Merge pull request #10 from juribeparada/devel

StripmapProc: fix typo
LT1AB
Heresh Fattahi 2019-02-20 13:22:10 -08:00 committed by GitHub
commit ce0e4ca80c
7 changed files with 881 additions and 1 deletions

View File

@ -138,7 +138,7 @@ def runPreprocessor(self):
self._insar.saveProduct(slave.frame, self._insar.slaveRawProduct) self._insar.saveProduct(slave.frame, self._insar.slaveRawProduct)
else: else:
print('Master data is in SLC format. Adding _slc to output name.') print('Slave data is in SLC format. Adding _slc to output name.')
iszerodop = isZeroDopplerSLC(self.slaveSensorName) iszerodop = isZeroDopplerSLC(self.slaveSensorName)
sensor.output = os.path.join(dirname + '_slc', os.path.basename(dirname)+'.slc') sensor.output = os.path.join(dirname + '_slc', os.path.basename(dirname)+'.slc')

273
docs/Doxyfile Normal file
View File

@ -0,0 +1,273 @@
# Doxyfile 1.5.7
#---------------------------------------------------------------------------
# Project related configuration options
#---------------------------------------------------------------------------
DOXYFILE_ENCODING = UTF-8
PROJECT_NAME = "ISCE"
PROJECT_NUMBER = "TBD"
OUTPUT_DIRECTORY =
CREATE_SUBDIRS = NO
OUTPUT_LANGUAGE = English
BRIEF_MEMBER_DESC = YES
REPEAT_BRIEF = YES
ABBREVIATE_BRIEF = "The $name class" \
"The $name widget" \
"The $name file" \
is \
provides \
specifies \
contains \
represents \
a \
an \
the
ALWAYS_DETAILED_SEC = NO
INLINE_INHERITED_MEMB = NO
FULL_PATH_NAMES = YES
STRIP_FROM_PATH =
STRIP_FROM_INC_PATH =
SHORT_NAMES = NO
JAVADOC_AUTOBRIEF = NO
QT_AUTOBRIEF = NO
MULTILINE_CPP_IS_BRIEF = NO
INHERIT_DOCS = YES
SEPARATE_MEMBER_PAGES = NO
TAB_SIZE = 8
ALIASES =
OPTIMIZE_OUTPUT_FOR_C = NO
OPTIMIZE_OUTPUT_JAVA = YES
OPTIMIZE_FOR_FORTRAN = NO
OPTIMIZE_OUTPUT_VHDL = NO
BUILTIN_STL_SUPPORT = NO
CPP_CLI_SUPPORT = NO
SIP_SUPPORT = NO
IDL_PROPERTY_SUPPORT = YES
DISTRIBUTE_GROUP_DOC = NO
SUBGROUPING = YES
TYPEDEF_HIDES_STRUCT = NO
SYMBOL_CACHE_SIZE = 0
#---------------------------------------------------------------------------
# Build related configuration options
#---------------------------------------------------------------------------
EXTRACT_ALL = YES
EXTRACT_PRIVATE = YES
EXTRACT_STATIC = YES
EXTRACT_LOCAL_CLASSES = YES
EXTRACT_LOCAL_METHODS = YES
EXTRACT_ANON_NSPACES = NO
HIDE_UNDOC_MEMBERS = NO
HIDE_UNDOC_CLASSES = NO
HIDE_FRIEND_COMPOUNDS = NO
HIDE_IN_BODY_DOCS = NO
INTERNAL_DOCS = NO
CASE_SENSE_NAMES = YES
HIDE_SCOPE_NAMES = YES
SHOW_INCLUDE_FILES = YES
INLINE_INFO = YES
SORT_MEMBER_DOCS = YES
SORT_BRIEF_DOCS = NO
SORT_GROUP_NAMES = NO
SORT_BY_SCOPE_NAME = NO
GENERATE_TODOLIST = YES
GENERATE_TESTLIST = YES
GENERATE_BUGLIST = YES
GENERATE_DEPRECATEDLIST= YES
ENABLED_SECTIONS =
MAX_INITIALIZER_LINES = 30
SHOW_USED_FILES = YES
SHOW_DIRECTORIES = NO
SHOW_FILES = YES
SHOW_NAMESPACES = YES
FILE_VERSION_FILTER =
LAYOUT_FILE =
#---------------------------------------------------------------------------
# configuration options related to warning and progress messages
#---------------------------------------------------------------------------
QUIET = NO
WARNINGS = YES
WARN_IF_UNDOCUMENTED = YES
WARN_IF_DOC_ERROR = YES
WARN_NO_PARAMDOC = NO
WARN_FORMAT = "$file:$line: $text"
WARN_LOGFILE =
#---------------------------------------------------------------------------
# configuration options related to the input files
#---------------------------------------------------------------------------
INPUT = ..
INPUT_ENCODING = UTF-8
FILE_PATTERNS = *.c \
*.cc \
*.cpp \
*.c++ \
*.h \
*.hh \
*.inc \
*.py \
*.f90 \
*.f \
*.F \
*.vhd \
mainpage.txt
RECURSIVE = YES
EXCLUDE =
EXCLUDE_SYMLINKS = NO
EXCLUDE_PATTERNS =
EXCLUDE_SYMBOLS =
EXAMPLE_PATH =
EXAMPLE_PATTERNS = *
EXAMPLE_RECURSIVE = NO
IMAGE_PATH = .
INPUT_FILTER =
FILTER_PATTERNS =
FILTER_SOURCE_FILES = NO
#---------------------------------------------------------------------------
# configuration options related to source browsing
#---------------------------------------------------------------------------
SOURCE_BROWSER = YES
INLINE_SOURCES = NO
STRIP_CODE_COMMENTS = NO
REFERENCED_BY_RELATION = YES
REFERENCES_RELATION = YES
REFERENCES_LINK_SOURCE = YES
USE_HTAGS = YES
VERBATIM_HEADERS = YES
#---------------------------------------------------------------------------
# configuration options related to the alphabetical class index
#---------------------------------------------------------------------------
COLS_IN_ALPHA_INDEX = 5
IGNORE_PREFIX =
#---------------------------------------------------------------------------
# configuration options related to the HTML output
#---------------------------------------------------------------------------
GENERATE_HTML = YES
HTML_OUTPUT = html
HTML_FILE_EXTENSION = .html
HTML_HEADER =
HTML_FOOTER =
HTML_STYLESHEET =
HTML_ALIGN_MEMBERS = YES
HTML_DYNAMIC_SECTIONS = NO
GENERATE_DOCSET = NO
DOCSET_FEEDNAME = "Doxygen generated docs"
DOCSET_BUNDLE_ID = org.doxygen.Project
GENERATE_HTMLHELP = YES
CHM_FILE =
HHC_LOCATION =
GENERATE_CHI = YES
CHM_INDEX_ENCODING =
BINARY_TOC = YES
TOC_EXPAND = YES
GENERATE_QHP = NO
QCH_FILE =
QHP_NAMESPACE = org.doxygen.Project
QHP_VIRTUAL_FOLDER = doc
QHG_LOCATION =
DISABLE_INDEX = NO
ENUM_VALUES_PER_LINE = 4
GENERATE_TREEVIEW = YES
TREEVIEW_WIDTH = 250
FORMULA_FONTSIZE = 10
#---------------------------------------------------------------------------
# configuration options related to the LaTeX output
#---------------------------------------------------------------------------
GENERATE_LATEX = YES
LATEX_OUTPUT = latex
LATEX_CMD_NAME = latex
MAKEINDEX_CMD_NAME = makeindex
COMPACT_LATEX = NO
PAPER_TYPE = a4wide
EXTRA_PACKAGES =
LATEX_HEADER =
PDF_HYPERLINKS = YES
USE_PDFLATEX = YES
LATEX_BATCHMODE = NO
LATEX_HIDE_INDICES = NO
#---------------------------------------------------------------------------
# configuration options related to the RTF output
#---------------------------------------------------------------------------
GENERATE_RTF = NO
RTF_OUTPUT = rtf
COMPACT_RTF = NO
RTF_HYPERLINKS = NO
RTF_STYLESHEET_FILE =
RTF_EXTENSIONS_FILE =
#---------------------------------------------------------------------------
# configuration options related to the man page output
#---------------------------------------------------------------------------
GENERATE_MAN = NO
MAN_OUTPUT = man
MAN_EXTENSION = .3
MAN_LINKS = NO
#---------------------------------------------------------------------------
# configuration options related to the XML output
#---------------------------------------------------------------------------
GENERATE_XML = NO
XML_OUTPUT = xml
XML_SCHEMA =
XML_DTD =
XML_PROGRAMLISTING = YES
#---------------------------------------------------------------------------
# configuration options for the AutoGen Definitions output
#---------------------------------------------------------------------------
GENERATE_AUTOGEN_DEF = NO
#---------------------------------------------------------------------------
# configuration options related to the Perl module output
#---------------------------------------------------------------------------
GENERATE_PERLMOD = NO
PERLMOD_LATEX = NO
PERLMOD_PRETTY = YES
PERLMOD_MAKEVAR_PREFIX =
#---------------------------------------------------------------------------
# Configuration options related to the preprocessor
#---------------------------------------------------------------------------
ENABLE_PREPROCESSING = YES
MACRO_EXPANSION = NO
EXPAND_ONLY_PREDEF = NO
SEARCH_INCLUDES = YES
INCLUDE_PATH =
INCLUDE_FILE_PATTERNS =
PREDEFINED =
EXPAND_AS_DEFINED =
SKIP_FUNCTION_MACROS = YES
#---------------------------------------------------------------------------
# Configuration::additions related to external references
#---------------------------------------------------------------------------
TAGFILES =
GENERATE_TAGFILE =
ALLEXTERNALS = NO
EXTERNAL_GROUPS = YES
PERL_PATH = /usr/bin/perl
#---------------------------------------------------------------------------
# Configuration options related to the dot tool
#---------------------------------------------------------------------------
CLASS_DIAGRAMS = YES
MSCGEN_PATH =
HIDE_UNDOC_RELATIONS = YES
HAVE_DOT = YES
DOT_FONTNAME = FreeSans
DOT_FONTPATH =
CLASS_GRAPH = YES
COLLABORATION_GRAPH = YES
GROUP_GRAPHS = YES
UML_LOOK = YES
TEMPLATE_RELATIONS = NO
INCLUDE_GRAPH = YES
INCLUDED_BY_GRAPH = YES
CALL_GRAPH = NO
CALLER_GRAPH = NO
GRAPHICAL_HIERARCHY = YES
DIRECTORY_GRAPH = YES
DOT_IMAGE_FORMAT = png
DOT_PATH =
DOTFILE_DIRS =
DOT_GRAPH_MAX_NODES = 50
MAX_DOT_GRAPH_DEPTH = 1000
DOT_TRANSPARENT = NO
DOT_MULTI_TARGETS = NO
GENERATE_LEGEND = YES
DOT_CLEANUP = YES
#---------------------------------------------------------------------------
# Configuration::additions related to the search engine
#---------------------------------------------------------------------------
SEARCHENGINE = NO

View File

@ -0,0 +1,82 @@
#!/usr/bin/env python3
import isce
from isceobj.XmlUtil import FastXML as xml
if __name__ == '__main__':
'''
Example demonstrating automated generation of insarApp.xml for
COSMO SkyMed raw data.
'''
#####Initialize a component named insar
insar = xml.Component('insar')
####Python dictionaries become components
####Master info
master = {}
master['hdf5'] = 'master.h5'
master['output'] = 'master.raw'
####Slave info
slave = {}
slave['hdf5'] = 'slave.h5'
slave['output'] = 'slave.raw'
#####Set sub-component
insar['master'] = master
insar['slave'] = slave
####Set properties
insar['doppler method'] = 'useDEFAULT'
insar['sensor name'] = 'COSMO_SKYMED'
insar['range looks'] = 4
insar['azimuth looks'] = 4
#####Catalog example
insar['dem'] = xml.Catalog('dem.xml')
####Components include a writeXML method
insar.writeXML('insarApp.xml', root='insarApp')
"""
The output should be of the form.
<insarApp>
<component name="insar">
<component name="master">
<property name="hdf5">
<value>master.h5</value>
</property>
<property name="output">
<value>master.raw</value>
</property>
</component>
<component name="slave">
<property name="hdf5">
<value>slave.h5</value>
</property>
<property name="output">
<value>slave.raw</value>
</property>
</component>
<property name="doppler method">
<value>useDEFAULT</value>
</property>
<property name="sensor name">
<value>COSMO_SKYMED</value>
</property>
<property name="range looks">
<value>4</value>
</property>
<property name="azimuth looks">
<value>4</value>
</property>
<component name="dem">
<catalog>dem.xml</catalog>
</component>
</component>
</insarApp>
"""

View File

@ -0,0 +1,95 @@
#!/usr/bin/env python3
import isce
import logging
import isceobj
import mroipac
import argparse
from mroipac.ampcor.Ampcor import Ampcor
import numpy as np
def cmdLineParser():
parser = argparse.ArgumentParser(description='Simple ampcor driver')
parser.add_argument('-m', dest='master', type=str,
help='Master image with ISCE XML file', required=True)
parser.add_argument('-b1', dest='band1', type=int,
help='Band number of master image', default=0)
parser.add_argument('-s', dest='slave', type=str,
help='Slave image with ISCE XML file', required=True)
parser.add_argument('-b2', dest='band2', type=int,
help='Band number of slave image', default=0)
parser.add_argument('-o', dest='outfile', default= 'offsets.txt',
type=str, help='Output ASCII file')
return parser.parse_args()
#Start of the main program
if __name__ == '__main__':
logging.info("Calculate offset between two using ampcor")
#Parse command line
inps = cmdLineParser()
####Create master image object
masterImg = isceobj.createImage() #Empty image
masterImg.load(inps.master +'.xml') #Load from XML file
masterImg.setAccessMode('read') #Set it up for reading
masterImg.createImage() #Create File
#####Create slave image object
slaveImg = isceobj.createImage() #Empty image
slaveImg.load(inps.slave +'.xml') #Load it from XML file
slaveImg.setAccessMode('read') #Set it up for reading
slaveImg.createImage() #Create File
####Stage 1: Initialize
objAmpcor = Ampcor(name='my_ampcor')
objAmpcor.configure()
####Defautl values used if not provided in my_ampcor
coarseAcross = 0
coarseDown = 0
####Get file types
if masterImg.getDataType().upper().startswith('C'):
objAmpcor.setImageDataType1('complex')
else:
objAmpcor.setImageDataType1('real')
if slaveImg.getDataType().upper().startswith('C'):
objAmpcor.setImageDataType2('complex')
else:
objAmpcor.setImageDataType2('real')
#####Stage 2: No ports for ampcor
### Any parameters can be controlled through my_ampcor.xml
### Stage 3: Set values as needed
####Only set these values if user does not define it in my_ampcor.xml
if objAmpcor.acrossGrossOffset is None:
objAmpcor.acrossGrossOffset = coarseAcross
if objAmpcor.downGrossOffset is None:
objAmpcor.downGrossOffset = coarseDown
logging.info('Across Gross Offset = %d'%(objAmpcor.acrossGrossOffset))
logging.info('Down Gross Offset = %d'%(objAmpcor.downGrossOffset))
####Stage 4: Call the main method
objAmpcor.ampcor(masterImg,slaveImg)
###Close ununsed images
masterImg.finalizeImage()
slaveImg.finalizeImage()
######Stage 5: Get required data out of the processing run
offField = objAmpcor.getOffsetField()
logging.info('Number of returned offsets : %d'%(len(offField._offsets)))
####Write output to an ascii file
field = np.array(offField.unpackOffsets())
np.savetxt(inps.outfile, field, delimiter=" ", format='%5.6f')

View File

@ -0,0 +1,181 @@
#!/usr/bin/env python3
###Our usual import statements
import numpy as np
import isce
import isceobj
from stdproc.model.enu2los.ENU2LOS import ENU2LOS
import argparse
####Method to load pickle information
####from an insarApp run
def load_pickle(step='topo'):
'''Loads the pickle from correct as default.'''
import cPickle
insarObj = cPickle.load(open('PICKLE/{0}'.format(step),'rb'))
return insarObj
###Create dummy model file if needed
###Use this for simple testing
###Modify values as per your test dataset
def createDummyModel():
'''Creates a model image.'''
wid = 401
lgt = 401
startLat = 20.0
deltaLat = -0.025
startLon = -156.0
deltaLon = 0.025
data = np.zeros((lgt,3*wid), dtype=np.float32)
###East only
# data[:,0::3] = 1.0
###North only
# data[:,1::3] = 1.0
###Up only
data[:,2::3] = 1.0
data.tofile('model.enu')
print('Creating model object')
objModel = isceobj.createDemImage()
objModel.setFilename('model.enu')
objModel.setWidth(wid)
objModel.scheme = 'BIP'
objModel.setAccessMode('read')
objModel.imageType='bip'
objModel.dataType='FLOAT'
objModel.bands = 3
dictProp = {'REFERENCE':'WGS84','Coordinate1': \
{'size':wid,'startingValue':startLon,'delta':deltaLon}, \
'Coordinate2':{'size':lgt,'startingValue':startLat, \
'delta':deltaLat},'FILE_NAME':'model.enu'}
objModel.init(dictProp)
objModel.renderHdr()
###cmd Line Parser
def cmdLineParser():
parser = argparse.ArgumentParser(description="Project ENU deformation to LOS in radar coordinates")
parser.add_argument('-m','--model', dest='model', type=str,
required=True,
help='Input 3 channel FLOAT model file with DEM like info')
parser.add_argument('-o','--output', dest='output', type=str,
default='enu2los.rdr', help='Output 1 channel LOS file')
return parser.parse_args()
###The main program
if __name__ == '__main__':
###Parse command line
inps = cmdLineParser()
###For testing only
# createDummyModel()
####Load model image
print('Creating model image')
modelImg = isceobj.createDemImage()
modelImg.load(inps.model +'.xml') ##From cmd line
if (modelImg.bands !=3 ):
raise Exception('Model input file should be a 3 band image.')
modelImg.setAccessMode('read')
modelImg.createImage()
####Get geocoded information
startLon = modelImg.coord1.coordStart
deltaLon = modelImg.coord1.coordDelta
startLat = modelImg.coord2.coordStart
deltaLat = modelImg.coord2.coordDelta
####Load geometry information from pickle file.
iObj = load_pickle()
topo = iObj.getTopo() #Get info for the dem in radar coords
####Get the wavelength information.
###This is available in multiple locations within insarProc
#wvl = iObj.getMasterFrame().getInstrument().getRadarWavelength()
wvl = topo.radarWavelength
####Pixel-by-pixel Latitude image
print('Creating lat image')
objLat = isceobj.createImage()
objLat.load(topo.latFilename+'.xml')
objLat.setAccessMode('read')
objLat.createImage()
####Pixel-by-pixel Longitude image
print('Creating lon image')
objLon = isceobj.createImage()
objLon.load(topo.lonFilename+'.xml')
objLon.setAccessMode('read')
objLon.createImage()
#####Pixel-by-pixel LOS information
print('Creating LOS image')
objLos = isceobj.createImage()
objLos.load(topo.losFilename +'.xml')
objLos.setAccessMode('read')
objLos.createImage()
###Check if dimensions are the same
for img in (objLon, objLos):
if (img.width != objLat.width) or (img.length != objLat.length):
raise Exception('Lat, Lon and LOS files are not of the same size.')
####Create an output object
print ('Creating output image')
objOut = isceobj.createImage()
objOut.initImage(inps.output, 'write', objLat.width, type='FLOAT')
objOut.createImage()
print('Actual processing')
####The actual processing
#Stage 1: Construction
converter = ENU2LOS()
converter.configure()
#Stage 2: No ports for enu2los
#Stage 3: Set values
converter.setWidth(objLat.width) ###Radar coords width
converter.setNumberLines(objLat.length) ###Radar coords length
converter.setGeoWidth(modelImg.width) ###Geo coords width
converter.setGeoNumberLines(modelImg.length) ###Geo coords length
###Set up geo information
converter.setStartLatitude(startLat)
converter.setStartLongitude(startLon)
converter.setDeltaLatitude(deltaLat)
converter.setDeltaLongitude(deltaLon)
####Set up output scaling
converter.setScaleFactor(1.0) ###Change if ENU not in meters
converter.setWavelength(4*np.pi) ###Wavelength for conversion to radians
converter.enu2los(modelImage = modelImg,
latImage = objLat,
lonImage = objLon,
losImage = objLos,
outImage = objOut)
#Step 4: Close the images
modelImg.finalizeImage()
objLat.finalizeImage()
objLon.finalizeImage()
objLos.finalizeImage()
objOut.finalizeImage()
objOut.renderHdr() ###Create output XML file

136
docs/dev/Example3_orbits.py Normal file
View File

@ -0,0 +1,136 @@
#!/usr/bin/env python3
import numpy as np
import isce
import isceobj
import stdproc
import copy
from iscesys.StdOEL.StdOELPy import create_writer
from isceobj.Orbit.Orbit import Orbit
###Load data from an insarApp run
###Load orbit2sch by default
def load_pickle(step='orbit2sch'):
import cPickle
insarObj = cPickle.load(open('PICKLE/{0}'.format(step), 'rb'))
return insarObj
if __name__ == '__main__':
##### Load insarProc object
print('Loading original and interpolated WGS84 state vectors')
iObj = load_pickle(step='mocompath')
####Make a copy of the peg point data
peg = copy.copy(iObj.peg)
#####Copy the original state vectors
#####These are the 10-15 vectors provided
#####with the sensor data in WGS84 coords
origOrbit = copy.copy(iObj.masterFrame.getOrbit())
print('From Original Metadata - WGS84')
print('Number of state vectors: %d'%len(origOrbit._stateVectors))
print('Time interval: %s %s'%(str(origOrbit._minTime),
str(origOrbit._maxTime)))
#####Line-by-line WGS84 interpolated orbit
#####This was done using Hermite polynomials
xyzOrbit = copy.copy(iObj.masterOrbit)
print('Line-by-Line XYZ interpolated')
print('Number of state vectors: %d'%len(xyzOrbit._stateVectors))
print('Time interval: %s %s'%(str(xyzOrbit._minTime),
str(xyzOrbit._maxTime)))
####Delete the insarProc object from "mocomppath"
del iObj
####Note:
####insarApp converts WGS84 orbits to SCH orbits
####during the orbit2sch step
######Line-by-line SCH orbit
######These were generated by converting
######Line-by-Line WGS84 orbits
print('Loading interpolated SCH orbits')
iObj = load_pickle('orbit2sch')
####Copy the peg information needed for conversion
pegHavg = copy.copy(iObj.averageHeight)
planet = copy.copy(iObj.planet)
###Copy the orbits
schOrbit = copy.copy(iObj.masterOrbit)
del iObj
print('Line-by-Line SCH interpolated')
print('Number of state vectors: %d'%len(schOrbit._stateVectors))
print('Time interval: %s %s'%(str(schOrbit._minTime),
str(schOrbit._maxTime)))
######Now convert the original state vectors to SCH coordinates
###stdWriter logging mechanism for some fortran modules
stdWriter = create_writer("log","",True,filename='orb.log')
print('*********************')
orbSch = stdproc.createOrbit2sch(averageHeight=pegHavg)
orbSch.setStdWriter(stdWriter)
orbSch(planet=planet, orbit=origOrbit, peg=peg)
print('*********************')
schOrigOrbit = copy.copy(orbSch.orbit)
del orbSch
print('Original WGS84 vectors to SCH')
print('Number of state vectors: %d'%len(schOrigOrbit._stateVectors))
print('Time interval: %s %s'%(str(schOrigOrbit._minTime),
str(schOrigOrbit._maxTime)))
print(str(schOrigOrbit._stateVectors[0]))
####Line-by-line interpolation of SCH orbits
####Using SCH orbits as inputs
pulseOrbit = Orbit()
pulseOrbit.configure()
#######Loop over and compare against interpolated SCH
for svOld in xyzOrbit._stateVectors:
####Get time from Line-by-Line WGS84
####And interpolate SCH orbit at those epochs
####SCH intepolation using simple linear interpolation
####WGS84 interpolation would use keyword method="hermite"
svNew = schOrigOrbit.interpolate(svOld.getTime())
pulseOrbit.addStateVector(svNew)
####Clear some variables
del xyzOrbit
del origOrbit
del schOrigOrbit
#####We compare the two interpolation schemes
####Orig WGS84 -> Line-by-line WGS84 -> Line-by-line SCH
####Orig WGS84 -> Orig SCH -> Line-by-line SCH
###Get the orbit information into Arrays
(told,xold,vold,relold) = schOrbit._unpackOrbit()
(tnew,xnew,vnew,relnew) = pulseOrbit._unpackOrbit()
xdiff = np.array(xold) - np.array(xnew)
vdiff = np.array(vold) - np.array(vnew)
print('Position Difference stats')
print('L1 mean in meters')
print(np.mean(np.abs(xdiff), axis=0))
print('')
print('RMS in meters')
print(np.sqrt(np.mean(xdiff*xdiff, axis=0)))
print('Velocity Difference stats')
print('L1 mean in meters/sec')
print(np.mean(np.abs(vdiff), axis=0))
print(' ')
print('RMS in meters/sec')
print(np.sqrt(np.mean(vdiff*vdiff, axis=0)))

113
docs/dev/Example4_gdal.py Normal file
View File

@ -0,0 +1,113 @@
#!/usr/bin/env python3
import numpy as np
import argparse
from osgeo import gdal
import isce
import isceobj
import os
def cmdLineParse():
'''
Parse command line.
'''
parser = argparse.ArgumentParser(description='Convert GeoTiff to ISCE file')
parser.add_argument('-i','--input', dest='infile', type=str,
required=True, help='Input GeoTiff file. If tar file is also included, this will be output file extracted from the TAR archive.')
parser.add_argument('-o','--output', dest='outfile', type=str,
required=True, help='Output GeoTiff file')
parser.add_argument('-t','--tar', dest='tarfile', type=str,
default=None, help='Optional input tar archive. If provided, Band 8 is extracted to file name provided with input option.')
return parser.parse_args()
def dumpTiff(infile, outfile):
'''
Read geotiff tags.
'''
###Uses gdal bindings to read geotiff files
data = {}
ds = gdal.Open(infile)
data['width'] = ds.RasterXSize
data['length'] = ds.RasterYSize
gt = ds.GetGeoTransform()
data['minx'] = gt[0]
data['miny'] = gt[3] + data['width'] * gt[4] + data['length']*gt[5]
data['maxx'] = gt[0] + data['width'] * gt[1] + data['length']*gt[2]
data['maxy'] = gt[3]
data['deltax'] = gt[1]
data['deltay'] = gt[5]
data['reference'] = ds.GetProjectionRef()
band = ds.GetRasterBand(1)
inArr = band.ReadAsArray(0,0, data['width'], data['length'])
inArr.astype(np.float32).tofile(outfile)
return data
def extractBand8(intarfile, destfile):
'''
Extracts Band 8 of downloaded Tar file from EarthExplorer
'''
import tarfile
import shutil
fid = tarfile.open(intarfile)
fileList = fid.getmembers()
###Find the band 8 file
src = None
for kk in fileList:
if kk.name.endswith('B8.TIF'):
src = kk
if src is None:
raise Exception('Band 8 TIF file not found in tar archive')
print('Extracting: %s'%(src.name))
####Create source and target file Ids.
srcid = fid.extractfile(src)
destid = open(destfile,'wb')
##Copy content
shutil.copyfileobj(srcid, destid)
fid.close()
destid.close()
if __name__ == '__main__':
####Parse cmd line
inps = cmdLineParse()
####If input tar file is given
if inps.tarfile is not None:
extractBand8(inps.tarfile, inps.infile)
print('Dumping image to file')
meta = dumpTiff(inps.infile, inps.outfile)
# print(meta)
####Create an ISCE XML header for the landsat image
img = isceobj.createDemImage()
img.setFilename(inps.outfile)
img.setDataType('FLOAT')
dictProp = {
'REFERENCE' : meta['reference'],
'Coordinate1': {
'size': meta['width'],
'startingValue' : meta['minx'],
'delta': meta['deltax']
},
'Coordinate2': {
'size' : meta['length'],
'startingValue' : meta['maxy'],
'delta': meta['deltay']
},
'FILE_NAME' : inps.outfile
}
img.init(dictProp)
img.renderHdr()