Merge branch 'master' of https://github.com/isce-framework/isce2 into UAVSAR
commit
36a13eef1d
|
@ -1,6 +1,6 @@
|
|||
version: 2
|
||||
version: 2.1
|
||||
jobs:
|
||||
build:
|
||||
test:
|
||||
docker:
|
||||
- image: hysds/pge-base:latest
|
||||
user: root
|
||||
|
@ -79,5 +79,115 @@ jobs:
|
|||
topsApp.py --help --steps
|
||||
stripmapApp.py --help --steps
|
||||
python3 -c "import isce"
|
||||
build:
|
||||
docker:
|
||||
- image: docker:stable-git
|
||||
steps:
|
||||
- checkout
|
||||
- setup_remote_docker
|
||||
- run:
|
||||
name: Install dependencies
|
||||
command: |
|
||||
apk add --no-cache \
|
||||
python-dev py-pip bash pigz build-base libffi-dev openssl-dev
|
||||
pip install \
|
||||
docker-compose awscli
|
||||
- run:
|
||||
name: Build docker image
|
||||
command: |
|
||||
mkdir images
|
||||
SHA1=$(echo $CIRCLE_SHA1 | cut -c1-7)
|
||||
echo "export TAG=$SHA1" >> images/env.sh
|
||||
source images/env.sh
|
||||
docker build --rm --force-rm -t isce/isce2:$TAG -f docker/Dockerfile .
|
||||
cd images
|
||||
docker save isce/isce2:$TAG > isce2.tar
|
||||
- persist_to_workspace:
|
||||
root: images
|
||||
paths:
|
||||
- "*"
|
||||
build-periodically:
|
||||
docker:
|
||||
- image: docker:stable-git
|
||||
steps:
|
||||
- checkout
|
||||
- setup_remote_docker
|
||||
- run:
|
||||
name: Install dependencies
|
||||
command: |
|
||||
apk add --no-cache \
|
||||
python-dev py-pip bash pigz build-base libffi-dev openssl-dev
|
||||
pip install \
|
||||
docker-compose awscli
|
||||
- run:
|
||||
name: Build docker image
|
||||
command: |
|
||||
mkdir images
|
||||
echo 'export TAG=$(date -u +%Y%m%d)' >> images/env.sh
|
||||
source images/env.sh
|
||||
docker build --rm --force-rm -t isce/isce2:$TAG -f docker/Dockerfile .
|
||||
cd images
|
||||
docker save isce/isce2:$TAG > isce2.tar
|
||||
- persist_to_workspace:
|
||||
root: images
|
||||
paths:
|
||||
- "*"
|
||||
deploy:
|
||||
docker:
|
||||
- image: docker:stable-git
|
||||
steps:
|
||||
- setup_remote_docker
|
||||
- run:
|
||||
name: Install dependencies
|
||||
command: |
|
||||
apk add --no-cache \
|
||||
curl file
|
||||
- attach_workspace:
|
||||
at: images
|
||||
- run:
|
||||
name: Deploy
|
||||
command: |
|
||||
cd images
|
||||
source env.sh
|
||||
docker load -i isce2.tar
|
||||
docker tag isce/isce2:$TAG isce/isce2:latest
|
||||
docker login -u $DOCKER_USER -p $DOCKER_PASS
|
||||
docker push isce/isce2:$TAG
|
||||
docker push isce/isce2:latest
|
||||
|
||||
|
||||
workflows:
|
||||
version: 2
|
||||
test:
|
||||
jobs:
|
||||
- test
|
||||
build-deploy:
|
||||
jobs:
|
||||
- build:
|
||||
filters:
|
||||
branches:
|
||||
only: master
|
||||
- deploy:
|
||||
requires:
|
||||
- build
|
||||
filters:
|
||||
branches:
|
||||
only: master
|
||||
weekly:
|
||||
triggers:
|
||||
- schedule:
|
||||
cron: "0 7 * * 0"
|
||||
filters:
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
jobs:
|
||||
- build-periodically:
|
||||
filters:
|
||||
branches:
|
||||
only: master
|
||||
- deploy:
|
||||
requires:
|
||||
- build-periodically
|
||||
filters:
|
||||
branches:
|
||||
only: master
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
# ISCE2
|
||||
|
||||
[](https://circleci.com/gh/isce-framework/isce2)
|
||||
|
||||
This is the Interferometric synthetic aperture radar Scientific Computing
|
||||
Environment (ISCE). Its initial development was funded by NASA's Earth Science
|
||||
Technology Office (ESTO) under the Advanced Information Systems Technology
|
||||
|
|
|
@ -1053,12 +1053,20 @@ class Orbit(Component):
|
|||
return tguess, rng
|
||||
|
||||
|
||||
def exportToC(self):
|
||||
def exportToC(self, reference=None):
|
||||
from isceobj.Util import combinedlibmodule
|
||||
orb = []
|
||||
|
||||
###Continue usage as usual if no reference is provided
|
||||
###This wont break the old interface but could cause
|
||||
###issues at midnight crossing
|
||||
if reference is None:
|
||||
reference = self.minTime()
|
||||
|
||||
refEpoch = reference.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
|
||||
for sv in self._stateVectors:
|
||||
tim = DTU.seconds_since_midnight(sv.getTime())
|
||||
tim = (sv.getTime() - refEpoch).total_seconds()
|
||||
pos = sv.getPosition()
|
||||
vel = sv.getVelocity()
|
||||
|
||||
|
|
|
@ -34,6 +34,7 @@ from isceobj import Constants as CN
|
|||
from iscesys.Component.Component import Component, Port
|
||||
from zerodop.geo2rdr import geo2rdr
|
||||
from iscesys.ImageUtil.ImageUtil import ImageUtil as IU
|
||||
from iscesys import DateTimeUtil as DTU
|
||||
from isceobj.Util import combinedlibmodule
|
||||
from isceobj.Util.Poly1D import Poly1D
|
||||
from isceobj.Util.Poly2D import Poly2D
|
||||
|
@ -243,7 +244,7 @@ class Geo2rdr(Component):
|
|||
azimuthOffAcc = self.azimuthOffsetImage.getImagePointer()
|
||||
|
||||
|
||||
cOrbit = self.orbit.exportToC()
|
||||
cOrbit = self.orbit.exportToC(reference=self.sensingStart)
|
||||
geo2rdr.setOrbit_Py(cOrbit)
|
||||
|
||||
#####Output cropped DEM for first band
|
||||
|
@ -393,7 +394,7 @@ class Geo2rdr(Component):
|
|||
geo2rdr.setDopplerAccessor_Py(self.polyDopplerAccessor)
|
||||
geo2rdr.setPRF_Py(float(self.prf))
|
||||
geo2rdr.setRadarWavelength_Py(float(self.radarWavelength))
|
||||
geo2rdr.setSensingStart_Py(float(self.sensingStart))
|
||||
geo2rdr.setSensingStart_Py(DTU.seconds_since_midnight(self.sensingStart))
|
||||
geo2rdr.setLength_Py(int(self.length))
|
||||
geo2rdr.setWidth_Py(int(self.width))
|
||||
geo2rdr.setNumberRangeLooks_Py(int(self.numberRangeLooks))
|
||||
|
@ -424,9 +425,7 @@ class Geo2rdr(Component):
|
|||
self.radarWavelength = float(var)
|
||||
|
||||
def setSensingStart(self,var):
|
||||
rtime = datetime.datetime.combine(var.date(), datetime.time(0,0,0))
|
||||
secs = (var - rtime).total_seconds()
|
||||
self.sensingStart = float(secs)
|
||||
self.sensingStart = var
|
||||
|
||||
def setLength(self,var):
|
||||
self.length = int(var)
|
||||
|
|
|
@ -34,6 +34,7 @@ from isceobj import Constants as CN
|
|||
from iscesys.Component.Component import Component, Port
|
||||
from zerodop.geozero import geozero
|
||||
from iscesys.ImageUtil.ImageUtil import ImageUtil as IU
|
||||
from iscesys import DateTimeUtil as DTU
|
||||
from isceobj.Util import combinedlibmodule
|
||||
from isceobj.Util.Poly1D import Poly1D
|
||||
import os
|
||||
|
@ -221,7 +222,7 @@ class Geocode(Component):
|
|||
complexFlag = self.inputImage.dataType.upper().startswith('C')
|
||||
nBands = self.inputImage.getBands()
|
||||
|
||||
cOrbit = self.orbit.exportToC()
|
||||
cOrbit = self.orbit.exportToC(reference=self.sensingStart)
|
||||
geozero.setOrbit_Py(cOrbit)
|
||||
|
||||
#####Output cropped DEM for first band
|
||||
|
@ -347,7 +348,7 @@ class Geocode(Component):
|
|||
geozero.setDopplerAccessor_Py(self.polyDopplerAccessor)
|
||||
geozero.setPRF_Py(float(self.prf))
|
||||
geozero.setRadarWavelength_Py(float(self.radarWavelength))
|
||||
geozero.setSensingStart_Py(float(self.sensingStart))
|
||||
geozero.setSensingStart_Py(DTU.seconds_since_midnight(self.sensingStart))
|
||||
geozero.setFirstLatitude_Py(float(self.firstLatitude))
|
||||
geozero.setFirstLongitude_Py(float(self.firstLongitude))
|
||||
geozero.setDeltaLatitude_Py(float(self.deltaLatitude))
|
||||
|
@ -392,9 +393,7 @@ class Geocode(Component):
|
|||
self.radarWavelength = float(var)
|
||||
|
||||
def setSensingStart(self,var):
|
||||
rtime = datetime.datetime.combine(var.date(), datetime.time(0,0,0))
|
||||
secs = (var - rtime).total_seconds()
|
||||
self.sensingStart = float(secs)
|
||||
self.sensingStart = var
|
||||
|
||||
def setFirstLatitude(self,var):
|
||||
self.firstLatitude = float(var)
|
||||
|
|
|
@ -120,7 +120,7 @@ class Topo(Component):
|
|||
|
||||
self.setState()
|
||||
|
||||
cOrbit = self.orbit.exportToC()
|
||||
cOrbit = self.orbit.exportToC(reference=self.sensingStart)
|
||||
topozero.setOrbit_Py(cOrbit)
|
||||
topozero.topo_Py(self.demAccessor, self.polyDopplerAccessor, self.slantRangeAccessor)
|
||||
combinedlibmodule.freeCOrbit(cOrbit)
|
||||
|
|
|
@ -98,10 +98,10 @@ def main(factoryFile,package,buildDir):
|
|||
# import isce
|
||||
import filecmp
|
||||
try:
|
||||
import imp
|
||||
import importlib
|
||||
factoryFile = os.path.abspath(factoryFile)
|
||||
mod = imp.find_module(factoryFile.replace('.py',''))
|
||||
factModule = imp.load_module(factoryFile.replace('.py',''),mod[0],mod[1],mod[2])
|
||||
mod = importlib.util.spec_from_file_location('.', factoryFile)
|
||||
factModule = mod.loader.load_module()
|
||||
factoriesInfo = factModule.getFactoriesInfo()
|
||||
nameList = []
|
||||
for k,v in factoriesInfo.items():
|
||||
|
|
|
@ -195,14 +195,14 @@ def main(iargs=None):
|
|||
dateDirs = sorted(glob.glob(os.path.join(inps.inputDir,'2*')))
|
||||
if inps.outputDir is not None:
|
||||
f = open(run_unPack,'w')
|
||||
for dataDir in dateDirs:
|
||||
AlosFiles = glob.glob(os.path.join(dataDir, 'ALP*'))
|
||||
for dateDir in dateDirs:
|
||||
AlosFiles = glob.glob(os.path.join(dateDir, 'ALP*'))
|
||||
if len(AlosFiles)>0:
|
||||
acquisitionDate = os.path.basename(dataDir)
|
||||
acquisitionDate = os.path.basename(dateDir)
|
||||
slcDir = os.path.join(inps.outputDir, acquisitionDate)
|
||||
if not os.path.exists(slcDir):
|
||||
os.makedirs(slcDir)
|
||||
cmd = 'unpackFrame_ALOS_raw.py -i ' + os.path.abspath(dataDir) + ' -o ' + slcDir
|
||||
cmd = 'unpackFrame_ALOS_raw.py -i ' + os.path.abspath(dateDir) + ' -o ' + slcDir
|
||||
IMG_files = glob.glob(os.path.join(AlosFiles[0],'IMG*'))
|
||||
if inps.fbd2fbs:
|
||||
#recommended for regular interferometry to use all FBS bandwidth
|
||||
|
|
|
@ -171,26 +171,26 @@ def main(iargs=None):
|
|||
dateDirs = glob.glob(os.path.join(inputDir,'2*'))
|
||||
if outputDir is not None:
|
||||
f = open(run_unPack,'w')
|
||||
for dataDir in dateDirs:
|
||||
CSKFiles = glob.glob(os.path.join(dataDir, 'CSK*.h5'))
|
||||
for dateDir in dateDirs:
|
||||
CSKFiles = glob.glob(os.path.join(dateDir, 'CSK*.h5'))
|
||||
if len(CSKFiles)>0:
|
||||
acquisitionDate = os.path.basename(dataDir)
|
||||
acquisitionDate = os.path.basename(dateDir)
|
||||
slcDir = os.path.join(outputDir, acquisitionDate)
|
||||
if not os.path.exists(slcDir):
|
||||
os.makedirs(slcDir)
|
||||
cmd = 'unpackFrame_CSK_raw.py -i ' + os.path.abspath(dataDir) + ' -o ' + slcDir
|
||||
cmd = 'unpackFrame_CSK_raw.py -i ' + os.path.abspath(dateDir) + ' -o ' + slcDir
|
||||
print (cmd)
|
||||
f.write(inps.text_cmd + cmd+'\n')
|
||||
|
||||
"""
|
||||
##### FOR now lets ptu all scences in single folder
|
||||
CSKFiles = glob.glob(os.path.join(dataDir, 'EL*'))
|
||||
CSKFiles = glob.glob(os.path.join(dateDir, 'EL*'))
|
||||
if len(CSKFiles)>0:
|
||||
acquisitionDate = os.path.basename(dataDir)
|
||||
acquisitionDate = os.path.basename(dateDir)
|
||||
slcDir = os.path.join(outputDir, acquisitionDate)
|
||||
if not os.path.exists(slcDir):
|
||||
os.makedirs(slcDir)
|
||||
cmd = 'unpackFrame_CSK_raw.py -i ' + os.path.abspath(dataDir) + ' -o ' + slcDir
|
||||
cmd = 'unpackFrame_CSK_raw.py -i ' + os.path.abspath(dateDir) + ' -o ' + slcDir
|
||||
|
||||
if len(CSKFiles) > 1:
|
||||
cmd = cmd + ' -m'
|
||||
|
|
|
@ -0,0 +1,217 @@
|
|||
#!/usr/bin/env python3
|
||||
# Author: David Bekaert
|
||||
# Zhang Yunjun, adopted from prepRawALOS.py for ALOS2 SM SLC
|
||||
|
||||
|
||||
import os
|
||||
import glob
|
||||
import argparse
|
||||
import shutil
|
||||
import tarfile
|
||||
import zipfile
|
||||
from uncompressFile import uncompressfile
|
||||
|
||||
|
||||
def createParser():
|
||||
'''
|
||||
Create command line parser.
|
||||
'''
|
||||
|
||||
parser = argparse.ArgumentParser(description='Prepare ALOS2 slc for processing (unzip/untar files, '
|
||||
'organize in date folders, generate script to unpack into isce formats).')
|
||||
parser.add_argument('-i', '--input', dest='inputDir', type=str, required=True,
|
||||
help='directory with the downloaded SLC data')
|
||||
parser.add_argument('-rmfile', '--rmfile', dest='rmfile',action='store_true', default=False,
|
||||
help='Optional: remove zip/tar/compressed files after unpacking into date structure '
|
||||
'(default is to keep in archive fo lder)')
|
||||
parser.add_argument('-o', '--output', dest='outputDir', type=str, required=False,
|
||||
help='output directory where data needs to be unpacked into isce format (for script generation).')
|
||||
parser.add_argument('-t', '--text_cmd', dest='text_cmd', type=str, default='source ~/.bash_profile;',
|
||||
help='text command to be added to the beginning of each line of the run files. Default: source ~/.bash_profile;')
|
||||
return parser
|
||||
|
||||
|
||||
def cmdLineParse(iargs=None):
|
||||
'''
|
||||
Command line parser.
|
||||
'''
|
||||
|
||||
parser = createParser()
|
||||
inps = parser.parse_args(args = iargs)
|
||||
|
||||
# parsing required inputs
|
||||
inps.inputDir = os.path.abspath(inps.inputDir)
|
||||
|
||||
# parsing optional inputs
|
||||
if inps.outputDir:
|
||||
inps.outputDir = os.path.abspath(inps.outputDir)
|
||||
return inps
|
||||
|
||||
|
||||
def get_Date(ALOS_folder):
|
||||
"""Grab acquisition date"""
|
||||
# will search for different version of workreport to be compatible with ASf, WInSAR etc
|
||||
workreport_files = ('*workreport','summary.txt')
|
||||
for workreport_file in workreport_files:
|
||||
workreports = glob.glob(os.path.join(ALOS_folder,workreport_file))
|
||||
|
||||
# if nothing is found return a failure
|
||||
if len(workreports) > 0:
|
||||
for workreport in workreports:
|
||||
template_dict = {}
|
||||
with open(workreport) as openfile:
|
||||
for line in openfile:
|
||||
c = line.split("=")
|
||||
template_dict[c[0].strip()] = c[1].strip()
|
||||
acquisitionDate = (str(template_dict['Img_SceneCenterDateTime'][1:9]))
|
||||
if acquisitionDate:
|
||||
successflag = True
|
||||
return successflag, acquisitionDate
|
||||
|
||||
# if it reached here it could not find the acqusiitionDate
|
||||
successflag = False
|
||||
acquisitionDate = 'FAIL'
|
||||
return successflag, acquisitionDate
|
||||
|
||||
|
||||
def get_ALOS2_name(infile):
|
||||
"""Get the ALOS2210402970 name from compress file in various format."""
|
||||
outname = None
|
||||
fbase = os.path.basename(infile)
|
||||
if 'ALOS2' in fbase:
|
||||
fbase = fbase.replace('_','-')
|
||||
outname = [i for i in fbase.split('-') if 'ALOS2' in i][0]
|
||||
else:
|
||||
fext = os.path.splitext(infile)[1]
|
||||
if fext in ['.tar', '.gz']:
|
||||
with tarfile.open(infile, 'r') as tar:
|
||||
file_list = tar.getnames()
|
||||
elif fext in ['.zip']:
|
||||
with zipfile.ZipFile(infile, 'r') as z:
|
||||
file_list = z.namelist()
|
||||
else:
|
||||
raise ValueError('unrecognized file extension: {}'.format(fext))
|
||||
led_file = [i for i in file_list if 'LED' in i][0]
|
||||
led_file = os.path.basename(led_file)
|
||||
outname = [i for i in led_file.split('-') if 'ALOS2' in i][0]
|
||||
return outname
|
||||
|
||||
|
||||
def main(iargs=None):
|
||||
'''
|
||||
The main driver.
|
||||
'''
|
||||
|
||||
inps = cmdLineParse(iargs)
|
||||
|
||||
# filename of the runfile
|
||||
run_unPack = 'run_unPackALOS2'
|
||||
|
||||
# loop over the different folder of ALOS2 zip/tar files and unzip them, make the names consistent
|
||||
file_exts = (os.path.join(inps.inputDir, '*.zip'),
|
||||
os.path.join(inps.inputDir, '*.tar'),
|
||||
os.path.join(inps.inputDir, '*.gz'))
|
||||
for file_ext in file_exts:
|
||||
# loop over zip/tar files
|
||||
for fname in sorted(glob.glob(file_ext)):
|
||||
## the path to the folder/zip
|
||||
workdir = os.path.dirname(fname)
|
||||
|
||||
## get the output name folder without any extensions
|
||||
dir_unzip = get_ALOS2_name(fname)
|
||||
dir_unzip = os.path.join(workdir, dir_unzip)
|
||||
|
||||
# loop over two cases (either file or folder):
|
||||
# if this is a file, try to unzip/untar it
|
||||
if os.path.isfile(fname):
|
||||
# unzip the file in the outfolder
|
||||
successflag_unzip = uncompressfile(fname, dir_unzip)
|
||||
|
||||
# put failed files in a seperate directory
|
||||
if not successflag_unzip:
|
||||
dir_failed = os.path.join(workdir,'FAILED_FILES')
|
||||
if not os.path.isdir(dir_failed):
|
||||
os.makedirs(dir_failed)
|
||||
cmd = 'mv {} {}'.format(fname, dir_failed)
|
||||
os.system(cmd)
|
||||
else:
|
||||
# check if file needs to be removed or put in archive folder
|
||||
if inps.rmfile:
|
||||
os.remove(fname)
|
||||
print('Deleting: ' + fname)
|
||||
else:
|
||||
dir_archive = os.path.join(workdir,'ARCHIVED_FILES')
|
||||
if not os.path.isdir(dir_archive):
|
||||
os.makedirs(dir_archive)
|
||||
cmd = 'mv {} {}'.format(fname, dir_archive)
|
||||
os.system(cmd)
|
||||
|
||||
|
||||
# loop over the different ALOS folders and make sure the folder names are consistent.
|
||||
# this step is not needed unless the user has manually unzipped data before.
|
||||
ALOS_folders = glob.glob(os.path.join(inps.inputDir, 'ALOS2*'))
|
||||
for ALOS_folder in ALOS_folders:
|
||||
# in case the user has already unzipped some files
|
||||
# make sure they are unzipped similar like the uncompressfile code
|
||||
temp = os.path.basename(ALOS_folder)
|
||||
parts = temp.split(".")
|
||||
parts = parts[0].split('-')
|
||||
ALOS_outfolder_temp = parts[0]
|
||||
ALOS_outfolder_temp = os.path.join(os.path.dirname(ALOS_folder),ALOS_outfolder_temp)
|
||||
# check if the folder (ALOS_folder) has a different filename as generated from uncompressFile (ALOS_outfolder_temp)
|
||||
if not (ALOS_outfolder_temp == ALOS_folder):
|
||||
# it is different, check if the ALOS_outfolder_temp already exists, if yes, delete the current folder
|
||||
if os.path.isdir(ALOS_outfolder_temp):
|
||||
print('Remove ' + ALOS_folder + ' as ' + ALOS_outfolder_temp + ' exists...')
|
||||
# check if this folder already exist, if so overwrite it
|
||||
shutil.rmtree(ALOS_folder)
|
||||
|
||||
|
||||
# loop over the different ALOS folders and organize in date folders
|
||||
ALOS_folders = glob.glob(os.path.join(inps.inputDir, 'ALOS2*'))
|
||||
for ALOS_folder in ALOS_folders:
|
||||
# get the date
|
||||
successflag, imgDate = get_Date(ALOS_folder)
|
||||
|
||||
workdir = os.path.dirname(ALOS_folder)
|
||||
if successflag:
|
||||
# move the file into the date folder
|
||||
SLC_dir = os.path.join(workdir,imgDate,'')
|
||||
if not os.path.isdir(SLC_dir):
|
||||
os.makedirs(SLC_dir)
|
||||
|
||||
# check if the folder already exist in that case overwrite it
|
||||
ALOS_folder_out = os.path.join(SLC_dir,os.path.basename(ALOS_folder))
|
||||
if os.path.isdir(ALOS_folder_out):
|
||||
shutil.rmtree(ALOS_folder_out)
|
||||
# move the ALOS acqusition folder in the date folder
|
||||
cmd = 'mv ' + ALOS_folder + ' ' + SLC_dir + '.'
|
||||
os.system(cmd)
|
||||
|
||||
print ('Succes: ' + imgDate)
|
||||
else:
|
||||
print('Failed: ' + ALOS_folder)
|
||||
|
||||
|
||||
# now generate the unpacking script for all the date dirs
|
||||
dateDirs = sorted(glob.glob(os.path.join(inps.inputDir,'2*')))
|
||||
if inps.outputDir is not None:
|
||||
f = open(run_unPack,'w')
|
||||
for dateDir in dateDirs:
|
||||
AlosFiles = glob.glob(os.path.join(dateDir, 'ALOS2*'))
|
||||
# if there is at least one frame
|
||||
if len(AlosFiles)>0:
|
||||
acquisitionDate = os.path.basename(dateDir)
|
||||
slcDir = os.path.join(inps.outputDir, acquisitionDate)
|
||||
if not os.path.exists(slcDir):
|
||||
os.makedirs(slcDir)
|
||||
cmd = 'unpackFrame_ALOS2.py -i ' + os.path.abspath(dateDir) + ' -o ' + slcDir
|
||||
print (cmd)
|
||||
f.write(inps.text_cmd + cmd+'\n')
|
||||
f.close()
|
||||
return
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
main()
|
|
@ -161,14 +161,14 @@ def main(iargs=None):
|
|||
dateDirs = glob.glob(os.path.join(inputDir,'2*'))
|
||||
if outputDir is not None:
|
||||
f = open(run_unPack,'w')
|
||||
for dataDir in dateDirs:
|
||||
RSAT2Files = glob.glob(os.path.join(dataDir, 'imagery_HH.tif'))
|
||||
for dateDir in dateDirs:
|
||||
RSAT2Files = glob.glob(os.path.join(dateDir, 'imagery_HH.tif'))
|
||||
if len(RSAT2Files)>0:
|
||||
acquisitionDate = os.path.basename(dataDir)
|
||||
acquisitionDate = os.path.basename(dateDir)
|
||||
slcDir = os.path.join(outputDir, acquisitionDate)
|
||||
if not os.path.exists(slcDir):
|
||||
os.makedirs(slcDir)
|
||||
cmd = 'unpackFrame_RSAT2.py -i ' + os.path.abspath(dataDir) + ' -o ' + slcDir
|
||||
cmd = 'unpackFrame_RSAT2.py -i ' + os.path.abspath(dateDir) + ' -o ' + slcDir
|
||||
print (cmd)
|
||||
f.write(inps.text_cmd + cmd+'\n')
|
||||
f.close()
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
#!/usr/bin/env python3
|
||||
import os, imp, sys
|
||||
import os, sys
|
||||
import importlib
|
||||
import argparse
|
||||
import configparser
|
||||
|
||||
|
@ -21,7 +22,7 @@ class ConfigParser:
|
|||
# Setting up and reading config
|
||||
Config = configparser.ConfigParser()
|
||||
Config.optionxform = str
|
||||
Config.readfp(content)
|
||||
Config.read_file(content)
|
||||
|
||||
# Reading the function sequence followed by input parameters
|
||||
# followed by the function parameters
|
||||
|
@ -94,14 +95,12 @@ class ConfigParser:
|
|||
|
||||
# If any of the following calls raises an exception,
|
||||
# there's a problem we can't handle -- let the caller handle it.
|
||||
fp, pathname, description = imp.find_module(name)
|
||||
spec = importlib.util.find_spec(name)
|
||||
|
||||
try:
|
||||
return imp.load_module(name, fp, pathname, description)
|
||||
finally:
|
||||
# Since we may exit via an exception, close fp explicitly.
|
||||
if fp:
|
||||
fp.close()
|
||||
return spec.loader.load_module()
|
||||
except ImportError:
|
||||
print('module {} not found'.format(name))
|
||||
|
||||
# Check existence of the input file
|
||||
def check_if_files_exist(Files, ftype='input'):
|
||||
|
|
|
@ -14,24 +14,25 @@ def cmdLineParse():
|
|||
Command line parser.
|
||||
'''
|
||||
|
||||
parser = argparse.ArgumentParser(description='Unpack CSK SLC data and store metadata in pickle file.')
|
||||
parser = argparse.ArgumentParser(description='Unpack ALOS2 SLC data and store metadata in pickle file.')
|
||||
parser.add_argument('-i','--input', dest='h5dir', type=str,
|
||||
required=True, help='Input CSK directory')
|
||||
required=True, help='Input ALOS2 directory')
|
||||
parser.add_argument('-o', '--output', dest='slcdir', type=str,
|
||||
required=True, help='Output SLC directory')
|
||||
parser.add_argument('-d', '--deskew', dest='deskew', action='store_true',
|
||||
default=False, help='To read in for deskewing data later')
|
||||
|
||||
parser.add_argument('-p', '--polarization', dest='polarization', type=str,
|
||||
default='HH', help='polarization in case if quad or full pol data exists. Deafult: HH')
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def unpack(hdf5, slcname, deskew=False):
|
||||
def unpack(hdf5, slcname, deskew=False, polarization='HH'):
|
||||
'''
|
||||
Unpack HDF5 to binary SLC file.
|
||||
'''
|
||||
|
||||
imgname = glob.glob(os.path.join(hdf5,'IMG*'))[0]
|
||||
ldrname = glob.glob(os.path.join(hdf5, 'LED*'))[0]
|
||||
imgname = glob.glob(os.path.join(hdf5, '*/IMG-{}*'.format(polarization)))[0]
|
||||
ldrname = glob.glob(os.path.join(hdf5, '*/LED*'))[0]
|
||||
if not os.path.isdir(slcname):
|
||||
os.mkdir(slcname)
|
||||
|
||||
|
@ -94,4 +95,7 @@ if __name__ == '__main__':
|
|||
if inps.h5dir.endswith('/'):
|
||||
inps.h5dir = inps.h5dir[:-1]
|
||||
|
||||
obj = unpack(inps.h5dir, inps.slcdir, deskew=inps.deskew)
|
||||
obj = unpack(inps.h5dir, inps.slcdir,
|
||||
deskew=inps.deskew,
|
||||
polarization=inps.polarization)
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
#!/usr/bin/env python3
|
||||
import os, imp, sys
|
||||
import os, sys
|
||||
import importlib
|
||||
import argparse
|
||||
import configparser
|
||||
|
||||
|
@ -22,7 +23,7 @@ class ConfigParser:
|
|||
# Setting up and reading config
|
||||
Config = configparser.ConfigParser()
|
||||
Config.optionxform = str
|
||||
Config.readfp(content)
|
||||
Config.read_file(content)
|
||||
|
||||
# Reading the function sequence followed by input parameters
|
||||
# followed by the function parameters
|
||||
|
@ -157,14 +158,12 @@ class ConfigParser:
|
|||
|
||||
# If any of the following calls raises an exception,
|
||||
# there's a problem we can't handle -- let the caller handle it.
|
||||
fp, pathname, description = imp.find_module(name)
|
||||
spec = importlib.util.find_spec(name)
|
||||
|
||||
try:
|
||||
return imp.load_module(name, fp, pathname, description)
|
||||
finally:
|
||||
# Since we may exit via an exception, close fp explicitly.
|
||||
if fp:
|
||||
fp.close()
|
||||
return spec.loader.load_module()
|
||||
except ImportError:
|
||||
print('module {} not found'.format(name))
|
||||
|
||||
# Check existence of the input file
|
||||
def check_if_files_exist(Files, ftype='input'):
|
||||
|
|
Loading…
Reference in New Issue