271 lines
9.7 KiB
Python
271 lines
9.7 KiB
Python
|
#!/usr/bin/env python3
|
||
|
########################
|
||
|
#Author: Minyan Zhong, Herresh Fattahi
|
||
|
|
||
|
# For Geocoding SLCs
|
||
|
|
||
|
#######################
|
||
|
|
||
|
import os, imp, sys, glob
|
||
|
import argparse
|
||
|
import configparser
|
||
|
import datetime
|
||
|
import numpy as np
|
||
|
import isce
|
||
|
import isceobj
|
||
|
from isceobj.Sensor.TOPS.Sentinel1 import Sentinel1
|
||
|
from Stack import config, run, sentinelSLC
|
||
|
|
||
|
helpstr= '''
|
||
|
|
||
|
Processor for Sentinel-1 data using ISCE software.
|
||
|
|
||
|
For a full list of different options, try sentinelApp.py -h
|
||
|
|
||
|
sentinelApp.py generates all configuration and run files required to be executed for Sentinel-1 TOPS data.
|
||
|
|
||
|
Following are required to start processing:
|
||
|
|
||
|
1) a folder that includes Sentinel-1 SLCs,
|
||
|
2) a DEM (Digital Elevation Model)
|
||
|
3) a folder that includes precise orbits (use dloadOrbits.py to download or to update your orbit folder)
|
||
|
4) a folder for Sentinel-1 Aux files (which is used for correcting the Elevation Antenna Pattern).
|
||
|
5) bounding box as South North West East.
|
||
|
|
||
|
Note that sentinelApp.py does not process any data. It only prepares a lot of input files for processing and a lot of run files. Then you need to execute all those generated run files in order. To know what is really going on, after running sentinelApp.py, look at each run file generated by sentinelApp.py. Each run file actually has several commands that are independent from each other and can be executed in parallel. The config files for each run file include the processing options to execute a specific command/function.
|
||
|
|
||
|
'''
|
||
|
|
||
|
class customArgparseAction(argparse.Action):
|
||
|
def __call__(self, parser, args, values, option_string=None):
|
||
|
'''
|
||
|
The action to be performed.
|
||
|
'''
|
||
|
print(helpstr)
|
||
|
parser.exit()
|
||
|
|
||
|
|
||
|
def createParser():
|
||
|
parser = argparse.ArgumentParser( description='Preparing the directory structure and config files for the processing of Sentinel data')
|
||
|
|
||
|
parser.add_argument('-H','--hh', nargs=0, action=customArgparseAction,
|
||
|
help='Display detailed help information.')
|
||
|
|
||
|
parser.add_argument('-s', '--slc_directory', dest='slc_dirname', type=str, required=True,
|
||
|
help='Directory with all Sentinel SLCs')
|
||
|
|
||
|
parser.add_argument('-o', '--orbit_directory', dest='orbit_dirname', type=str, required=True,
|
||
|
help='Directory with all orbits')
|
||
|
|
||
|
parser.add_argument('-a', '--aux_directory', dest='aux_dirname', type=str, required=True,
|
||
|
help='Directory with all orbits')
|
||
|
|
||
|
parser.add_argument('-w', '--working_directory', dest='work_dir', type=str, default='./',
|
||
|
help='Working directory ')
|
||
|
|
||
|
parser.add_argument('-d', '--dem', dest='dem', type=str, required=True,
|
||
|
help='Directory with slave acquisition')
|
||
|
|
||
|
parser.add_argument('-n', '--swath_num', dest='swath_num', type=str, default='1 2 3',
|
||
|
help='A list of swaths to be processed')
|
||
|
|
||
|
parser.add_argument('-b', '--bbox', dest='bbox', type=str, default=None, help='Lat/Lon Bounding SNWE')
|
||
|
|
||
|
parser.add_argument('-t', '--text_cmd', dest='text_cmd', type=str, default='source ~/.bash_profile;'
|
||
|
, help='text command to be added to the beginning of each line of the run files. Example : source ~/.bash_profile;')
|
||
|
|
||
|
parser.add_argument('-p', '--polarization', dest='polarization', type=str, default='vv'
|
||
|
, help='SAR data polarization')
|
||
|
|
||
|
parser.add_argument('-u','--update', dest='update', type=int, default=0, help='re-run (0) or update (1)')
|
||
|
|
||
|
parser.add_argument('-z', '--azimuth_looks', dest='azimuthLooks', type=str, default='3'
|
||
|
, help='Number of looks in azimuth for interferogram multi-looking')
|
||
|
|
||
|
parser.add_argument('-r', '--range_looks', dest='rangeLooks', type=str, default='9'
|
||
|
, help='Number of looks in range for interferogram multi-looking')
|
||
|
|
||
|
|
||
|
return parser
|
||
|
|
||
|
def cmdLineParse(iargs = None):
|
||
|
parser = createParser()
|
||
|
inps = parser.parse_args(args=iargs)
|
||
|
|
||
|
inps.slc_dirname = os.path.abspath(inps.slc_dirname)
|
||
|
inps.orbit_dirname = os.path.abspath(inps.orbit_dirname)
|
||
|
inps.aux_dirname = os.path.abspath(inps.aux_dirname)
|
||
|
inps.work_dir = os.path.abspath(inps.work_dir)
|
||
|
inps.dem = os.path.abspath(inps.dem)
|
||
|
|
||
|
return inps
|
||
|
|
||
|
####################################
|
||
|
def get_dates(inps):
|
||
|
# Given the SLC directory This function extracts the acquisition dates
|
||
|
# and prepares a dictionary of sentinel slc files such that keys are
|
||
|
# acquisition dates and values are object instances of sentinelSLC class
|
||
|
# which is defined in Stack.py
|
||
|
|
||
|
if inps.bbox is not None:
|
||
|
bbox = [float(val) for val in inps.bbox.split()]
|
||
|
|
||
|
if os.path.isfile(inps.slc_dirname):
|
||
|
print('reading SAFE files from: ' + inps.slc_dirname)
|
||
|
SAFE_files = []
|
||
|
for line in open(inps.slc_dirname):
|
||
|
SAFE_files.append(str.replace(line,'\n','').strip())
|
||
|
|
||
|
else:
|
||
|
SAFE_files = glob.glob(os.path.join(inps.slc_dirname,'S1*_IW_SLC*zip')) # changed to zip file by Minyan Zhong
|
||
|
|
||
|
if len(SAFE_files) == 0:
|
||
|
raise Exception('No SAFE file found')
|
||
|
|
||
|
else:
|
||
|
print ("Number of SAFE files found: "+str(len(SAFE_files)))
|
||
|
|
||
|
################################
|
||
|
# write down the list of SAFE files in a txt file:
|
||
|
f = open('SAFE_files.txt','w')
|
||
|
for safe in SAFE_files:
|
||
|
f.write(safe + '\n')
|
||
|
f.close()
|
||
|
################################
|
||
|
# group the files based on dates
|
||
|
safe_dict={}
|
||
|
for safe in SAFE_files:
|
||
|
safeObj=sentinelSLC(safe)
|
||
|
safeObj.get_dates()
|
||
|
safeObj.get_orbit(inps.orbit_dirname, inps.work_dir)
|
||
|
if safeObj.date not in safe_dict.keys():
|
||
|
safe_dict[safeObj.date]=safeObj
|
||
|
else:
|
||
|
safe_dict[safeObj.date].safe_file = safe_dict[safeObj.date].safe_file + ' ' + safe
|
||
|
################################
|
||
|
dateList = [key for key in safe_dict.keys()]
|
||
|
dateList.sort()
|
||
|
print ("*****************************************")
|
||
|
print ("Number of dates : " +str(len(dateList)))
|
||
|
print ("List of dates : ")
|
||
|
print (dateList)
|
||
|
################################
|
||
|
#get the files covering the bounding box
|
||
|
S=[]
|
||
|
N=[]
|
||
|
W=[]
|
||
|
E=[]
|
||
|
safe_dict_bbox={}
|
||
|
print ('date south north west east')
|
||
|
for date in dateList:
|
||
|
#safe_dict[date].get_lat_lon()
|
||
|
safe_dict[date].get_lat_lon_v2()
|
||
|
#safe_dict[date].get_lat_lon_v3(inps)
|
||
|
S.append(safe_dict[date].SNWE[0])
|
||
|
N.append(safe_dict[date].SNWE[1])
|
||
|
W.append(safe_dict[date].SNWE[2])
|
||
|
E.append(safe_dict[date].SNWE[3])
|
||
|
print (date, safe_dict[date].SNWE[0],safe_dict[date].SNWE[1],safe_dict[date].SNWE[2],safe_dict[date].SNWE[3])
|
||
|
if inps.bbox is not None:
|
||
|
if safe_dict[date].SNWE[0] <= bbox[0] and safe_dict[date].SNWE[1] >= bbox[1] and safe_dict[date].SNWE[2] <= bbox[2] and safe_dict[date].SNWE[3] >=bbox[3]:
|
||
|
safe_dict_bbox[date] = safe_dict[date]
|
||
|
|
||
|
print ("*****************************************")
|
||
|
|
||
|
################################
|
||
|
print ('All dates')
|
||
|
print (dateList)
|
||
|
if inps.bbox is not None:
|
||
|
safe_dict = safe_dict_bbox
|
||
|
dateList = [key for key in safe_dict.keys()]
|
||
|
dateList.sort()
|
||
|
print ('dates covering the bbox')
|
||
|
print (dateList)
|
||
|
|
||
|
return dateList, safe_dict
|
||
|
|
||
|
def checkCurrentStatus(inps):
|
||
|
acquisitionDates, safe_dict = get_dates(inps)
|
||
|
slcDir = os.path.join(inps.work_dir, 'slc')
|
||
|
|
||
|
if os.path.exists(slcDir):
|
||
|
slcFiles = glob.glob(os.path.join(slcDir, '*'))
|
||
|
existed_dates = [os.path.basename(slc) for slc in slcFiles]
|
||
|
existed_dates.sort()
|
||
|
|
||
|
if inps.update and len(existed_dates)>0:
|
||
|
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
|
||
|
print('')
|
||
|
print('Old processed acquisitions are found: ')
|
||
|
print(existed_dates)
|
||
|
print('')
|
||
|
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
|
||
|
|
||
|
acquisitionDates = list(set(acquisitionDates).difference(set(existed_dates)))
|
||
|
|
||
|
acquisitionDates.sort()
|
||
|
|
||
|
if len(acquisitionDates)>0:
|
||
|
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
|
||
|
print('')
|
||
|
print('New acquisitions are found and will be processed: ')
|
||
|
print(acquisitionDates)
|
||
|
print('')
|
||
|
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
|
||
|
else:
|
||
|
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
|
||
|
print('')
|
||
|
print('No new acquisition: ')
|
||
|
print('')
|
||
|
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
|
||
|
|
||
|
safe_dict_new={}
|
||
|
for d in acquisitionDates:
|
||
|
safe_dict_new[d] = safe_dict[d]
|
||
|
safe_dict = safe_dict_new
|
||
|
else:
|
||
|
print('No existing processed slc are identified. All the slcs will be processed.')
|
||
|
|
||
|
return acquisitionDates, safe_dict
|
||
|
|
||
|
def slcSimple(inps, acquisitionDates, safe_dict, mergeSLC=False):
|
||
|
#############################
|
||
|
i=0
|
||
|
i+=1
|
||
|
runObj = run()
|
||
|
runObj.configure(inps, 'run_' + str(i))
|
||
|
runObj.unpackSLC(acquisitionDates, safe_dict)
|
||
|
runObj.finalize()
|
||
|
|
||
|
if mergeSLC:
|
||
|
i+=1
|
||
|
runObj = run()
|
||
|
runObj.configure(inps, 'run_' + str(i))
|
||
|
runObj.mergeSLC(acquisitionDates, virtual = 'False')
|
||
|
runObj.finalize()
|
||
|
|
||
|
return i
|
||
|
|
||
|
def main(iargs=None):
|
||
|
|
||
|
inps = cmdLineParse(iargs)
|
||
|
|
||
|
if os.path.exists(os.path.join(inps.work_dir, 'run_files')):
|
||
|
print('')
|
||
|
print('**************************')
|
||
|
print('run_files folder exists.')
|
||
|
print(os.path.join(inps.work_dir, 'run_files'), ' already exists.')
|
||
|
print('Please remove or rename this folder and try again.')
|
||
|
print('')
|
||
|
print('**************************')
|
||
|
sys.exit(1)
|
||
|
|
||
|
acquisitionDates, safe_dict = checkCurrentStatus(inps)
|
||
|
|
||
|
slcSimple(inps, acquisitionDates, safe_dict, mergeSLC=True)
|
||
|
|
||
|
if __name__ == "__main__":
|
||
|
|
||
|
# Main engine
|
||
|
main()
|