From e9bd7edeb3ebf2811e92a46a789bb9bd7706a159 Mon Sep 17 00:00:00 2001 From: CunrenLiang <56097947+CunrenLiang@users.noreply.github.com> Date: Mon, 19 Oct 2020 19:42:07 -0700 Subject: [PATCH] new alosStack application --- applications/alos2App.py | 69 + applications/alos2burstApp.py | 60 + .../isceobj/Alos2Proc/Alos2ProcPublic.py | 229 ++- components/isceobj/Alos2Proc/CMakeLists.txt | 2 + components/isceobj/Alos2Proc/Factories.py | 2 + components/isceobj/Alos2Proc/SConscript | 2 +- components/isceobj/Alos2Proc/runBaseline.py | 229 +++ components/isceobj/Alos2Proc/runCoherence.py | 6 +- .../isceobj/Alos2Proc/runDiffInterferogram.py | 4 + components/isceobj/Alos2Proc/runFilt.py | 38 +- .../isceobj/Alos2Proc/runFormInterferogram.py | 4 + .../isceobj/Alos2Proc/runFrameMosaic.py | 177 +-- .../isceobj/Alos2Proc/runFrameOffset.py | 4 + components/isceobj/Alos2Proc/runGeo2Rdr.py | 4 + components/isceobj/Alos2Proc/runGeocode.py | 4 + components/isceobj/Alos2Proc/runIonCorrect.py | 155 ++ components/isceobj/Alos2Proc/runIonFilt.py | 136 +- components/isceobj/Alos2Proc/runIonSubband.py | 50 +- components/isceobj/Alos2Proc/runIonUwrap.py | 125 +- components/isceobj/Alos2Proc/runLook.py | 4 + .../isceobj/Alos2Proc/runPreprocessor.py | 281 +--- components/isceobj/Alos2Proc/runRdr2Geo.py | 4 + .../isceobj/Alos2Proc/runRdrDemOffset.py | 38 +- .../isceobj/Alos2Proc/runRectRangeOffset.py | 4 + components/isceobj/Alos2Proc/runSlcMosaic.py | 58 +- components/isceobj/Alos2Proc/runSlcOffset.py | 5 + .../isceobj/Alos2Proc/runSwathMosaic.py | 141 +- .../isceobj/Alos2Proc/runSwathOffset.py | 4 + .../isceobj/Alos2Proc/runUnwrapSnaphu.py | 38 +- .../isceobj/Alos2burstProc/Factories.py | 2 + .../isceobj/Alos2burstProc/runIonSubband.py | 46 +- .../isceobj/Alos2burstProc/runPreprocessor.py | 195 --- components/isceobj/Sensor/TerraSARX.py | 11 + .../stdproc/resamp_slc/src/resamp_slc.f90 | 4 +- contrib/Snaphu/src/snaphu_cs2.c | 82 +- contrib/stack/README.md | 5 +- contrib/stack/alosStack/Stack.py | 426 ++++++ contrib/stack/alosStack/StackPulic.py | 325 +++++ contrib/stack/alosStack/alos2_pairs.py | 86 ++ contrib/stack/alosStack/alosStack.xml | 379 +++++ .../stack/alosStack/alosStack_tutorial.txt | 250 ++++ contrib/stack/alosStack/compute_baseline.py | 186 +++ contrib/stack/alosStack/compute_burst_sync.py | 207 +++ contrib/stack/alosStack/create_cmds.py | 1248 +++++++++++++++++ contrib/stack/alosStack/diff_interferogram.py | 97 ++ .../stack/alosStack/estimate_frame_offset.py | 83 ++ .../stack/alosStack/estimate_slc_offset.py | 392 ++++++ .../stack/alosStack/estimate_swath_offset.py | 86 ++ contrib/stack/alosStack/filt.py | 108 ++ contrib/stack/alosStack/form_interferogram.py | 92 ++ contrib/stack/alosStack/geo2rdr.py | 132 ++ contrib/stack/alosStack/geocode.py | 87 ++ contrib/stack/alosStack/ion_check.py | 124 ++ contrib/stack/alosStack/ion_correct.py | 99 ++ contrib/stack/alosStack/ion_filt.py | 499 +++++++ contrib/stack/alosStack/ion_ls.py | 339 +++++ contrib/stack/alosStack/ion_subband.py | 619 ++++++++ contrib/stack/alosStack/ion_unwrap.py | 113 ++ contrib/stack/alosStack/look_coherence.py | 113 ++ contrib/stack/alosStack/look_geom.py | 130 ++ .../stack/alosStack/mosaic_interferogram.py | 226 +++ contrib/stack/alosStack/mosaic_parameter.py | 167 +++ contrib/stack/alosStack/pair_up.py | 195 +++ contrib/stack/alosStack/plot_baseline.py | 122 ++ contrib/stack/alosStack/radar_dem_offset.py | 116 ++ contrib/stack/alosStack/rdr2geo.py | 92 ++ contrib/stack/alosStack/read_data.py | 301 ++++ contrib/stack/alosStack/rect_range_offset.py | 101 ++ .../stack/alosStack/resample_common_grid.py | 500 +++++++ contrib/stack/alosStack/unwrap_snaphu.py | 101 ++ examples/input_files/alos2/alos2App.xml | 77 +- examples/input_files/alos2/alos2burstApp.xml | 56 +- .../scansar-scansar/1/alos2App.xml | 293 ---- .../scansar-scansar/2/alos2App.xml | 293 ---- .../scansar-scansar/3/alos2App.xml | 293 ---- .../scansar-scansar/4/alos2App.xml | 293 ---- .../scansar-scansar_7s/alos2App.xml | 293 ---- .../scansar-scansar_burst/1/alos2burstApp.xml | 279 ---- .../scansar-scansar_burst/2/alos2burstApp.xml | 279 ---- .../scansar-scansar_burst/3/alos2burstApp.xml | 279 ---- .../scansar-scansar_burst/4/alos2burstApp.xml | 279 ---- .../scansar-stripmap/1/alos2App.xml | 293 ---- .../scansar-stripmap/2/alos2App.xml | 293 ---- .../stripmap-stripmap/1/alos2App.xml | 293 ---- .../stripmap-stripmap/2/alos2App.xml | 293 ---- .../stripmap-stripmap/3/alos2App.xml | 293 ---- .../stripmap-stripmap/4/alos2App.xml | 293 ---- .../alos2/example_input_files/test1.sh | 3 + .../alos2/example_input_files/test2.sh | 3 + 89 files changed, 9627 insertions(+), 5214 deletions(-) create mode 100644 components/isceobj/Alos2Proc/runBaseline.py create mode 100644 components/isceobj/Alos2Proc/runIonCorrect.py create mode 100644 contrib/stack/alosStack/Stack.py create mode 100644 contrib/stack/alosStack/StackPulic.py create mode 100644 contrib/stack/alosStack/alos2_pairs.py create mode 100644 contrib/stack/alosStack/alosStack.xml create mode 100644 contrib/stack/alosStack/alosStack_tutorial.txt create mode 100644 contrib/stack/alosStack/compute_baseline.py create mode 100644 contrib/stack/alosStack/compute_burst_sync.py create mode 100644 contrib/stack/alosStack/create_cmds.py create mode 100644 contrib/stack/alosStack/diff_interferogram.py create mode 100644 contrib/stack/alosStack/estimate_frame_offset.py create mode 100644 contrib/stack/alosStack/estimate_slc_offset.py create mode 100644 contrib/stack/alosStack/estimate_swath_offset.py create mode 100644 contrib/stack/alosStack/filt.py create mode 100644 contrib/stack/alosStack/form_interferogram.py create mode 100644 contrib/stack/alosStack/geo2rdr.py create mode 100644 contrib/stack/alosStack/geocode.py create mode 100644 contrib/stack/alosStack/ion_check.py create mode 100644 contrib/stack/alosStack/ion_correct.py create mode 100644 contrib/stack/alosStack/ion_filt.py create mode 100644 contrib/stack/alosStack/ion_ls.py create mode 100644 contrib/stack/alosStack/ion_subband.py create mode 100644 contrib/stack/alosStack/ion_unwrap.py create mode 100644 contrib/stack/alosStack/look_coherence.py create mode 100644 contrib/stack/alosStack/look_geom.py create mode 100644 contrib/stack/alosStack/mosaic_interferogram.py create mode 100644 contrib/stack/alosStack/mosaic_parameter.py create mode 100644 contrib/stack/alosStack/pair_up.py create mode 100644 contrib/stack/alosStack/plot_baseline.py create mode 100644 contrib/stack/alosStack/radar_dem_offset.py create mode 100644 contrib/stack/alosStack/rdr2geo.py create mode 100644 contrib/stack/alosStack/read_data.py create mode 100644 contrib/stack/alosStack/rect_range_offset.py create mode 100644 contrib/stack/alosStack/resample_common_grid.py create mode 100644 contrib/stack/alosStack/unwrap_snaphu.py diff --git a/applications/alos2App.py b/applications/alos2App.py index c56b666..5fcd3b5 100755 --- a/applications/alos2App.py +++ b/applications/alos2App.py @@ -117,6 +117,13 @@ WBD = Application.Parameter('wbd', mandatory=False, doc='water body file') +DO_INSAR = Application.Parameter('doInSAR', + public_name='do InSAR', + default = True, + type = bool, + mandatory = False, + doc = 'do InSAR') + USE_VIRTUAL_FILE = Application.Parameter('useVirtualFile', public_name = 'use virtual file', default=True, @@ -331,6 +338,22 @@ SWATH_PHASE_DIFF_SNAP_ION = Application.Parameter('swathPhaseDiffSnapIon', container = list, doc = 'swath phase difference snap to fixed values') +SWATH_PHASE_DIFF_LOWER_ION = Application.Parameter('swathPhaseDiffLowerIon', + public_name = 'swath phase difference of lower band', + default = None, + type = float, + mandatory = False, + container = list, + doc = 'swath phase difference of lower band') + +SWATH_PHASE_DIFF_UPPER_ION = Application.Parameter('swathPhaseDiffUpperIon', + public_name = 'swath phase difference of upper band', + default = None, + type = float, + mandatory = False, + container = list, + doc = 'swath phase difference of upper band') + FIT_ION = Application.Parameter('fitIon', public_name = 'apply polynomial fit before filtering ionosphere phase', default = True, @@ -352,6 +375,13 @@ FIT_ADAPTIVE_ION = Application.Parameter('fitAdaptiveIon', mandatory = False, doc = 'apply polynomial fit in adaptive filtering window') +FILT_SECONDARY_ION = Application.Parameter('filtSecondaryIon', + public_name = 'whether do secondary filtering of ionosphere phase', + default = True, + type = bool, + mandatory = False, + doc = 'whether do secondary filtering of ionosphere phase') + FILTERING_WINSIZE_MAX_ION = Application.Parameter('filteringWinsizeMaxIon', public_name='maximum window size for filtering ionosphere phase', default=301, @@ -366,6 +396,20 @@ FILTERING_WINSIZE_MIN_ION = Application.Parameter('filteringWinsizeMinIon', mandatory=False, doc='minimum window size for filtering ionosphere phase') +FILTERING_WINSIZE_SECONDARY_ION = Application.Parameter('filteringWinsizeSecondaryIon', + public_name='window size of secondary filtering of ionosphere phase', + default=5, + type=int, + mandatory=False, + doc='window size of secondary filtering of ionosphere phase') + +FILTER_STD_ION = Application.Parameter('filterStdIon', + public_name = 'standard deviation of ionosphere phase after filtering', + default = None, + type=float, + mandatory = False, + doc = 'standard deviation of ionosphere phase after filtering') + FILTER_SUBBAND_INT = Application.Parameter('filterSubbandInt', public_name = 'filter subband interferogram', default = False, @@ -601,6 +645,7 @@ class Alos2InSAR(Application): DEM, DEM_GEO, WBD, + DO_INSAR, USE_VIRTUAL_FILE, USE_GPU, BURST_SYNCHRONIZATION_THRESHOLD, @@ -631,11 +676,16 @@ class Alos2InSAR(Application): NUMBER_AZIMUTH_LOOKS_ION, MASKED_AREAS_ION, SWATH_PHASE_DIFF_SNAP_ION, + SWATH_PHASE_DIFF_LOWER_ION, + SWATH_PHASE_DIFF_UPPER_ION, FIT_ION, FILT_ION, FIT_ADAPTIVE_ION, + FILT_SECONDARY_ION, FILTERING_WINSIZE_MAX_ION, FILTERING_WINSIZE_MIN_ION, + FILTERING_WINSIZE_SECONDARY_ION, + FILTER_STD_ION, FILTER_SUBBAND_INT, FILTER_STRENGTH_SUBBAND_INT, FILTER_WINSIZE_SUBBAND_INT, @@ -775,6 +825,7 @@ class Alos2InSAR(Application): ## Add instance attribute RunWrapper functions, which emulate methods. def _add_methods(self): self.runPreprocessor = Alos2Proc.createPreprocessor(self) + self.runBaseline = Alos2Proc.createBaseline(self) self.runDownloadDem = Alos2Proc.createDownloadDem(self) self.runPrepareSlc = Alos2Proc.createPrepareSlc(self) self.runSlcOffset = Alos2Proc.createSlcOffset(self) @@ -793,6 +844,7 @@ class Alos2InSAR(Application): self.runIonSubband = Alos2Proc.createIonSubband(self) self.runIonUwrap = Alos2Proc.createIonUwrap(self) self.runIonFilt = Alos2Proc.createIonFilt(self) + self.runIonCorrect = Alos2Proc.createIonCorrect(self) self.runFilt = Alos2Proc.createFilt(self) self.runUnwrapSnaphu = Alos2Proc.createUnwrapSnaphu(self) self.runGeocode = Alos2Proc.createGeocode(self) @@ -822,6 +874,13 @@ class Alos2InSAR(Application): ) ) + self.step('baseline', + func=self.runBaseline, + doc=( + """compute baseline, burst synchronization etc""" + ) + ) + self.step('download_dem', func=self.runDownloadDem, doc=( @@ -934,6 +993,12 @@ class Alos2InSAR(Application): ) ) + self.step('ion_correct', func=self.runIonCorrect, + doc=( + """resample ionospheric phase and ionospheric correction""" + ) + ) + self.step('filt', func=self.runFilt, doc=( """filter interferogram""" @@ -995,6 +1060,8 @@ class Alos2InSAR(Application): # Run a preprocessor for the two sets of frames self.runPreprocessor() + self.runBaseline() + self.runDownloadDem() self.runPrepareSlc() @@ -1031,6 +1098,8 @@ class Alos2InSAR(Application): self.runIonFilt() + self.runIonCorrect() + self.runFilt() self.runUnwrapSnaphu() diff --git a/applications/alos2burstApp.py b/applications/alos2burstApp.py index 8470051..43c264f 100755 --- a/applications/alos2burstApp.py +++ b/applications/alos2burstApp.py @@ -321,6 +321,22 @@ SWATH_PHASE_DIFF_SNAP_ION = Application.Parameter('swathPhaseDiffSnapIon', container = list, doc = 'swath phase difference snap to fixed values') +SWATH_PHASE_DIFF_LOWER_ION = Application.Parameter('swathPhaseDiffLowerIon', + public_name = 'swath phase difference of lower band', + default = None, + type = float, + mandatory = False, + container = list, + doc = 'swath phase difference of lower band') + +SWATH_PHASE_DIFF_UPPER_ION = Application.Parameter('swathPhaseDiffUpperIon', + public_name = 'swath phase difference of upper band', + default = None, + type = float, + mandatory = False, + container = list, + doc = 'swath phase difference of upper band') + FIT_ION = Application.Parameter('fitIon', public_name = 'apply polynomial fit before filtering ionosphere phase', default = True, @@ -342,6 +358,13 @@ FIT_ADAPTIVE_ION = Application.Parameter('fitAdaptiveIon', mandatory = False, doc = 'apply polynomial fit in adaptive filtering window') +FILT_SECONDARY_ION = Application.Parameter('filtSecondaryIon', + public_name = 'whether do secondary filtering of ionosphere phase', + default = True, + type = bool, + mandatory = False, + doc = 'whether do secondary filtering of ionosphere phase') + FILTERING_WINSIZE_MAX_ION = Application.Parameter('filteringWinsizeMaxIon', public_name='maximum window size for filtering ionosphere phase', default=301, @@ -356,6 +379,20 @@ FILTERING_WINSIZE_MIN_ION = Application.Parameter('filteringWinsizeMinIon', mandatory=False, doc='minimum window size for filtering ionosphere phase') +FILTERING_WINSIZE_SECONDARY_ION = Application.Parameter('filteringWinsizeSecondaryIon', + public_name='window size of secondary filtering of ionosphere phase', + default=5, + type=int, + mandatory=False, + doc='window size of secondary filtering of ionosphere phase') + +FILTER_STD_ION = Application.Parameter('filterStdIon', + public_name = 'standard deviation of ionosphere phase after filtering', + default = None, + type=float, + mandatory = False, + doc = 'standard deviation of ionosphere phase after filtering') + FILTER_SUBBAND_INT = Application.Parameter('filterSubbandInt', public_name = 'filter subband interferogram', default = False, @@ -566,11 +603,16 @@ class Alos2burstInSAR(Application): NUMBER_AZIMUTH_LOOKS_ION, MASKED_AREAS_ION, SWATH_PHASE_DIFF_SNAP_ION, + SWATH_PHASE_DIFF_LOWER_ION, + SWATH_PHASE_DIFF_UPPER_ION, FIT_ION, FILT_ION, FIT_ADAPTIVE_ION, + FILT_SECONDARY_ION, FILTERING_WINSIZE_MAX_ION, FILTERING_WINSIZE_MIN_ION, + FILTERING_WINSIZE_SECONDARY_ION, + FILTER_STD_ION, FILTER_SUBBAND_INT, FILTER_STRENGTH_SUBBAND_INT, FILTER_WINSIZE_SUBBAND_INT, @@ -704,6 +746,7 @@ class Alos2burstInSAR(Application): ## Add instance attribute RunWrapper functions, which emulate methods. def _add_methods(self): self.runPreprocessor = Alos2burstProc.createPreprocessor(self) + self.runBaseline = Alos2burstProc.createBaseline(self) self.runExtractBurst = Alos2burstProc.createExtractBurst(self) self.runDownloadDem = Alos2burstProc.createDownloadDem(self) self.runCoregGeom = Alos2burstProc.createCoregGeom(self) @@ -723,6 +766,7 @@ class Alos2burstInSAR(Application): self.runIonSubband = Alos2burstProc.createIonSubband(self) self.runIonUwrap = Alos2burstProc.createIonUwrap(self) self.runIonFilt = Alos2burstProc.createIonFilt(self) + self.runIonCorrect = Alos2burstProc.createIonCorrect(self) self.runFilt = Alos2burstProc.createFilt(self) self.runUnwrapSnaphu = Alos2burstProc.createUnwrapSnaphu(self) self.runGeocode = Alos2burstProc.createGeocode(self) @@ -749,6 +793,12 @@ class Alos2burstInSAR(Application): ) ) + self.step('baseline', func=self.runBaseline, + doc=( + """compute baseline, burst synchronization etc""" + ) + ) + self.step('extract_burst', func=self.runExtractBurst, doc=( """extract bursts from full aperture images""" @@ -863,6 +913,12 @@ class Alos2burstInSAR(Application): ) ) + self.step('ion_correct', func=self.runIonCorrect, + doc=( + """resample ionospheric phase and ionospheric correction""" + ) + ) + self.step('filt', func=self.runFilt, doc=( """filter interferogram""" @@ -916,6 +972,8 @@ class Alos2burstInSAR(Application): # Run a preprocessor for the two sets of frames self.runPreprocessor() + self.runBaseline() + self.runExtractBurst() self.runDownloadDem() @@ -954,6 +1012,8 @@ class Alos2burstInSAR(Application): self.runIonFilt() + self.runIonCorrect() + self.runFilt() self.runUnwrapSnaphu() diff --git a/components/isceobj/Alos2Proc/Alos2ProcPublic.py b/components/isceobj/Alos2Proc/Alos2ProcPublic.py index edd4fcf..b48dd17 100644 --- a/components/isceobj/Alos2Proc/Alos2ProcPublic.py +++ b/components/isceobj/Alos2Proc/Alos2ProcPublic.py @@ -91,7 +91,7 @@ def create_xml(fileName, width, length, fileType): #image.finalizeImage() -def multilook_v1(data, nalks, nrlks): +def multilook_v1(data, nalks, nrlks, mean=True): ''' doing multiple looking ATTENSION: original array changed after running this function @@ -106,10 +106,13 @@ def multilook_v1(data, nalks, nrlks): for i in range(1, nrlks): data[0:length2*nalks:nalks, 0:width2*nrlks:nrlks] += data[0:length2*nalks:nalks, i:width2*nrlks:nrlks] - return data[0:length2*nalks:nalks, 0:width2*nrlks:nrlks] / nrlks / nalks + if mean: + return data[0:length2*nalks:nalks, 0:width2*nrlks:nrlks] / nrlks / nalks + else: + return data[0:length2*nalks:nalks, 0:width2*nrlks:nrlks] -def multilook(data, nalks, nrlks): +def multilook(data, nalks, nrlks, mean=True): ''' doing multiple looking ''' @@ -125,7 +128,10 @@ def multilook(data, nalks, nrlks): for i in range(1, nrlks): data2[:, 0:width2*nrlks:nrlks] += data2[:, i:width2*nrlks:nrlks] - return data2[:, 0:width2*nrlks:nrlks] / nrlks / nalks + if mean: + return data2[:, 0:width2*nrlks:nrlks] / nrlks / nalks + else: + return data2[:, 0:width2*nrlks:nrlks] def cal_coherence_1(inf, win=5): @@ -281,9 +287,9 @@ def reformatGeometricalOffset(rangeOffsetFile, azimuthOffsetFile, reformatedOffs offsetsPlain = offsetsPlain + "{:8d} {:10.3f} {:8d} {:12.3f} {:11.5f} {:11.6f} {:11.6f} {:11.6f}\n".format( int(j*rangeStep+1), - float(rgoff[i][j]), + float(rgoff[i][j])*rangeStep, int(i*azimuthStep+1), - float(azoff[i][j]), + float(azoff[i][j])*azimuthStep, float(22.00015), float(0.000273), float(0.002126), @@ -749,7 +755,7 @@ def snaphuUnwrap(track, t, wrapName, corName, unwrapName, nrlks, nalks, costMode return -def snaphuUnwrapOriginal(wrapName, corName, ampName, unwrapName, costMode = 's', initMethod = 'mcf'): +def snaphuUnwrapOriginal(wrapName, corName, ampName, unwrapName, costMode = 's', initMethod = 'mcf', snaphuConfFile = 'snaphu.conf'): ''' unwrap interferogram using original snaphu program ''' @@ -762,7 +768,7 @@ def snaphuUnwrapOriginal(wrapName, corName, ampName, unwrapName, costMode = 's', length = corImg.length #specify coherence file format in configure file - snaphuConfFile = 'snaphu.conf' + #snaphuConfFile = 'snaphu.conf' if corImg.bands == 1: snaphuConf = '''CORRFILEFORMAT FLOAT_DATA CONNCOMPFILE {} @@ -809,7 +815,7 @@ MAXNCOMPS 20'''.format(unwrapName+'.conncomp') return -def getBboxGeo(track): +def getBboxGeo(track, useTrackOnly=False, numberOfSamples=1, numberOfLines=1, numberRangeLooks=1, numberAzimuthLooks=1): ''' get bounding box in geo-coordinate ''' @@ -817,7 +823,15 @@ def getBboxGeo(track): pointingDirection = {'right': -1, 'left' :1} - bboxRdr = getBboxRdr(track) + if useTrackOnly: + import datetime + rangeMin = track.startingRange + (numberRangeLooks-1.0)/2.0*track.rangePixelSize + rangeMax = rangeMin + (numberOfSamples-1) * numberRangeLooks * track.rangePixelSize + azimuthTimeMin = track.sensingStart + datetime.timedelta(seconds=(numberAzimuthLooks-1.0)/2.0*track.azimuthLineInterval) + azimuthTimeMax = azimuthTimeMin + datetime.timedelta(seconds=(numberOfLines-1) * numberAzimuthLooks * track.azimuthLineInterval) + bboxRdr = [rangeMin, rangeMax, azimuthTimeMin, azimuthTimeMax] + else: + bboxRdr = getBboxRdr(track) rangeMin = bboxRdr[0] rangeMax = bboxRdr[1] @@ -1254,8 +1268,199 @@ def snap(inputValue, fixedValues, snapThreshold): return (outputValue, snapped) - - +modeProcParDict = { + 'ALOS-2': { + #All SPT (SBS) modes are the same + 'SBS': { + 'numberRangeLooks1': 2, + 'numberAzimuthLooks1': 4, + + 'numberRangeLooks2': 4, + 'numberAzimuthLooks2': 4, + + 'numberRangeLooksIon': 16, + 'numberAzimuthLooksIon': 16, + + 'filterStdIon': 0.015 + }, + #All SM1 (UBS, UBD) modes are the same + 'UBS': { + 'numberRangeLooks1': 2, + 'numberAzimuthLooks1': 3, + + 'numberRangeLooks2': 4, + 'numberAzimuthLooks2': 4, + + 'numberRangeLooksIon': 32, + 'numberAzimuthLooksIon': 32, + + 'filterStdIon': 0.015 + }, + 'UBD': { + 'numberRangeLooks1': 2, + 'numberAzimuthLooks1': 3, + + 'numberRangeLooks2': 4, + 'numberAzimuthLooks2': 4, + + 'numberRangeLooksIon': 32, + 'numberAzimuthLooksIon': 32, + + 'filterStdIon': 0.015 + }, + #All SM2 (HBS, HBD, HBQ) modes are the same + 'HBS': { + 'numberRangeLooks1': 2, + 'numberAzimuthLooks1': 4, + + 'numberRangeLooks2': 4, + 'numberAzimuthLooks2': 4, + + 'numberRangeLooksIon': 16, + 'numberAzimuthLooksIon': 16, + + 'filterStdIon': 0.035 + }, + 'HBD': { + 'numberRangeLooks1': 2, + 'numberAzimuthLooks1': 4, + + 'numberRangeLooks2': 4, + 'numberAzimuthLooks2': 4, + + 'numberRangeLooksIon': 16, + 'numberAzimuthLooksIon': 16, + + 'filterStdIon': 0.035 + }, + 'HBQ': { + 'numberRangeLooks1': 2, + 'numberAzimuthLooks1': 4, + + 'numberRangeLooks2': 4, + 'numberAzimuthLooks2': 4, + + 'numberRangeLooksIon': 16, + 'numberAzimuthLooksIon': 16, + + 'filterStdIon': 0.035 + }, + #All SM3 (FBS, FBD, FBQ) modes are the same + 'FBS': { + 'numberRangeLooks1': 2, + 'numberAzimuthLooks1': 4, + + 'numberRangeLooks2': 4, + 'numberAzimuthLooks2': 4, + + 'numberRangeLooksIon': 16, + 'numberAzimuthLooksIon': 16, + + 'filterStdIon': 0.075 + }, + 'FBD': { + 'numberRangeLooks1': 2, + 'numberAzimuthLooks1': 4, + + 'numberRangeLooks2': 4, + 'numberAzimuthLooks2': 4, + + 'numberRangeLooksIon': 16, + 'numberAzimuthLooksIon': 16, + + 'filterStdIon': 0.075 + }, + 'FBQ': { + 'numberRangeLooks1': 2, + 'numberAzimuthLooks1': 4, + + 'numberRangeLooks2': 4, + 'numberAzimuthLooks2': 4, + + 'numberRangeLooksIon': 16, + 'numberAzimuthLooksIon': 16, + + 'filterStdIon': 0.075 + }, + #All WD1 (WBS, WBD) modes are the same + 'WBS': { + 'numberRangeLooks1': 1, + 'numberAzimuthLooks1': 14, + + 'numberRangeLooks2': 5, + 'numberAzimuthLooks2': 2, + + 'numberRangeLooksIon': 80, + 'numberAzimuthLooksIon': 32, + + 'filterStdIon': 0.1 + }, + 'WBD': { + 'numberRangeLooks1': 1, + 'numberAzimuthLooks1': 14, + + 'numberRangeLooks2': 5, + 'numberAzimuthLooks2': 2, + + 'numberRangeLooksIon': 80, + 'numberAzimuthLooksIon': 32, + + 'filterStdIon': 0.1 + }, + #All WD1 (WWS, WWD) modes are the same + 'WWS': { + 'numberRangeLooks1': 2, + 'numberAzimuthLooks1': 14, + + 'numberRangeLooks2': 5, + 'numberAzimuthLooks2': 2, + + 'numberRangeLooksIon': 80, + 'numberAzimuthLooksIon': 32, + + 'filterStdIon': 0.075 + }, + 'WWD': { + 'numberRangeLooks1': 2, + 'numberAzimuthLooks1': 14, + + 'numberRangeLooks2': 5, + 'numberAzimuthLooks2': 2, + + 'numberRangeLooksIon': 80, + 'numberAzimuthLooksIon': 32, + + 'filterStdIon': 0.075 + }, + #All WD2 (VBS, VBD) modes are the same + 'VBS': { + 'numberRangeLooks1': 1, + 'numberAzimuthLooks1': 14, + + 'numberRangeLooks2': 5, + 'numberAzimuthLooks2': 2, + + 'numberRangeLooksIon': 80, + 'numberAzimuthLooksIon': 32, + + 'filterStdIon': 0.1 + }, + 'VBD': { + 'numberRangeLooks1': 1, + 'numberAzimuthLooks1': 14, + + 'numberRangeLooks2': 5, + 'numberAzimuthLooks2': 2, + + 'numberRangeLooksIon': 80, + 'numberAzimuthLooksIon': 32, + + 'filterStdIon': 0.1 + } + } + } +import numpy as np +filterStdPolyIon = np.array([ 2.31536879e-05, -3.41687763e-03, 1.39904121e-01]) diff --git a/components/isceobj/Alos2Proc/CMakeLists.txt b/components/isceobj/Alos2Proc/CMakeLists.txt index e9fef33..26214cc 100644 --- a/components/isceobj/Alos2Proc/CMakeLists.txt +++ b/components/isceobj/Alos2Proc/CMakeLists.txt @@ -4,6 +4,7 @@ InstallSameDir( Alos2ProcPublic.py Factories.py denseOffsetNote.txt + runBaseline.py runCoherence.py runDenseOffset.py runDiffInterferogram.py @@ -16,6 +17,7 @@ InstallSameDir( runGeo2Rdr.py runGeocode.py runGeocodeOffset.py + runIonCorrect.py runIonFilt.py runIonSubband.py runIonUwrap.py diff --git a/components/isceobj/Alos2Proc/Factories.py b/components/isceobj/Alos2Proc/Factories.py index d450c46..74184bc 100644 --- a/components/isceobj/Alos2Proc/Factories.py +++ b/components/isceobj/Alos2Proc/Factories.py @@ -74,6 +74,7 @@ def createUnwrap2Stage(other, do_unwrap_2stage = None, unwrapperName = None): createPreprocessor = _factory("runPreprocessor") +createBaseline = _factory("runBaseline") createDownloadDem = _factory("runDownloadDem") createPrepareSlc = _factory("runPrepareSlc") createSlcOffset = _factory("runSlcOffset") @@ -92,6 +93,7 @@ createCoherence = _factory("runCoherence") createIonSubband = _factory("runIonSubband") createIonUwrap = _factory("runIonUwrap") createIonFilt = _factory("runIonFilt") +createIonCorrect = _factory("runIonCorrect") createFilt = _factory("runFilt") createUnwrapSnaphu = _factory("runUnwrapSnaphu") createGeocode = _factory("runGeocode") diff --git a/components/isceobj/Alos2Proc/SConscript b/components/isceobj/Alos2Proc/SConscript index 66748bb..2cb882b 100644 --- a/components/isceobj/Alos2Proc/SConscript +++ b/components/isceobj/Alos2Proc/SConscript @@ -40,6 +40,6 @@ project = 'Alos2Proc' install = os.path.join(envisceobj['PRJ_SCONS_INSTALL'],package,project) -listFiles = ['__init__.py', 'Factories.py', 'Alos2Proc.py', 'Alos2ProcPublic.py', 'runPreprocessor.py', 'runDownloadDem.py', 'runPrepareSlc.py', 'runSlcOffset.py', 'runFormInterferogram.py', 'runSwathOffset.py', 'runSwathMosaic.py', 'runFrameOffset.py', 'runFrameMosaic.py', 'runRdr2Geo.py', 'runGeo2Rdr.py', 'runRdrDemOffset.py', 'runRectRangeOffset.py', 'runDiffInterferogram.py', 'runLook.py', 'runCoherence.py', 'runIonSubband.py', 'runIonUwrap.py', 'runIonFilt.py', 'runFilt.py', 'runUnwrapSnaphu.py', 'runGeocode.py', 'srtm_no_swbd_tiles.txt', 'srtm_tiles.txt', 'swbd_tiles.txt', 'runSlcMosaic.py', 'runSlcMatch.py', 'runDenseOffset.py', 'runFiltOffset.py', 'runGeocodeOffset.py', 'denseOffsetNote.txt'] +listFiles = ['__init__.py', 'Factories.py', 'Alos2Proc.py', 'Alos2ProcPublic.py', 'runPreprocessor.py', 'runBaseline.py', 'runDownloadDem.py', 'runPrepareSlc.py', 'runSlcOffset.py', 'runFormInterferogram.py', 'runSwathOffset.py', 'runSwathMosaic.py', 'runFrameOffset.py', 'runFrameMosaic.py', 'runRdr2Geo.py', 'runGeo2Rdr.py', 'runRdrDemOffset.py', 'runRectRangeOffset.py', 'runDiffInterferogram.py', 'runLook.py', 'runCoherence.py', 'runIonSubband.py', 'runIonUwrap.py', 'runIonFilt.py', 'runIonCorrect.py', 'runFilt.py', 'runUnwrapSnaphu.py', 'runGeocode.py', 'srtm_no_swbd_tiles.txt', 'srtm_tiles.txt', 'swbd_tiles.txt', 'runSlcMosaic.py', 'runSlcMatch.py', 'runDenseOffset.py', 'runFiltOffset.py', 'runGeocodeOffset.py', 'denseOffsetNote.txt'] envisceobj.Install(install,listFiles) envisceobj.Alias('install',install) diff --git a/components/isceobj/Alos2Proc/runBaseline.py b/components/isceobj/Alos2Proc/runBaseline.py new file mode 100644 index 0000000..cf8951b --- /dev/null +++ b/components/isceobj/Alos2Proc/runBaseline.py @@ -0,0 +1,229 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import logging +import datetime +import numpy as np + +import isceobj +import isceobj.Sensor.MultiMode as MultiMode +from isceobj.Planet.Planet import Planet +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd +from isceobj.Alos2Proc.Alos2ProcPublic import getBboxRdr +from isceobj.Alos2Proc.Alos2ProcPublic import getBboxGeo + +logger = logging.getLogger('isce.alos2insar.runBaseline') + +def runBaseline(self): + '''compute baseline + ''' + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + secondaryTrack = self._insar.loadTrack(reference=False) + + + ################################################## + #2. compute burst synchronization + ################################################## + #burst synchronization may slowly change along a track as a result of the changing relative speed of the two flights + #in one frame, real unsynchronized time is the same for all swaths + unsynTime = 0 + #real synchronized time/percentage depends on the swath burst length (synTime = burstlength - abs(unsynTime)) + #synTime = 0 + synPercentage = 0 + + numberOfFrames = len(self._insar.referenceFrames) + numberOfSwaths = self._insar.endingSwath - self._insar.startingSwath + 1 + + for i, frameNumber in enumerate(self._insar.referenceFrames): + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + referenceSwath = referenceTrack.frames[i].swaths[j] + secondarySwath = secondaryTrack.frames[i].swaths[j] + #using Piyush's code for computing range and azimuth offsets + midRange = referenceSwath.startingRange + referenceSwath.rangePixelSize * referenceSwath.numberOfSamples * 0.5 + midSensingStart = referenceSwath.sensingStart + datetime.timedelta(seconds = referenceSwath.numberOfLines * 0.5 / referenceSwath.prf) + llh = referenceTrack.orbit.rdr2geo(midSensingStart, midRange) + slvaz, slvrng = secondaryTrack.orbit.geo2rdr(llh) + ###Translate to offsets + #note that secondary range pixel size and prf might be different from reference, here we assume there is a virtual secondary with same + #range pixel size and prf + rgoff = ((slvrng - secondarySwath.startingRange) / referenceSwath.rangePixelSize) - referenceSwath.numberOfSamples * 0.5 + azoff = ((slvaz - secondarySwath.sensingStart).total_seconds() * referenceSwath.prf) - referenceSwath.numberOfLines * 0.5 + + #compute burst synchronization + #burst parameters for ScanSAR wide mode not estimed yet + if self._insar.modeCombination == 21: + scburstStartLine = (referenceSwath.burstStartTime - referenceSwath.sensingStart).total_seconds() * referenceSwath.prf + azoff + #secondary burst start times corresponding to reference burst start times (100% synchronization) + scburstStartLines = np.arange(scburstStartLine - 100000*referenceSwath.burstCycleLength, \ + scburstStartLine + 100000*referenceSwath.burstCycleLength, \ + referenceSwath.burstCycleLength) + dscburstStartLines = -((secondarySwath.burstStartTime - secondarySwath.sensingStart).total_seconds() * secondarySwath.prf - scburstStartLines) + #find the difference with minimum absolute value + unsynLines = dscburstStartLines[np.argmin(np.absolute(dscburstStartLines))] + if np.absolute(unsynLines) >= secondarySwath.burstLength: + synLines = 0 + if unsynLines > 0: + unsynLines = secondarySwath.burstLength + else: + unsynLines = -secondarySwath.burstLength + else: + synLines = secondarySwath.burstLength - np.absolute(unsynLines) + + unsynTime += unsynLines / referenceSwath.prf + synPercentage += synLines / referenceSwath.burstLength * 100.0 + + catalog.addItem('burst synchronization of frame {} swath {}'.format(frameNumber, swathNumber), '%.1f%%'%(synLines / referenceSwath.burstLength * 100.0), 'runBaseline') + + ############################################################################################ + #illustration of the sign of the number of unsynchronized lines (unsynLines) + #The convention is the same as ampcor offset, that is, + # secondaryLineNumber = referenceLineNumber + unsynLines + # + # |-----------------------| ------------ + # | | ^ + # | | | + # | | | unsynLines < 0 + # | | | + # | | \ / + # | | |-----------------------| + # | | | | + # | | | | + # |-----------------------| | | + # Reference Burst | | + # | | + # | | + # | | + # | | + # |-----------------------| + # Secondary Burst + # + # + ############################################################################################ + + ##burst parameters for ScanSAR wide mode not estimed yet + elif self._insar.modeCombination == 31: + #scansar is reference + scburstStartLine = (referenceSwath.burstStartTime - referenceSwath.sensingStart).total_seconds() * referenceSwath.prf + azoff + #secondary burst start times corresponding to reference burst start times (100% synchronization) + for k in range(-100000, 100000): + saz_burstx = scburstStartLine + referenceSwath.burstCycleLength * k + st_burstx = secondarySwath.sensingStart + datetime.timedelta(seconds=saz_burstx / referenceSwath.prf) + if saz_burstx >= 0.0 and saz_burstx <= secondarySwath.numberOfLines -1: + secondarySwath.burstStartTime = st_burstx + secondarySwath.burstLength = referenceSwath.burstLength + secondarySwath.burstCycleLength = referenceSwath.burstCycleLength + secondarySwath.swathNumber = referenceSwath.swathNumber + break + #unsynLines = 0 + #synLines = referenceSwath.burstLength + #unsynTime += unsynLines / referenceSwath.prf + #synPercentage += synLines / referenceSwath.burstLength * 100.0 + catalog.addItem('burst synchronization of frame {} swath {}'.format(frameNumber, swathNumber), '%.1f%%'%(100.0), 'runBaseline') + else: + pass + + #overwrite original frame parameter file + if self._insar.modeCombination == 31: + frameDir = 'f{}_{}'.format(i+1, frameNumber) + self._insar.saveProduct(secondaryTrack.frames[i], os.path.join(frameDir, self._insar.secondaryFrameParameter)) + + #getting average + if self._insar.modeCombination == 21: + unsynTime /= numberOfFrames*numberOfSwaths + synPercentage /= numberOfFrames*numberOfSwaths + elif self._insar.modeCombination == 31: + unsynTime = 0. + synPercentage = 100. + else: + pass + + #record results + if (self._insar.modeCombination == 21) or (self._insar.modeCombination == 31): + self._insar.burstUnsynchronizedTime = unsynTime + self._insar.burstSynchronization = synPercentage + catalog.addItem('burst synchronization averaged', '%.1f%%'%(synPercentage), 'runBaseline') + + + ################################################## + #3. compute baseline + ################################################## + #only compute baseline at four corners and center of the reference track + bboxRdr = getBboxRdr(referenceTrack) + + rangeMin = bboxRdr[0] + rangeMax = bboxRdr[1] + azimuthTimeMin = bboxRdr[2] + azimuthTimeMax = bboxRdr[3] + + azimuthTimeMid = azimuthTimeMin+datetime.timedelta(seconds=(azimuthTimeMax-azimuthTimeMin).total_seconds()/2.0) + rangeMid = (rangeMin + rangeMax) / 2.0 + + points = [[azimuthTimeMin, rangeMin], + [azimuthTimeMin, rangeMax], + [azimuthTimeMax, rangeMin], + [azimuthTimeMax, rangeMax], + [azimuthTimeMid, rangeMid]] + + Bpar = [] + Bperp = [] + #modify Piyush's code for computing baslines + refElp = Planet(pname='Earth').ellipsoid + for x in points: + referenceSV = referenceTrack.orbit.interpolate(x[0], method='hermite') + target = referenceTrack.orbit.rdr2geo(x[0], x[1]) + + slvTime, slvrng = secondaryTrack.orbit.geo2rdr(target) + secondarySV = secondaryTrack.orbit.interpolateOrbit(slvTime, method='hermite') + + targxyz = np.array(refElp.LLH(target[0], target[1], target[2]).ecef().tolist()) + mxyz = np.array(referenceSV.getPosition()) + mvel = np.array(referenceSV.getVelocity()) + sxyz = np.array(secondarySV.getPosition()) + + #to fix abrupt change near zero in baseline grid. JUN-05-2020 + mvelunit = mvel / np.linalg.norm(mvel) + sxyz = sxyz - np.dot ( sxyz-mxyz, mvelunit) * mvelunit + + aa = np.linalg.norm(sxyz-mxyz) + costheta = (x[1]*x[1] + aa*aa - slvrng*slvrng)/(2.*x[1]*aa) + + Bpar.append(aa*costheta) + + perp = aa * np.sqrt(1 - costheta*costheta) + direction = np.sign(np.dot( np.cross(targxyz-mxyz, sxyz-mxyz), mvel)) + Bperp.append(direction*perp) + + catalog.addItem('parallel baseline at upperleft of reference track', Bpar[0], 'runBaseline') + catalog.addItem('parallel baseline at upperright of reference track', Bpar[1], 'runBaseline') + catalog.addItem('parallel baseline at lowerleft of reference track', Bpar[2], 'runBaseline') + catalog.addItem('parallel baseline at lowerright of reference track', Bpar[3], 'runBaseline') + catalog.addItem('parallel baseline at center of reference track', Bpar[4], 'runBaseline') + + catalog.addItem('perpendicular baseline at upperleft of reference track', Bperp[0], 'runBaseline') + catalog.addItem('perpendicular baseline at upperright of reference track', Bperp[1], 'runBaseline') + catalog.addItem('perpendicular baseline at lowerleft of reference track', Bperp[2], 'runBaseline') + catalog.addItem('perpendicular baseline at lowerright of reference track', Bperp[3], 'runBaseline') + catalog.addItem('perpendicular baseline at center of reference track', Bperp[4], 'runBaseline') + + + ################################################## + #4. compute bounding box + ################################################## + referenceBbox = getBboxGeo(referenceTrack) + secondaryBbox = getBboxGeo(secondaryTrack) + + catalog.addItem('reference bounding box', referenceBbox, 'runBaseline') + catalog.addItem('secondary bounding box', secondaryBbox, 'runBaseline') + + + catalog.printToLog(logger, "runBaseline") + self._insar.procDoc.addAllFromCatalog(catalog) + + diff --git a/components/isceobj/Alos2Proc/runCoherence.py b/components/isceobj/Alos2Proc/runCoherence.py index 07fae55..335fd04 100644 --- a/components/isceobj/Alos2Proc/runCoherence.py +++ b/components/isceobj/Alos2Proc/runCoherence.py @@ -13,8 +13,12 @@ from isceobj.Alos2Proc.Alos2ProcPublic import runCmd logger = logging.getLogger('isce.alos2insar.runCoherence') def runCoherence(self): - '''Extract images. + '''estimate coherence ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) self.updateParamemetersFromUser() diff --git a/components/isceobj/Alos2Proc/runDiffInterferogram.py b/components/isceobj/Alos2Proc/runDiffInterferogram.py index 0ac3c3a..b43750e 100644 --- a/components/isceobj/Alos2Proc/runDiffInterferogram.py +++ b/components/isceobj/Alos2Proc/runDiffInterferogram.py @@ -15,6 +15,10 @@ logger = logging.getLogger('isce.alos2insar.runDiffInterferogram') def runDiffInterferogram(self): '''Extract images. ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) self.updateParamemetersFromUser() diff --git a/components/isceobj/Alos2Proc/runFilt.py b/components/isceobj/Alos2Proc/runFilt.py index 0452943..27bf795 100644 --- a/components/isceobj/Alos2Proc/runFilt.py +++ b/components/isceobj/Alos2Proc/runFilt.py @@ -21,12 +21,24 @@ logger = logging.getLogger('isce.alos2insar.runFilt') def runFilt(self): '''filter interferogram ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) self.updateParamemetersFromUser() #referenceTrack = self._insar.loadTrack(reference=True) #secondaryTrack = self._insar.loadTrack(reference=False) + filt(self) + + catalog.printToLog(logger, "runFilt") + self._insar.procDoc.addAllFromCatalog(catalog) + + +def filt(self): + insarDir = 'insar' os.makedirs(insarDir, exist_ok=True) os.chdir(insarDir) @@ -150,21 +162,17 @@ def runFilt(self): print('\nmask filtered interferogram using: {}'.format(self._insar.multilookWbdOut)) if self.waterBodyMaskStartingStep=='filt': - if not os.path.exists(self._insar.multilookWbdOut): - catalog.addItem('warning message', 'requested masking interferogram with water body, but water body does not exist', 'runFilt') - else: - wbd = np.fromfile(self._insar.multilookWbdOut, dtype=np.int8).reshape(length, width) - phsig=np.memmap(self._insar.multilookPhsig, dtype='float32', mode='r+', shape=(length, width)) - phsig[np.nonzero(wbd==-1)]=0 - del phsig - filt=np.memmap(self._insar.filteredInterferogram, dtype='complex64', mode='r+', shape=(length, width)) - filt[np.nonzero(wbd==-1)]=0 - del filt - del wbd + #if not os.path.exists(self._insar.multilookWbdOut): + # catalog.addItem('warning message', 'requested masking interferogram with water body, but water body does not exist', 'runFilt') + #else: + wbd = np.fromfile(self._insar.multilookWbdOut, dtype=np.int8).reshape(length, width) + phsig=np.memmap(self._insar.multilookPhsig, dtype='float32', mode='r+', shape=(length, width)) + phsig[np.nonzero(wbd==-1)]=0 + del phsig + filt=np.memmap(self._insar.filteredInterferogram, dtype='complex64', mode='r+', shape=(length, width)) + filt[np.nonzero(wbd==-1)]=0 + del filt + del wbd os.chdir('../') - - catalog.printToLog(logger, "runFilt") - self._insar.procDoc.addAllFromCatalog(catalog) - diff --git a/components/isceobj/Alos2Proc/runFormInterferogram.py b/components/isceobj/Alos2Proc/runFormInterferogram.py index 0e17417..2095b89 100644 --- a/components/isceobj/Alos2Proc/runFormInterferogram.py +++ b/components/isceobj/Alos2Proc/runFormInterferogram.py @@ -18,6 +18,10 @@ logger = logging.getLogger('isce.alos2insar.runFormInterferogram') def runFormInterferogram(self): '''form interferograms. ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) self.updateParamemetersFromUser() diff --git a/components/isceobj/Alos2Proc/runFrameMosaic.py b/components/isceobj/Alos2Proc/runFrameMosaic.py index 152b6c0..75fac03 100644 --- a/components/isceobj/Alos2Proc/runFrameMosaic.py +++ b/components/isceobj/Alos2Proc/runFrameMosaic.py @@ -17,6 +17,10 @@ logger = logging.getLogger('isce.alos2insar.runFrameMosaic') def runFrameMosaic(self): '''mosaic frames ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) self.updateParamemetersFromUser() @@ -193,90 +197,107 @@ def frameMosaic(track, inputFiles, outputfile, rangeOffsets, azimuthOffsets, num if i == 0: rinfs[i] = inf else: - infImg = isceobj.createImage() - infImg.load(inf+'.xml') - rangeOffsets2Frac = rangeOffsets2[i] - int(rangeOffsets2[i]) - azimuthOffsets2Frac = azimuthOffsets2[i] - int(azimuthOffsets2[i]) + #no need to resample + if (abs(rangeOffsets2[i] - round(rangeOffsets2[i])) < 0.0001) and (abs(azimuthOffsets2[i] - round(azimuthOffsets2[i])) < 0.0001): + if os.path.isfile(rinfs[i]): + os.remove(rinfs[i]) + os.symlink(inf, rinfs[i]) + #all of the following use of rangeOffsets2/azimuthOffsets2 is inside int(), we do the following in case it is like + #4.99999999999... + rangeOffsets2[i] = round(rangeOffsets2[i]) + azimuthOffsets2[i] = round(azimuthOffsets2[i]) - if resamplingMethod == 0: - rect_with_looks(inf, - rinfs[i], - infImg.width, infImg.length, - infImg.width, infImg.length, - 1.0, 0.0, - 0.0, 1.0, - rangeOffsets2Frac, azimuthOffsets2Frac, - 1,1, - 1,1, - 'COMPLEX', - 'Bilinear') - if infImg.getImageType() == 'amp': - create_xml(rinfs[i], infImg.width, infImg.length, 'amp') - else: - create_xml(rinfs[i], infImg.width, infImg.length, 'int') - - elif resamplingMethod == 1: - #decompose amplitude and phase - phaseFile = 'phase' - amplitudeFile = 'amplitude' - data = np.fromfile(inf, dtype=np.complex64).reshape(infImg.length, infImg.width) - phase = np.exp(np.complex64(1j) * np.angle(data)) - phase[np.nonzero(data==0)] = 0 - phase.astype(np.complex64).tofile(phaseFile) - amplitude = np.absolute(data) - amplitude.astype(np.float32).tofile(amplitudeFile) - - #resampling - phaseRectFile = 'phaseRect' - amplitudeRectFile = 'amplitudeRect' - rect_with_looks(phaseFile, - phaseRectFile, - infImg.width, infImg.length, - infImg.width, infImg.length, - 1.0, 0.0, - 0.0, 1.0, - rangeOffsets2Frac, azimuthOffsets2Frac, - 1,1, - 1,1, - 'COMPLEX', - 'Sinc') - rect_with_looks(amplitudeFile, - amplitudeRectFile, - infImg.width, infImg.length, - infImg.width, infImg.length, - 1.0, 0.0, - 0.0, 1.0, - rangeOffsets2Frac, azimuthOffsets2Frac, - 1,1, - 1,1, - 'REAL', - 'Bilinear') - - #recombine amplitude and phase - phase = np.fromfile(phaseRectFile, dtype=np.complex64).reshape(infImg.length, infImg.width) - amplitude = np.fromfile(amplitudeRectFile, dtype=np.float32).reshape(infImg.length, infImg.width) - (phase*amplitude).astype(np.complex64).tofile(rinfs[i]) - - #tidy up - os.remove(phaseFile) - os.remove(amplitudeFile) - os.remove(phaseRectFile) - os.remove(amplitudeRectFile) + infImg = isceobj.createImage() + infImg.load(inf+'.xml') if infImg.getImageType() == 'amp': create_xml(rinfs[i], infImg.width, infImg.length, 'amp') else: create_xml(rinfs[i], infImg.width, infImg.length, 'int') else: - resamp(inf, - rinfs[i], - 'fake', - 'fake', - infImg.width, infImg.length, - frames[i].swaths[0].prf, - frames[i].swaths[0].dopplerVsPixel, - [rangeOffsets2Frac, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], - [azimuthOffsets2Frac, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) - create_xml(rinfs[i], infImg.width, infImg.length, 'slc') + infImg = isceobj.createImage() + infImg.load(inf+'.xml') + rangeOffsets2Frac = rangeOffsets2[i] - int(rangeOffsets2[i]) + azimuthOffsets2Frac = azimuthOffsets2[i] - int(azimuthOffsets2[i]) + + if resamplingMethod == 0: + rect_with_looks(inf, + rinfs[i], + infImg.width, infImg.length, + infImg.width, infImg.length, + 1.0, 0.0, + 0.0, 1.0, + rangeOffsets2Frac, azimuthOffsets2Frac, + 1,1, + 1,1, + 'COMPLEX', + 'Bilinear') + if infImg.getImageType() == 'amp': + create_xml(rinfs[i], infImg.width, infImg.length, 'amp') + else: + create_xml(rinfs[i], infImg.width, infImg.length, 'int') + + elif resamplingMethod == 1: + #decompose amplitude and phase + phaseFile = 'phase' + amplitudeFile = 'amplitude' + data = np.fromfile(inf, dtype=np.complex64).reshape(infImg.length, infImg.width) + phase = np.exp(np.complex64(1j) * np.angle(data)) + phase[np.nonzero(data==0)] = 0 + phase.astype(np.complex64).tofile(phaseFile) + amplitude = np.absolute(data) + amplitude.astype(np.float32).tofile(amplitudeFile) + + #resampling + phaseRectFile = 'phaseRect' + amplitudeRectFile = 'amplitudeRect' + rect_with_looks(phaseFile, + phaseRectFile, + infImg.width, infImg.length, + infImg.width, infImg.length, + 1.0, 0.0, + 0.0, 1.0, + rangeOffsets2Frac, azimuthOffsets2Frac, + 1,1, + 1,1, + 'COMPLEX', + 'Sinc') + rect_with_looks(amplitudeFile, + amplitudeRectFile, + infImg.width, infImg.length, + infImg.width, infImg.length, + 1.0, 0.0, + 0.0, 1.0, + rangeOffsets2Frac, azimuthOffsets2Frac, + 1,1, + 1,1, + 'REAL', + 'Bilinear') + + #recombine amplitude and phase + phase = np.fromfile(phaseRectFile, dtype=np.complex64).reshape(infImg.length, infImg.width) + amplitude = np.fromfile(amplitudeRectFile, dtype=np.float32).reshape(infImg.length, infImg.width) + (phase*amplitude).astype(np.complex64).tofile(rinfs[i]) + + #tidy up + os.remove(phaseFile) + os.remove(amplitudeFile) + os.remove(phaseRectFile) + os.remove(amplitudeRectFile) + if infImg.getImageType() == 'amp': + create_xml(rinfs[i], infImg.width, infImg.length, 'amp') + else: + create_xml(rinfs[i], infImg.width, infImg.length, 'int') + else: + resamp(inf, + rinfs[i], + 'fake', + 'fake', + infImg.width, infImg.length, + frames[i].swaths[0].prf, + frames[i].swaths[0].dopplerVsPixel, + [rangeOffsets2Frac, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [azimuthOffsets2Frac, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) + create_xml(rinfs[i], infImg.width, infImg.length, 'slc') #determine output width and length #actually no need to calculate in azimuth direction diff --git a/components/isceobj/Alos2Proc/runFrameOffset.py b/components/isceobj/Alos2Proc/runFrameOffset.py index 4a95e2d..635e94c 100644 --- a/components/isceobj/Alos2Proc/runFrameOffset.py +++ b/components/isceobj/Alos2Proc/runFrameOffset.py @@ -13,6 +13,10 @@ logger = logging.getLogger('isce.alos2insar.runFrameOffset') def runFrameOffset(self): '''estimate frame offsets. ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) self.updateParamemetersFromUser() diff --git a/components/isceobj/Alos2Proc/runGeo2Rdr.py b/components/isceobj/Alos2Proc/runGeo2Rdr.py index 6aeb37a..5ff63ce 100644 --- a/components/isceobj/Alos2Proc/runGeo2Rdr.py +++ b/components/isceobj/Alos2Proc/runGeo2Rdr.py @@ -13,6 +13,10 @@ logger = logging.getLogger('isce.alos2insar.runGeo2Rdr') def runGeo2Rdr(self): '''compute range and azimuth offsets ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) self.updateParamemetersFromUser() diff --git a/components/isceobj/Alos2Proc/runGeocode.py b/components/isceobj/Alos2Proc/runGeocode.py index 0269aed..e596c7f 100644 --- a/components/isceobj/Alos2Proc/runGeocode.py +++ b/components/isceobj/Alos2Proc/runGeocode.py @@ -16,6 +16,10 @@ logger = logging.getLogger('isce.alos2insar.runGeocode') def runGeocode(self): '''geocode final products ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) self.updateParamemetersFromUser() diff --git a/components/isceobj/Alos2Proc/runIonCorrect.py b/components/isceobj/Alos2Proc/runIonCorrect.py new file mode 100644 index 0000000..87a455e --- /dev/null +++ b/components/isceobj/Alos2Proc/runIonCorrect.py @@ -0,0 +1,155 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging +import numpy as np +import numpy.matlib + +import isceobj + +logger = logging.getLogger('isce.alos2insar.runIonCorrect') + +def runIonCorrect(self): + '''resample original ionosphere and ionospheric correction + ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + if not self.doIon: + catalog.printToLog(logger, "runIonCorrect") + self._insar.procDoc.addAllFromCatalog(catalog) + return + + referenceTrack = self._insar.loadTrack(reference=True) + secondaryTrack = self._insar.loadTrack(reference=False) + + from isceobj.Alos2Proc.runIonSubband import defineIonDir + ionDir = defineIonDir() + subbandPrefix = ['lower', 'upper'] + + ionCalDir = os.path.join(ionDir['ion'], ionDir['ionCal']) + os.makedirs(ionCalDir, exist_ok=True) + os.chdir(ionCalDir) + + + ############################################################ + # STEP 3. resample ionospheric phase + ############################################################ + from contrib.alos2proc_f.alos2proc_f import rect + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + from scipy.interpolate import interp1d + import shutil + + ################################################# + #SET PARAMETERS HERE + #interpolation method + interpolationMethod = 1 + ################################################# + + print('\ninterpolate ionosphere') + + ml2 = '_{}rlks_{}alks'.format(self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon, + self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon) + + ml3 = '_{}rlks_{}alks'.format(self._insar.numberRangeLooks1*self._insar.numberRangeLooks2, + self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooks2) + + ionfiltfile = 'filt_ion'+ml2+'.ion' + #ionrectfile = 'filt_ion'+ml3+'.ion' + ionrectfile = self._insar.multilookIon + + img = isceobj.createImage() + img.load(ionfiltfile + '.xml') + width2 = img.width + length2 = img.length + + img = isceobj.createImage() + img.load(os.path.join('../../', ionDir['insar'], self._insar.multilookDifferentialInterferogram) + '.xml') + width3 = img.width + length3 = img.length + + #number of range looks output + nrlo = self._insar.numberRangeLooks1*self._insar.numberRangeLooks2 + #number of range looks input + nrli = self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon + #number of azimuth looks output + nalo = self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooks2 + #number of azimuth looks input + nali = self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon + + if (self._insar.numberRangeLooks2 != self._insar.numberRangeLooksIon) or \ + (self._insar.numberAzimuthLooks2 != self._insar.numberAzimuthLooksIon): + #this should be faster using fortran + if interpolationMethod == 0: + rect(ionfiltfile, ionrectfile, + width2,length2, + width3,length3, + nrlo/nrli, 0.0, + 0.0, nalo/nali, + (nrlo-nrli)/(2.0*nrli), + (nalo-nali)/(2.0*nali), + 'REAL','Bilinear') + #finer, but slower method + else: + ionfilt = np.fromfile(ionfiltfile, dtype=np.float32).reshape(length2, width2) + index2 = np.linspace(0, width2-1, num=width2, endpoint=True) + index3 = np.linspace(0, width3-1, num=width3, endpoint=True) * nrlo/nrli + (nrlo-nrli)/(2.0*nrli) + ionrect = np.zeros((length3, width3), dtype=np.float32) + for i in range(length2): + f = interp1d(index2, ionfilt[i,:], kind='cubic', fill_value="extrapolate") + ionrect[i, :] = f(index3) + + index2 = np.linspace(0, length2-1, num=length2, endpoint=True) + index3 = np.linspace(0, length3-1, num=length3, endpoint=True) * nalo/nali + (nalo-nali)/(2.0*nali) + for j in range(width3): + f = interp1d(index2, ionrect[0:length2, j], kind='cubic', fill_value="extrapolate") + ionrect[:, j] = f(index3) + ionrect.astype(np.float32).tofile(ionrectfile) + del ionrect + create_xml(ionrectfile, width3, length3, 'float') + + os.rename(ionrectfile, os.path.join('../../insar', ionrectfile)) + os.rename(ionrectfile+'.vrt', os.path.join('../../insar', ionrectfile)+'.vrt') + os.rename(ionrectfile+'.xml', os.path.join('../../insar', ionrectfile)+'.xml') + os.chdir('../../insar') + else: + shutil.copyfile(ionfiltfile, os.path.join('../../insar', ionrectfile)) + os.chdir('../../insar') + create_xml(ionrectfile, width3, length3, 'float') + #now we are in 'insar' + + + ############################################################ + # STEP 4. correct interferogram + ############################################################ + from isceobj.Alos2Proc.Alos2ProcPublic import renameFile + from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + + if self.applyIon: + print('\ncorrect interferogram') + if os.path.isfile(self._insar.multilookDifferentialInterferogramOriginal): + print('original interferogram: {} is already here, do not rename: {}'.format(self._insar.multilookDifferentialInterferogramOriginal, self._insar.multilookDifferentialInterferogram)) + else: + print('renaming {} to {}'.format(self._insar.multilookDifferentialInterferogram, self._insar.multilookDifferentialInterferogramOriginal)) + renameFile(self._insar.multilookDifferentialInterferogram, self._insar.multilookDifferentialInterferogramOriginal) + + cmd = "imageMath.py -e='a*exp(-1.0*J*b)' --a={} --b={} -s BIP -t cfloat -o {}".format( + self._insar.multilookDifferentialInterferogramOriginal, + self._insar.multilookIon, + self._insar.multilookDifferentialInterferogram) + runCmd(cmd) + else: + print('\nionospheric phase estimation finished, but correction of interfeorgram not requested') + + os.chdir('../') + + catalog.printToLog(logger, "runIonCorrect") + self._insar.procDoc.addAllFromCatalog(catalog) + diff --git a/components/isceobj/Alos2Proc/runIonFilt.py b/components/isceobj/Alos2Proc/runIonFilt.py index 53ccf50..1745d3e 100644 --- a/components/isceobj/Alos2Proc/runIonFilt.py +++ b/components/isceobj/Alos2Proc/runIonFilt.py @@ -15,6 +15,10 @@ logger = logging.getLogger('isce.alos2insar.runIonFilt') def runIonFilt(self): '''compute and filter ionospheric phase ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) self.updateParamemetersFromUser() @@ -110,6 +114,7 @@ def runIonFilt(self): ############################################################ # STEP 2. filter ionospheric phase ############################################################ + import scipy.signal as ss ################################################# #SET PARAMETERS HERE @@ -117,22 +122,36 @@ def runIonFilt(self): fit = self.fitIon filt = self.filtIon fitAdaptive = self.fitAdaptiveIon + filtSecondary = self.filtSecondaryIon if (fit == False) and (filt == False): raise Exception('either fit ionosphere or filt ionosphere should be True when doing ionospheric correction\n') #filtering window size size_max = self.filteringWinsizeMaxIon size_min = self.filteringWinsizeMinIon + size_secondary = self.filteringWinsizeSecondaryIon if size_min > size_max: print('\n\nWARNING: minimum window size for filtering ionosphere phase {} > maximum window size {}'.format(size_min, size_max)) print(' re-setting maximum window size to {}\n\n'.format(size_min)) size_max = size_min + if size_secondary % 2 != 1: + size_secondary += 1 + print('window size of secondary filtering of ionosphere phase should be odd, window size changed to {}'.format(size_secondary)) #coherence threshold for fitting a polynomial corThresholdFit = 0.25 #ionospheric phase standard deviation after filtering - std_out0 = 0.1 + if self.filterStdIon is not None: + std_out0 = self.filterStdIon + else: + if referenceTrack.operationMode == secondaryTrack.operationMode: + from isceobj.Alos2Proc.Alos2ProcPublic import modeProcParDict + std_out0 = modeProcParDict['ALOS-2'][referenceTrack.operationMode]['filterStdIon'] + else: + from isceobj.Alos2Proc.Alos2ProcPublic import filterStdPolyIon + std_out0 = np.polyval(filterStdPolyIon, referenceTrack.frames[0].swaths[0].rangeBandwidth/(1e6)) + #std_out0 = 0.1 ################################################# print('\nfiltering ionosphere') @@ -271,6 +290,12 @@ def runIonFilt(self): #filter the rest of the ionosphere if filt: (ion_filt, std_out, window_size_out) = adaptive_gaussian(ion, std, size_min, size_max, std_out0, fit=fitAdaptive) + if filtSecondary: + print('applying secondary filtering with window size {}'.format(size_secondary)) + g2d = gaussian(size_secondary, size_secondary/2.0, scale=1.0) + scale = ss.fftconvolve((ion_filt!=0), g2d, mode='same') + ion_filt = (ion_filt!=0) * ss.fftconvolve(ion_filt, g2d, mode='same') / (scale + (scale==0)) + catalog.addItem('standard deviation of filtered ionospheric phase', std_out0, 'runIonFilt') #get final results if (fit == True) and (filt == True): @@ -291,114 +316,7 @@ def runIonFilt(self): window_size_out.astype(np.float32).tofile(windowsizefiltfile) create_xml(windowsizefiltfile, width, length, 'float') - - ############################################################ - # STEP 3. resample ionospheric phase - ############################################################ - from contrib.alos2proc_f.alos2proc_f import rect - from isceobj.Alos2Proc.Alos2ProcPublic import create_xml - from scipy.interpolate import interp1d - import shutil - - ################################################# - #SET PARAMETERS HERE - #interpolation method - interpolationMethod = 1 - ################################################# - - print('\ninterpolate ionosphere') - - ml3 = '_{}rlks_{}alks'.format(self._insar.numberRangeLooks1*self._insar.numberRangeLooks2, - self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooks2) - - ionfiltfile = 'filt_ion'+ml2+'.ion' - #ionrectfile = 'filt_ion'+ml3+'.ion' - ionrectfile = self._insar.multilookIon - - img = isceobj.createImage() - img.load(ionfiltfile + '.xml') - width2 = img.width - length2 = img.length - - img = isceobj.createImage() - img.load(os.path.join('../../', ionDir['insar'], self._insar.multilookDifferentialInterferogram) + '.xml') - width3 = img.width - length3 = img.length - - #number of range looks output - nrlo = self._insar.numberRangeLooks1*self._insar.numberRangeLooks2 - #number of range looks input - nrli = self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon - #number of azimuth looks output - nalo = self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooks2 - #number of azimuth looks input - nali = self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon - - if (self._insar.numberRangeLooks2 != self._insar.numberRangeLooksIon) or \ - (self._insar.numberAzimuthLooks2 != self._insar.numberAzimuthLooksIon): - #this should be faster using fortran - if interpolationMethod == 0: - rect(ionfiltfile, ionrectfile, - width2,length2, - width3,length3, - nrlo/nrli, 0.0, - 0.0, nalo/nali, - (nrlo-nrli)/(2.0*nrli), - (nalo-nali)/(2.0*nali), - 'REAL','Bilinear') - #finer, but slower method - else: - ionfilt = np.fromfile(ionfiltfile, dtype=np.float32).reshape(length2, width2) - index2 = np.linspace(0, width2-1, num=width2, endpoint=True) - index3 = np.linspace(0, width3-1, num=width3, endpoint=True) * nrlo/nrli + (nrlo-nrli)/(2.0*nrli) - ionrect = np.zeros((length3, width3), dtype=np.float32) - for i in range(length2): - f = interp1d(index2, ionfilt[i,:], kind='cubic', fill_value="extrapolate") - ionrect[i, :] = f(index3) - - index2 = np.linspace(0, length2-1, num=length2, endpoint=True) - index3 = np.linspace(0, length3-1, num=length3, endpoint=True) * nalo/nali + (nalo-nali)/(2.0*nali) - for j in range(width3): - f = interp1d(index2, ionrect[0:length2, j], kind='cubic', fill_value="extrapolate") - ionrect[:, j] = f(index3) - ionrect.astype(np.float32).tofile(ionrectfile) - del ionrect - create_xml(ionrectfile, width3, length3, 'float') - - os.rename(ionrectfile, os.path.join('../../insar', ionrectfile)) - os.rename(ionrectfile+'.vrt', os.path.join('../../insar', ionrectfile)+'.vrt') - os.rename(ionrectfile+'.xml', os.path.join('../../insar', ionrectfile)+'.xml') - os.chdir('../../insar') - else: - shutil.copyfile(ionfiltfile, os.path.join('../../insar', ionrectfile)) - os.chdir('../../insar') - create_xml(ionrectfile, width3, length3, 'float') - #now we are in 'insar' - - - ############################################################ - # STEP 4. correct interferogram - ############################################################ - from isceobj.Alos2Proc.Alos2ProcPublic import renameFile - from isceobj.Alos2Proc.Alos2ProcPublic import runCmd - - if self.applyIon: - print('\ncorrect interferogram') - if os.path.isfile(self._insar.multilookDifferentialInterferogramOriginal): - print('original interferogram: {} is already here, do not rename: {}'.format(self._insar.multilookDifferentialInterferogramOriginal, self._insar.multilookDifferentialInterferogram)) - else: - print('renaming {} to {}'.format(self._insar.multilookDifferentialInterferogram, self._insar.multilookDifferentialInterferogramOriginal)) - renameFile(self._insar.multilookDifferentialInterferogram, self._insar.multilookDifferentialInterferogramOriginal) - - cmd = "imageMath.py -e='a*exp(-1.0*J*b)' --a={} --b={} -s BIP -t cfloat -o {}".format( - self._insar.multilookDifferentialInterferogramOriginal, - self._insar.multilookIon, - self._insar.multilookDifferentialInterferogram) - runCmd(cmd) - else: - print('\nionospheric phase estimation finished, but correction of interfeorgram not requested') - - os.chdir('../') + os.chdir('../../') catalog.printToLog(logger, "runIonFilt") self._insar.procDoc.addAllFromCatalog(catalog) diff --git a/components/isceobj/Alos2Proc/runIonSubband.py b/components/isceobj/Alos2Proc/runIonSubband.py index 1991f4f..096f62b 100644 --- a/components/isceobj/Alos2Proc/runIonSubband.py +++ b/components/isceobj/Alos2Proc/runIonSubband.py @@ -14,6 +14,10 @@ logger = logging.getLogger('isce.alos2insar.runIonSubband') def runIonSubband(self): '''create subband interferograms ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) self.updateParamemetersFromUser() @@ -296,30 +300,38 @@ def runIonSubband(self): #list of input files inputInterferograms = [] inputAmplitudes = [] - phaseDiff = [None] + #phaseDiff = [None] + swathPhaseDiffIon = [self.swathPhaseDiffLowerIon, self.swathPhaseDiffUpperIon] + phaseDiff = swathPhaseDiffIon[k] + if swathPhaseDiffIon[k] is None: + phaseDiff = None + else: + phaseDiff = swathPhaseDiffIon[k][i] + phaseDiff.insert(0, None) + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): swathDir = 's{}'.format(swathNumber) inputInterferograms.append(os.path.join('../', swathDir, self._insar.interferogram)) inputAmplitudes.append(os.path.join('../', swathDir, self._insar.amplitude)) - #compute phase needed to be compensated using startingRange - if j >= 1: - #phaseDiffSwath1 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange)/subbandRadarWavelength[k] - #phaseDiffSwath2 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange)/subbandRadarWavelength[k] - phaseDiffSwath1 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \ - -4.0 * np.pi * secondaryTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) - phaseDiffSwath2 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \ - -4.0 * np.pi * secondaryTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) - if referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange == \ - referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange: - #phaseDiff.append(phaseDiffSwath2 - phaseDiffSwath1) - #if reference and secondary versions are all before or after version 2.025 (starting range error < 0.5 m), - #it should be OK to do the above. - #see results in neom where it meets the above requirement, but there is still phase diff - #to be less risky, we do not input values here - phaseDiff.append(None) - else: - phaseDiff.append(None) + # #compute phase needed to be compensated using startingRange + # if j >= 1: + # #phaseDiffSwath1 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange)/subbandRadarWavelength[k] + # #phaseDiffSwath2 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange)/subbandRadarWavelength[k] + # phaseDiffSwath1 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \ + # -4.0 * np.pi * secondaryTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) + # phaseDiffSwath2 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \ + # -4.0 * np.pi * secondaryTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) + # if referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange == \ + # referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange: + # #phaseDiff.append(phaseDiffSwath2 - phaseDiffSwath1) + # #if reference and secondary versions are all before or after version 2.025 (starting range error < 0.5 m), + # #it should be OK to do the above. + # #see results in neom where it meets the above requirement, but there is still phase diff + # #to be less risky, we do not input values here + # phaseDiff.append(None) + # else: + # phaseDiff.append(None) #note that frame parameters are updated after mosaicking, here no need to update parameters #mosaic amplitudes diff --git a/components/isceobj/Alos2Proc/runIonUwrap.py b/components/isceobj/Alos2Proc/runIonUwrap.py index 1045da6..55840ce 100644 --- a/components/isceobj/Alos2Proc/runIonUwrap.py +++ b/components/isceobj/Alos2Proc/runIonUwrap.py @@ -4,6 +4,7 @@ # import os +import shutil import logging import datetime import numpy as np @@ -15,6 +16,10 @@ logger = logging.getLogger('isce.alos2insar.runIonUwrap') def runIonUwrap(self): '''unwrap subband interferograms ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) self.updateParamemetersFromUser() @@ -24,7 +29,17 @@ def runIonUwrap(self): return referenceTrack = self._insar.loadTrack(reference=True) - secondaryTrack = self._insar.loadTrack(reference=False) + #secondaryTrack = self._insar.loadTrack(reference=False) + + ionUwrap(self, referenceTrack) + + os.chdir('../../') + catalog.printToLog(logger, "runIonUwrap") + self._insar.procDoc.addAllFromCatalog(catalog) + + +def ionUwrap(self, referenceTrack, latLonDir=None): + wbdFile = os.path.abspath(self._insar.wbd) from isceobj.Alos2Proc.runIonSubband import defineIonDir @@ -73,8 +88,14 @@ def runIonUwrap(self): #water body if k == 0: - look(os.path.join(fullbandDir, self._insar.latitude), 'lat'+ml2+'.lat', width, self._insar.numberRangeLooksIon, self._insar.numberAzimuthLooksIon, 3, 0, 1) - look(os.path.join(fullbandDir, self._insar.longitude), 'lon'+ml2+'.lon', width, self._insar.numberRangeLooksIon, self._insar.numberAzimuthLooksIon, 3, 0, 1) + if latLonDir is None: + latFile = os.path.join(fullbandDir, self._insar.latitude) + lonFile = os.path.join(fullbandDir, self._insar.longitude) + else: + latFile = os.path.join('../../', latLonDir, self._insar.latitude) + lonFile = os.path.join('../../', latLonDir, self._insar.longitude) + look(latFile, 'lat'+ml2+'.lat', width, self._insar.numberRangeLooksIon, self._insar.numberAzimuthLooksIon, 3, 0, 1) + look(lonFile, 'lon'+ml2+'.lon', width, self._insar.numberRangeLooksIon, self._insar.numberAzimuthLooksIon, 3, 0, 1) create_xml('lat'+ml2+'.lat', width2, length2, 'double') create_xml('lon'+ml2+'.lon', width2, length2, 'double') waterBodyRadar('lat'+ml2+'.lat', 'lon'+ml2+'.lon', wbdFile, 'wbd'+ml2+'.wbd') @@ -132,8 +153,9 @@ def runIonUwrap(self): from isceobj.Alos2Proc.Alos2ProcPublic import create_xml from mroipac.icu.Icu import Icu - if self.filterSubbandInt: - for k in range(2): + for k in range(2): + #1. filtering subband interferogram + if self.filterSubbandInt: toBeFiltered = 'tmp.int' if self.removeMagnitudeBeforeFilteringSubbandInt: cmd = "imageMath.py -e='a/(abs(a)+(a==0))' --a={} -o {} -t cfloat -s BSQ".format(subbandPrefix[k]+ml2+'.int', toBeFiltered) @@ -156,45 +178,50 @@ def runIonUwrap(self): os.remove(toBeFiltered + '.vrt') os.remove(toBeFiltered + '.xml') - #create phase sigma for phase unwrapping - #recreate filtered image - filtImage = isceobj.createIntImage() - filtImage.load('filt_'+subbandPrefix[k]+ml2+'.int' + '.xml') - filtImage.setAccessMode('read') - filtImage.createImage() + toBeUsedInPhsig = 'filt_'+subbandPrefix[k]+ml2+'.int' + else: + toBeUsedInPhsig = subbandPrefix[k]+ml2+'.int' - #amplitude image - ampImage = isceobj.createAmpImage() - ampImage.load(subbandPrefix[k]+ml2+'.amp' + '.xml') - ampImage.setAccessMode('read') - ampImage.createImage() + #2. create phase sigma for phase unwrapping + #recreate filtered image + filtImage = isceobj.createIntImage() + filtImage.load(toBeUsedInPhsig + '.xml') + filtImage.setAccessMode('read') + filtImage.createImage() - #phase sigma correlation image - phsigImage = isceobj.createImage() - phsigImage.setFilename(subbandPrefix[k]+ml2+'.phsig') - phsigImage.setWidth(width) - phsigImage.dataType='FLOAT' - phsigImage.bands = 1 - phsigImage.setImageType('cor') - phsigImage.setAccessMode('write') - phsigImage.createImage() + #amplitude image + ampImage = isceobj.createAmpImage() + ampImage.load(subbandPrefix[k]+ml2+'.amp' + '.xml') + ampImage.setAccessMode('read') + ampImage.createImage() - icu = Icu(name='insarapp_filter_icu') - icu.configure() - icu.unwrappingFlag = False - icu.icu(intImage = filtImage, ampImage=ampImage, phsigImage=phsigImage) + #phase sigma correlation image + phsigImage = isceobj.createImage() + phsigImage.setFilename(subbandPrefix[k]+ml2+'.phsig') + phsigImage.setWidth(filtImage.width) + phsigImage.dataType='FLOAT' + phsigImage.bands = 1 + phsigImage.setImageType('cor') + phsigImage.setAccessMode('write') + phsigImage.createImage() - phsigImage.renderHdr() + icu = Icu(name='insarapp_filter_icu') + icu.configure() + icu.unwrappingFlag = False + icu.icu(intImage = filtImage, ampImage=ampImage, phsigImage=phsigImage) - filtImage.finalizeImage() - ampImage.finalizeImage() - phsigImage.finalizeImage() + phsigImage.renderHdr() + + filtImage.finalizeImage() + ampImage.finalizeImage() + phsigImage.finalizeImage() ############################################################ # STEP 4. phase unwrapping ############################################################ from isceobj.Alos2Proc.Alos2ProcPublic import snaphuUnwrap + from isceobj.Alos2Proc.Alos2ProcPublic import snaphuUnwrapOriginal for k in range(2): tmid = referenceTrack.sensingStart + datetime.timedelta(seconds=(self._insar.numberAzimuthLooks1-1.0)/2.0*referenceTrack.azimuthLineInterval+ @@ -207,16 +234,24 @@ def runIonUwrap(self): toBeUnwrapped = subbandPrefix[k]+ml2+'.int' coherenceFile = 'diff'+ml2+'.cor' - snaphuUnwrap(referenceTrack, tmid, - toBeUnwrapped, - coherenceFile, - subbandPrefix[k]+ml2+'.unw', - self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon, - self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon, - costMode = 'SMOOTH',initMethod = 'MCF', defomax = 2, initOnly = True) - - - os.chdir('../../') - catalog.printToLog(logger, "runIonUwrap") - self._insar.procDoc.addAllFromCatalog(catalog) + #if shutil.which('snaphu') != None: + #do not use original snaphu now + if False: + print('\noriginal snaphu program found') + print('unwrap {} using original snaphu, rather than that in ISCE'.format(toBeUnwrapped)) + snaphuUnwrapOriginal(toBeUnwrapped, + subbandPrefix[k]+ml2+'.phsig', + subbandPrefix[k]+ml2+'.amp', + subbandPrefix[k]+ml2+'.unw', + costMode = 's', + initMethod = 'mcf', + snaphuConfFile = '{}_snaphu.conf'.format(subbandPrefix[k])) + else: + snaphuUnwrap(referenceTrack, tmid, + toBeUnwrapped, + coherenceFile, + subbandPrefix[k]+ml2+'.unw', + self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon, + self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon, + costMode = 'SMOOTH',initMethod = 'MCF', defomax = 2, initOnly = True) diff --git a/components/isceobj/Alos2Proc/runLook.py b/components/isceobj/Alos2Proc/runLook.py index fb419e7..562f50a 100644 --- a/components/isceobj/Alos2Proc/runLook.py +++ b/components/isceobj/Alos2Proc/runLook.py @@ -17,6 +17,10 @@ logger = logging.getLogger('isce.alos2insar.runLook') def runLook(self): '''take looks ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) self.updateParamemetersFromUser() diff --git a/components/isceobj/Alos2Proc/runPreprocessor.py b/components/isceobj/Alos2Proc/runPreprocessor.py index d58d070..9104d02 100644 --- a/components/isceobj/Alos2Proc/runPreprocessor.py +++ b/components/isceobj/Alos2Proc/runPreprocessor.py @@ -15,6 +15,7 @@ from isceobj.Planet.Planet import Planet from isceobj.Alos2Proc.Alos2ProcPublic import runCmd from isceobj.Alos2Proc.Alos2ProcPublic import getBboxRdr from isceobj.Alos2Proc.Alos2ProcPublic import getBboxGeo +from isceobj.Alos2Proc.Alos2ProcPublic import modeProcParDict logger = logging.getLogger('isce.alos2insar.runPreprocessor') @@ -110,81 +111,20 @@ def runPreprocessor(self): self._insar.numberRangeLooksIon = self.numberRangeLooksIon self._insar.numberAzimuthLooksIon = self.numberAzimuthLooksIon - if self._insar.numberRangeLooks1 == None: - if referenceMode in ['SBS']: - self._insar.numberRangeLooks1 = 2 - elif referenceMode in ['UBS', 'UBD']: - self._insar.numberRangeLooks1 = 2 - elif referenceMode in ['HBS', 'HBD', 'HBQ']: - self._insar.numberRangeLooks1 = 2 - elif referenceMode in ['FBS', 'FBD', 'FBQ']: - self._insar.numberRangeLooks1 = 2 - elif referenceMode in ['WBS', 'WBD']: - self._insar.numberRangeLooks1 = 1 - elif referenceMode in ['WWS', 'WWD']: - self._insar.numberRangeLooks1 = 2 - elif referenceMode in ['VBS', 'VBD']: - self._insar.numberRangeLooks1 = 1 - else: - raise Exception('unknow acquisition mode') + if self._insar.numberRangeLooks1 is None: + self._insar.numberRangeLooks1 = modeProcParDict['ALOS-2'][referenceMode]['numberRangeLooks1'] + if self._insar.numberAzimuthLooks1 is None: + self._insar.numberAzimuthLooks1 = modeProcParDict['ALOS-2'][referenceMode]['numberAzimuthLooks1'] - if self._insar.numberAzimuthLooks1 == None: - if referenceMode in ['SBS']: - self._insar.numberAzimuthLooks1 = 4 - elif referenceMode in ['UBS', 'UBD']: - self._insar.numberAzimuthLooks1 = 2 - elif referenceMode in ['HBS', 'HBD', 'HBQ']: - self._insar.numberAzimuthLooks1 = 2 - elif referenceMode in ['FBS', 'FBD', 'FBQ']: - self._insar.numberAzimuthLooks1 = 4 - elif referenceMode in ['WBS', 'WBD']: - self._insar.numberAzimuthLooks1 = 14 - elif referenceMode in ['WWS', 'WWD']: - self._insar.numberAzimuthLooks1 = 14 - elif referenceMode in ['VBS', 'VBD']: - self._insar.numberAzimuthLooks1 = 14 - else: - raise Exception('unknow acquisition mode') + if self._insar.numberRangeLooks2 is None: + self._insar.numberRangeLooks2 = modeProcParDict['ALOS-2'][referenceMode]['numberRangeLooks2'] + if self._insar.numberAzimuthLooks2 is None: + self._insar.numberAzimuthLooks2 = modeProcParDict['ALOS-2'][referenceMode]['numberAzimuthLooks2'] - if self._insar.numberRangeLooks2 == None: - if referenceMode in spotlightModes: - self._insar.numberRangeLooks2 = 4 - elif referenceMode in stripmapModes: - self._insar.numberRangeLooks2 = 4 - elif referenceMode in scansarModes: - self._insar.numberRangeLooks2 = 5 - else: - raise Exception('unknow acquisition mode') - - if self._insar.numberAzimuthLooks2 == None: - if referenceMode in spotlightModes: - self._insar.numberAzimuthLooks2 = 4 - elif referenceMode in stripmapModes: - self._insar.numberAzimuthLooks2 = 4 - elif referenceMode in scansarModes: - self._insar.numberAzimuthLooks2 = 2 - else: - raise Exception('unknow acquisition mode') - - if self._insar.numberRangeLooksIon == None: - if referenceMode in spotlightModes: - self._insar.numberRangeLooksIon = 16 - elif referenceMode in stripmapModes: - self._insar.numberRangeLooksIon = 16 - elif referenceMode in scansarModes: - self._insar.numberRangeLooksIon = 40 - else: - raise Exception('unknow acquisition mode') - - if self._insar.numberAzimuthLooksIon == None: - if referenceMode in spotlightModes: - self._insar.numberAzimuthLooksIon = 16 - elif referenceMode in stripmapModes: - self._insar.numberAzimuthLooksIon = 16 - elif referenceMode in scansarModes: - self._insar.numberAzimuthLooksIon = 16 - else: - raise Exception('unknow acquisition mode') + if self._insar.numberRangeLooksIon is None: + self._insar.numberRangeLooksIon = modeProcParDict['ALOS-2'][referenceMode]['numberRangeLooksIon'] + if self._insar.numberAzimuthLooksIon is None: + self._insar.numberAzimuthLooksIon = modeProcParDict['ALOS-2'][referenceMode]['numberAzimuthLooksIon'] #define processing file names @@ -335,201 +275,6 @@ def runPreprocessor(self): self._insar.saveProduct(self.secondary.track, self._insar.secondaryTrackParameter) - ################################################## - #2. compute burst synchronization - ################################################## - #burst synchronization may slowly change along a track as a result of the changing relative speed of the two flights - #in one frame, real unsynchronized time is the same for all swaths - unsynTime = 0 - #real synchronized time/percentage depends on the swath burst length (synTime = burstlength - abs(unsynTime)) - #synTime = 0 - synPercentage = 0 - - numberOfFrames = len(self._insar.referenceFrames) - numberOfSwaths = self._insar.endingSwath - self._insar.startingSwath + 1 - - for i, frameNumber in enumerate(self._insar.referenceFrames): - for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): - referenceSwath = self.reference.track.frames[i].swaths[j] - secondarySwath = self.secondary.track.frames[i].swaths[j] - #using Piyush's code for computing range and azimuth offsets - midRange = referenceSwath.startingRange + referenceSwath.rangePixelSize * referenceSwath.numberOfSamples * 0.5 - midSensingStart = referenceSwath.sensingStart + datetime.timedelta(seconds = referenceSwath.numberOfLines * 0.5 / referenceSwath.prf) - llh = self.reference.track.orbit.rdr2geo(midSensingStart, midRange) - slvaz, slvrng = self.secondary.track.orbit.geo2rdr(llh) - ###Translate to offsets - #note that secondary range pixel size and prf might be different from reference, here we assume there is a virtual secondary with same - #range pixel size and prf - rgoff = ((slvrng - secondarySwath.startingRange) / referenceSwath.rangePixelSize) - referenceSwath.numberOfSamples * 0.5 - azoff = ((slvaz - secondarySwath.sensingStart).total_seconds() * referenceSwath.prf) - referenceSwath.numberOfLines * 0.5 - - #compute burst synchronization - #burst parameters for ScanSAR wide mode not estimed yet - if self._insar.modeCombination == 21: - scburstStartLine = (referenceSwath.burstStartTime - referenceSwath.sensingStart).total_seconds() * referenceSwath.prf + azoff - #secondary burst start times corresponding to reference burst start times (100% synchronization) - scburstStartLines = np.arange(scburstStartLine - 100000*referenceSwath.burstCycleLength, \ - scburstStartLine + 100000*referenceSwath.burstCycleLength, \ - referenceSwath.burstCycleLength) - dscburstStartLines = -((secondarySwath.burstStartTime - secondarySwath.sensingStart).total_seconds() * secondarySwath.prf - scburstStartLines) - #find the difference with minimum absolute value - unsynLines = dscburstStartLines[np.argmin(np.absolute(dscburstStartLines))] - if np.absolute(unsynLines) >= secondarySwath.burstLength: - synLines = 0 - if unsynLines > 0: - unsynLines = secondarySwath.burstLength - else: - unsynLines = -secondarySwath.burstLength - else: - synLines = secondarySwath.burstLength - np.absolute(unsynLines) - - unsynTime += unsynLines / referenceSwath.prf - synPercentage += synLines / referenceSwath.burstLength * 100.0 - - catalog.addItem('burst synchronization of frame {} swath {}'.format(frameNumber, swathNumber), '%.1f%%'%(synLines / referenceSwath.burstLength * 100.0), 'runPreprocessor') - - ############################################################################################ - #illustration of the sign of the number of unsynchronized lines (unsynLines) - #The convention is the same as ampcor offset, that is, - # secondaryLineNumber = referenceLineNumber + unsynLines - # - # |-----------------------| ------------ - # | | ^ - # | | | - # | | | unsynLines < 0 - # | | | - # | | \ / - # | | |-----------------------| - # | | | | - # | | | | - # |-----------------------| | | - # Reference Burst | | - # | | - # | | - # | | - # | | - # |-----------------------| - # Secondary Burst - # - # - ############################################################################################ - - ##burst parameters for ScanSAR wide mode not estimed yet - elif self._insar.modeCombination == 31: - #scansar is reference - scburstStartLine = (referenceSwath.burstStartTime - referenceSwath.sensingStart).total_seconds() * referenceSwath.prf + azoff - #secondary burst start times corresponding to reference burst start times (100% synchronization) - for k in range(-100000, 100000): - saz_burstx = scburstStartLine + referenceSwath.burstCycleLength * k - st_burstx = secondarySwath.sensingStart + datetime.timedelta(seconds=saz_burstx / referenceSwath.prf) - if saz_burstx >= 0.0 and saz_burstx <= secondarySwath.numberOfLines -1: - secondarySwath.burstStartTime = st_burstx - secondarySwath.burstLength = referenceSwath.burstLength - secondarySwath.burstCycleLength = referenceSwath.burstCycleLength - secondarySwath.swathNumber = referenceSwath.swathNumber - break - #unsynLines = 0 - #synLines = referenceSwath.burstLength - #unsynTime += unsynLines / referenceSwath.prf - #synPercentage += synLines / referenceSwath.burstLength * 100.0 - catalog.addItem('burst synchronization of frame {} swath {}'.format(frameNumber, swathNumber), '%.1f%%'%(100.0), 'runPreprocessor') - else: - pass - - #overwrite original frame parameter file - if self._insar.modeCombination == 31: - frameDir = 'f{}_{}'.format(i+1, frameNumber) - self._insar.saveProduct(self.secondary.track.frames[i], os.path.join(frameDir, self._insar.secondaryFrameParameter)) - - #getting average - if self._insar.modeCombination == 21: - unsynTime /= numberOfFrames*numberOfSwaths - synPercentage /= numberOfFrames*numberOfSwaths - elif self._insar.modeCombination == 31: - unsynTime = 0. - synPercentage = 100. - else: - pass - - #record results - if (self._insar.modeCombination == 21) or (self._insar.modeCombination == 31): - self._insar.burstUnsynchronizedTime = unsynTime - self._insar.burstSynchronization = synPercentage - catalog.addItem('burst synchronization averaged', '%.1f%%'%(synPercentage), 'runPreprocessor') - - - ################################################## - #3. compute baseline - ################################################## - #only compute baseline at four corners and center of the reference track - bboxRdr = getBboxRdr(self.reference.track) - - rangeMin = bboxRdr[0] - rangeMax = bboxRdr[1] - azimuthTimeMin = bboxRdr[2] - azimuthTimeMax = bboxRdr[3] - - azimuthTimeMid = azimuthTimeMin+datetime.timedelta(seconds=(azimuthTimeMax-azimuthTimeMin).total_seconds()/2.0) - rangeMid = (rangeMin + rangeMax) / 2.0 - - points = [[azimuthTimeMin, rangeMin], - [azimuthTimeMin, rangeMax], - [azimuthTimeMax, rangeMin], - [azimuthTimeMax, rangeMax], - [azimuthTimeMid, rangeMid]] - - Bpar = [] - Bperp = [] - #modify Piyush's code for computing baslines - refElp = Planet(pname='Earth').ellipsoid - for x in points: - referenceSV = self.reference.track.orbit.interpolate(x[0], method='hermite') - target = self.reference.track.orbit.rdr2geo(x[0], x[1]) - - slvTime, slvrng = self.secondary.track.orbit.geo2rdr(target) - secondarySV = self.secondary.track.orbit.interpolateOrbit(slvTime, method='hermite') - - targxyz = np.array(refElp.LLH(target[0], target[1], target[2]).ecef().tolist()) - mxyz = np.array(referenceSV.getPosition()) - mvel = np.array(referenceSV.getVelocity()) - sxyz = np.array(secondarySV.getPosition()) - - #to fix abrupt change near zero in baseline grid. JUN-05-2020 - mvelunit = mvel / np.linalg.norm(mvel) - sxyz = sxyz - np.dot ( sxyz-mxyz, mvelunit) * mvelunit - - aa = np.linalg.norm(sxyz-mxyz) - costheta = (x[1]*x[1] + aa*aa - slvrng*slvrng)/(2.*x[1]*aa) - - Bpar.append(aa*costheta) - - perp = aa * np.sqrt(1 - costheta*costheta) - direction = np.sign(np.dot( np.cross(targxyz-mxyz, sxyz-mxyz), mvel)) - Bperp.append(direction*perp) - - catalog.addItem('parallel baseline at upperleft of reference track', Bpar[0], 'runPreprocessor') - catalog.addItem('parallel baseline at upperright of reference track', Bpar[1], 'runPreprocessor') - catalog.addItem('parallel baseline at lowerleft of reference track', Bpar[2], 'runPreprocessor') - catalog.addItem('parallel baseline at lowerright of reference track', Bpar[3], 'runPreprocessor') - catalog.addItem('parallel baseline at center of reference track', Bpar[4], 'runPreprocessor') - - catalog.addItem('perpendicular baseline at upperleft of reference track', Bperp[0], 'runPreprocessor') - catalog.addItem('perpendicular baseline at upperright of reference track', Bperp[1], 'runPreprocessor') - catalog.addItem('perpendicular baseline at lowerleft of reference track', Bperp[2], 'runPreprocessor') - catalog.addItem('perpendicular baseline at lowerright of reference track', Bperp[3], 'runPreprocessor') - catalog.addItem('perpendicular baseline at center of reference track', Bperp[4], 'runPreprocessor') - - - ################################################## - #4. compute bounding box - ################################################## - referenceBbox = getBboxGeo(self.reference.track) - secondaryBbox = getBboxGeo(self.secondary.track) - - catalog.addItem('reference bounding box', referenceBbox, 'runPreprocessor') - catalog.addItem('secondary bounding box', secondaryBbox, 'runPreprocessor') - - catalog.printToLog(logger, "runPreprocessor") self._insar.procDoc.addAllFromCatalog(catalog) diff --git a/components/isceobj/Alos2Proc/runRdr2Geo.py b/components/isceobj/Alos2Proc/runRdr2Geo.py index 6e2a02f..283b2f0 100644 --- a/components/isceobj/Alos2Proc/runRdr2Geo.py +++ b/components/isceobj/Alos2Proc/runRdr2Geo.py @@ -14,6 +14,10 @@ logger = logging.getLogger('isce.alos2insar.runRdr2Geo') def runRdr2Geo(self): '''compute lat/lon/hgt ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) self.updateParamemetersFromUser() diff --git a/components/isceobj/Alos2Proc/runRdrDemOffset.py b/components/isceobj/Alos2Proc/runRdrDemOffset.py index 16695f7..4b5c1cf 100644 --- a/components/isceobj/Alos2Proc/runRdrDemOffset.py +++ b/components/isceobj/Alos2Proc/runRdrDemOffset.py @@ -20,10 +20,20 @@ logger = logging.getLogger('isce.alos2insar.runRdrDemOffset') def runRdrDemOffset(self): '''estimate between radar image and dem ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) self.updateParamemetersFromUser() referenceTrack = self._insar.loadTrack(reference=True) + + rdrDemOffset(self, referenceTrack, catalog=catalog) + + +def rdrDemOffset(self, referenceTrack, catalog=None): + demFile = os.path.abspath(self._insar.dem) insarDir = 'insar' @@ -96,13 +106,15 @@ def runRdrDemOffset(self): if (landRatio <= 0.00125): print('\n\nWARNING: land area too small for estimating offsets between radar and dem') print('do not estimate offsets between radar and dem\n\n') - self._insar.radarDemAffineTransform = [1.0, 0.0, 0.0, 1.0, 0.0, 0.0] - catalog.addItem('warning message', 'land area too small for estimating offsets between radar and dem', 'runRdrDemOffset') + if catalog is not None: + self._insar.radarDemAffineTransform = [1.0, 0.0, 0.0, 1.0, 0.0, 0.0] + catalog.addItem('warning message', 'land area too small for estimating offsets between radar and dem', 'runRdrDemOffset') os.chdir('../../') - catalog.printToLog(logger, "runRdrDemOffset") - self._insar.procDoc.addAllFromCatalog(catalog) + if catalog is not None: + catalog.printToLog(logger, "runRdrDemOffset") + self._insar.procDoc.addAllFromCatalog(catalog) return @@ -130,8 +142,9 @@ def runRdrDemOffset(self): if numberOfOffsetsAzimuth < 10: numberOfOffsetsAzimuth = 10 - catalog.addItem('number of range offsets', '{}'.format(numberOfOffsetsRange), 'runRdrDemOffset') - catalog.addItem('number of azimuth offsets', '{}'.format(numberOfOffsetsAzimuth), 'runRdrDemOffset') + if catalog is not None: + catalog.addItem('number of range offsets', '{}'.format(numberOfOffsetsRange), 'runRdrDemOffset') + catalog.addItem('number of azimuth offsets', '{}'.format(numberOfOffsetsAzimuth), 'runRdrDemOffset') #matching ampcor = Ampcor(name='insarapp_slcs_ampcor') @@ -247,12 +260,14 @@ def runRdrDemOffset(self): print('\n\nWARNING: too few points left after culling, {} left'.format(numCullOffsets)) print('do not estimate offsets between radar and dem\n\n') self._insar.radarDemAffineTransform = [1.0, 0.0, 0.0, 1.0, 0.0, 0.0] - catalog.addItem('warning message', 'too few points left after culling, {} left'.format(numCullOffsets), 'runRdrDemOffset') + if catalog is not None: + catalog.addItem('warning message', 'too few points left after culling, {} left'.format(numCullOffsets), 'runRdrDemOffset') os.chdir('../../') - catalog.printToLog(logger, "runRdrDemOffset") - self._insar.procDoc.addAllFromCatalog(catalog) + if catalog is not None: + catalog.printToLog(logger, "runRdrDemOffset") + self._insar.procDoc.addAllFromCatalog(catalog) return @@ -277,8 +292,9 @@ def runRdrDemOffset(self): os.chdir('../../') - catalog.printToLog(logger, "runRdrDemOffset") - self._insar.procDoc.addAllFromCatalog(catalog) + if catalog is not None: + catalog.printToLog(logger, "runRdrDemOffset") + self._insar.procDoc.addAllFromCatalog(catalog) def simulateRadar(hgtfile, simfile, scale=3.0, offset=100.0): diff --git a/components/isceobj/Alos2Proc/runRectRangeOffset.py b/components/isceobj/Alos2Proc/runRectRangeOffset.py index 8e33737..519cdf6 100644 --- a/components/isceobj/Alos2Proc/runRectRangeOffset.py +++ b/components/isceobj/Alos2Proc/runRectRangeOffset.py @@ -15,6 +15,10 @@ logger = logging.getLogger('isce.alos2insar.runRectRangeOffset') def runRectRangeOffset(self): '''rectify range offset ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) self.updateParamemetersFromUser() diff --git a/components/isceobj/Alos2Proc/runSlcMosaic.py b/components/isceobj/Alos2Proc/runSlcMosaic.py index a43cd3a..5fabb2d 100644 --- a/components/isceobj/Alos2Proc/runSlcMosaic.py +++ b/components/isceobj/Alos2Proc/runSlcMosaic.py @@ -40,13 +40,30 @@ def runSlcMosaic(self): if len(referenceTrack.frames) > 1: matchingMode=1 + #determine whether reference offset from matching is already done in previous InSAR processing. + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + referenceEstimated = False + else: + if self.frameOffsetMatching == False: + referenceEstimated = False + else: + referenceEstimated = True + else: + if self.frameOffsetMatching == False: + referenceEstimated = False + else: + referenceEstimated = True + #if reference offsets from matching are not already computed - if self.frameOffsetMatching == False: + #if self.frameOffsetMatching == False: + if referenceEstimated == False: offsetReference = frameOffset(referenceTrack, self._insar.referenceSlc, self._insar.referenceFrameOffset, crossCorrelation=True, matchingMode=matchingMode) offsetSecondary = frameOffset(secondaryTrack, self._insar.secondarySlc, self._insar.secondaryFrameOffset, crossCorrelation=True, matchingMode=matchingMode) - if self.frameOffsetMatching == False: + #if self.frameOffsetMatching == False: + if referenceEstimated == False: self._insar.frameRangeOffsetMatchingReference = offsetReference[2] self._insar.frameAzimuthOffsetMatchingReference = offsetReference[3] self._insar.frameRangeOffsetMatchingSecondary = offsetSecondary[2] @@ -110,6 +127,43 @@ def runSlcMosaic(self): secondaryTrack.dopplerVsPixel = secondaryTrack.frames[0].swaths[0].dopplerVsPixel else: + #in case InSAR, and therefore runSwathMosaic, was not done previously + for i, frameNumber in enumerate(self._insar.referenceFrames): + #update frame parameters + ######################################################### + frame = referenceTrack.frames[i] + #mosaic size + frame.numberOfSamples = frame.swaths[0].numberOfSamples + frame.numberOfLines = frame.swaths[0].numberOfLines + #NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE + #range parameters + frame.startingRange = frame.swaths[0].startingRange + frame.rangeSamplingRate = frame.swaths[0].rangeSamplingRate + frame.rangePixelSize = frame.swaths[0].rangePixelSize + #azimuth parameters + frame.sensingStart = frame.swaths[0].sensingStart + frame.prf = frame.swaths[0].prf + frame.azimuthPixelSize = frame.swaths[0].azimuthPixelSize + frame.azimuthLineInterval = frame.swaths[0].azimuthLineInterval + + #update frame parameters, secondary + ######################################################### + frame = secondaryTrack.frames[i] + #mosaic size + frame.numberOfSamples = frame.swaths[0].numberOfSamples + frame.numberOfLines = frame.swaths[0].numberOfLines + #NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE + #range parameters + frame.startingRange = frame.swaths[0].startingRange + frame.rangeSamplingRate = frame.swaths[0].rangeSamplingRate + frame.rangePixelSize = frame.swaths[0].rangePixelSize + #azimuth parameters + frame.sensingStart = frame.swaths[0].sensingStart + frame.prf = frame.swaths[0].prf + frame.azimuthPixelSize = frame.swaths[0].azimuthPixelSize + frame.azimuthLineInterval = frame.swaths[0].azimuthLineInterval + + #mosaic reference slc ######################################################### #choose offsets diff --git a/components/isceobj/Alos2Proc/runSlcOffset.py b/components/isceobj/Alos2Proc/runSlcOffset.py index c7bfed3..3838b40 100644 --- a/components/isceobj/Alos2Proc/runSlcOffset.py +++ b/components/isceobj/Alos2Proc/runSlcOffset.py @@ -25,6 +25,11 @@ logger = logging.getLogger('isce.alos2insar.runSlcOffset') def runSlcOffset(self): '''estimate SLC offsets ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + print('\nInSAR processing not requested, skip this and the remaining InSAR steps...') + return + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) self.updateParamemetersFromUser() diff --git a/components/isceobj/Alos2Proc/runSwathMosaic.py b/components/isceobj/Alos2Proc/runSwathMosaic.py index 3af99ff..27f490c 100644 --- a/components/isceobj/Alos2Proc/runSwathMosaic.py +++ b/components/isceobj/Alos2Proc/runSwathMosaic.py @@ -17,6 +17,10 @@ logger = logging.getLogger('isce.alos2insar.runSwathMosaic') def runSwathMosaic(self): '''mosaic subswaths ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) self.updateParamemetersFromUser() @@ -211,8 +215,10 @@ def swathMosaic(frame, inputFiles, outputfile, rangeOffsets, azimuthOffsets, num rectWidth.append( int(swaths[i].numberOfSamples / numberOfRangeLooks) ) rectLength.append( int(swaths[i].numberOfLines / numberOfAzimuthLooks) ) else: - rectWidth.append( int(1.0 / rangeScale[i] * int(swaths[i].numberOfSamples / numberOfRangeLooks)) ) - rectLength.append( int(1.0 / azimuthScale[i] * int(swaths[i].numberOfLines / numberOfAzimuthLooks)) ) + rectWidth.append( round(1.0 / rangeScale[i] * int(swaths[i].numberOfSamples / numberOfRangeLooks)) ) + rectLength.append( round(1.0 / azimuthScale[i] * int(swaths[i].numberOfLines / numberOfAzimuthLooks)) ) + #rectWidth.append( int(1.0 / rangeScale[i] * int(swaths[i].numberOfSamples / numberOfRangeLooks)) ) + #rectLength.append( int(1.0 / azimuthScale[i] * int(swaths[i].numberOfLines / numberOfAzimuthLooks)) ) #convert original offset to offset for images with looks #use list instead of np.array to make it consistent with the rest of the code @@ -239,71 +245,80 @@ def swathMosaic(frame, inputFiles, outputfile, rangeOffsets, azimuthOffsets, num os.remove(rinfs[i]) os.symlink(inf, rinfs[i]) else: - infImg = isceobj.createImage() - infImg.load(inf+'.xml') - rangeOffsets2Frac = rangeOffsets2[i] - int(rangeOffsets2[i]) - azimuthOffsets2Frac = azimuthOffsets2[i] - int(azimuthOffsets2[i]) + #no need to resample + if (abs(rangeOffsets2[i] - round(rangeOffsets2[i])) < 0.0001) and (abs(azimuthOffsets2[i] - round(azimuthOffsets2[i])) < 0.0001): + if os.path.isfile(rinfs[i]): + os.remove(rinfs[i]) + os.symlink(inf, rinfs[i]) + #all of the following use of rangeOffsets2/azimuthOffsets2 is inside int(), we do the following in case it is like + #4.99999999999... + rangeOffsets2[i] = round(rangeOffsets2[i]) + azimuthOffsets2[i] = round(azimuthOffsets2[i]) + else: + infImg = isceobj.createImage() + infImg.load(inf+'.xml') + rangeOffsets2Frac = rangeOffsets2[i] - int(rangeOffsets2[i]) + azimuthOffsets2Frac = azimuthOffsets2[i] - int(azimuthOffsets2[i]) + if resamplingMethod == 0: + rect_with_looks(inf, + rinfs[i], + infImg.width, infImg.length, + rectWidth[i], rectLength[i], + rangeScale[i], 0.0, + 0.0,azimuthScale[i], + rangeOffsets2Frac * rangeScale[i], azimuthOffsets2Frac * azimuthScale[i], + 1,1, + 1,1, + 'COMPLEX', + 'Bilinear') + elif resamplingMethod == 1: + #decompose amplitude and phase + phaseFile = 'phase' + amplitudeFile = 'amplitude' + data = np.fromfile(inf, dtype=np.complex64).reshape(infImg.length, infImg.width) + phase = np.exp(np.complex64(1j) * np.angle(data)) + phase[np.nonzero(data==0)] = 0 + phase.astype(np.complex64).tofile(phaseFile) + amplitude = np.absolute(data) + amplitude.astype(np.float32).tofile(amplitudeFile) - if resamplingMethod == 0: - rect_with_looks(inf, - rinfs[i], - infImg.width, infImg.length, - rectWidth[i], rectLength[i], - rangeScale[i], 0.0, - 0.0,azimuthScale[i], - rangeOffsets2Frac * rangeScale[i], azimuthOffsets2Frac * azimuthScale[i], - 1,1, - 1,1, - 'COMPLEX', - 'Bilinear') - elif resamplingMethod == 1: - #decompose amplitude and phase - phaseFile = 'phase' - amplitudeFile = 'amplitude' - data = np.fromfile(inf, dtype=np.complex64).reshape(infImg.length, infImg.width) - phase = np.exp(np.complex64(1j) * np.angle(data)) - phase[np.nonzero(data==0)] = 0 - phase.astype(np.complex64).tofile(phaseFile) - amplitude = np.absolute(data) - amplitude.astype(np.float32).tofile(amplitudeFile) + #resampling + phaseRectFile = 'phaseRect' + amplitudeRectFile = 'amplitudeRect' + rect_with_looks(phaseFile, + phaseRectFile, + infImg.width, infImg.length, + rectWidth[i], rectLength[i], + rangeScale[i], 0.0, + 0.0,azimuthScale[i], + rangeOffsets2Frac * rangeScale[i], azimuthOffsets2Frac * azimuthScale[i], + 1,1, + 1,1, + 'COMPLEX', + 'Sinc') + rect_with_looks(amplitudeFile, + amplitudeRectFile, + infImg.width, infImg.length, + rectWidth[i], rectLength[i], + rangeScale[i], 0.0, + 0.0,azimuthScale[i], + rangeOffsets2Frac * rangeScale[i], azimuthOffsets2Frac * azimuthScale[i], + 1,1, + 1,1, + 'REAL', + 'Bilinear') - #resampling - phaseRectFile = 'phaseRect' - amplitudeRectFile = 'amplitudeRect' - rect_with_looks(phaseFile, - phaseRectFile, - infImg.width, infImg.length, - rectWidth[i], rectLength[i], - rangeScale[i], 0.0, - 0.0,azimuthScale[i], - rangeOffsets2Frac * rangeScale[i], azimuthOffsets2Frac * azimuthScale[i], - 1,1, - 1,1, - 'COMPLEX', - 'Sinc') - rect_with_looks(amplitudeFile, - amplitudeRectFile, - infImg.width, infImg.length, - rectWidth[i], rectLength[i], - rangeScale[i], 0.0, - 0.0,azimuthScale[i], - rangeOffsets2Frac * rangeScale[i], azimuthOffsets2Frac * azimuthScale[i], - 1,1, - 1,1, - 'REAL', - 'Bilinear') + #recombine amplitude and phase + phase = np.fromfile(phaseRectFile, dtype=np.complex64).reshape(rectLength[i], rectWidth[i]) + amplitude = np.fromfile(amplitudeRectFile, dtype=np.float32).reshape(rectLength[i], rectWidth[i]) + (phase*amplitude).astype(np.complex64).tofile(rinfs[i]) - #recombine amplitude and phase - phase = np.fromfile(phaseRectFile, dtype=np.complex64).reshape(rectLength[i], rectWidth[i]) - amplitude = np.fromfile(amplitudeRectFile, dtype=np.float32).reshape(rectLength[i], rectWidth[i]) - (phase*amplitude).astype(np.complex64).tofile(rinfs[i]) - - #tidy up - os.remove(phaseFile) - os.remove(amplitudeFile) - os.remove(phaseRectFile) - os.remove(amplitudeRectFile) + #tidy up + os.remove(phaseFile) + os.remove(amplitudeFile) + os.remove(phaseRectFile) + os.remove(amplitudeRectFile) #determine output width and length diff --git a/components/isceobj/Alos2Proc/runSwathOffset.py b/components/isceobj/Alos2Proc/runSwathOffset.py index 9a4e2b6..7f3f43e 100644 --- a/components/isceobj/Alos2Proc/runSwathOffset.py +++ b/components/isceobj/Alos2Proc/runSwathOffset.py @@ -18,6 +18,10 @@ logger = logging.getLogger('isce.alos2insar.runSwathOffset') def runSwathOffset(self): '''estimate swath offsets. ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) self.updateParamemetersFromUser() diff --git a/components/isceobj/Alos2Proc/runUnwrapSnaphu.py b/components/isceobj/Alos2Proc/runUnwrapSnaphu.py index 985d9a8..8bca848 100644 --- a/components/isceobj/Alos2Proc/runUnwrapSnaphu.py +++ b/components/isceobj/Alos2Proc/runUnwrapSnaphu.py @@ -19,12 +19,23 @@ logger = logging.getLogger('isce.alos2insar.runUnwrapSnaphu') def runUnwrapSnaphu(self): '''unwrap filtered interferogram ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) self.updateParamemetersFromUser() referenceTrack = self._insar.loadTrack(reference=True) #secondaryTrack = self._insar.loadTrack(reference=False) + unwrapSnaphu(self, referenceTrack) + + catalog.printToLog(logger, "runUnwrapSnaphu") + self._insar.procDoc.addAllFromCatalog(catalog) + + +def unwrapSnaphu(self, referenceTrack): insarDir = 'insar' os.makedirs(insarDir, exist_ok=True) os.chdir(insarDir) @@ -70,21 +81,20 @@ def runUnwrapSnaphu(self): wbdImage.load(self._insar.multilookWbdOut+'.xml') width = wbdImage.width length = wbdImage.length - if not os.path.exists(self._insar.multilookWbdOut): - catalog.addItem('warning message', 'requested masking interferogram with water body, but water body does not exist', 'runUnwrapSnaphu') - else: - wbd = np.fromfile(self._insar.multilookWbdOut, dtype=np.int8).reshape(length, width) - unw=np.memmap(self._insar.unwrappedInterferogram, dtype='float32', mode='r+', shape=(length*2, width)) - (unw[0:length*2:2, :])[np.nonzero(wbd==-1)]=0 - (unw[1:length*2:2, :])[np.nonzero(wbd==-1)]=0 - del unw - unw=np.memmap(self._insar.unwrappedMaskedInterferogram, dtype='float32', mode='r+', shape=(length*2, width)) - (unw[0:length*2:2, :])[np.nonzero(wbd==-1)]=0 - (unw[1:length*2:2, :])[np.nonzero(wbd==-1)]=0 - del unw, wbd + #if not os.path.exists(self._insar.multilookWbdOut): + # catalog.addItem('warning message', 'requested masking interferogram with water body, but water body does not exist', 'runUnwrapSnaphu') + #else: + wbd = np.fromfile(self._insar.multilookWbdOut, dtype=np.int8).reshape(length, width) + unw=np.memmap(self._insar.unwrappedInterferogram, dtype='float32', mode='r+', shape=(length*2, width)) + (unw[0:length*2:2, :])[np.nonzero(wbd==-1)]=0 + (unw[1:length*2:2, :])[np.nonzero(wbd==-1)]=0 + del unw + unw=np.memmap(self._insar.unwrappedMaskedInterferogram, dtype='float32', mode='r+', shape=(length*2, width)) + (unw[0:length*2:2, :])[np.nonzero(wbd==-1)]=0 + (unw[1:length*2:2, :])[np.nonzero(wbd==-1)]=0 + del unw, wbd os.chdir('../') - catalog.printToLog(logger, "runUnwrapSnaphu") - self._insar.procDoc.addAllFromCatalog(catalog) + diff --git a/components/isceobj/Alos2burstProc/Factories.py b/components/isceobj/Alos2burstProc/Factories.py index 0e9aeb7..f040292 100644 --- a/components/isceobj/Alos2burstProc/Factories.py +++ b/components/isceobj/Alos2burstProc/Factories.py @@ -74,6 +74,7 @@ def createUnwrap2Stage(other, do_unwrap_2stage = None, unwrapperName = None): createPreprocessor = _factory("runPreprocessor") +createBaseline = _factory("runBaseline", path = "isceobj.Alos2Proc.") createExtractBurst = _factory("runExtractBurst") createDownloadDem = _factory("runDownloadDem", path = "isceobj.Alos2Proc.") createCoregGeom = _factory("runCoregGeom") @@ -93,6 +94,7 @@ createCoherence = _factory("runCoherence", path = "isceobj.Alos2Proc.") createIonSubband = _factory("runIonSubband") createIonUwrap = _factory("runIonUwrap", path = "isceobj.Alos2Proc.") createIonFilt = _factory("runIonFilt", path = "isceobj.Alos2Proc.") +createIonCorrect = _factory("runIonCorrect", path = "isceobj.Alos2Proc.") createFilt = _factory("runFilt", path = "isceobj.Alos2Proc.") createUnwrapSnaphu = _factory("runUnwrapSnaphu", path = "isceobj.Alos2Proc.") createGeocode = _factory("runGeocode", path = "isceobj.Alos2Proc.") diff --git a/components/isceobj/Alos2burstProc/runIonSubband.py b/components/isceobj/Alos2burstProc/runIonSubband.py index b4ecb9a..050c42a 100644 --- a/components/isceobj/Alos2burstProc/runIonSubband.py +++ b/components/isceobj/Alos2burstProc/runIonSubband.py @@ -252,30 +252,38 @@ def runIonSubband(self): #list of input files inputInterferograms = [] inputAmplitudes = [] - phaseDiff = [None] + #phaseDiff = [None] + swathPhaseDiffIon = [self.swathPhaseDiffLowerIon, self.swathPhaseDiffUpperIon] + phaseDiff = swathPhaseDiffIon[k] + if swathPhaseDiffIon[k] is None: + phaseDiff = None + else: + phaseDiff = swathPhaseDiffIon[k][i] + phaseDiff.insert(0, None) + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): swathDir = 's{}'.format(swathNumber) inputInterferograms.append(os.path.join('../', swathDir, self._insar.interferogram)) inputAmplitudes.append(os.path.join('../', swathDir, self._insar.amplitude)) - #compute phase needed to be compensated using startingRange - if j >= 1: - #phaseDiffSwath1 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange)/subbandRadarWavelength[k] - #phaseDiffSwath2 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange)/subbandRadarWavelength[k] - phaseDiffSwath1 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \ - -4.0 * np.pi * secondaryTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) - phaseDiffSwath2 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \ - -4.0 * np.pi * secondaryTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) - if referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange == \ - referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange: - #phaseDiff.append(phaseDiffSwath2 - phaseDiffSwath1) - #if reference and secondary versions are all before or after version 2.025 (starting range error < 0.5 m), - #it should be OK to do the above. - #see results in neom where it meets the above requirement, but there is still phase diff - #to be less risky, we do not input values here - phaseDiff.append(None) - else: - phaseDiff.append(None) + # #compute phase needed to be compensated using startingRange + # if j >= 1: + # #phaseDiffSwath1 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange)/subbandRadarWavelength[k] + # #phaseDiffSwath2 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange)/subbandRadarWavelength[k] + # phaseDiffSwath1 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \ + # -4.0 * np.pi * secondaryTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) + # phaseDiffSwath2 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \ + # -4.0 * np.pi * secondaryTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) + # if referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange == \ + # referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange: + # #phaseDiff.append(phaseDiffSwath2 - phaseDiffSwath1) + # #if reference and secondary versions are all before or after version 2.025 (starting range error < 0.5 m), + # #it should be OK to do the above. + # #see results in neom where it meets the above requirement, but there is still phase diff + # #to be less risky, we do not input values here + # phaseDiff.append(None) + # else: + # phaseDiff.append(None) #note that frame parameters are updated after mosaicking #mosaic amplitudes diff --git a/components/isceobj/Alos2burstProc/runPreprocessor.py b/components/isceobj/Alos2burstProc/runPreprocessor.py index 5db38e4..8ce560a 100644 --- a/components/isceobj/Alos2burstProc/runPreprocessor.py +++ b/components/isceobj/Alos2burstProc/runPreprocessor.py @@ -258,201 +258,6 @@ def runPreprocessor(self): self._insar.saveProduct(self.secondary.track, self._insar.secondaryTrackParameter) - ################################################## - #2. compute burst synchronization - ################################################## - #burst synchronization may slowly change along a track as a result of the changing relative speed of the two flights - #in one frame, real unsynchronized time is the same for all swaths - unsynTime = 0 - #real synchronized time/percentage depends on the swath burst length (synTime = burstlength - abs(unsynTime)) - #synTime = 0 - synPercentage = 0 - - numberOfFrames = len(self._insar.referenceFrames) - numberOfSwaths = self._insar.endingSwath - self._insar.startingSwath + 1 - - for i, frameNumber in enumerate(self._insar.referenceFrames): - for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): - referenceSwath = self.reference.track.frames[i].swaths[j] - secondarySwath = self.secondary.track.frames[i].swaths[j] - #using Piyush's code for computing range and azimuth offsets - midRange = referenceSwath.startingRange + referenceSwath.rangePixelSize * referenceSwath.numberOfSamples * 0.5 - midSensingStart = referenceSwath.sensingStart + datetime.timedelta(seconds = referenceSwath.numberOfLines * 0.5 / referenceSwath.prf) - llh = self.reference.track.orbit.rdr2geo(midSensingStart, midRange) - slvaz, slvrng = self.secondary.track.orbit.geo2rdr(llh) - ###Translate to offsets - #note that secondary range pixel size and prf might be different from reference, here we assume there is a virtual secondary with same - #range pixel size and prf - rgoff = ((slvrng - secondarySwath.startingRange) / referenceSwath.rangePixelSize) - referenceSwath.numberOfSamples * 0.5 - azoff = ((slvaz - secondarySwath.sensingStart).total_seconds() * referenceSwath.prf) - referenceSwath.numberOfLines * 0.5 - - #compute burst synchronization - #burst parameters for ScanSAR wide mode not estimed yet - if self._insar.modeCombination == 21: - scburstStartLine = (referenceSwath.burstStartTime - referenceSwath.sensingStart).total_seconds() * referenceSwath.prf + azoff - #secondary burst start times corresponding to reference burst start times (100% synchronization) - scburstStartLines = np.arange(scburstStartLine - 100000*referenceSwath.burstCycleLength, \ - scburstStartLine + 100000*referenceSwath.burstCycleLength, \ - referenceSwath.burstCycleLength) - dscburstStartLines = -((secondarySwath.burstStartTime - secondarySwath.sensingStart).total_seconds() * secondarySwath.prf - scburstStartLines) - #find the difference with minimum absolute value - unsynLines = dscburstStartLines[np.argmin(np.absolute(dscburstStartLines))] - if np.absolute(unsynLines) >= secondarySwath.burstLength: - synLines = 0 - if unsynLines > 0: - unsynLines = secondarySwath.burstLength - else: - unsynLines = -secondarySwath.burstLength - else: - synLines = secondarySwath.burstLength - np.absolute(unsynLines) - - unsynTime += unsynLines / referenceSwath.prf - synPercentage += synLines / referenceSwath.burstLength * 100.0 - - catalog.addItem('burst synchronization of frame {} swath {}'.format(frameNumber, swathNumber), '%.1f%%'%(synLines / referenceSwath.burstLength * 100.0), 'runPreprocessor') - - ############################################################################################ - #illustration of the sign of the number of unsynchronized lines (unsynLines) - #The convention is the same as ampcor offset, that is, - # secondaryLineNumber = referenceLineNumber + unsynLines - # - # |-----------------------| ------------ - # | | ^ - # | | | - # | | | unsynLines < 0 - # | | | - # | | \ / - # | | |-----------------------| - # | | | | - # | | | | - # |-----------------------| | | - # Reference Burst | | - # | | - # | | - # | | - # | | - # |-----------------------| - # Secondary Burst - # - # - ############################################################################################ - - ##burst parameters for ScanSAR wide mode not estimed yet - elif self._insar.modeCombination == 31: - #scansar is reference - scburstStartLine = (referenceSwath.burstStartTime - referenceSwath.sensingStart).total_seconds() * referenceSwath.prf + azoff - #secondary burst start times corresponding to reference burst start times (100% synchronization) - for k in range(-100000, 100000): - saz_burstx = scburstStartLine + referenceSwath.burstCycleLength * k - st_burstx = secondarySwath.sensingStart + datetime.timedelta(seconds=saz_burstx / referenceSwath.prf) - if saz_burstx >= 0.0 and saz_burstx <= secondarySwath.numberOfLines -1: - secondarySwath.burstStartTime = st_burstx - secondarySwath.burstLength = referenceSwath.burstLength - secondarySwath.burstCycleLength = referenceSwath.burstCycleLength - secondarySwath.swathNumber = referenceSwath.swathNumber - break - #unsynLines = 0 - #synLines = referenceSwath.burstLength - #unsynTime += unsynLines / referenceSwath.prf - #synPercentage += synLines / referenceSwath.burstLength * 100.0 - catalog.addItem('burst synchronization of frame {} swath {}'.format(frameNumber, swathNumber), '%.1f%%'%(100.0), 'runPreprocessor') - else: - pass - - #overwrite original frame parameter file - if self._insar.modeCombination == 31: - frameDir = 'f{}_{}'.format(i+1, frameNumber) - self._insar.saveProduct(self.secondary.track.frames[i], os.path.join(frameDir, self._insar.secondaryFrameParameter)) - - #getting average - if self._insar.modeCombination == 21: - unsynTime /= numberOfFrames*numberOfSwaths - synPercentage /= numberOfFrames*numberOfSwaths - elif self._insar.modeCombination == 31: - unsynTime = 0. - synPercentage = 100. - else: - pass - - #record results - if (self._insar.modeCombination == 21) or (self._insar.modeCombination == 31): - self._insar.burstUnsynchronizedTime = unsynTime - self._insar.burstSynchronization = synPercentage - catalog.addItem('burst synchronization averaged', '%.1f%%'%(synPercentage), 'runPreprocessor') - - - ################################################## - #3. compute baseline - ################################################## - #only compute baseline at four corners and center of the reference track - bboxRdr = getBboxRdr(self.reference.track) - - rangeMin = bboxRdr[0] - rangeMax = bboxRdr[1] - azimuthTimeMin = bboxRdr[2] - azimuthTimeMax = bboxRdr[3] - - azimuthTimeMid = azimuthTimeMin+datetime.timedelta(seconds=(azimuthTimeMax-azimuthTimeMin).total_seconds()/2.0) - rangeMid = (rangeMin + rangeMax) / 2.0 - - points = [[azimuthTimeMin, rangeMin], - [azimuthTimeMin, rangeMax], - [azimuthTimeMax, rangeMin], - [azimuthTimeMax, rangeMax], - [azimuthTimeMid, rangeMid]] - - Bpar = [] - Bperp = [] - #modify Piyush's code for computing baslines - refElp = Planet(pname='Earth').ellipsoid - for x in points: - referenceSV = self.reference.track.orbit.interpolate(x[0], method='hermite') - target = self.reference.track.orbit.rdr2geo(x[0], x[1]) - - slvTime, slvrng = self.secondary.track.orbit.geo2rdr(target) - secondarySV = self.secondary.track.orbit.interpolateOrbit(slvTime, method='hermite') - - targxyz = np.array(refElp.LLH(target[0], target[1], target[2]).ecef().tolist()) - mxyz = np.array(referenceSV.getPosition()) - mvel = np.array(referenceSV.getVelocity()) - sxyz = np.array(secondarySV.getPosition()) - - #to fix abrupt change near zero in baseline grid. JUN-05-2020 - mvelunit = mvel / np.linalg.norm(mvel) - sxyz = sxyz - np.dot ( sxyz-mxyz, mvelunit) * mvelunit - - aa = np.linalg.norm(sxyz-mxyz) - costheta = (x[1]*x[1] + aa*aa - slvrng*slvrng)/(2.*x[1]*aa) - - Bpar.append(aa*costheta) - - perp = aa * np.sqrt(1 - costheta*costheta) - direction = np.sign(np.dot( np.cross(targxyz-mxyz, sxyz-mxyz), mvel)) - Bperp.append(direction*perp) - - catalog.addItem('parallel baseline at upperleft of reference track', Bpar[0], 'runPreprocessor') - catalog.addItem('parallel baseline at upperright of reference track', Bpar[1], 'runPreprocessor') - catalog.addItem('parallel baseline at lowerleft of reference track', Bpar[2], 'runPreprocessor') - catalog.addItem('parallel baseline at lowerright of reference track', Bpar[3], 'runPreprocessor') - catalog.addItem('parallel baseline at center of reference track', Bpar[4], 'runPreprocessor') - - catalog.addItem('perpendicular baseline at upperleft of reference track', Bperp[0], 'runPreprocessor') - catalog.addItem('perpendicular baseline at upperright of reference track', Bperp[1], 'runPreprocessor') - catalog.addItem('perpendicular baseline at lowerleft of reference track', Bperp[2], 'runPreprocessor') - catalog.addItem('perpendicular baseline at lowerright of reference track', Bperp[3], 'runPreprocessor') - catalog.addItem('perpendicular baseline at center of reference track', Bperp[4], 'runPreprocessor') - - - ################################################## - #4. compute bounding box - ################################################## - referenceBbox = getBboxGeo(self.reference.track) - secondaryBbox = getBboxGeo(self.secondary.track) - - catalog.addItem('reference bounding box', referenceBbox, 'runPreprocessor') - catalog.addItem('secondary bounding box', secondaryBbox, 'runPreprocessor') - - catalog.printToLog(logger, "runPreprocessor") self._insar.procDoc.addAllFromCatalog(catalog) diff --git a/components/isceobj/Sensor/TerraSARX.py b/components/isceobj/Sensor/TerraSARX.py index 075b50b..867c860 100755 --- a/components/isceobj/Sensor/TerraSARX.py +++ b/components/isceobj/Sensor/TerraSARX.py @@ -136,6 +136,17 @@ class TerraSARX(Sensor): self.populateMetadata() fp.close() + def grab_from_xml(self, path): + try: + res = self._xml_root.find(path).text + except: + raise Exception('Tag= %s not found'%(path)) + + if res is None: + raise Exception('Tag = %s not found'%(path)) + + return res + def populateMetadata(self): """ Populate our Metadata objects diff --git a/components/stdproc/stdproc/resamp_slc/src/resamp_slc.f90 b/components/stdproc/stdproc/resamp_slc/src/resamp_slc.f90 index 05d0c6a..2195ec8 100644 --- a/components/stdproc/stdproc/resamp_slc/src/resamp_slc.f90 +++ b/components/stdproc/stdproc/resamp_slc/src/resamp_slc.f90 @@ -213,7 +213,9 @@ cycle endif - r_dop = evalPoly2d_f(dopplerPoly, r_at, r_rt) + !r_dop = evalPoly2d_f(dopplerPoly, r_at, r_rt) + ! doppler should be computed using secondary's coordinate. Cunren Liang, 12-AUG-2020 + r_dop = evalPoly2d_f(dopplerPoly, r_at+r_ao, r_rt+r_ro) !!!!!!Data chip without the carriers do jj=1,sincone diff --git a/contrib/Snaphu/src/snaphu_cs2.c b/contrib/Snaphu/src/snaphu_cs2.c index 0c3f9b2..4277742 100644 --- a/contrib/Snaphu/src/snaphu_cs2.c +++ b/contrib/Snaphu/src/snaphu_cs2.c @@ -55,6 +55,7 @@ #include #include #include +#include #include #include #include @@ -1755,6 +1756,8 @@ void SolveCS2(signed char **residue, short **mstcosts, long nrow, long ncol, double cost, c_max; short *cap; /* cap changed to short by CWC */ + long row_index, col_index; /* report out-of-bounds index by Cunren, 18-aug-2020 */ + short **rowcost, **colcost; short **rowflow, **colflow; @@ -1808,19 +1811,10 @@ void SolveCS2(signed char **residue, short **mstcosts, long nrow, long ncol, exit(ABNORMAL_EXIT); } - if(from==(to+1)){ - num=from+(int )((from-1)/nNrow); - colflow[(num-1) % (nNrow+1)][(int )(num-1)/(nNrow+1)]-=flow; - }else if(from==(to-1)){ - num=from+(int )((from-1)/nNrow)+1; - colflow[(num-1) % (nNrow+1)][(int )(num-1)/(nNrow+1)]+=flow; - }else if(from==(to-nNrow)){ - num=from+nNrow; - rowflow[(num-1) % nNrow][(int )((num-1)/nNrow)]+=flow; - }else if(from==(to+nNrow)){ - num=from; - rowflow[(num-1) % nNrow][(int )((num-1)/nNrow)]-=flow; - }else if((from==ground) || (to==ground)){ + /* node indices are indexed from 1, not 0 */ + /* node indices are in column major order, not row major */ + /* handle flow to/from ground first */ + if((from==ground) || (to==ground)){ if(to==ground){ num=to; to=from; @@ -1828,17 +1822,69 @@ void SolveCS2(signed char **residue, short **mstcosts, long nrow, long ncol, flow=-flow; } if(!((to-1) % nNrow)){ - colflow[0][(int )((to-1)/nNrow)]+=flow; + row_index = 0; + col_index = (int )((to-1)/nNrow); + if (0 <= row_index && row_index <= nrow-1 && 0 <= col_index && col_index <= ncol-2) + colflow[row_index][col_index]+=flow; + else + fprintf(sp0,"Warning: out-of-bounds index in computing flow\n"); }else if(to<=nNrow){ - rowflow[to-1][0]+=flow; + row_index = to-1; + col_index = 0; + if (0 <= row_index && row_index <= nrow-2 && 0 <= col_index && col_index <= ncol-1) + rowflow[row_index][col_index]+=flow; + else + fprintf(sp0,"Warning: out-of-bounds index in computing flow\n"); }else if(to>=(ground-nNrow-1)){ - rowflow[(to-1) % nNrow][nNcol]-=flow; + row_index = (to-1) % nNrow; + col_index = nNcol; + if (0 <= row_index && row_index <= nrow-2 && 0 <= col_index && col_index <= ncol-1) + rowflow[row_index][col_index]-=flow; + else + fprintf(sp0,"Warning: out-of-bounds index in computing flow\n"); }else if(!(to % nNrow)){ - colflow[nNrow][(int )((to/nNrow)-1)]-=flow; + row_index = nNrow; + col_index = (int )((to/nNrow)-1); + if (0 <= row_index && row_index <= nrow-1 && 0 <= col_index && col_index <= ncol-2) + colflow[row_index][col_index]-=flow; + else + fprintf(sp0,"Warning: out-of-bounds index in computing flow\n"); }else{ fprintf(sp0,"Unassigned ground arc parsing cs2 solution\nAbort\n"); exit(ABNORMAL_EXIT); - } + } + }else if(from==(to+1)){ + num=from+(int )((from-1)/nNrow); + row_index = (num-1) % (nNrow+1); + col_index = (int )(num-1)/(nNrow+1); + if (0 <= row_index && row_index <= nrow-1 && 0 <= col_index && col_index <= ncol-2) + colflow[row_index][col_index]-=flow; + else + fprintf(sp0,"Warning: out-of-bounds index in computing flow\n"); + }else if(from==(to-1)){ + num=from+(int )((from-1)/nNrow)+1; + row_index = (num-1) % (nNrow+1); + col_index = (int )(num-1)/(nNrow+1); + if (0 <= row_index && row_index <= nrow-1 && 0 <= col_index && col_index <= ncol-2) + colflow[row_index][col_index]+=flow; + else + fprintf(sp0,"Warning: out-of-bounds index in computing flow\n"); + }else if(from==(to-nNrow)){ + num=from+nNrow; + row_index = (num-1) % nNrow; + col_index = (int )((num-1)/nNrow); + if (0 <= row_index && row_index <= nrow-2 && 0 <= col_index && col_index <= ncol-1) + rowflow[row_index][col_index]+=flow; + else + fprintf(sp0,"Warning: out-of-bounds index in computing flow\n"); + }else if(from==(to+nNrow)){ + num=from; + row_index = (num-1) % nNrow; + col_index = (int )((num-1)/nNrow); + if (0 <= row_index && row_index <= nrow-2 && 0 <= col_index && col_index <= ncol-1) + rowflow[row_index][col_index]-=flow; + else + fprintf(sp0,"Warning: out-of-bounds index in computing flow\n"); }else{ fprintf(sp0,"Non-grid arc parsing cs2 solution\nAbort\n"); exit(ABNORMAL_EXIT); diff --git a/contrib/stack/README.md b/contrib/stack/README.md index 389ddf3..3c806e6 100644 --- a/contrib/stack/README.md +++ b/contrib/stack/README.md @@ -4,16 +4,18 @@ Read the document for each stack processor for details. + [stripmapStack](./stripmapStack/README.md) + [topsStack](./topsStack/README.md) ++ [alosStack](./alosStack/alosStack_tutorial.txt) ### Installation -To use the TOPS or Stripmap stack processors you need to: +To use a stack processor you need to: 1. Install ISCE as usual 2. Depending on which stack processor you need to try, add the path of the folder containing the python scripts to your `$PATH` environment variable as follows: - add the full path of your **contrib/stack/topsStack** to `$PATH` to use the topsStack for processing a stack of Sentinel-1 TOPS data - add the full path of your **contrib/stack/stripmapStack** to `$PATH` to use the stripmapStack for processing a stack of StripMap data + - set environment variable `$PATH_ALOSSTACK` by doing: export PATH_ALOSSTACK=CODE_DIR/contrib/stack/alosStack to use the alosStack for processing a stack of ALOS-2 data Note: The stack processors do not show up in the install directory of your isce software. They can be found in the isce source directory. @@ -32,3 +34,4 @@ For StripMap stack processor and ionospheric phase estimation: For TOPS stack processing: + H. Fattahi, P. Agram, and M. Simons, “A network-based enhanced spectral diversity approach for TOPS time-series analysis,” IEEE Trans. Geosci. Remote Sens., vol. 55, no. 2, pp. 777–786, Feb. 2017. (https://ieeexplore.ieee.org/abstract/document/7637021/) + diff --git a/contrib/stack/alosStack/Stack.py b/contrib/stack/alosStack/Stack.py new file mode 100644 index 0000000..be583cf --- /dev/null +++ b/contrib/stack/alosStack/Stack.py @@ -0,0 +1,426 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + + +import isce +import isceobj +import iscesys +from iscesys.Component.Application import Application + + +DATA_DIR = Application.Parameter('dataDir', + public_name='data directory', + default=None, + type=str, + mandatory=False, + doc="directory of data, where data of each date are in an individual directory") + +FRAMES = Application.Parameter('frames', + public_name = 'frames', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'frames to process') + +POLARIZATION = Application.Parameter('polarization', + public_name='polarization', + default='HH', + type=str, + mandatory=False, + doc="polarization to process") + +STARTING_SWATH = Application.Parameter('startingSwath', + public_name='starting swath', + default=None, + type=int, + mandatory=False, + doc="starting swath to process") + +ENDING_SWATH = Application.Parameter('endingSwath', + public_name='ending swath', + default=None, + type=int, + mandatory=False, + doc="ending swath to process") + +DEM = Application.Parameter('dem', + public_name='dem for coregistration', + default=None, + type=str, + mandatory=False, + doc='dem for coregistration file') + +DEM_GEO = Application.Parameter('demGeo', + public_name='dem for geocoding', + default=None, + type=str, + mandatory=False, + doc='dem for geocoding file') + +WBD = Application.Parameter('wbd', + public_name='water body', + default=None, + type=str, + mandatory=False, + doc='water body file') + +DATE_REFERENCE_STACK = Application.Parameter('dateReferenceStack', + public_name='reference date of the stack', + default=None, + type=str, + mandatory=False, + doc="reference date of the stack") + +GRID_FRAME = Application.Parameter('gridFrame', + public_name='grid frame', + default=None, + type=str, + mandatory=False, + doc="resample all frames/swaths to the grid size of this frame") + +GRID_SWATH = Application.Parameter('gridSwath', + public_name='grid swath', + default=None, + type=int, + mandatory=False, + doc="resample all frames/swaths to the grid size of this swath") + +NUMBER_OF_SUBSEQUENT_DATES = Application.Parameter('numberOfSubsequentDates', + public_name='number of subsequent dates', + default=4, + type=int, + mandatory=False, + doc="number of subsequent dates used to form pairs") + +PAIR_TIME_SPAN_MINIMUM = Application.Parameter('pairTimeSpanMinimum', + public_name = 'pair time span minimum in years', + default = None, + type=float, + mandatory=False, + doc = 'pair time span minimum in years') + +PAIR_TIME_SPAN_MAXIMUM = Application.Parameter('pairTimeSpanMaximum', + public_name = 'pair time span maximum in years', + default = None, + type=float, + mandatory=False, + doc = 'pair time span maximum in years') + +DATES_INCLUDED = Application.Parameter('datesIncluded', + public_name = 'dates to be included', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'dates to be included') + +#MUST BE FIRST DATE - SECOND DATE!!! +PAIRS_INCLUDED = Application.Parameter('pairsIncluded', + public_name = 'pairs to be included', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'pairs to be included') + +DATES_EXCLUDED = Application.Parameter('datesExcluded', + public_name = 'dates to be excluded', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'dates to be excluded') + +#MUST BE FIRST DATE - SECOND DATE!!! +PAIRS_EXCLUDED = Application.Parameter('pairsExcluded', + public_name = 'pairs to be excluded', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'pairs to be excluded') + +DATE_REFERENCE_STACK_ION = Application.Parameter('dateReferenceStackIon', + public_name='reference date of the stack for estimating ionosphere', + default=None, + type=str, + mandatory=False, + doc="reference date of the stack in estimating ionosphere") + +NUMBER_OF_SUBSEQUENT_DATES_ION = Application.Parameter('numberOfSubsequentDatesIon', + public_name='number of subsequent dates for estimating ionosphere', + default=4, + type=int, + mandatory=False, + doc="number of subsequent dates used to form pairs for estimating ionosphere") + +PAIR_TIME_SPAN_MINIMUM_ION = Application.Parameter('pairTimeSpanMinimumIon', + public_name = 'pair time span minimum in years for estimating ionosphere', + default = None, + type=float, + mandatory=False, + doc = 'pair time span minimum in years for estimating ionosphere') + +PAIR_TIME_SPAN_MAXIMUM_ION = Application.Parameter('pairTimeSpanMaximumIon', + public_name = 'pair time span maximum in years for estimating ionosphere', + default = None, + type=float, + mandatory=False, + doc = 'pair time span maximum in years for estimating ionosphere') + +DATES_INCLUDED_ION = Application.Parameter('datesIncludedIon', + public_name = 'dates to be included for estimating ionosphere', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'dates to be included for estimating ionosphere') + +#MUST BE FIRST DATE - SECOND DATE!!! +PAIRS_INCLUDED_ION = Application.Parameter('pairsIncludedIon', + public_name = 'pairs to be included for estimating ionosphere', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'pairs to be included for estimating ionosphere') + +DATES_EXCLUDED_ION = Application.Parameter('datesExcludedIon', + public_name = 'dates to be excluded for estimating ionosphere', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'dates to be excluded for estimating ionosphere') + +#MUST BE FIRST DATE - SECOND DATE!!! +PAIRS_EXCLUDED_ION = Application.Parameter('pairsExcludedIon', + public_name = 'pairs to be excluded for estimating ionosphere', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'pairs to be excluded for estimating ionosphere') + +DATES_REPROCESS = Application.Parameter('datesReprocess', + public_name = 'reprocess already processed dates', + default=False, + type=bool, + mandatory=False, + doc = 'reprocess already processed dates') + +PAIRS_REPROCESS = Application.Parameter('pairsReprocess', + public_name = 'reprocess already processed pairs', + default=False, + type=bool, + mandatory=False, + doc = 'reprocess already processed pairs') + +PAIRS_REPROCESS_ION = Application.Parameter('pairsReprocessIon', + public_name = 'reprocess already processed pairs for estimating ionosphere', + default=False, + type=bool, + mandatory=False, + doc = 'reprocess already processed pairs for estimating ionosphere') + +DATES_PROCESSING_DIR = Application.Parameter('datesProcessingDir', + public_name='dates processing directory', + default='dates', + type=str, + mandatory=False, + doc="directory for processing all dates") + +DATES_RESAMPLED_DIR = Application.Parameter('datesResampledDir', + public_name='dates resampled directory', + default='dates_resampled', + type=str, + mandatory=False, + doc="directory for all dates resampled") + +PAIRS_PROCESSING_DIR = Application.Parameter('pairsProcessingDir', + public_name='pairs processing directory', + default='pairs', + type=str, + mandatory=False, + doc="directory for processing all pairs") + +BASELINE_DIR = Application.Parameter('baselineDir', + public_name='baseline directory', + default='baseline', + type=str, + mandatory=False, + doc="directory for baselines") + +DATES_DIR_ION = Application.Parameter('datesDirIon', + public_name='dates directory for ionosphere', + default='dates_ion', + type=str, + mandatory=False, + doc="dates directory for ionosphere") + +PAIRS_PROCESSING_DIR_ION = Application.Parameter('pairsProcessingDirIon', + public_name='pairs processing directory for estimating ionosphere', + default='pairs_ion', + type=str, + mandatory=False, + doc="directory for processing all pairs for estimating ionosphere") + +#import insar processing parameters from alos2App.py +#from alos2App import REFERENCE_DIR +#from alos2App import SECONDARY_DIR +#from alos2App import REFERENCE_FRAMES +#from alos2App import SECONDARY_FRAMES +#from alos2App import REFERENCE_POLARIZATION +#from alos2App import SECONDARY_POLARIZATION +#from alos2App import STARTING_SWATH +#from alos2App import ENDING_SWATH +#from alos2App import DEM +#from alos2App import DEM_GEO +#from alos2App import WBD +from alos2App import USE_VIRTUAL_FILE +from alos2App import USE_GPU +#from alos2App import BURST_SYNCHRONIZATION_THRESHOLD +#from alos2App import CROP_SLC +from alos2App import USE_WBD_FOR_NUMBER_OFFSETS +from alos2App import NUMBER_RANGE_OFFSETS +from alos2App import NUMBER_AZIMUTH_OFFSETS +from alos2App import NUMBER_RANGE_LOOKS1 +from alos2App import NUMBER_AZIMUTH_LOOKS1 +from alos2App import NUMBER_RANGE_LOOKS2 +from alos2App import NUMBER_AZIMUTH_LOOKS2 +from alos2App import NUMBER_RANGE_LOOKS_SIM +from alos2App import NUMBER_AZIMUTH_LOOKS_SIM +from alos2App import SWATH_OFFSET_MATCHING +from alos2App import FRAME_OFFSET_MATCHING +from alos2App import FILTER_STRENGTH +from alos2App import FILTER_WINSIZE +from alos2App import FILTER_STEPSIZE +from alos2App import REMOVE_MAGNITUDE_BEFORE_FILTERING +from alos2App import WATERBODY_MASK_STARTING_STEP +#from alos2App import GEOCODE_LIST +from alos2App import GEOCODE_BOUNDING_BOX +from alos2App import GEOCODE_INTERP_METHOD + #ionospheric correction parameters +from alos2App import DO_ION +from alos2App import APPLY_ION +from alos2App import NUMBER_RANGE_LOOKS_ION +from alos2App import NUMBER_AZIMUTH_LOOKS_ION +from alos2App import MASKED_AREAS_ION +from alos2App import SWATH_PHASE_DIFF_SNAP_ION +from alos2App import SWATH_PHASE_DIFF_LOWER_ION +from alos2App import SWATH_PHASE_DIFF_UPPER_ION +from alos2App import FIT_ION +from alos2App import FILT_ION +from alos2App import FIT_ADAPTIVE_ION +from alos2App import FILT_SECONDARY_ION +from alos2App import FILTERING_WINSIZE_MAX_ION +from alos2App import FILTERING_WINSIZE_MIN_ION +from alos2App import FILTERING_WINSIZE_SECONDARY_ION +from alos2App import FILTER_STD_ION +from alos2App import FILTER_SUBBAND_INT +from alos2App import FILTER_STRENGTH_SUBBAND_INT +from alos2App import FILTER_WINSIZE_SUBBAND_INT +from alos2App import FILTER_STEPSIZE_SUBBAND_INT +from alos2App import REMOVE_MAGNITUDE_BEFORE_FILTERING_SUBBAND_INT + + +## Common interface for all insar applications. +class Stack(Application): + family = 'stackinsar' + parameter_list = (DATA_DIR, + FRAMES, + POLARIZATION, + STARTING_SWATH, + ENDING_SWATH, + DEM, + DEM_GEO, + WBD, + DATE_REFERENCE_STACK, + GRID_FRAME, + GRID_SWATH, + NUMBER_OF_SUBSEQUENT_DATES, + PAIR_TIME_SPAN_MINIMUM, + PAIR_TIME_SPAN_MAXIMUM, + DATES_INCLUDED, + PAIRS_INCLUDED, + DATES_EXCLUDED, + PAIRS_EXCLUDED, + DATE_REFERENCE_STACK_ION, + NUMBER_OF_SUBSEQUENT_DATES_ION, + PAIR_TIME_SPAN_MINIMUM_ION, + PAIR_TIME_SPAN_MAXIMUM_ION, + DATES_INCLUDED_ION, + PAIRS_INCLUDED_ION, + DATES_EXCLUDED_ION, + PAIRS_EXCLUDED_ION, + DATES_REPROCESS, + PAIRS_REPROCESS, + PAIRS_REPROCESS_ION, + DATES_PROCESSING_DIR, + DATES_RESAMPLED_DIR, + PAIRS_PROCESSING_DIR, + BASELINE_DIR, + DATES_DIR_ION, + PAIRS_PROCESSING_DIR_ION, + #insar processing parameters, same as those in alos2App.py + USE_VIRTUAL_FILE, + USE_GPU, + USE_WBD_FOR_NUMBER_OFFSETS, + NUMBER_RANGE_OFFSETS, + NUMBER_AZIMUTH_OFFSETS, + NUMBER_RANGE_LOOKS1, + NUMBER_AZIMUTH_LOOKS1, + NUMBER_RANGE_LOOKS2, + NUMBER_AZIMUTH_LOOKS2, + NUMBER_RANGE_LOOKS_SIM, + NUMBER_AZIMUTH_LOOKS_SIM, + SWATH_OFFSET_MATCHING, + FRAME_OFFSET_MATCHING, + FILTER_STRENGTH, + FILTER_WINSIZE, + FILTER_STEPSIZE, + REMOVE_MAGNITUDE_BEFORE_FILTERING, + WATERBODY_MASK_STARTING_STEP, + GEOCODE_BOUNDING_BOX, + GEOCODE_INTERP_METHOD, + #ionospheric correction parameters + DO_ION, + APPLY_ION, + NUMBER_RANGE_LOOKS_ION, + NUMBER_AZIMUTH_LOOKS_ION, + MASKED_AREAS_ION, + SWATH_PHASE_DIFF_SNAP_ION, + SWATH_PHASE_DIFF_LOWER_ION, + SWATH_PHASE_DIFF_UPPER_ION, + FIT_ION, + FILT_ION, + FIT_ADAPTIVE_ION, + FILT_SECONDARY_ION, + FILTERING_WINSIZE_MAX_ION, + FILTERING_WINSIZE_MIN_ION, + FILTERING_WINSIZE_SECONDARY_ION, + FILTER_STD_ION, + FILTER_SUBBAND_INT, + FILTER_STRENGTH_SUBBAND_INT, + FILTER_WINSIZE_SUBBAND_INT, + FILTER_STEPSIZE_SUBBAND_INT, + REMOVE_MAGNITUDE_BEFORE_FILTERING_SUBBAND_INT) + + facility_list = () + + def __init__(self, family='', name='',cmdline=None): + import isceobj + + super().__init__( + family=family if family else self.__class__.family, name=name, + cmdline=cmdline) + + + return None + + diff --git a/contrib/stack/alosStack/StackPulic.py b/contrib/stack/alosStack/StackPulic.py new file mode 100644 index 0000000..ba91997 --- /dev/null +++ b/contrib/stack/alosStack/StackPulic.py @@ -0,0 +1,325 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + + +def loadInsarUserParameters(filename): + import os + from isce.applications.alos2App import Alos2InSAR + + #application object cannot recognize extension + if filename.endswith('.xml'): + filename = os.path.splitext(filename)[0] + + #here, Alos2InSAR object is only used for reading and storing parameters + #none of its other attibutes or functions are used. + insar = Alos2InSAR(name=filename) + insar.configure() + + return insar + + +def loadStackUserParameters(filename): + import os + from Stack import Stack + + #application object cannot recognize extension + if filename.endswith('.xml'): + filename = os.path.splitext(filename)[0] + + stack = Stack(name=filename) + stack.configure() + + return stack + + +def loadInsarProcessingParameters(name): + import os + import pickle + + from isceobj.Alos2Proc import Alos2Proc + + try: + toLoad = Alos2Proc() + toLoad.load(name + '.xml') + with open(name, 'rb') as f: + setattr(toLoad, 'procDoc', pickle.load(f)) + except IOError: + print("Cannot open %s" % (name)) + + return toLoad + + +def dumpInsarProcessingParameters(obj, name): + import os + import pickle + + ############################## + #do this to output important paramters to xml (alos2Proc.xml) after each step. + #self.renderProcDoc() + ############################## + + os.makedirs(os.path.dirname(name), exist_ok=True) + try: + toDump = obj + toDump.dump(name + '.xml') + #dump the procDoc separately + with open(name, 'wb') as f: + pickle.dump(getattr(toDump, 'procDoc'), f, + protocol=pickle.HIGHEST_PROTOCOL) + except IOError: + print("Cannot dump %s" % (name)) + + return None + + + +def loadProduct(xmlname): + ''' + Load the product using Product Manager. + ''' + + from iscesys.Component.ProductManager import ProductManager as PM + + pm = PM() + pm.configure() + + obj = pm.loadProduct(xmlname) + + return obj + + +def saveProduct(obj, xmlname): + ''' + Save the product to an XML file using Product Manager. + ''' + + from iscesys.Component.ProductManager import ProductManager as PM + + pm = PM() + pm.configure() + + pm.dumpProduct(obj, xmlname) + + return None + + +def loadTrack(trackDir, date): + ''' + Load the track using Product Manager. + trackDir: where *.track.xml is located + date: YYMMDD + ''' + import os + import glob + + + frames = sorted(glob.glob(os.path.join(trackDir, 'f*_*/{}.frame.xml'.format(date)))) + track = loadProduct(os.path.join(trackDir, '{}.track.xml'.format(date))) + + track.frames = [] + for x in frames: + track.frames.append(loadProduct(x)) + + return track + + +def saveTrack(track, date): + ''' + Save the track to XML files using Product Manager. + track: track object + #trackDir: where *.track.xml is located + date: YYMMDD + ''' + import os + import glob + + #dump track object + #os.chdir(trackDir) + saveProduct(track, date+'.track.xml') + + for i in range(len(track.frames)): + #find frame folder + frameDirs = sorted(glob.glob('f{}_*'.format(i+1))) + if frameDirs == []: + frameDir = 'f{}_{}'.format(i+1, track.frames[i].frameNumber) + print('no existing frame folder found at frame {}, create a frame folder {}'.format(i+1, frameDir)) + else: + frameDir = frameDirs[0] + + #dump frame object + if track.frames[i].frameNumber != frameDir[-4:]: + print('frame number in track object {} is different from that in frame folder name: {} at frame {}'.format( + track.frames[i].frameNumber, frameDir[-4:], i+1)) + print('dumping it to {}'.format(frameDir)) + + os.chdir(frameDir) + saveProduct(track.frames[i], date+'.frame.xml') + os.chdir('../') + + + return None + + +def datesFromPairs(pairs): + dates = [] + for x in pairs: + dateReference = x.split('-')[0] + dateSecondary = x.split('-')[1] + if dateReference not in dates: + dates.append(dateReference) + if dateSecondary not in dates: + dates.append(dateSecondary) + dates = sorted(dates) + return dates + + +def stackDateStatistics(idir, dateReference): + ''' + idir: input directory where data of each date is located. only folders are recognized + dateReference: reference date, str type format: 'YYMMDD' + ''' + import os + import glob + + #get date folders + dateDirs = sorted(glob.glob(os.path.join(os.path.abspath(idir), '*'))) + dateDirs = [x for x in dateDirs if os.path.isdir(x)] + + #find index of reference date: + dates = [] + dateIndexReference = None + for i in range(len(dateDirs)): + date = os.path.basename(dateDirs[i]) + dates.append(date) + if date == dateReference: + dateIndexReference = i + if dateIndexReference is None: + raise Exception('cannot get reference date {} from the data list, pleasae check your input'.format(dateReference)) + else: + print('reference date index {}'.format(dateIndexReference)) + + #use one date to find frames and swaths. any date should work, here we use dateIndexReference + frames = sorted([x[-4:] for x in glob.glob(os.path.join(dateDirs[dateIndexReference], 'f*_*'))]) + swaths = sorted([int(x[-1]) for x in glob.glob(os.path.join(dateDirs[dateIndexReference], 'f1_*', 's*'))]) + + ndate = len(dates) + nframe = len(frames) + nswath = len(swaths) + + #print result + print('\nlist of dates:') + print(' index date frames') + print('=======================================================') + for i in range(ndate): + if dates[i] == dateReference: + print(' %03d %s'%(i, dates[i])+' {}'.format(frames)+' reference') + else: + print(' %03d %s'%(i, dates[i])+' {}'.format(frames)) + print('\n') + + + # str list, str list, str list, int list int + return (dateDirs, dates, frames, swaths, dateIndexReference) + + + +def acquisitionModesAlos2(): + ''' + return ALOS-2 acquisition mode + ''' + + spotlightModes = ['SBS'] + stripmapModes = ['UBS', 'UBD', 'HBS', 'HBD', 'HBQ', 'FBS', 'FBD', 'FBQ'] + scansarNominalModes = ['WBS', 'WBD', 'WWS', 'WWD'] + scansarWideModes = ['VBS', 'VBD'] + scansarModes = ['WBS', 'WBD', 'WWS', 'WWD', 'VBS', 'VBD'] + + return (spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes) + + +def hasGPU(): + ''' + Determine if GPU modules are available. + ''' + + flag = False + try: + from zerodop.GPUtopozero.GPUtopozero import PyTopozero + from zerodop.GPUgeo2rdr.GPUgeo2rdr import PyGeo2rdr + flag = True + except: + pass + + return flag + + +class createObject(object): + pass + + +def subbandParameters(track): + ''' + compute subband parameters + ''' + #speed of light from: components/isceobj/Planet/AstronomicalHandbook.py + SPEED_OF_LIGHT = 299792458.0 + + #using 1/3, 1/3, 1/3 band split + radarWavelength = track.radarWavelength + rangeBandwidth = track.frames[0].swaths[0].rangeBandwidth + rangeSamplingRate = track.frames[0].swaths[0].rangeSamplingRate + radarWavelengthLower = SPEED_OF_LIGHT/(SPEED_OF_LIGHT / radarWavelength - rangeBandwidth / 3.0) + radarWavelengthUpper = SPEED_OF_LIGHT/(SPEED_OF_LIGHT / radarWavelength + rangeBandwidth / 3.0) + subbandRadarWavelength = [radarWavelengthLower, radarWavelengthUpper] + subbandBandWidth = [rangeBandwidth / 3.0 / rangeSamplingRate, rangeBandwidth / 3.0 / rangeSamplingRate] + subbandFrequencyCenter = [-rangeBandwidth / 3.0 / rangeSamplingRate, rangeBandwidth / 3.0 / rangeSamplingRate] + + subbandPrefix = ['lower', 'upper'] + + return (subbandRadarWavelength, subbandBandWidth, subbandFrequencyCenter, subbandPrefix) + + +def formInterferogram(slcReference, slcSecondary, interferogram, amplitude, numberRangeLooks, numberAzimuthLooks): + import numpy as np + import isce, isceobj + from isceobj.Alos2Proc.Alos2ProcPublic import multilook + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + + img = isceobj.createImage() + img.load(slcReference+'.xml') + width = img.width + length = img.length + + width2 = int(width / numberRangeLooks) + length2 = int(length / numberAzimuthLooks) + + fpRef = open(slcReference,'rb') + fpSec = open(slcSecondary,'rb') + fpInf = open(interferogram,'wb') + fpAmp = open(amplitude,'wb') + + for k in range(length2): + if (((k+1)%200) == 0): + print("processing line %6d of %6d" % (k+1, length2), end='\r', flush=True) + ref = np.fromfile(fpRef, dtype=np.complex64, count=numberAzimuthLooks * width).reshape(numberAzimuthLooks, width) + sec = np.fromfile(fpSec, dtype=np.complex64, count=numberAzimuthLooks * width).reshape(numberAzimuthLooks, width) + inf = multilook(ref*np.conjugate(sec), numberAzimuthLooks, numberRangeLooks, mean=False) + amp = np.sqrt(multilook(ref.real*ref.real+ref.imag*ref.imag, numberAzimuthLooks, numberRangeLooks, mean=False)) + 1j * \ + np.sqrt(multilook(sec.real*sec.real+sec.imag*sec.imag, numberAzimuthLooks, numberRangeLooks, mean=False)) + index = np.nonzero( (np.real(amp)==0) + (np.imag(amp)==0) ) + amp[index]=0 + inf.tofile(fpInf) + amp.tofile(fpAmp) + print("processing line %6d of %6d" % (length2, length2)) + fpRef.close() + fpSec.close() + fpInf.close() + fpAmp.close() + + create_xml(interferogram, width2, length2, 'int') + create_xml(amplitude, width2, length2, 'amp') + diff --git a/contrib/stack/alosStack/alos2_pairs.py b/contrib/stack/alosStack/alos2_pairs.py new file mode 100644 index 0000000..1ca964c --- /dev/null +++ b/contrib/stack/alosStack/alos2_pairs.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python3 + +#Cunren Liang, 05-MAR-2020 + +import os +import sys +import glob +import zipfile +import argparse +import datetime +import numpy as np +import xml.etree.ElementTree as ET + + +def cmdLineParse(): + ''' + Command line parser. + ''' + parser = argparse.ArgumentParser(description='prepare alos2App.py OR alos2burstApp.py input files') + parser.add_argument('-dir', dest='dir', type=str, required=True, + help = 'directory containing the alos-2 data directories [data dir format: YYMMDD]') + parser.add_argument('-xml', dest='xml', type=str, required=True, + help = 'example alos2App.py input file') + parser.add_argument('-num', dest='num', type=int, default=3, + help = 'number of pairs for each acquistion. default: 3') + parser.add_argument('-yr', dest='yr', type=float, default=1.0, + help = 'time span threshhold. default: 1.0 year') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + dates = sorted(glob.glob(os.path.join(inps.dir, '*'))) + dates = sorted([os.path.basename(x) for x in dates]) + #for x in dates: + # print(x) + + #read standard configurations + tree = ET.parse(inps.xml) + root = tree.getroot() + + ndate = len(dates) + datefmt = "%y%m%d" + pairs_created = [] + pairs_not_created = [] + for i in range(ndate): + mdate = dates[i] + mtime = datetime.datetime.strptime(mdate, datefmt) + for j in range(inps.num): + if i+j+1 <= ndate - 1: + sdate = dates[i+j+1] + stime = datetime.datetime.strptime(sdate, datefmt) + pair = mdate + '-' + sdate + if np.absolute((stime - mtime).total_seconds()) < inps.yr * 365.0 * 24.0 * 3600: + pairs_created.append(pair) + print('creating pair: {}'.format(pair)) + #create pair dir + if not os.path.exists(pair): + os.makedirs(pair) + #create xml + safe = root.find("component/property[@name='master directory']") + safe.text = '{}'.format(os.path.join(inps.dir, mdate)) + safe = root.find("component/property[@name='slave directory']") + safe.text = '{}'.format(os.path.join(inps.dir, sdate)) + tree.write(os.path.join(pair, 'alos2App.xml')) + else: + pairs_not_created.append(pair) + + + print('total number of pairs created: {}'.format(len(pairs_created))) + + if pairs_not_created != []: + print('\nthe following pairs are not created because their time spans >= {} years'.format(inps.yr)) + for x in pairs_not_created: + print(x) + print('total number of pairs not created: {}'.format(len(pairs_not_created))) + else: + print('\nall possible pairs are created') diff --git a/contrib/stack/alosStack/alosStack.xml b/contrib/stack/alosStack/alosStack.xml new file mode 100644 index 0000000..0c3595b --- /dev/null +++ b/contrib/stack/alosStack/alosStack.xml @@ -0,0 +1,379 @@ + + + + + + + ../data/saf_d169 + + ../data/saf_d169_dem/dem_1_arcsec/demLat_N35_N44_Lon_W126_W118.dem.wgs84 + ../data/saf_d169_dem/dem_3_arcsec/demLat_N35_N44_Lon_W126_W118.dem.wgs84 + ../data/saf_d169_dem/wbd_1_arcsec/swbdLat_N35_N44_Lon_W126_W118.wbd + + 150408 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/contrib/stack/alosStack/alosStack_tutorial.txt b/contrib/stack/alosStack/alosStack_tutorial.txt new file mode 100644 index 0000000..bbcfbb4 --- /dev/null +++ b/contrib/stack/alosStack/alosStack_tutorial.txt @@ -0,0 +1,250 @@ +###################################################################################### +# Tutorial for alosStack +# Cunren Liang, October 2020 +###################################################################################### + +This is the tutorial of alosStack processor. + + +########################################### +# 0. SET ENVIRONMENT VARIABLE +########################################### + +Set environment variable 'PATH_ALOSSTACK' +export PATH_ALOSSTACK=CODE_DIR/contrib/stack/alosStack + +where CODE_DIR is the directory of your isce code. Note that alosStack is not installed when you install +the software, so CODE_DIR is your code directory rather than installation directory. + + +########################################### +# 1. PREPARE DATA +########################################### + +1. ALOS-2 data +Currently the processor only supports the processing of a stack of data acquired in the same mode. + +To find the acquisition mode code, check the unpacked ALOS-2 product. For example, in the following +file name + +IMG-HH-ALOS2183010685-171012-FBDR1.1__A + ^^^ +FBD (indicated by ^) is the acquisition mode code. Here is the list of acquistion modes: + + Operation Mode | Mode (AUIG2) | Mode (in file name) +-------------------------------------------------------------- + spotlight | SPT | SBS +-------------------------------------------------------------- + stripmap | SM1 | UBS, UBD + | SM2 | HBS, HBD, HBQ + | SM3 | FBS, FBD, FBQ +-------------------------------------------------------------- + ScanSAR | WD1 | WBS, WBD, WWS, WWD + | WD2 | VBS, VBD + + +Create a folder such as 'saf_d169', and in this folder, unpack all frames of each date in an individual folder +named YYMMDD. YYMMDD is the acquistion date, and it must be in this format. Now the data directory should look +like + +saf_d169_data-------150225-------IMG-HH-ALOS2041062800-150225-WBDR1.1__D-F1 + |__150408 |__IMG-HH-ALOS2041062800-150225-WBDR1.1__D-F2 + |__150520 |__IMG-HH-ALOS2041062800-150225-WBDR1.1__D-F3 + |__150701 |__IMG-HH-ALOS2041062800-150225-WBDR1.1__D-F4 + |__... |__IMG-HH-ALOS2041062800-150225-WBDR1.1__D-F5 + |__IMG-HH-ALOS2041062850-150225-WBDR1.1__D-F1 + |__IMG-HH-ALOS2041062850-150225-WBDR1.1__D-F2 + |__IMG-HH-ALOS2041062850-150225-WBDR1.1__D-F3 + |__IMG-HH-ALOS2041062850-150225-WBDR1.1__D-F4 + |__IMG-HH-ALOS2041062850-150225-WBDR1.1__D-F5 + |__LED-ALOS2041062800-150225-WBDR1.1__D + |__LED-ALOS2041062850-150225-WBDR1.1__D + +2. DEM and water body + +You MUST FIRST have an account to download DEM and water body. See +https://github.com/isce-framework/isce2#notes-on-digital-elevation-models +or +https://github.com/isce-framework/isce2 +for more details. + +See input xml file alosStack.xml in this folder on how to download DEM and water body. + + +########################################### +# 2. PROCESS DATA +########################################### + +1. Create and enter a folder for processing data, e.g. +mkdir saf_d169_proc +cd saf_d169_proc + +2. Input xml file alosStack.xml can be found in code directory. Copy it to current folder and simply set +the parameters. +cp ${PATH_ALOSSTACK}/alosStack.xml ./ + +3. Create command files for processing data. Run +${PATH_ALOSSTACK}/create_cmds.py -stack_par alosStack.xml + +4. Do most of the single date processing. Run +./cmd_1.sh + +In all command files including cmd_1.sh, note that same commands for processing different dates either +listed repeatedly or in a loop can run parallelly. The 'resample to a common grid' step in cmd_1.sh is +a very time consuming step. These commands can of course run parallelly, but note that each command may +use up to 7G memory. + +5. InSAR processing before ionosphere correction. Run +./cmd_2.sh + +6. Ionosphere correction (if do ionospheric phase estimation, by default True). If the following parameter of +the input xml file is True (default) + + + +Run +./cmd_3.sh + +After it finishes, check the images in folder 'fig_ion' to see if ionosphere estimation is OK for each +pair. The anomalies include dense fringes or slight phase difference between adjacent swaths in ScanSAR +interferograms after removing ionosphere. There might also be dense fringes elsewhere. These are all anomalies +and the associated ionosphere estimation results should not be used in the next steps. + +At the end of this command file, there is a step called 'estimate ionospheric phase for each date'. If you found +some pairs with ionosphere estimation anomalies, specify them by adding argument '-exc_pair' to the command ion_ls.py. +Make sure all dates are still connected after excluding these pairs, and then run ion_ls.py. + +You can plot baselines to see if the pairs are fully connected, e.g. +${PATH_ALOSSTACK}/plot_baseline.py -baseline baseline/baseline_center.txt -pairs_dir pairs_ion -pairs_exc 150520-150701 -output baselines.pdf + +If the following parameters of the input xml file are True (default) + + + + +there is a final step called 'correct ionosphere' in cmd_3.sh, uncomment the code marked by '#uncomment to run this command' +and then run the entire step. + +7. InSAR processing after ionosphere correction. Run +./cmd_4.sh + +If everything is OK, you may consider removing the huge slc files in folder dates_resampled. When you need them in +the future, you can re-run the commands in the '#resample to a common grid' step in cmd_1.sh. + +Furthermore, you may consider removing the huge original data files you unpacked previously. + + +########################################### +# 3. ADDING MORE DATES +########################################### + +Sometimes we want to add new acquistions to the already processed stack. To do this, + +1. Upack the new acquistions in data directory following #1. PREPARE DATA. + +2. Repeat the processing in #2. PROCESS DATA. + +We recommend saving previous command files in a folder before new processing. Note that even the previously processed +pairs will be reprocessed again by cmd_4.sh if the following parameters of the input xml file are True (default) + + + + +because ionospheric phase will be estimated by ion_ls.py at the end of cmd_3.sh for each date with new pairs included, +and therefore all steps after ion_ls.py should be reprocessed. + + +########################################### +# 4. CHECK RESULTS +########################################### + +baseline basline files +burst_synchronization.txt burst synchronization +dates original date of each date +dates_ion ionospheric phase of each date +dates_resampled resampled date of each date. Data of all other dates are coregistered to reference date. + The parameter xml files including *.track.xml and f*_*/*.frame.xml are in reference date + folder. These should be the files you should use in most cases, such as looking for data + parameters, preparing for time series analysis etc. +fig_ion figures for checking ionosphere estimation results +pairs pairs of InSAR processing +pairs_ion pairs for ionosphere estimation + +If you want to know more details about the files in each folder, read +CODE_DIR/examples/input_files/alos2/alos2_tutorial.txt +File name conventions and directory structures are mostly the same. + + +########################################### +# 5. KNOWN ISSUES +########################################### + +1. Issues with Ionospheric Correction +According to our experience, ionospheric correction works for most of the interferograms. Because it +relies on coherence and phase unwrapping, it does not work in some cases. These include: + +(1) data have low coherence +(2) the majority of the imaged area is low coherence area like lake, ocean... +(3) the imaged area is completely divided into several isolated areas by low coherence areas, such as + islands. + +In addition to the above issues, there are also data-mode-related issues. +(1) ScanSAR-ScanSAR interferometry. While you can process one single subswath, it's better to process +more than one subswath if the addistional subswath has good coherence. This is good for ionospheric +correction. + +(2) Range distortions in JAXA product. This mostly happens in stripmap-stripmap interferometry using +data not covering Japan. If you see very dense fringes in the corrected inteferogram, probably it is +caused by this problem. This has been reported to JAXA and JAXA is working on debugging the focusing +program. + +UPDATE: On November 20, 2018 (JST), JAXA updated the software for PALSAR-2 standard products. Therefore, +if your product is ordered after this time, you don't have this problem. + + +2. How do I improve ionospheric correction? + +First of all, we recommend reading through cmd_3.sh before manually improving ionosphere estimation results. + +Isolated areas lead to relative phase unwrapping errors, and therefore leads to significant errors in ionosphere +estimation result, usually shown as dense fringes in the corrected interferograms. If your scene covers an area +with two or more isolated areas and you are interested in one of the areas, you can mask out the other areas by +setting "areas masked out in ionospheric phase estimation". + +Or if you have processed the data, you can also specify the argument -masked_areas in ion_filt.py in cmd_3.sh. +Then check the updated results following step '#check ionosphere estimation results' in cmd_3.sh + +For ScanSAR, the software uses some accurate values for removing phase difference between adjacent swaths. +This, however, does not work well sometimes as a result of the inconistencies between different JAXA products, +especially products processed by different versions of JAXA software. As a result of this, you may see dense +fringes in the ionospheric correction result. In this case, you can try not to use aforementioned accurate +values by setting -snap in ion_subband.py in cmd_3.sh, and run this command and the remaining commands to see +if ionosphere estimation results have improvement. + +Note that each time you updated ionosphere estimation results, you need to re-run the steps after +'#estimate ionospheric phase for each date' (including this step) in cmd_3.sh, as well as cmd_4.sh + +4. ScanSAR burst synchronization +For ScanSAR data acquired before February 8, 2015, chances of having enough burst synchronization for +interferometry are very low. Don't include data acquired before this date in your stack processing. + + +########################################### +# 6. REFRENCES +########################################### +The methods and algorithms implemented can be found in the following papers. + +1. ScanSAR or multi-mode InSAR processing +C. Liang and E. J. Fielding, "Interferometry with ALOS-2 full-aperture ScanSAR data," +IEEE Transactions on Geoscience and Remote Sensing, vol. 55, no. 5, pp. 2739-2750, May 2017. + +2. Ionospheric correction, burst-by-burst ScanSAR processing, and burst-mode spectral diversity (SD) or +multi-aperture InSAR (MAI) processing +C. Liang and E. J. Fielding, "Measuring azimuth deformation with L-band ALOS-2 ScanSAR interferometry," +IEEE Transactions on Geoscience and Remote Sensing, vol. 55, no. 5, pp. 2725-2738, May 2017. + +3. Ionospheric correction +C. Liang, Z. Liu, E. J. Fielding, and R. Bürgmann, "InSAR time series analysis of L-band wide-swath SAR +data acquired by ALOS-2," +IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 8, pp. 4492-4506, Aug. 2018. + diff --git a/contrib/stack/alosStack/compute_baseline.py b/contrib/stack/alosStack/compute_baseline.py new file mode 100644 index 0000000..463ba44 --- /dev/null +++ b/contrib/stack/alosStack/compute_baseline.py @@ -0,0 +1,186 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import datetime +import numpy as np + +import isce, isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml +from isceobj.Alos2Proc.Alos2ProcPublic import getBboxRdr + +from StackPulic import loadTrack +from StackPulic import stackDateStatistics + + +def computeBaseline(trackReference, trackSecondary, azimuthTime, rangeDistance): + import numpy as np + + from isceobj.Planet.Planet import Planet + + #modify Piyush's code for computing baslines + refElp = Planet(pname='Earth').ellipsoid + #for x in points: + referenceSV = trackReference.orbit.interpolate(azimuthTime, method='hermite') + target = trackReference.orbit.rdr2geo(azimuthTime, rangeDistance) + + slvTime, slvrng = trackSecondary.orbit.geo2rdr(target) + secondarySV = trackSecondary.orbit.interpolateOrbit(slvTime, method='hermite') + + targxyz = np.array(refElp.LLH(target[0], target[1], target[2]).ecef().tolist()) + mxyz = np.array(referenceSV.getPosition()) + mvel = np.array(referenceSV.getVelocity()) + sxyz = np.array(secondarySV.getPosition()) + + #to fix abrupt change near zero in baseline grid. JUN-05-2020 + mvelunit = mvel / np.linalg.norm(mvel) + sxyz = sxyz - np.dot ( sxyz-mxyz, mvelunit) * mvelunit + + aa = np.linalg.norm(sxyz-mxyz) + costheta = (rangeDistance*rangeDistance + aa*aa - slvrng*slvrng)/(2.*rangeDistance*aa) + + Bpar = aa*costheta + + perp = aa * np.sqrt(1 - costheta*costheta) + direction = np.sign(np.dot( np.cross(targxyz-mxyz, sxyz-mxyz), mvel)) + Bperp = direction*perp + + return (Bpar, Bperp) + + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='compute baselines for a number of dates') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where data of each date (YYMMDD) is located. only folders are recognized') + parser.add_argument('-odir', dest='odir', type=str, required=True, + help = 'output directory where baseline of each date is output') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, nargs='+', default=[], + help = 'a number of secondary dates seperated by blanks. format: YYMMDD YYMMDD YYMMDD. If provided, only compute baseline grids of these dates') + parser.add_argument('-baseline_center', dest='baseline_center', type=str, default=None, + help = 'output baseline file at image center for all dates. If not provided, it will not be computed') + parser.add_argument('-baseline_grid', dest='baseline_grid', action='store_true', default=False, + help='compute baseline grid for each date') + parser.add_argument('-baseline_grid_width', dest='baseline_grid_width', type=int, default=10, + help = 'baseline grid width if compute baseline grid, default: 10') + parser.add_argument('-baseline_grid_length', dest='baseline_grid_length', type=int, default=10, + help = 'baseline grid length if compute baseline grid, default: 10') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + odir = inps.odir + dateReference = inps.ref_date + dateSecondary = inps.sec_date + baselineCenterFile = inps.baseline_center + baselineGrid = inps.baseline_grid + + widthBaseline = inps.baseline_grid_width + lengthBaseline = inps.baseline_grid_length + + ####################################################### + + + #get date statistics + dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir, dateReference) + ndate = len(dates) + nframe = len(frames) + nswath = len(swaths) + + + #create output directory if it does not already exist + if not os.path.isdir(odir): + print('output directory {} does not exist, create'.format(odir)) + os.makedirs(odir, exist_ok=True) + os.chdir(odir) + + + #compute baseline + trackReference = loadTrack(dateDirs[dateIndexReference], dates[dateIndexReference]) + bboxRdr = getBboxRdr(trackReference) + #at four corners + rangeMin = bboxRdr[0] + rangeMax = bboxRdr[1] + azimuthTimeMin = bboxRdr[2] + azimuthTimeMax = bboxRdr[3] + #at image center + azimuthTimeMid = azimuthTimeMin+datetime.timedelta(seconds=(azimuthTimeMax-azimuthTimeMin).total_seconds()/2.0) + rangeMid = (rangeMin + rangeMax) / 2.0 + #grid size + rangeDelta = (rangeMax - rangeMin) / (widthBaseline - 1.0) + azimuthDelta = (azimuthTimeMax-azimuthTimeMin).total_seconds() / (lengthBaseline - 1.0) + + #baseline at image center + if baselineCenterFile is not None: + baselineCenter = ' reference date secondary date parallel baseline [m] perpendicular baseline [m]\n' + baselineCenter += '===========================================================================================\n' + + #baseline grid: two-band BIL image, first band: parallel baseline, perpendicular baseline + baseline = np.zeros((lengthBaseline*2, widthBaseline), dtype=np.float32) + + #compute baseline + for i in range(ndate): + if i == dateIndexReference: + continue + + + trackSecondary = loadTrack(dateDirs[i], dates[i]) + + #compute baseline at image center + if baselineCenterFile is not None: + (Bpar, Bperp) = computeBaseline(trackReference, trackSecondary, azimuthTimeMid, rangeMid) + baselineCenter += ' %s %s %9.3f %9.3f\n'%(dates[dateIndexReference], dates[i], Bpar, Bperp) + + if dateSecondary != []: + if dates[i] not in dateSecondary: + continue + + + #compute baseline grid + if baselineGrid: + baselineFile = '{}-{}.rmg'.format(dates[dateIndexReference], dates[i]) + if os.path.isfile(baselineFile): + print('baseline grid file {} already exists, do not create'.format(baselineFile)) + else: + for j in range(lengthBaseline): + for k in range(widthBaseline): + (baseline[j*2, k], baseline[j*2+1, k]) = computeBaseline(trackReference, trackSecondary, + azimuthTimeMin+datetime.timedelta(seconds=azimuthDelta*j), + rangeMin+rangeDelta*k) + baseline.astype(np.float32).tofile(baselineFile) + create_xml(baselineFile, widthBaseline, lengthBaseline, 'rmg') + + #dump baseline at image center + if baselineCenterFile is not None: + print('\nbaselines at image centers') + print(baselineCenter) + with open(baselineCenterFile, 'w') as f: + f.write(baselineCenter) + + + diff --git a/contrib/stack/alosStack/compute_burst_sync.py b/contrib/stack/alosStack/compute_burst_sync.py new file mode 100644 index 0000000..2021e3b --- /dev/null +++ b/contrib/stack/alosStack/compute_burst_sync.py @@ -0,0 +1,207 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import datetime +import numpy as np + +import isce, isceobj + +from StackPulic import loadTrack +from StackPulic import stackDateStatistics + + +def computeBurstSynchronization(trackReference, trackSecondary): + '''compute burst synchronization + ''' + + import datetime + import numpy as np + + frames = [frame.frameNumber for frame in trackReference.frames] + swaths = [swath.swathNumber for swath in trackReference.frames[0].swaths] + startingSwath = swaths[0] + endingSwath = swaths[-1] + + #burst synchronization may slowly change along a track as a result of the changing relative speed of the two flights + #in one frame, real unsynchronized time is the same for all swaths + unsynTime = 0 + #real synchronized time/percentage depends on the swath burst length (synTime = burstlength - abs(unsynTime)) + #synTime = 0 + synPercentage = 0 + + numberOfFrames = len(frames) + numberOfSwaths = endingSwath - startingSwath + 1 + + unsynTimeAll = [] + synPercentageAll = [] + for i, frameNumber in enumerate(frames): + unsynTimeAll0 = [] + synPercentageAll0 = [] + for j, swathNumber in enumerate(range(startingSwath, endingSwath + 1)): + referenceSwath = trackReference.frames[i].swaths[j] + secondarySwath = trackSecondary.frames[i].swaths[j] + #using Piyush's code for computing range and azimuth offsets + midRange = referenceSwath.startingRange + referenceSwath.rangePixelSize * referenceSwath.numberOfSamples * 0.5 + midSensingStart = referenceSwath.sensingStart + datetime.timedelta(seconds = referenceSwath.numberOfLines * 0.5 / referenceSwath.prf) + llh = trackReference.orbit.rdr2geo(midSensingStart, midRange) + slvaz, slvrng = trackSecondary.orbit.geo2rdr(llh) + ###Translate to offsets + #note that secondary range pixel size and prf might be different from reference, here we assume there is a virtual secondary with same + #range pixel size and prf + rgoff = ((slvrng - secondarySwath.startingRange) / referenceSwath.rangePixelSize) - referenceSwath.numberOfSamples * 0.5 + azoff = ((slvaz - secondarySwath.sensingStart).total_seconds() * referenceSwath.prf) - referenceSwath.numberOfLines * 0.5 + + #compute burst synchronization + #burst parameters for ScanSAR wide mode not estimed yet + #if self._insar.modeCombination == 21: + scburstStartLine = (referenceSwath.burstStartTime - referenceSwath.sensingStart).total_seconds() * referenceSwath.prf + azoff + #secondary burst start times corresponding to reference burst start times (100% synchronization) + scburstStartLines = np.arange(scburstStartLine - 100000*referenceSwath.burstCycleLength, \ + scburstStartLine + 100000*referenceSwath.burstCycleLength, \ + referenceSwath.burstCycleLength) + dscburstStartLines = -((secondarySwath.burstStartTime - secondarySwath.sensingStart).total_seconds() * secondarySwath.prf - scburstStartLines) + #find the difference with minimum absolute value + unsynLines = dscburstStartLines[np.argmin(np.absolute(dscburstStartLines))] + if np.absolute(unsynLines) >= secondarySwath.burstLength: + synLines = 0 + if unsynLines > 0: + unsynLines = secondarySwath.burstLength + else: + unsynLines = -secondarySwath.burstLength + else: + synLines = secondarySwath.burstLength - np.absolute(unsynLines) + + unsynTime += unsynLines / referenceSwath.prf + synPercentage += synLines / referenceSwath.burstLength * 100.0 + + unsynTimeAll0.append(unsynLines / referenceSwath.prf) + synPercentageAll0.append(synLines / referenceSwath.burstLength * 100.0) + + unsynTimeAll.append(unsynTimeAll0) + synPercentageAll.append(synPercentageAll0) + + ############################################################################################ + #illustration of the sign of the number of unsynchronized lines (unsynLines) + #The convention is the same as ampcor offset, that is, + # secondaryLineNumber = referenceLineNumber + unsynLines + # + # |-----------------------| ------------ + # | | ^ + # | | | + # | | | unsynLines < 0 + # | | | + # | | \ / + # | | |-----------------------| + # | | | | + # | | | | + # |-----------------------| | | + # Reference Burst | | + # | | + # | | + # | | + # | | + # |-----------------------| + # Secondary Burst + # + # + ############################################################################################ + + #getting average + #if self._insar.modeCombination == 21: + unsynTime /= numberOfFrames*numberOfSwaths + synPercentage /= numberOfFrames*numberOfSwaths + + return (unsynTimeAll, synPercentageAll) + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='compute burst synchronization for a number of dates') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where data of each date (YYMMDD) is located. only folders are recognized') + parser.add_argument('-burst_sync_file', dest='burst_sync_file', type=str, required=True, + help = 'output burst synchronization file') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, nargs='+', default=[], + help = 'a number of secondary dates seperated by blanks. format: YYMMDD YYMMDD YYMMDD. If provided, only compute burst synchronization of these dates') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + burstSyncFile = inps.burst_sync_file + dateReference = inps.ref_date + dateSecondary = inps.sec_date + ####################################################### + + + #get date statistics + dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir, dateReference) + ndate = len(dates) + nframe = len(frames) + nswath = len(swaths) + + + #compute burst synchronization + trackReference = loadTrack(dateDirs[dateIndexReference], dates[dateIndexReference]) + + frames = [frame.frameNumber for frame in trackReference.frames] + swaths = [swath.swathNumber for swath in trackReference.frames[0].swaths] + startingSwath = swaths[0] + endingSwath = swaths[-1] + + burstSync = ' reference date secondary date frame swath burst UNsync time [ms] burst sync [%]\n' + burstSync += '==================================================================================================\n' + + #compute burst synchronization + for i in range(ndate): + if i == dateIndexReference: + continue + if dateSecondary != []: + if dates[i] not in dateSecondary: + continue + + trackSecondary = loadTrack(dateDirs[i], dates[i]) + unsynTimeAll, synPercentageAll = computeBurstSynchronization(trackReference, trackSecondary) + + for j in range(nframe): + for k in range(nswath): + if (j == 0) and (k == 0): + burstSync += ' %s %s %s %d %8.2f %6.2f\n'%\ + (dates[dateIndexReference], dates[i], frames[j], swaths[k], unsynTimeAll[j][k]*1000.0, synPercentageAll[j][k]) + else: + burstSync += ' %s %d %8.2f %6.2f\n'%\ + (frames[j], swaths[k], unsynTimeAll[j][k]*1000.0, synPercentageAll[j][k]) + + burstSync += ' %8.2f (mean) %6.2f (mean)\n\n'%(np.mean(np.array(unsynTimeAll), dtype=np.float64)*1000.0, np.mean(np.array(synPercentageAll), dtype=np.float64)) + + + #dump burstSync + print('\nburst synchronization') + print(burstSync) + with open(burstSyncFile, 'w') as f: + f.write(burstSync) + diff --git a/contrib/stack/alosStack/create_cmds.py b/contrib/stack/alosStack/create_cmds.py new file mode 100644 index 0000000..e39754f --- /dev/null +++ b/contrib/stack/alosStack/create_cmds.py @@ -0,0 +1,1248 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + +from StackPulic import loadStackUserParameters +from StackPulic import loadInsarUserParameters +from StackPulic import acquisitionModesAlos2 +from StackPulic import datesFromPairs + + +def checkDem(fileName): + if fileName is None: + raise Exception('dem for coregistration, dem for geocoding, water body must be set') + else: + if not os.path.isfile(fileName): + raise Exception('file not found: {}'.format(fileName)) + else: + img = isceobj.createDemImage() + img.load(fileName+'.xml') + if os.path.abspath(fileName) != img.filename: + raise Exception('please use absolute path for in {} xml file'.format(fileName)) + + +def getFolders(directory): + ''' + return sorted folders in a directory + ''' + import os + import glob + + folders = glob.glob(os.path.join(os.path.abspath(directory), '*')) + folders = sorted([os.path.basename(x) for x in folders if os.path.isdir(x)]) + + return folders + + +def unionLists(list1, list2): + import copy + + list3 = copy.deepcopy(list1) + + for x in list2: + if x not in list1: + list3.append(x) + + return sorted(list3) + + +def removeCommonItemsLists(list1, list2): + ''' + remove common items of list1 and list2 from list1 + ''' + + import copy + + list3 = copy.deepcopy(list1) + + list4 = [] + for x in list1: + if x in list2: + list3.remove(x) + list4.append(x) + + return (sorted(list3), sorted(list4)) + + +def formPairs(idir, numberOfSubsequentDates, pairTimeSpanMinimum=None, pairTimeSpanMaximum=None, + datesIncluded=None, pairsIncluded=None, + datesExcluded=None, pairsExcluded=None): + ''' + datesIncluded: list + pairsIncluded: list + datesExcluded: list + pairsExcluded: list + ''' + datefmt = "%y%m%d" + + #get date folders + dateDirs = sorted(glob.glob(os.path.join(os.path.abspath(idir), '*'))) + dateDirs = [x for x in dateDirs if os.path.isdir(x)] + dates = [os.path.basename(x) for x in dateDirs] + ndate = len(dates) + + #check input parameters + if datesIncluded is not None: + if type(datesIncluded) != list: + raise Exception('datesIncluded must be a list') + for date in datesIncluded: + if date not in dates: + raise Exception('in datesIncluded, date {} is not found in data directory {}'.format(date, idir)) + + if pairsIncluded is not None: + if type(pairsIncluded) != list: + raise Exception('pairsIncluded must be a list') + #check reference must < secondary + for pair in pairsIncluded: + rdate = pair.split('-')[0] + sdate = pair.split('-')[1] + rtime = datetime.datetime.strptime(rdate, datefmt) + stime = datetime.datetime.strptime(sdate, datefmt) + if rtime >= stime: + raise Exception('in pairsIncluded, first date must be reference') + if (sdate not in dates) or (mdate not in dates): + raise Exception('in pairsIncluded, reference or secondary date of pair {} not in data directory {}'.format(pair, idir)) + + if datesExcluded is not None: + if type(datesExcluded) != list: + raise Exception('datesExcluded must be a list') + if pairsExcluded is not None: + if type(pairsExcluded) != list: + raise Exception('pairsExcluded must be a list') + + #get initial pairs to process + pairsProcess = [] + for i in range(ndate): + rdate = dates[i] + rtime = datetime.datetime.strptime(rdate, datefmt) + for j in range(numberOfSubsequentDates): + if i+j+1 <= ndate - 1: + sdate = dates[i+j+1] + stime = datetime.datetime.strptime(sdate, datefmt) + pair = rdate + '-' + sdate + ts = np.absolute((stime - rtime).total_seconds()) / (365.0 * 24.0 * 3600) + if pairTimeSpanMinimum is not None: + if ts < pairTimeSpanMinimum: + continue + if pairTimeSpanMaximum is not None: + if ts > pairTimeSpanMaximum: + continue + pairsProcess.append(pair) + + #included dates + if datesIncluded is not None: + pairsProcess2 = [] + for pair in pairsProcess: + rdate = pair.split('-')[0] + sdate = pair.split('-')[1] + if (rdate in datesIncluded) or (sdate in datesIncluded): + pairsProcess2.append(pair) + pairsProcess = pairsProcess2 + + #included pairs + if pairsIncluded is not None: + pairsProcess = pairsIncluded + + #excluded dates + if datesExcluded is not None: + pairsProcess2 = [] + for pair in pairsProcess: + rdate = pair.split('-')[0] + sdate = pair.split('-')[1] + if (rdate not in datesIncluded) and (sdate not in datesIncluded): + pairsProcess2.append(pair) + pairsProcess = pairsProcess2 + + #excluded pairs + if pairsExcluded is not None: + pairsProcess2 = [] + for pair in pairsProcess: + if pair not in pairsExcluded: + pairsProcess2.append(pair) + pairsProcess = pairsProcess2 + + # #datesProcess + # datesProcess = [] + # for pair in pairsProcess: + # rdate = pair.split('-')[0] + # sdate = pair.split('-')[1] + # if rdate not in datesProcess: + # datesProcess.append(rdate) + # if sdate not in datesProcess: + # datesProcess.append(sdate) + + # datesProcess = sorted(datesProcess) + pairsProcess = sorted(pairsProcess) + + #return (datesProcess, pairsProcess) + return pairsProcess + + +def stackRank(dates, pairs): + from numpy.linalg import matrix_rank + + dates = sorted(dates) + pairs = sorted(pairs) + ndate = len(dates) + npair = len(pairs) + + #observation matrix + H0 = np.zeros((npair, ndate)) + for k in range(npair): + dateReference = pairs[k].split('-')[0] + dateSecondary = pairs[k].split('-')[1] + dateReference_i = dates.index(dateReference) + H0[k, dateReference_i] = 1 + dateSecondary_i = dates.index(dateSecondary) + H0[k, dateSecondary_i] = -1 + + rank = matrix_rank(H0) + + return rank + + + + +def checkStackDataDir(idir): + ''' + idir: input directory where data of each date is located. only folders are recognized + ''' + stack.dataDir + + #get date folders + dateDirs = sorted(glob.glob(os.path.join(os.path.abspath(idir), '*'))) + dateDirs = [x for x in dateDirs if os.path.isdir(x)] + + #check dates and acquisition mode + mode = os.path.basename(sorted(glob.glob(os.path.join(dateDirs[0], 'IMG-HH-ALOS2*')))[0]).split('-')[4][0:3] + for x in dateDirs: + dateFolder = os.path.basename(x) + images = sorted(glob.glob(os.path.join(x, 'IMG-HH-ALOS2*'))) + leaders = sorted(glob.glob(os.path.join(x, 'LED-ALOS2*'))) + for y in images: + dateFile = os.path.basename(y).split('-')[3] + if dateFolder != dateFile: + raise Exception('date: {} in data folder name is different from date: {} in file name: {}'.format(dateFolder, dateFile, y)) + ymode = os.path.basename(y).split('-')[4][0:3] + if mode != ymode: + #currently only allows S or D polarization, Q should also be OK? + if (mode[0:2] == ymode[0:2]) and (mode[2] in ['S', 'D']) and (ymode[2] in ['S', 'D']): + pass + else: + raise Exception('all acquisition modes should be the same') + + for y in leaders: + dateFile = os.path.basename(y).split('-')[2] + if dateFolder != dateFile: + raise Exception('date: {} in data folder name is different from date: {} in file name: {}'.format(dateFolder, dateFile, y)) + ymode = os.path.basename(y).split('-')[3][0:3] + if mode != ymode: + #currently only allows S or D polarization, Q should also be OK? + if (mode[0:2] == ymode[0:2]) and (mode[2] in ['S', 'D']) and (ymode[2] in ['S', 'D']): + pass + else: + raise Exception('all acquisition modes should be the same') + + +def createCmds(stack, datesProcess, pairsProcess, pairsProcessIon, mode): + ''' + create scripts to process an InSAR stack + ''' + import os + import copy + + stack.dem = os.path.abspath(stack.dem) + stack.demGeo = os.path.abspath(stack.demGeo) + stack.wbd = os.path.abspath(stack.wbd) + + insar = stack + + def header(txt): + hdr = '##################################################\n' + hdr += '# {}\n'.format(txt) + hdr += '##################################################\n' + return hdr + + + stackScriptPath = os.environ['PATH_ALOSSTACK'] + + + print(' * * *') + if stack.dateReferenceStack in datesProcess: + print('reference date of stack in date list to be processed.') + if os.path.isfile(os.path.join(stack.datesResampledDir, stack.dateReferenceStack, 'insar', 'affine_transform.txt')): + print('reference date of stack already processed previously.') + print('do not implement reference-date-related processing this time.') + processDateReferenceStack = False + else: + print('reference date of stack not processed previously.') + print('implement reference-date-related processing this time.') + processDateReferenceStack = True + else: + print('reference date of stack NOT in date list to be processed.') + if not os.path.isfile(os.path.join(stack.datesResampledDir, stack.dateReferenceStack, 'insar', 'affine_transform.txt')): + raise Exception('but it does not seem to have been processed previously.') + else: + print('assume it has already been processed previously.') + print('do not implement reference-date-related processing this time.') + processDateReferenceStack = False + print(' * * *') + print() + + #WHEN PROVIDING '-sec_date' BECAREFUL WITH 'datesProcess' AND 'datesProcessSecondary' + datesProcessSecondary = copy.deepcopy(datesProcess) + if stack.dateReferenceStack in datesProcessSecondary: + datesProcessSecondary.remove(stack.dateReferenceStack) + + #pairs also processed in regular InSAR processing + pairsProcessIon1 = [ipair for ipair in pairsProcessIon if ipair in pairsProcess] + #pairs not processed in regular InSAR processing + pairsProcessIon2 = [ipair for ipair in pairsProcessIon if ipair not in pairsProcess] + + + #start new commands: processing each date + ################################################################################# + cmd = '#!/bin/bash\n\n' + + #read data + if datesProcess != []: + cmd += header('read data') + cmd += os.path.join(stackScriptPath, 'read_data.py') + ' -idir {} -odir {} -ref_date {} -sec_date {} -pol {}'.format(stack.dataDir, stack.datesProcessingDir, stack.dateReferenceStack, ' '.join(datesProcess), stack.polarization) + if stack.frames is not None: + cmd += ' -frames {}'.format(' '.join(stack.frames)) + if stack.startingSwath is not None: + cmd += ' -starting_swath {}'.format(stack.startingSwath) + if stack.endingSwath is not None: + cmd += ' -ending_swath {}'.format(stack.endingSwath) + if insar.useVirtualFile: + cmd += ' -virtual' + cmd += '\n' + cmd += '\n' + #frame and swath names use those from frame and swath dirs from now on + + + #compute baseline + if datesProcessSecondary != []: + cmd += header('compute baseline') + cmd += os.path.join(stackScriptPath, 'compute_baseline.py') + ' -idir {} -odir {} -ref_date {} -sec_date {} -baseline_center baseline_center.txt -baseline_grid -baseline_grid_width 10 -baseline_grid_length 10'.format(stack.datesProcessingDir, stack.baselineDir, stack.dateReferenceStack, ' '.join(datesProcessSecondary)) + cmd += '\n' + cmd += '\n' + + + #compute burst synchronization + spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2() + if mode in scansarNominalModes: + cmd += header('compute burst synchronization') + cmd += os.path.join(stackScriptPath, 'compute_burst_sync.py') + ' -idir {} -burst_sync_file burst_synchronization.txt -ref_date {}'.format(stack.datesProcessingDir, stack.dateReferenceStack) + cmd += '\n' + cmd += '\n' + + + #estimate SLC offsets + if datesProcessSecondary != []: + cmd += header('estimate SLC offsets') + for i in range(len(datesProcessSecondary)): + cmd += os.path.join(stackScriptPath, 'estimate_slc_offset.py') + ' -idir {} -ref_date {} -sec_date {} -wbd {} -dem {}'.format(stack.datesProcessingDir, stack.dateReferenceStack, datesProcessSecondary[i], insar.wbd, stack.dem) + if insar.useWbdForNumberOffsets is not None: + cmd += ' -use_wbd_offset' + if insar.numberRangeOffsets is not None: + for x in insar.numberRangeOffsets: + cmd += ' -num_rg_offset {}'.format(' '.join(x)) + if insar.numberAzimuthOffsets is not None: + for x in insar.numberAzimuthOffsets: + cmd += ' -num_az_offset {}'.format(' '.join(x)) + cmd += '\n' + cmd += '\n' + + + #estimate swath offsets + if processDateReferenceStack: + cmd += header('estimate swath offsets') + cmd += os.path.join(stackScriptPath, 'estimate_swath_offset.py') + ' -idir {} -date {} -output swath_offset.txt'.format(os.path.join(stack.datesProcessingDir, stack.dateReferenceStack), stack.dateReferenceStack) + if insar.swathOffsetMatching: + cmd += ' -match' + cmd += '\n' + cmd += '\n' + + + #estimate frame offsets + if processDateReferenceStack: + cmd += header('estimate frame offsets') + cmd += os.path.join(stackScriptPath, 'estimate_frame_offset.py') + ' -idir {} -date {} -output frame_offset.txt'.format(os.path.join(stack.datesProcessingDir, stack.dateReferenceStack), stack.dateReferenceStack) + if insar.frameOffsetMatching: + cmd += ' -match' + cmd += '\n' + cmd += '\n' + + + #resample to a common grid + if datesProcess != []: + cmd += header('resample to a common grid') + for x in datesProcess: + cmd += os.path.join(stackScriptPath, 'resample_common_grid.py') + ' -idir {} -odir {} -ref_date {} -sec_date {} -nrlks1 {} -nalks1 {}'.format(stack.datesProcessingDir, stack.datesResampledDir, stack.dateReferenceStack, x, insar.numberRangeLooks1, insar.numberAzimuthLooks1) + if stack.gridFrame is not None: + cmd += ' -ref_frame {}'.format(stack.gridFrame) + if stack.gridSwath is not None: + cmd += ' -ref_swath {}'.format(stack.gridSwath) + if insar.doIon: + cmd += ' -subband' + cmd += '\n' + cmd += '\n' + + + #mosaic parameter + if datesProcess != []: + cmd += header('mosaic parameter') + cmd += os.path.join(stackScriptPath, 'mosaic_parameter.py') + ' -idir {} -ref_date {} -sec_date {} -nrlks1 {} -nalks1 {}'.format(stack.datesProcessingDir, stack.dateReferenceStack, ' '.join(datesProcess), insar.numberRangeLooks1, insar.numberAzimuthLooks1) + if stack.gridFrame is not None: + cmd += ' -ref_frame {}'.format(stack.gridFrame) + if stack.gridSwath is not None: + cmd += ' -ref_swath {}'.format(stack.gridSwath) + cmd += '\n' + + if processDateReferenceStack: + cmd += os.path.join(stackScriptPath, 'mosaic_parameter.py') + ' -idir {} -ref_date {} -sec_date {} -nrlks1 {} -nalks1 {}'.format(stack.datesResampledDir, stack.dateReferenceStack, stack.dateReferenceStack, insar.numberRangeLooks1, insar.numberAzimuthLooks1) + if stack.gridFrame is not None: + cmd += ' -ref_frame {}'.format(stack.gridFrame) + if stack.gridSwath is not None: + cmd += ' -ref_swath {}'.format(stack.gridSwath) + cmd += '\n' + cmd += '\n' + else: + cmd += '\n' + + + #compute lat/lon/hgt + if processDateReferenceStack: + cmd += header('compute latitude, longtitude and height') + cmd += 'cd {}\n'.format(os.path.join(stack.datesResampledDir, stack.dateReferenceStack)) + cmd += os.path.join(stackScriptPath, 'rdr2geo.py') + ' -date {} -dem {} -wbd {} -nrlks1 {} -nalks1 {}'.format(stack.dateReferenceStack, stack.dem, insar.wbd, insar.numberRangeLooks1, insar.numberAzimuthLooks1) + if insar.useGPU: + cmd += ' -gpu' + cmd += '\n' + + # #should move it to look section???!!! + # cmd += os.path.join(stackScriptPath, 'look_geom.py') + ' -date {} -wbd {} -nrlks1 {} -nalks1 {} -nrlks2 {} -nalks2 {}'.format(stack.dateReferenceStack, insar.wbd, insar.numberRangeLooks1, insar.numberAzimuthLooks1, insar.numberRangeLooks2, insar.numberAzimuthLooks2) + # cmd += '\n' + + cmd += 'cd ../../' + cmd += '\n' + cmd += '\n' + + + #compute geometrical offsets + if datesProcessSecondary != []: + cmd += header('compute geometrical offsets') + for x in datesProcessSecondary: + date_par_dir = os.path.join('../../', stack.datesProcessingDir, x) + lat = '../{}/insar/{}_{}rlks_{}alks.lat'.format(stack.dateReferenceStack, stack.dateReferenceStack, insar.numberRangeLooks1, insar.numberAzimuthLooks1) + lon = '../{}/insar/{}_{}rlks_{}alks.lon'.format(stack.dateReferenceStack, stack.dateReferenceStack, insar.numberRangeLooks1, insar.numberAzimuthLooks1) + hgt = '../{}/insar/{}_{}rlks_{}alks.hgt'.format(stack.dateReferenceStack, stack.dateReferenceStack, insar.numberRangeLooks1, insar.numberAzimuthLooks1) + + cmd += 'cd {}\n'.format(os.path.join(stack.datesResampledDir, x)) + cmd += os.path.join(stackScriptPath, 'geo2rdr.py') + ' -date {} -date_par_dir {} -lat {} -lon {} -hgt {} -nrlks1 {} -nalks1 {}'.format(x, date_par_dir, lat, lon, hgt, insar.numberRangeLooks1, insar.numberAzimuthLooks1) + if insar.useGPU: + cmd += ' -gpu' + cmd += '\n' + cmd += 'cd ../../\n' + cmd += '\n' + + + #save commands + cmd1 = cmd + + + + if pairsProcess != []: + #start new commands: processing each pair before ionosphere correction + ################################################################################# + cmd = '#!/bin/bash\n\n' + cmd += '#########################################################################\n' + cmd += '#set the environment variable before running the following steps\n' + cmd += 'insarpair=({})\n'.format(' '.join(pairsProcess)) + cmd += '#########################################################################\n' + cmd += '\n\n' + else: + cmd = '#!/bin/bash\n\n' + cmd += '#no pairs for InSAR processing.' + + + #pair up + if pairsProcess != []: + cmd += header('pair up') + cmd += os.path.join(stackScriptPath, 'pair_up.py') + ' -idir1 {} -idir2 {} -odir {} -ref_date {} -pairs {}'.format(stack.datesProcessingDir, stack.datesResampledDir, stack.pairsProcessingDir, stack.dateReferenceStack, ' '.join(pairsProcess)) + cmd += '\n' + cmd += '\n' + + + #form interferograms + if pairsProcess != []: + cmd += header('form interferograms') + cmd += '''for ((i=0;i<${{#insarpair[@]}};i++)); do + IFS='-' read -ra dates <<< "${{insarpair[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{insarpair[i]}} + {script} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} + cd ../../ +done'''.format(script = os.path.join(stackScriptPath, 'form_interferogram.py'), + pairsProcessingDir = stack.pairsProcessingDir, + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1) + cmd += '\n' + cmd += '\n' + + + #mosaic interferograms + if pairsProcess != []: + cmd += header('mosaic interferograms') + cmd += '''for ((i=0;i<${{#insarpair[@]}};i++)); do + IFS='-' read -ra dates <<< "${{insarpair[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{insarpair[i]}} + {script} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} + cd ../../ +done'''.format(script = os.path.join(stackScriptPath, 'mosaic_interferogram.py'), + pairsProcessingDir = stack.pairsProcessingDir, + ref_date_stack = stack.dateReferenceStack, + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1) + cmd += '\n' + cmd += '\n' + + + #estimate residual offsets between radar and DEM + if processDateReferenceStack: + #if not os.path.isfile(os.path.join(stack.datesResampledDir, stack.dateReferenceStack, 'insar', 'affine_transform.txt')): + #amplitde image of any pair should work, since they are all coregistered now + if pairsProcess == []: + pairsProcessTmp = [os.path.basename(x) for x in sorted(glob.glob(os.path.join(stack.pairsProcessingDir, '*'))) if os.path.isdir(x)] + else: + pairsProcessTmp = pairsProcess + if pairsProcessTmp == []: + raise Exception('no InSAR pairs available for estimating residual offsets between radar and DEM') + for x in pairsProcessTmp: + if stack.dateReferenceStack in x.split('-'): + pairToUse = x + break + track = '{}.track.xml'.format(stack.dateReferenceStack) + wbd = os.path.join('insar', '{}_{}rlks_{}alks.wbd'.format(stack.dateReferenceStack, insar.numberRangeLooks1, insar.numberAzimuthLooks1)) + hgt = os.path.join('insar', '{}_{}rlks_{}alks.hgt'.format(stack.dateReferenceStack, insar.numberRangeLooks1, insar.numberAzimuthLooks1)) + amp = os.path.join('../../', stack.pairsProcessingDir, pairToUse, 'insar', '{}_{}rlks_{}alks.amp'.format(pairToUse, insar.numberRangeLooks1, insar.numberAzimuthLooks1)) + + cmd += header('estimate residual offsets between radar and DEM') + cmd += 'cd {}\n'.format(os.path.join(stack.datesResampledDir, stack.dateReferenceStack)) + cmd += os.path.join(stackScriptPath, 'radar_dem_offset.py') + ' -track {} -dem {} -wbd {} -hgt {} -amp {} -output affine_transform.txt -nrlks1 {} -nalks1 {}'.format(track, stack.dem, wbd, hgt, amp, insar.numberRangeLooks1, insar.numberAzimuthLooks1) + if insar.numberRangeLooksSim is not None: + cmd += '-nrlks_sim {}'.format(insar.numberRangeLooksSim) + if insar.numberAzimuthLooksSim is not None: + cmd += '-nalks_sim {}'.format(insar.numberAzimuthLooksSim) + cmd += '\n' + cmd += 'cd ../../\n' + cmd += '\n' + + + #rectify range offsets + if datesProcessSecondary != []: + cmd += header('rectify range offsets') + aff = os.path.join('../../', stack.dateReferenceStack, 'insar', 'affine_transform.txt') + for x in datesProcessSecondary: + rgoff = '{}_{}rlks_{}alks_rg.off'.format(x, insar.numberRangeLooks1, insar.numberAzimuthLooks1) + rgoffRect = '{}_{}rlks_{}alks_rg_rect.off'.format(x, insar.numberRangeLooks1, insar.numberAzimuthLooks1) + cmd += 'cd {}\n'.format(os.path.join(stack.datesResampledDir, x, 'insar')) + cmd += os.path.join(stackScriptPath, 'rect_range_offset.py') + ' -aff {} -input {} -output {} -nrlks1 {} -nalks1 {}'.format(aff, rgoff, rgoffRect, insar.numberRangeLooks1, insar.numberAzimuthLooks1) + cmd += '\n' + cmd += 'cd ../../../\n' + cmd += '\n' + + + #diff interferograms + if pairsProcess != []: + cmd += header('diff interferograms') + cmd += '''for ((i=0;i<${{#insarpair[@]}};i++)); do + IFS='-' read -ra dates <<< "${{insarpair[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{insarpair[i]}} + {script} -idir {idir} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} + cd ../../ +done'''.format(script = os.path.join(stackScriptPath, 'diff_interferogram.py'), + pairsProcessingDir = stack.pairsProcessingDir, + idir = os.path.join('../../', stack.datesResampledDir), + ref_date_stack = stack.dateReferenceStack, + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1) + cmd += '\n' + cmd += '\n' + + + #look and coherence + if (pairsProcess != []) or processDateReferenceStack: + cmd += header('look and coherence') + if pairsProcess != []: + cmd += '''for ((i=0;i<${{#insarpair[@]}};i++)); do + IFS='-' read -ra dates <<< "${{insarpair[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{insarpair[i]}} + {script} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} -nrlks2 {nrlks2} -nalks2 {nalks2} + cd ../../ +done'''.format(script = os.path.join(stackScriptPath, 'look_coherence.py'), + pairsProcessingDir = stack.pairsProcessingDir, + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1, + nrlks2 = insar.numberRangeLooks2, + nalks2 = insar.numberAzimuthLooks2) + cmd += '\n' + cmd += '\n' + + if processDateReferenceStack: + cmd += 'cd {}\n'.format(os.path.join(stack.datesResampledDir, stack.dateReferenceStack)) + cmd += os.path.join(stackScriptPath, 'look_geom.py') + ' -date {} -wbd {} -nrlks1 {} -nalks1 {} -nrlks2 {} -nalks2 {}'.format(stack.dateReferenceStack, insar.wbd, insar.numberRangeLooks1, insar.numberAzimuthLooks1, insar.numberRangeLooks2, insar.numberAzimuthLooks2) + cmd += '\n' + cmd += 'cd ../../\n' + cmd += '\n' + + + #save commands + cmd2 = cmd + + + + + #for ionospheric correction + if insar.doIon and (pairsProcessIon != []): + #start new commands: ionospheric phase estimation + ################################################################################# + cmd = '#!/bin/bash\n\n' + cmd += '#########################################################################\n' + cmd += '#set the environment variables before running the following steps\n' + cmd += 'ionpair=({})\n'.format(' '.join(pairsProcessIon)) + cmd += 'ionpair1=({})\n'.format(' '.join(pairsProcessIon1)) + cmd += 'ionpair2=({})\n'.format(' '.join(pairsProcessIon2)) + cmd += 'insarpair=({})\n'.format(' '.join(pairsProcess)) + cmd += '#########################################################################\n' + cmd += '\n\n' + + + #pair up + cmd += header('pair up for ionospheric phase estimation') + cmd += os.path.join(stackScriptPath, 'pair_up.py') + ' -idir1 {} -idir2 {} -odir {} -ref_date {} -pairs {}'.format(stack.datesProcessingDir, stack.datesResampledDir, stack.pairsProcessingDirIon, stack.dateReferenceStack, ' '.join(pairsProcessIon)) + cmd += '\n' + cmd += '\n' + + + #subband interferograms + if insar.swathPhaseDiffSnapIon is not None: + snap = [[1 if y else 0 for y in x] for x in insar.swathPhaseDiffSnapIon] + snapArgument = ' ' + ' '.join(['-snap {}'.format(' '.join([str(y) for y in x])) for x in snap]) + else: + snapArgument = '' + + cmd += header('subband interferograms') + cmd += '''for ((i=0;i<${{#ionpair[@]}};i++)); do + IFS='-' read -ra dates <<< "${{ionpair[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{ionpair[i]}} + {script} -idir {idir} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1}{snapArgument} + cd ../../ +done'''.format(script = os.path.join(stackScriptPath, 'ion_subband.py'), + pairsProcessingDir = stack.pairsProcessingDirIon, + idir = os.path.join('../../', stack.datesResampledDir), + ref_date_stack = stack.dateReferenceStack, + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1, + snapArgument = snapArgument) + cmd += '\n' + cmd += '\n' + + + #unwrap subband interferograms + if insar.filterSubbandInt: + filtArgument = ' -filt -alpha {} -win {} -step {}'.format(insar.filterStrengthSubbandInt, insar.filterWinsizeSubbandInt, insar.filterStepsizeSubbandInt) + if not insar.removeMagnitudeBeforeFilteringSubbandInt: + filtArgument += ' -keep_mag' + else: + filtArgument = '' + + cmd += header('unwrap subband interferograms') + cmd += '''for ((i=0;i<${{#ionpair[@]}};i++)); do + IFS='-' read -ra dates <<< "${{ionpair[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{ionpair[i]}} + {script} -idir {idir} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -wbd {wbd} -nrlks1 {nrlks1} -nalks1 {nalks1} -nrlks_ion {nrlks_ion} -nalks_ion {nalks_ion}{filtArgument} + cd ../../ +done'''.format(script = os.path.join(stackScriptPath, 'ion_unwrap.py'), + pairsProcessingDir = stack.pairsProcessingDirIon, + idir = os.path.join('../../', stack.datesResampledDir), + ref_date_stack = stack.dateReferenceStack, + wbd = insar.wbd, + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1, + nrlks_ion = insar.numberRangeLooksIon, + nalks_ion = insar.numberAzimuthLooksIon, + filtArgument = filtArgument) + cmd += '\n' + cmd += '\n' + + + #filter ionosphere + filtArgument = '' + if insar.fitIon: + filtArgument += ' -fit' + if insar.filtIon: + filtArgument += ' -filt' + if insar.fitAdaptiveIon: + filtArgument += ' -fit_adaptive' + if insar.filtSecondaryIon: + filtArgument += ' -filt_secondary -win_secondary {}'.format(insar.filteringWinsizeSecondaryIon) + if insar.filterStdIon is not None: + filtArgument += ' -filter_std_ion {}'.format(insar.filterStdIon) + + if insar.maskedAreasIon is not None: + filtArgument += ''.join([' -masked_areas '+' '.join([str(y) for y in x]) for x in insar.maskedAreasIon]) + + cmd += header('filter ionosphere') + cmd += '''for ((i=0;i<${{#ionpair[@]}};i++)); do + IFS='-' read -ra dates <<< "${{ionpair[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{ionpair[i]}} + {script} -idir {idir1} -idir2 {idir2} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} -nrlks2 {nrlks2} -nalks2 {nalks2} -nrlks_ion {nrlks_ion} -nalks_ion {nalks_ion} -win_min {win_min} -win_max {win_max}{filtArgument} + cd ../../ +done'''.format(script = os.path.join(stackScriptPath, 'ion_filt.py'), + pairsProcessingDir = stack.pairsProcessingDirIon, + idir1 = os.path.join('../../', stack.datesResampledDir), + idir2 = os.path.join('../../', stack.datesProcessingDir), + ref_date_stack = stack.dateReferenceStack, + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1, + nrlks2 = insar.numberRangeLooks2, + nalks2 = insar.numberAzimuthLooks2, + nrlks_ion = insar.numberRangeLooksIon, + nalks_ion = insar.numberAzimuthLooksIon, + win_min = insar.filteringWinsizeMinIon, + win_max = insar.filteringWinsizeMaxIon, + filtArgument = filtArgument) + cmd += '\n' + cmd += '\n' + + + #prepare interferograms for checking ionospheric correction + cmd += header('prepare interferograms for checking ionosphere estimation results') + if pairsProcessIon1 != []: + if (insar.numberRangeLooksIon != 1) or (insar.numberAzimuthLooksIon != 1): + cmd += '''for ((i=0;i<${{#ionpair1[@]}};i++)); do + IFS='-' read -ra dates <<< "${{ionpair1[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + {script} -i {pairsProcessingDir}/${{ionpair1[i]}}/insar/diff_${{ionpair1[i]}}_{nrlks1}rlks_{nalks1}alks.int -o {pairsProcessingDirIon}/${{ionpair1[i]}}/ion/ion_cal/diff_${{ionpair1[i]}}_{nrlks}rlks_{nalks}alks_ori.int -r {nrlks_ion} -a {nalks_ion} +done'''.format(script = os.path.join('', 'looks.py'), + pairsProcessingDir = stack.pairsProcessingDir.strip('/'), + pairsProcessingDirIon = stack.pairsProcessingDirIon.strip('/'), + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1, + nrlks_ion = insar.numberRangeLooksIon, + nalks_ion = insar.numberAzimuthLooksIon, + nrlks = insar.numberRangeLooks1 * insar.numberRangeLooksIon, + nalks = insar.numberAzimuthLooks1 * insar.numberAzimuthLooksIon) + cmd += '\n' + cmd += '\n' + else: + cmd += '''for ((i=0;i<${{#ionpair1[@]}};i++)); do + IFS='-' read -ra dates <<< "${{ionpair1[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cp {pairsProcessingDir}/${{ionpair1[i]}}/insar/diff_${{ionpair1[i]}}_{nrlks1}rlks_{nalks1}alks.int* {pairsProcessingDirIon}/${{ionpair1[i]}}/ion/ion_cal +done'''.format(pairsProcessingDir = stack.pairsProcessingDir.strip('/'), + pairsProcessingDirIon = stack.pairsProcessingDirIon.strip('/'), + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1) + cmd += '\n' + cmd += '\n' + + + if pairsProcessIon2 != []: + cmd += '''for ((i=0;i<${{#ionpair2[@]}};i++)); do + IFS='-' read -ra dates <<< "${{ionpair2[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{ionpair2[i]}} + {script} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} + cd ../../ +done'''.format(script = os.path.join(stackScriptPath, 'form_interferogram.py'), + pairsProcessingDir = stack.pairsProcessingDirIon, + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1) + cmd += '\n' + cmd += '\n' + + cmd += '''for ((i=0;i<${{#ionpair2[@]}};i++)); do + IFS='-' read -ra dates <<< "${{ionpair2[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{ionpair2[i]}} + {script} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} + cd ../../ +done'''.format(script = os.path.join(stackScriptPath, 'mosaic_interferogram.py'), + pairsProcessingDir = stack.pairsProcessingDirIon, + ref_date_stack = stack.dateReferenceStack, + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1) + cmd += '\n' + cmd += '\n' + + cmd += '''for ((i=0;i<${{#ionpair2[@]}};i++)); do + IFS='-' read -ra dates <<< "${{ionpair2[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{ionpair2[i]}} + {script} -idir {idir} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} + cd ../../ +done'''.format(script = os.path.join(stackScriptPath, 'diff_interferogram.py'), + pairsProcessingDir = stack.pairsProcessingDirIon, + idir = os.path.join('../../', stack.datesResampledDir), + ref_date_stack = stack.dateReferenceStack, + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1) + cmd += '\n' + cmd += '\n' + + if (insar.numberRangeLooksIon != 1) or (insar.numberAzimuthLooksIon != 1): + cmd += '''for ((i=0;i<${{#ionpair2[@]}};i++)); do + IFS='-' read -ra dates <<< "${{ionpair2[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + {script} -i {pairsProcessingDir}/${{ionpair2[i]}}/insar/diff_${{ionpair2[i]}}_{nrlks1}rlks_{nalks1}alks.int -o {pairsProcessingDir}/${{ionpair2[i]}}/ion/ion_cal/diff_${{ionpair2[i]}}_{nrlks}rlks_{nalks}alks_ori.int -r {nrlks_ion} -a {nalks_ion} +done'''.format(script = os.path.join('', 'looks.py'), + pairsProcessingDir = stack.pairsProcessingDirIon.strip('/'), + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1, + nrlks_ion = insar.numberRangeLooksIon, + nalks_ion = insar.numberAzimuthLooksIon, + nrlks = insar.numberRangeLooks1 * insar.numberRangeLooksIon, + nalks = insar.numberAzimuthLooks1 * insar.numberAzimuthLooksIon) + cmd += '\n' + cmd += '\n' + else: + cmd += '''for ((i=0;i<${{#ionpair2[@]}};i++)); do + IFS='-' read -ra dates <<< "${{ionpair2[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cp {pairsProcessingDir}/${{ionpair2[i]}}/insar/diff_${{ionpair2[i]}}_{nrlks1}rlks_{nalks1}alks.int* {pairsProcessingDir}/${{ionpair2[i]}}/ion/ion_cal +done'''.format(pairsProcessingDir = stack.pairsProcessingDirIon.strip('/'), + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1) + cmd += '\n' + cmd += '\n' + + + #check ionosphere estimation results + cmd += header('check ionosphere estimation results') + cmd += '''for ((i=0;i<${{#ionpair[@]}};i++)); do + IFS='-' read -ra dates <<< "${{ionpair[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{ionpair[i]}} + {script} -e='a*exp(-1.0*J*b)' --a=ion/ion_cal/diff_${{ionpair[i]}}_{nrlks}rlks_{nalks}alks_ori.int --b=ion/ion_cal/filt_ion_{nrlks}rlks_{nalks}alks.ion -s BIP -t cfloat -o ion/ion_cal/diff_${{ionpair[i]}}_{nrlks}rlks_{nalks}alks.int + cd ../../ +done'''.format(script = os.path.join('', 'imageMath.py'), + pairsProcessingDir = stack.pairsProcessingDirIon, + nrlks = insar.numberRangeLooks1*insar.numberRangeLooksIon, + nalks = insar.numberAzimuthLooks1*insar.numberAzimuthLooksIon) + cmd += '\n' + cmd += '\n' + + cmd += os.path.join(stackScriptPath, 'ion_check.py') + ' -idir {} -odir fig_ion -pairs {}'.format(stack.pairsProcessingDirIon, ' '.join(pairsProcessIon)) + cmd += '\n' + cmd += '\n' + + + #estimate ionospheric phase for each date + cmd += header('estimate ionospheric phase for each date') + cmd += '#MUST re-run all the following commands, each time after running this command!!!\n' + cmd += '#uncomment to run this command\n' + cmd += '#' + cmd += os.path.join(stackScriptPath, 'ion_ls.py') + ' -idir {} -odir {} -ref_date_stack {} -nrlks1 {} -nalks1 {} -nrlks2 {} -nalks2 {} -nrlks_ion {} -nalks_ion {} -interp'.format(stack.pairsProcessingDirIon, stack.datesDirIon, stack.dateReferenceStack, insar.numberRangeLooks1, insar.numberAzimuthLooks1, insar.numberRangeLooks2, insar.numberAzimuthLooks2, insar.numberRangeLooksIon, insar.numberAzimuthLooksIon) + if stack.dateReferenceStackIon is not None: + cmd += ' -zro_date {}'.format(stack.dateReferenceStackIon) + cmd += '\n' + cmd += '\n' + + + #correct ionosphere + if insar.applyIon: + cmd += header('correct ionosphere') + cmd += '''#redefine insarpair to include all processed InSAR pairs +insarpair=($(ls -l {pairsProcessingDir} | grep ^d | awk '{{print $9}}')) +for ((i=0;i<${{#insarpair[@]}};i++)); do + IFS='-' read -ra dates <<< "${{insarpair[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{insarpair[i]}} + #uncomment to run this command + #{script} -ion_dir {ion_dir} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} -nrlks2 {nrlks2} -nalks2 {nalks2} + cd ../../ +done'''.format(script = os.path.join(stackScriptPath, 'ion_correct.py'), + pairsProcessingDir = stack.pairsProcessingDir, + ion_dir = os.path.join('../../', stack.datesDirIon), + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1, + nrlks2 = insar.numberRangeLooks2, + nalks2 = insar.numberAzimuthLooks2) + cmd += '\n' + cmd += '\n' + else: + cmd = '#!/bin/bash\n\n' + cmd += '#no pairs for estimating ionosphere.' + + + #save commands + cmd3 = cmd + + + + + #if pairsProcess != []: + if True: + #start new commands: processing each pair after ionosphere correction + ################################################################################# + cmd = '#!/bin/bash\n\n' + cmd += '#########################################################################\n' + cmd += '#set the environment variable before running the following steps\n' + if insar.doIon and insar.applyIon: + #reprocess all pairs + cmd += '''insarpair=($(ls -l {pairsProcessingDir} | grep ^d | awk '{{print $9}}'))'''.format(pairsProcessingDir = stack.pairsProcessingDir) + cmd += '\n' + else: + cmd += 'insarpair=({})\n'.format(' '.join(pairsProcess)) + cmd += '#########################################################################\n' + cmd += '\n\n' + + + #filter interferograms + extraArguments = '' + if not insar.removeMagnitudeBeforeFiltering: + extraArguments += ' -keep_mag' + if insar.waterBodyMaskStartingStep == 'filt': + extraArguments += ' -wbd_msk' + + cmd += header('filter interferograms') + cmd += '''for ((i=0;i<${{#insarpair[@]}};i++)); do + IFS='-' read -ra dates <<< "${{insarpair[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{insarpair[i]}} + {script} -idir {idir} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} -nrlks2 {nrlks2} -nalks2 {nalks2} -alpha {alpha} -win {win} -step {step}{extraArguments} + cd ../../ +done'''.format(script = os.path.join(stackScriptPath, 'filt.py'), + pairsProcessingDir = stack.pairsProcessingDir, + idir = os.path.join('../../', stack.datesResampledDir), + ref_date_stack = stack.dateReferenceStack, + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1, + nrlks2 = insar.numberRangeLooks2, + nalks2 = insar.numberAzimuthLooks2, + alpha = insar.filterStrength, + win = insar.filterWinsize, + step = insar.filterStepsize, + extraArguments = extraArguments) + cmd += '\n' + cmd += '\n' + + + #unwrap interferograms + extraArguments = '' + if insar.waterBodyMaskStartingStep == 'unwrap': + extraArguments += ' -wbd_msk' + + cmd += header('unwrap interferograms') + cmd += '''for ((i=0;i<${{#insarpair[@]}};i++)); do + IFS='-' read -ra dates <<< "${{insarpair[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{insarpair[i]}} + {script} -idir {idir} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} -nrlks2 {nrlks2} -nalks2 {nalks2}{extraArguments} + cd ../../ +done'''.format(script = os.path.join(stackScriptPath, 'unwrap_snaphu.py'), + pairsProcessingDir = stack.pairsProcessingDir, + idir = os.path.join('../../', stack.datesResampledDir), + ref_date_stack = stack.dateReferenceStack, + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1, + nrlks2 = insar.numberRangeLooks2, + nalks2 = insar.numberAzimuthLooks2, + extraArguments = extraArguments) + cmd += '\n' + cmd += '\n' + + + #geocode + extraArguments = '' + if insar.geocodeInterpMethod is not None: + extraArguments += ' -interp_method {}'.format(insar.geocodeInterpMethod) + if insar.bbox is not None: + extraArguments += ' -bbox {}'.format('/'.format(insar.bbox)) + + cmd += header('geocode') + cmd += '''for ((i=0;i<${{#insarpair[@]}};i++)); do + IFS='-' read -ra dates <<< "${{insarpair[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{insarpair[i]}} + cd insar + {script} -ref_date_stack_track ../{ref_date_stack}.track.xml -dem {dem_geo} -input ${{insarpair[i]}}_{nrlks}rlks_{nalks}alks.cor -nrlks {nrlks} -nalks {nalks}{extraArguments} + {script} -ref_date_stack_track ../{ref_date_stack}.track.xml -dem {dem_geo} -input filt_${{insarpair[i]}}_{nrlks}rlks_{nalks}alks.unw -nrlks {nrlks} -nalks {nalks}{extraArguments} + {script} -ref_date_stack_track ../{ref_date_stack}.track.xml -dem {dem_geo} -input filt_${{insarpair[i]}}_{nrlks}rlks_{nalks}alks_msk.unw -nrlks {nrlks} -nalks {nalks}{extraArguments} + cd ../../../ +done'''.format(script = os.path.join(stackScriptPath, 'geocode.py'), + pairsProcessingDir = stack.pairsProcessingDir, + ref_date_stack = stack.dateReferenceStack, + dem_geo = stack.demGeo, + nrlks = insar.numberRangeLooks1*insar.numberRangeLooks2, + nalks = insar.numberAzimuthLooks1*insar.numberAzimuthLooks2, + extraArguments = extraArguments) + cmd += '\n' + cmd += '\n' + + cmd += 'cd {}\n'.format(os.path.join(stack.datesResampledDir, stack.dateReferenceStack, 'insar')) + cmd += os.path.join(stackScriptPath, 'geocode.py') + ' -ref_date_stack_track ../{ref_date_stack}.track.xml -dem {dem_geo} -input {ref_date_stack}_{nrlks}rlks_{nalks}alks.los -nrlks {nrlks} -nalks {nalks}{extraArguments}'.format( + ref_date_stack = stack.dateReferenceStack, + dem_geo = stack.demGeo, + nrlks = insar.numberRangeLooks1*insar.numberRangeLooks2, + nalks = insar.numberAzimuthLooks1*insar.numberAzimuthLooks2, + extraArguments = extraArguments) + cmd += '\n' + cmd += 'cd ../../../\n' + cmd += '\n' + else: + cmd = '#!/bin/bash\n\n' + cmd += '#no pairs for InSAR processing.' + + + #save commands + cmd4 = cmd + + + return (cmd1, cmd2, cmd3, cmd4) + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='create commands to process a stack of acquisitions') + parser.add_argument('-stack_par', dest='stack_par', type=str, required=True, + help = 'stack processing input parameter file.') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + stackParameter = inps.stack_par + + + #need to remove -stack_par from arguments, otherwise application class would complain + import sys + #sys.argv.remove(sys.argv[1]) + #sys.argv = [sys.argv[2]] + sys.argv = [sys.argv[0], sys.argv[2]] + + stack = loadStackUserParameters(stackParameter) + insar = stack + print() + + + #0. parameters that must be set. + if stack.dataDir is None: + raise Exception('data directory not set.') + checkDem(stack.dem) + checkDem(stack.demGeo) + checkDem(stack.wbd) + if stack.dateReferenceStack is None: + raise Exception('reference date of the stack not set.') + + + #1. check if date dirctories are OK + checkStackDataDir(stack.dataDir) + + + #2. regular InSAR processing + print('get dates and pairs from user input') + pairsProcess = formPairs(stack.dataDir, stack.numberOfSubsequentDates, + stack.pairTimeSpanMinimum, stack.pairTimeSpanMaximum, + stack.datesIncluded, stack.pairsIncluded, + stack.datesExcluded, stack.pairsExcluded) + datesProcess = datesFromPairs(pairsProcess) + print('InSAR processing:') + print('dates: {}'.format(' '.join(datesProcess))) + print('pairs: {}'.format(' '.join(pairsProcess))) + + rank = stackRank(datesProcess, pairsProcess) + if rank != len(datesProcess) - 1: + print('\nWARNING: dates in stack not fully connected by pairs to be processed in regular InSAR processing\n') + print() + + + #3. ionospheric correction + if insar.doIon: + pairsProcessIon = formPairs(stack.dataDir, stack.numberOfSubsequentDatesIon, + stack.pairTimeSpanMinimumIon, stack.pairTimeSpanMaximumIon, + stack.datesIncludedIon, stack.pairsIncludedIon, + stack.datesExcludedIon, stack.pairsExcludedIon) + datesProcessIon = datesFromPairs(pairsProcessIon) + print('ionospheric phase estimation:') + print('dates: {}'.format(' '.join(datesProcessIon))) + print('pairs: {}'.format(' '.join(pairsProcessIon))) + + rankIon = stackRank(datesProcessIon, pairsProcessIon) + if rankIon != len(datesProcessIon) - 1: + print('\nWARNING: dates in stack not fully connected by pairs to be processed in ionospheric correction\n') + print('\n') + else: + pairsProcessIon = [] + + + #4. union + if insar.doIon: + datesProcess = unionLists(datesProcess, datesProcessIon) + else: + datesProcess = datesProcess + + + #5. find acquisition mode + mode = os.path.basename(sorted(glob.glob(os.path.join(stack.dataDir, datesProcess[0], 'LED-ALOS2*-*-*')))[0]).split('-')[-1][0:3] + print('acquisition mode of stack: {}'.format(mode)) + print('\n') + + + #6. check if already processed previously + datesProcessedAlready = getFolders(stack.datesResampledDir) + if not stack.datesReprocess: + datesProcess, datesProcessRemoved = removeCommonItemsLists(datesProcess, datesProcessedAlready) + if datesProcessRemoved != []: + print('the following dates have already been processed, will not reprocess them.') + print('dates: {}'.format(' '.join(datesProcessRemoved))) + print() + + pairsProcessedAlready = getFolders(stack.pairsProcessingDir) + if not stack.pairsReprocess: + pairsProcess, pairsProcessRemoved = removeCommonItemsLists(pairsProcess, pairsProcessedAlready) + if pairsProcessRemoved != []: + print('the following pairs for InSAR processing have already been processed, will not reprocess them.') + print('pairs: {}'.format(' '.join(pairsProcessRemoved))) + print() + + if insar.doIon: + pairsProcessedAlreadyIon = getFolders(stack.pairsProcessingDirIon) + if not stack.pairsReprocessIon: + pairsProcessIon, pairsProcessRemovedIon = removeCommonItemsLists(pairsProcessIon, pairsProcessedAlreadyIon) + if pairsProcessRemovedIon != []: + print('the following pairs for estimating ionospheric phase have already been processed, will not reprocess them.') + print('pairs: {}'.format(' '.join(pairsProcessRemovedIon))) + print() + + print() + + print('dates and pairs to be processed:') + print('dates: {}'.format(' '.join(datesProcess))) + print('pairs (for InSAR processing): {}'.format(' '.join(pairsProcess))) + if insar.doIon: + print('pairs (for estimating ionospheric phase): {}'.format(' '.join(pairsProcessIon))) + print('\n') + + + #7. use mode to define processing parameters + #number of looks + from isceobj.Alos2Proc.Alos2ProcPublic import modeProcParDict + if insar.numberRangeLooks1 is None: + insar.numberRangeLooks1 = modeProcParDict['ALOS-2'][mode]['numberRangeLooks1'] + if insar.numberAzimuthLooks1 is None: + insar.numberAzimuthLooks1 = modeProcParDict['ALOS-2'][mode]['numberAzimuthLooks1'] + if insar.numberRangeLooks2 is None: + insar.numberRangeLooks2 = modeProcParDict['ALOS-2'][mode]['numberRangeLooks2'] + if insar.numberAzimuthLooks2 is None: + insar.numberAzimuthLooks2 = modeProcParDict['ALOS-2'][mode]['numberAzimuthLooks2'] + if insar.numberRangeLooksIon is None: + insar.numberRangeLooksIon = modeProcParDict['ALOS-2'][mode]['numberRangeLooksIon'] + if insar.numberAzimuthLooksIon is None: + insar.numberAzimuthLooksIon = modeProcParDict['ALOS-2'][mode]['numberAzimuthLooksIon'] + + + #7. create commands + if (datesProcess == []) and (pairsProcess == []) and (pairsProcessIon == []): + print('no dates and pairs need to be processed.') + print('no processing script is generated.') + else: + cmd1, cmd2, cmd3, cmd4 = createCmds(stack, datesProcess, pairsProcess, pairsProcessIon, mode) + with open('cmd_1.sh', 'w') as f: + f.write(cmd1) + with open('cmd_2.sh', 'w') as f: + f.write(cmd2) + with open('cmd_3.sh', 'w') as f: + f.write(cmd3) + with open('cmd_4.sh', 'w') as f: + f.write(cmd4) + + runCmd('chmod +x cmd_1.sh cmd_2.sh cmd_3.sh cmd_4.sh', silent=1) diff --git a/contrib/stack/alosStack/diff_interferogram.py b/contrib/stack/alosStack/diff_interferogram.py new file mode 100644 index 0000000..c1e177e --- /dev/null +++ b/contrib/stack/alosStack/diff_interferogram.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + +from StackPulic import loadProduct +from StackPulic import stackDateStatistics + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='form interferogram') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where resampled data of each date (YYMMDD) is located. only folders are recognized') + parser.add_argument('-ref_date_stack', dest='ref_date_stack', type=str, required=True, + help = 'reference date of stack. format: YYMMDD') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + dateReferenceStack = inps.ref_date_stack + dateReference = inps.ref_date + dateSecondary = inps.sec_date + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + ####################################################### + + pair = '{}-{}'.format(dateReference, dateSecondary) + + ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1) + + dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir, dateReferenceStack) + + trackParameter = os.path.join(dateDirs[dateIndexReference], dates[dateIndexReference]+'.track.xml') + trackReferenceStack = loadProduct(trackParameter) + + rangePixelSize = numberRangeLooks1 * trackReferenceStack.rangePixelSize + radarWavelength = trackReferenceStack.radarWavelength + + insarDir = 'insar' + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + interferogram = pair + ml1 + '.int' + differentialInterferogram = 'diff_' + pair + ml1 + '.int' + + if dateReference == dateReferenceStack: + rectRangeOffset = os.path.join('../', idir, dateSecondary, 'insar', dateSecondary + ml1 + '_rg_rect.off') + cmd = "imageMath.py -e='a*exp(-1.0*J*b*4.0*{}*{}/{})*(b!=0)' --a={} --b={} -o {} -t cfloat".format(np.pi, rangePixelSize, radarWavelength, interferogram, rectRangeOffset, differentialInterferogram) + elif dateSecondary == dateReferenceStack: + rectRangeOffset = os.path.join('../', idir, dateReference, 'insar', dateReference + ml1 + '_rg_rect.off') + cmd = "imageMath.py -e='a*exp(1.0*J*b*4.0*{}*{}/{})*(b!=0)' --a={} --b={} -o {} -t cfloat".format(np.pi, rangePixelSize, radarWavelength, interferogram, rectRangeOffset, differentialInterferogram) + else: + rectRangeOffset1 = os.path.join('../', idir, dateReference, 'insar', dateReference + ml1 + '_rg_rect.off') + rectRangeOffset2 = os.path.join('../', idir, dateSecondary, 'insar', dateSecondary + ml1 + '_rg_rect.off') + cmd = "imageMath.py -e='a*exp(1.0*J*(b-c)*4.0*{}*{}/{})*(b!=0)*(c!=0)' --a={} --b={} --c={} -o {} -t cfloat".format(np.pi, rangePixelSize, radarWavelength, interferogram, rectRangeOffset1, rectRangeOffset2, differentialInterferogram) + runCmd(cmd) + + + os.chdir('../') diff --git a/contrib/stack/alosStack/estimate_frame_offset.py b/contrib/stack/alosStack/estimate_frame_offset.py new file mode 100644 index 0000000..006b877 --- /dev/null +++ b/contrib/stack/alosStack/estimate_frame_offset.py @@ -0,0 +1,83 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os + +import isce, isceobj +from isceobj.Alos2Proc.runFrameOffset import frameOffset + +from StackPulic import loadTrack +from StackPulic import acquisitionModesAlos2 + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='estimate frame offset') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'data directory') + parser.add_argument('-date', dest='date', type=str, required=True, + help = 'data acquisition date. format: YYMMDD') + parser.add_argument('-output', dest='output', type=str, required=True, + help = 'output file') + #parser.add_argument('-match', dest='match', type=int, default=1, + # help = 'do matching when computing adjacent frame offset. 0: no. 1: yes (default)') + parser.add_argument('-match', dest='match', action='store_true', default=False, + help='do matching when computing adjacent swath offset') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + date = inps.date + outputFile = inps.output + match = inps.match + ####################################################### + + spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2() + + + track = loadTrack(idir, date) + + #save current dir + dirOriginal = os.getcwd() + os.chdir(idir) + + + if len(track.frames) > 1: + if track.operationMode in scansarModes: + matchingMode=0 + else: + matchingMode=1 + + mosaicDir = 'insar' + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + #compute swath offset + offsetReference = frameOffset(track, date+'.slc', 'frame_offset.txt', + crossCorrelation=match, matchingMode=matchingMode) + + os.chdir('../') + else: + print('there is only one frame, no need to estimate frame offset') + diff --git a/contrib/stack/alosStack/estimate_slc_offset.py b/contrib/stack/alosStack/estimate_slc_offset.py new file mode 100644 index 0000000..8344599 --- /dev/null +++ b/contrib/stack/alosStack/estimate_slc_offset.py @@ -0,0 +1,392 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import datetime +import numpy as np + +import isce, isceobj +import mroipac +from mroipac.ampcor.Ampcor import Ampcor +from isceobj.Alos2Proc.Alos2ProcPublic import topo +from isceobj.Alos2Proc.Alos2ProcPublic import geo2rdr +from isceobj.Alos2Proc.Alos2ProcPublic import waterBodyRadar +from isceobj.Alos2Proc.Alos2ProcPublic import reformatGeometricalOffset +from isceobj.Alos2Proc.Alos2ProcPublic import writeOffset +from isceobj.Alos2Proc.Alos2ProcPublic import cullOffsets +from isceobj.Alos2Proc.Alos2ProcPublic import computeOffsetFromOrbit + +from StackPulic import loadTrack +from StackPulic import stackDateStatistics +from StackPulic import acquisitionModesAlos2 + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='estimate offset between a pair of SLCs for a number of dates') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where data of each date (YYMMDD) is located. only folders are recognized') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, nargs='+', default=[], + help = 'a number of secondary dates seperated by blanks. format: YYMMDD YYMMDD YYMMDD. If provided, only estimate offsets of these dates') + parser.add_argument('-wbd', dest='wbd', type=str, default=None, + help = 'water body used to determine number of offsets in range and azimuth') + parser.add_argument('-dem', dest='dem', type=str, default=None, + help = 'if water body is provided, dem file must also be provided') + parser.add_argument('-use_wbd_offset', dest='use_wbd_offset', action='store_true', default=False, + help='use water body to dertermine number of matching offsets') + parser.add_argument('-num_rg_offset', dest='num_rg_offset', type=int, nargs='+', action='append', default=[], + help = 'number of offsets in range. format (e.g. 2 frames, 3 swaths): -num_rg_offset 11 12 13 -num_rg_offset 14 15 16') + parser.add_argument('-num_az_offset', dest='num_az_offset', type=int, nargs='+', action='append', default=[], + help = 'number of offsets in azimuth. format (e.g. 2 frames, 3 swaths): -num_az_offset 11 12 13 -num_az_offset 14 15 16') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + dateReference = inps.ref_date + dateSecondary = inps.sec_date + wbd = inps.wbd + dem = inps.dem + useWbdForNumberOffsets = inps.use_wbd_offset + numberOfOffsetsRangeInput = inps.num_rg_offset + numberOfOffsetsAzimuthInput = inps.num_az_offset + + + if wbd is not None: + wbdFile = os.path.abspath(wbd) + else: + wbdFile = None + if dem is not None: + demFile = os.path.abspath(dem) + else: + demFile = None + ####################################################### + + + spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2() + + + warningMessage = '' + + + #get date statistics + dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir, dateReference) + ndate = len(dates) + nframe = len(frames) + nswath = len(swaths) + + + #load reference track + referenceTrack = loadTrack(dateDirs[dateIndexReference], dates[dateIndexReference]) + + + #set number of matching points + numberOfOffsetsRangeUsed = [[None for j in range(nswath)] for i in range(nframe)] + numberOfOffsetsAzimuthUsed = [[None for j in range(nswath)] for i in range(nframe)] + for i, frameNumber in enumerate(frames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + swathDir = 's{}'.format(swathNumber) + + print('determine number of range/azimuth offsets frame {}, swath {}'.format(frameNumber, swathNumber)) + referenceSwath = referenceTrack.frames[i].swaths[j] + + #1. set initinial numbers + #in case there are long time span pairs that have bad coherence + ratio = np.sqrt(1.5) + if referenceTrack.operationMode in scansarModes: + numberOfOffsetsRange = int(10*ratio+0.5) + numberOfOffsetsAzimuth = int(40*ratio+0.5) + else: + numberOfOffsetsRange = int(20*ratio+0.5) + numberOfOffsetsAzimuth = int(20*ratio+0.5) + + #2. change the initial numbers using water body + if useWbdForNumberOffsets and (wbdFile is not None) and (demFile is not None): + numberRangeLooks=100 + numberAzimuthLooks=100 + + #compute land ratio using topo module + latFile = 'lat_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + lonFile = 'lon_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + hgtFile = 'hgt_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + losFile = 'los_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + wbdRadarFile = 'wbd_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + + topo(referenceSwath, referenceTrack, demFile, latFile, lonFile, hgtFile, losFile=losFile, + incFile=None, mskFile=None, + numberRangeLooks=numberRangeLooks, numberAzimuthLooks=numberAzimuthLooks, multilookTimeOffset=False) + waterBodyRadar(latFile, lonFile, wbdFile, wbdRadarFile) + + wbdImg = isceobj.createImage() + wbdImg.load(wbdRadarFile+'.xml') + width = wbdImg.width + length = wbdImg.length + + wbd = np.fromfile(wbdRadarFile, dtype=np.byte).reshape(length, width) + landRatio = np.sum(wbd==0) / (length*width) + + if (landRatio <= 0.00125): + print('\n\nWARNING: land too small for estimating slc offsets at frame {}, swath {}'.format(frameNumber, swathNumber)) + print('proceed to use geometric offsets for forming interferogram') + print('but please consider not using this swath\n\n') + warningMessage += 'land too small for estimating slc offsets at frame {}, swath {}, use geometric offsets\n'.format(frameNumber, swathNumber) + + numberOfOffsetsRange = 0 + numberOfOffsetsAzimuth = 0 + else: + #put the results on a grid with a specified interval + interval = 0.2 + axisRatio = int(np.sqrt(landRatio)/interval)*interval + interval + if axisRatio > 1: + axisRatio = 1 + + numberOfOffsetsRange = int(numberOfOffsetsRange/axisRatio) + numberOfOffsetsAzimuth = int(numberOfOffsetsAzimuth/axisRatio) + else: + warningMessage += 'no water mask used to determine number of matching points. frame {} swath {}\n'.format(frameNumber, swathNumber) + + #3. user's settings + if numberOfOffsetsRangeInput != []: + numberOfOffsetsRange = numberOfOffsetsRangeInput[i][j] + if numberOfOffsetsAzimuthInput != []: + numberOfOffsetsAzimuth = numberOfOffsetsAzimuthInput[i][j] + + #4. save final results + numberOfOffsetsRangeUsed[i][j] = numberOfOffsetsRange + numberOfOffsetsAzimuthUsed[i][j] = numberOfOffsetsAzimuth + + + #estimate offsets + for idate in range(ndate): + if idate == dateIndexReference: + continue + if dateSecondary != []: + if dates[idate] not in dateSecondary: + continue + + secondaryTrack = loadTrack(dateDirs[idate], dates[idate]) + + for i, frameNumber in enumerate(frames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + swathDir = 's{}'.format(swathNumber) + + print('estimating offset frame {}, swath {}'.format(frameNumber, swathNumber)) + referenceDir = os.path.join(dateDirs[dateIndexReference], frameDir, swathDir) + secondaryDir = os.path.join(dateDirs[idate], frameDir, swathDir) + referenceSwath = referenceTrack.frames[i].swaths[j] + secondarySwath = secondaryTrack.frames[i].swaths[j] + + #compute geometrical offsets + if (wbdFile is not None) and (demFile is not None) and (numberOfOffsetsRangeUsed[i][j] == 0) and (numberOfOffsetsAzimuthUsed[i][j] == 0): + #compute geomtricla offsets + latFile = 'lat_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + lonFile = 'lon_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + hgtFile = 'hgt_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + losFile = 'los_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + rgOffsetFile = 'rg_offset_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + azOffsetFile = 'az_offset_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + wbdRadarFile = 'wbd_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + geo2rdr(secondarySwath, secondaryTrack, latFile, lonFile, hgtFile, rgOffsetFile, azOffsetFile, numberRangeLooks=numberRangeLooks, numberAzimuthLooks=numberAzimuthLooks, multilookTimeOffset=False) + reformatGeometricalOffset(rgOffsetFile, azOffsetFile, os.path.join(secondaryDir, 'cull.off'), rangeStep=numberRangeLooks, azimuthStep=numberAzimuthLooks, maximumNumberOfOffsets=2000) + + os.remove(rgOffsetFile) + os.remove(rgOffsetFile+'.vrt') + os.remove(rgOffsetFile+'.xml') + os.remove(azOffsetFile) + os.remove(azOffsetFile+'.vrt') + os.remove(azOffsetFile+'.xml') + #estimate offsets using ampcor + else: + ampcor = Ampcor(name='insarapp_slcs_ampcor') + ampcor.configure() + + mSLC = isceobj.createSlcImage() + mSLC.load(os.path.join(referenceDir, dates[dateIndexReference]+'.slc.xml')) + mSLC.filename = os.path.join(referenceDir, dates[dateIndexReference]+'.slc') + mSLC.extraFilename = os.path.join(referenceDir, dates[dateIndexReference]+'.slc.vrt') + mSLC.setAccessMode('read') + mSLC.createImage() + + sSLC = isceobj.createSlcImage() + sSLC.load(os.path.join(secondaryDir, dates[idate]+'.slc.xml')) + sSLC.filename = os.path.join(secondaryDir, dates[idate]+'.slc') + sSLC.extraFilename = os.path.join(secondaryDir, dates[idate]+'.slc.vrt') + sSLC.setAccessMode('read') + sSLC.createImage() + + ampcor.setImageDataType1('complex') + ampcor.setImageDataType2('complex') + + ampcor.setReferenceSlcImage(mSLC) + ampcor.setSecondarySlcImage(sSLC) + + #MATCH REGION + #compute an offset at image center to use + rgoff, azoff = computeOffsetFromOrbit(referenceSwath, referenceTrack, secondarySwath, secondaryTrack, + referenceSwath.numberOfSamples * 0.5, + referenceSwath.numberOfLines * 0.5) + #it seems that we cannot use 0, haven't look into the problem + if rgoff == 0: + rgoff = 1 + if azoff == 0: + azoff = 1 + firstSample = 1 + if rgoff < 0: + firstSample = int(35 - rgoff) + firstLine = 1 + if azoff < 0: + firstLine = int(35 - azoff) + ampcor.setAcrossGrossOffset(rgoff) + ampcor.setDownGrossOffset(azoff) + ampcor.setFirstSampleAcross(firstSample) + ampcor.setLastSampleAcross(mSLC.width) + ampcor.setNumberLocationAcross(numberOfOffsetsRangeUsed[i][j]) + ampcor.setFirstSampleDown(firstLine) + ampcor.setLastSampleDown(mSLC.length) + ampcor.setNumberLocationDown(numberOfOffsetsAzimuthUsed[i][j]) + + #MATCH PARAMETERS + #full-aperture mode + if referenceTrack.operationMode in scansarModes: + ampcor.setWindowSizeWidth(64) + ampcor.setWindowSizeHeight(512) + #note this is the half width/length of search area, number of resulting correlation samples: 32*2+1 + ampcor.setSearchWindowSizeWidth(32) + ampcor.setSearchWindowSizeHeight(32) + #triggering full-aperture mode matching + ampcor.setWinsizeFilt(8) + ampcor.setOversamplingFactorFilt(64) + #regular mode + else: + ampcor.setWindowSizeWidth(64) + ampcor.setWindowSizeHeight(64) + ampcor.setSearchWindowSizeWidth(32) + ampcor.setSearchWindowSizeHeight(32) + + #REST OF THE STUFF + ampcor.setAcrossLooks(1) + ampcor.setDownLooks(1) + ampcor.setOversamplingFactor(64) + ampcor.setZoomWindowSize(16) + #1. The following not set + #Matching Scale for Sample/Line Directions (-) = 1. 1. + #should add the following in Ampcor.py? + #if not set, in this case, Ampcor.py'value is also 1. 1. + #ampcor.setScaleFactorX(1.) + #ampcor.setScaleFactorY(1.) + + #MATCH THRESHOLDS AND DEBUG DATA + #2. The following not set + #in roi_pac the value is set to 0 1 + #in isce the value is set to 0.001 1000.0 + #SNR and Covariance Thresholds (-) = {s1} {s2} + #should add the following in Ampcor? + #THIS SHOULD BE THE ONLY THING THAT IS DIFFERENT FROM THAT OF ROI_PAC + #ampcor.setThresholdSNR(0) + #ampcor.setThresholdCov(1) + ampcor.setDebugFlag(False) + ampcor.setDisplayFlag(False) + + #in summary, only two things not set which are indicated by 'The following not set' above. + + #run ampcor + ampcor.ampcor() + offsets = ampcor.getOffsetField() + ampcorOffsetFile = os.path.join(secondaryDir, 'ampcor.off') + writeOffset(offsets, ampcorOffsetFile) + + #finalize image, and re-create it + #otherwise the file pointer is still at the end of the image + mSLC.finalizeImage() + sSLC.finalizeImage() + + ########################################## + #3. cull offsets + ########################################## + refinedOffsets = cullOffsets(offsets) + if refinedOffsets == None: + print('******************************************************************') + print('WARNING: There are not enough offsets left, so we are forced to') + print(' use offset without culling. frame {}, swath {}'.format(frameNumber, swathNumber)) + print('******************************************************************') + warningMessage += 'not enough offsets left, use offset without culling. frame {} swath {}'.format(frameNumber, swathNumber) + refinedOffsets = offsets + + cullOffsetFile = os.path.join(secondaryDir, 'cull.off') + writeOffset(refinedOffsets, cullOffsetFile) + + #os.chdir('../') + #os.chdir('../') + + + #delete geometry files + for i, frameNumber in enumerate(frames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + swathDir = 's{}'.format(swathNumber) + + if (wbdFile is not None) and (demFile is not None): + latFile = 'lat_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + lonFile = 'lon_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + hgtFile = 'hgt_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + losFile = 'los_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + wbdRadarFile = 'wbd_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + + os.remove(latFile) + os.remove(latFile+'.vrt') + os.remove(latFile+'.xml') + + os.remove(lonFile) + os.remove(lonFile+'.vrt') + os.remove(lonFile+'.xml') + + os.remove(hgtFile) + os.remove(hgtFile+'.vrt') + os.remove(hgtFile+'.xml') + + os.remove(losFile) + os.remove(losFile+'.vrt') + os.remove(losFile+'.xml') + + os.remove(wbdRadarFile) + os.remove(wbdRadarFile+'.vrt') + os.remove(wbdRadarFile+'.xml') + + + numberOfOffsetsUsedTxt = '\nnumber of offsets in cross correlation:\n' + numberOfOffsetsUsedTxt += ' frame swath range azimuth\n' + numberOfOffsetsUsedTxt += '============================================\n' + for i, frameNumber in enumerate(frames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + swathDir = 's{}'.format(swathNumber) + numberOfOffsetsUsedTxt += ' {} {} {} {}\n'.format(frameNumber, swathNumber, numberOfOffsetsRangeUsed[i][j], numberOfOffsetsAzimuthUsed[i][j]) + print(numberOfOffsetsUsedTxt) + + if warningMessage != '': + print('\n'+warningMessage+'\n') diff --git a/contrib/stack/alosStack/estimate_swath_offset.py b/contrib/stack/alosStack/estimate_swath_offset.py new file mode 100644 index 0000000..bb4cfd5 --- /dev/null +++ b/contrib/stack/alosStack/estimate_swath_offset.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import datetime +import numpy as np + +import isce, isceobj +from isceobj.Alos2Proc.runSwathOffset import swathOffset + +from StackPulic import loadTrack +from StackPulic import acquisitionModesAlos2 + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='estimate swath offset') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'data directory') + parser.add_argument('-date', dest='date', type=str, required=True, + help = 'data acquisition date. format: YYMMDD') + parser.add_argument('-output', dest='output', type=str, required=True, + help = 'output file') + #parser.add_argument('-match', dest='match', type=int, default=1, + # help = 'do matching when computing adjacent swath offset. 0: no. 1: yes (default)') + parser.add_argument('-match', dest='match', action='store_true', default=False, + help='do matching when computing adjacent swath offset') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + date = inps.date + outputFile = inps.output + match = inps.match + ####################################################### + + spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2() + + + frames = sorted([x[-4:] for x in glob.glob(os.path.join(idir, 'f*_*'))]) + track = loadTrack(idir, date) + + #save current dir + dirOriginal = os.getcwd() + os.chdir(idir) + + + if (track.operationMode in scansarModes) and (len(track.frames[0].swaths) >= 2): + for i, frameNumber in enumerate(frames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.chdir(frameDir) + + mosaicDir = 'mosaic' + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + #compute swath offset + offsetReference = swathOffset(track.frames[i], date+'.slc', outputFile, + crossCorrelation=match, numberOfAzimuthLooks=10) + + os.chdir('../../') + else: + print('there is only one swath, no need to estimate swath offset') diff --git a/contrib/stack/alosStack/filt.py b/contrib/stack/alosStack/filt.py new file mode 100644 index 0000000..c424fe5 --- /dev/null +++ b/contrib/stack/alosStack/filt.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.runFilt import filt + +from StackPulic import createObject + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='take more looks and compute coherence') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where resampled data of each date (YYMMDD) is located. only folders are recognized') + parser.add_argument('-ref_date_stack', dest='ref_date_stack', type=str, required=True, + help = 'reference date of stack. format: YYMMDD') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + parser.add_argument('-nrlks2', dest='nrlks2', type=int, default=1, + help = 'number of range looks 2. default: 1') + parser.add_argument('-nalks2', dest='nalks2', type=int, default=1, + help = 'number of azimuth looks 2. default: 1') + parser.add_argument('-alpha', dest='alpha', type=float, default=0.3, + help='filtering strength. default: 0.3') + parser.add_argument('-win', dest='win', type=int, default=32, + help = 'filter window size. default: 32') + parser.add_argument('-step', dest='step', type=int, default=4, + help = 'filter step size. default: 4') + parser.add_argument('-keep_mag', dest='keep_mag', action='store_true', default=False, + help='keep magnitude before filtering interferogram') + parser.add_argument('-wbd_msk', dest='wbd_msk', action='store_true', default=False, + help='mask filtered interferogram with water body') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + dateReferenceStack = inps.ref_date_stack + dateReference = inps.ref_date + dateSecondary = inps.sec_date + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + numberRangeLooks2 = inps.nrlks2 + numberAzimuthLooks2 = inps.nalks2 + filterStrength = inps.alpha + filterWinsize = inps.win + filterStepsize = inps.step + removeMagnitudeBeforeFiltering = not inps.keep_mag + waterBodyMaskStartingStep = inps.wbd_msk + ####################################################### + + pair = '{}-{}'.format(dateReference, dateSecondary) + ms = pair + ml2 = '_{}rlks_{}alks'.format(numberRangeLooks1*numberRangeLooks2, numberAzimuthLooks1*numberAzimuthLooks2) + + self = createObject() + self._insar = createObject() + + self.filterStrength = filterStrength + self.filterWinsize = filterWinsize + self.filterStepsize = filterStepsize + self.removeMagnitudeBeforeFiltering = removeMagnitudeBeforeFiltering + self._insar.multilookDifferentialInterferogram = 'diff_' + ms + ml2 + '.int' + self._insar.filteredInterferogram = 'filt_' + ms + ml2 + '.int' + self._insar.multilookAmplitude = ms + ml2 + '.amp' + self._insar.multilookPhsig = ms + ml2 + '.phsig' + self._insar.multilookWbdOut = os.path.join(idir, dateReferenceStack, 'insar', dateReferenceStack + ml2 + '.wbd') + if waterBodyMaskStartingStep: + self.waterBodyMaskStartingStep='filt' + else: + self.waterBodyMaskStartingStep=None + + filt(self) + + + diff --git a/contrib/stack/alosStack/form_interferogram.py b/contrib/stack/alosStack/form_interferogram.py new file mode 100644 index 0000000..573aa6f --- /dev/null +++ b/contrib/stack/alosStack/form_interferogram.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import multilook +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + +from StackPulic import stackDateStatistics +from StackPulic import acquisitionModesAlos2 +from StackPulic import formInterferogram + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='form interferogram') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + dateReference = inps.ref_date + dateSecondary = inps.sec_date + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + ####################################################### + + pair = '{}-{}'.format(dateReference, dateSecondary) + + ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1) + + #use one date to find frames and swaths. any date should work, here we use dateIndexReference + frames = sorted([x[-4:] for x in glob.glob(os.path.join('./', 'f*_*'))]) + swaths = sorted([int(x[-1]) for x in glob.glob(os.path.join('./', 'f1_*', 's*'))]) + + nframe = len(frames) + nswath = len(swaths) + + for i, frameNumber in enumerate(frames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.chdir(frameDir) + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + swathDir = 's{}'.format(swathNumber) + os.chdir(swathDir) + + print('processing swath {}, frame {}'.format(swathNumber, frameNumber)) + + slcReference = dateReference+'.slc' + slcSecondary = dateSecondary+'.slc' + interferogram = pair + ml1 + '.int' + amplitude = pair + ml1 + '.amp' + formInterferogram(slcReference, slcSecondary, interferogram, amplitude, numberRangeLooks1, numberAzimuthLooks1) + + os.chdir('../') + os.chdir('../') + + + + diff --git a/contrib/stack/alosStack/geo2rdr.py b/contrib/stack/alosStack/geo2rdr.py new file mode 100644 index 0000000..998d1ae --- /dev/null +++ b/contrib/stack/alosStack/geo2rdr.py @@ -0,0 +1,132 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import datetime +import numpy as np + +import isce, isceobj +from isceobj.Alos2Proc.runGeo2Rdr import geo2RdrCPU +from isceobj.Alos2Proc.runGeo2Rdr import geo2RdrGPU + +from StackPulic import loadTrack +from StackPulic import hasGPU + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='compute range and azimuth offsets') + parser.add_argument('-date', dest='date', type=str, required=True, + help = 'date. format: YYMMDD') + parser.add_argument('-date_par_dir', dest='date_par_dir', type=str, default='./', + help = 'date parameter directory. default: ./') + parser.add_argument('-lat', dest='lat', type=str, required=True, + help = 'latitude file') + parser.add_argument('-lon', dest='lon', type=str, required=True, + help = 'longtitude file') + parser.add_argument('-hgt', dest='hgt', type=str, required=True, + help = 'height file') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + #parser.add_argument('-gpu', dest='gpu', type=int, default=1, + # help = 'use GPU when available. 0: no. 1: yes (default)') + parser.add_argument('-gpu', dest='gpu', action='store_true', default=False, + help='use GPU when available') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + date = inps.date + dateParDir = os.path.join('../', inps.date_par_dir) + latitude = os.path.join('../', inps.lat) + longitude = os.path.join('../', inps.lon) + height = os.path.join('../', inps.hgt) + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + useGPU = inps.gpu + ####################################################### + + insarDir = 'insar' + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1) + + rangeOffset = date + ml1 + '_rg.off' + azimuthOffset = date + ml1 + '_az.off' + + + if not os.path.isfile(os.path.basename(latitude)): + latitudeLink = True + os.symlink(latitude, os.path.basename(latitude)) + os.symlink(latitude+'.vrt', os.path.basename(latitude)+'.vrt') + os.symlink(latitude+'.xml', os.path.basename(latitude)+'.xml') + else: + latitudeLink = False + + if not os.path.isfile(os.path.basename(longitude)): + longitudeLink = True + os.symlink(longitude, os.path.basename(longitude)) + os.symlink(longitude+'.vrt', os.path.basename(longitude)+'.vrt') + os.symlink(longitude+'.xml', os.path.basename(longitude)+'.xml') + else: + longitudeLink = False + + if not os.path.isfile(os.path.basename(height)): + heightLink = True + os.symlink(height, os.path.basename(height)) + os.symlink(height+'.vrt', os.path.basename(height)+'.vrt') + os.symlink(height+'.xml', os.path.basename(height)+'.xml') + else: + heightLink = False + + + + track = loadTrack(dateParDir, date) + if useGPU and hasGPU(): + geo2RdrGPU(track, numberRangeLooks1, numberAzimuthLooks1, + latitude, longitude, height, rangeOffset, azimuthOffset) + else: + geo2RdrCPU(track, numberRangeLooks1, numberAzimuthLooks1, + latitude, longitude, height, rangeOffset, azimuthOffset) + + + + if latitudeLink == True: + os.remove(os.path.basename(latitude)) + os.remove(os.path.basename(latitude)+'.vrt') + os.remove(os.path.basename(latitude)+'.xml') + + if longitudeLink == True: + os.remove(os.path.basename(longitude)) + os.remove(os.path.basename(longitude)+'.vrt') + os.remove(os.path.basename(longitude)+'.xml') + + if heightLink == True: + os.remove(os.path.basename(height)) + os.remove(os.path.basename(height)+'.vrt') + os.remove(os.path.basename(height)+'.xml') + diff --git a/contrib/stack/alosStack/geocode.py b/contrib/stack/alosStack/geocode.py new file mode 100644 index 0000000..559439c --- /dev/null +++ b/contrib/stack/alosStack/geocode.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.runGeocode import geocode +from isceobj.Alos2Proc.Alos2ProcPublic import getBboxGeo + +from StackPulic import loadProduct + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='geocode') + parser.add_argument('-ref_date_stack_track', dest='ref_date_stack_track', type=str, required=True, + help = 'track parameter of reference date of stack. format: YYMMDD.track.xml') + parser.add_argument('-dem', dest='dem', type=str, required=True, + help = 'dem file used for geocoding') + parser.add_argument('-input', dest='input', type=str, required=True, + help='input file to be geocoded') + parser.add_argument('-bbox', dest='bbox', type=str, default=None, + help = 'user input bounding box, format: s/n/w/e. default: bbox of ref_date_stack_track') + parser.add_argument('-interp_method', dest='interp_method', type=str, default='nearest', + help = 'interpolation method: sinc, bilinear, bicubic, nearest. default: nearest') + parser.add_argument('-nrlks', dest='nrlks', type=int, default=1, + help = 'total number of range looks = number of range looks 1 * number of range looks 2. default: 1') + parser.add_argument('-nalks', dest='nalks', type=int, default=1, + help = 'total number of azimuth looks = number of azimuth looks 1 * number of azimuth looks 2. default: 1') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + ref_date_stack_track = inps.ref_date_stack_track + demGeo = inps.dem + inputFile = inps.input + bbox = inps.bbox + geocodeInterpMethod = inps.interp_method + numberRangeLooks = inps.nrlks + numberAzimuthLooks = inps.nalks + ####################################################### + + demFile = os.path.abspath(demGeo) + trackReferenceStack = loadProduct(ref_date_stack_track) + + #compute bounding box for geocoding + if bbox is not None: + bbox = [float(x) for x in bbox.split('/')] + if len(bbox)!=4: + raise Exception('user input bbox must have four elements') + else: + img = isceobj.createImage() + img.load(inputFile+'.xml') + bbox = getBboxGeo(trackReferenceStack, useTrackOnly=True, numberOfSamples=img.width, numberOfLines=img.length, numberRangeLooks=numberRangeLooks, numberAzimuthLooks=numberAzimuthLooks) + print('=====================================================================================================') + print('geocode bounding box: {}'.format(bbox)) + print('=====================================================================================================') + + interpMethod = geocodeInterpMethod + geocode(trackReferenceStack, demFile, inputFile, bbox, numberRangeLooks, numberAzimuthLooks, interpMethod, 0, 0) + + + diff --git a/contrib/stack/alosStack/ion_check.py b/contrib/stack/alosStack/ion_check.py new file mode 100644 index 0000000..5857e59 --- /dev/null +++ b/contrib/stack/alosStack/ion_check.py @@ -0,0 +1,124 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='check ionospheric correction results') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where each pair (YYMMDD-YYMMDD) is located. only folders are recognized') + parser.add_argument('-odir', dest='odir', type=str, required=True, + help = 'output directory for estimated ionospheric phase of each date') + parser.add_argument('-pairs', dest='pairs', type=str, nargs='+', default=None, + help = 'a number of pairs seperated by blanks. format: YYMMDD-YYMMDD YYMMDD-YYMMDD YYMMDD-YYMMDD... This argument has highest priority. When provided, only process these pairs') + # parser.add_argument('-nrlks', dest='nrlks', type=int, default=1, + # help = 'number of range looks 1 * number of range looks ion. default: 1') + # parser.add_argument('-nalks', dest='nalks', type=int, default=1, + # help = 'number of azimuth looks 1 * number of azimuth looks ion. default: 1') + + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + odir = inps.odir + pairsUser = inps.pairs + ####################################################### + + if shutil.which('montage') is None: + raise Exception('this command requires montage in ImageMagick\n') + + + #get date folders + dateDirs = sorted(glob.glob(os.path.join(os.path.abspath(idir), '*'))) + dateDirs = [os.path.basename(x) for x in dateDirs if os.path.isdir(x)] + if pairsUser is not None: + pairs = pairsUser + else: + pairs = dateDirs + + os.makedirs(odir, exist_ok=True) + + img = isceobj.createImage() + img.load(glob.glob(os.path.join(idir, pairs[0], 'ion', 'ion_cal', 'filt_ion_*rlks_*alks.ion'))[0] + '.xml') + width = img.width + length = img.length + + widthMax = 600 + if width >= widthMax: + ratio = widthMax / width + resize = ' -resize {}%'.format(ratio*100.0) + else: + ratio = 1.0 + resize = '' + + for ipair in pairs: + diffOriginal = glob.glob(os.path.join(idir, ipair, 'ion', 'ion_cal', 'diff_{}_*rlks_*alks_ori.int'.format(ipair)))[0] + ion = glob.glob(os.path.join(idir, ipair, 'ion', 'ion_cal', 'filt_ion_*rlks_*alks.ion'))[0] + diff = glob.glob(os.path.join(idir, ipair, 'ion', 'ion_cal', 'diff_{}_*rlks_*alks.int'.format(ipair)))[0] + + runCmd('mdx {} -s {} -c8pha -cmap cmy -wrap 6.283185307179586 -addr -3.141592653589793 -P -workdir {}'.format(diffOriginal, width, odir)) + runCmd('mv {} {}'.format(os.path.join(odir, 'out.ppm'), os.path.join(odir, 'out1.ppm'))) + runCmd('mdx {} -s {} -cmap cmy -wrap 6.283185307179586 -addr -3.141592653589793 -P -workdir {}'.format(ion, width, odir)) + runCmd('mv {} {}'.format(os.path.join(odir, 'out.ppm'), os.path.join(odir, 'out2.ppm'))) + runCmd('mdx {} -s {} -c8pha -cmap cmy -wrap 6.283185307179586 -addr -3.141592653589793 -P -workdir {}'.format(diff, width, odir)) + runCmd('mv {} {}'.format(os.path.join(odir, 'out.ppm'), os.path.join(odir, 'out3.ppm'))) + runCmd("montage -pointsize {} -label 'original' {} -label 'ionosphere' {} -label 'corrected' {} -geometry +{} -compress LZW{} {}.tif".format( + int((ratio*width)/111*18+0.5), + os.path.join(odir, 'out1.ppm'), + os.path.join(odir, 'out2.ppm'), + os.path.join(odir, 'out3.ppm'), + int((ratio*width)/111*5+0.5), + resize, + os.path.join(odir, ipair))) + runCmd('rm {} {} {}'.format( + os.path.join(odir, 'out1.ppm'), + os.path.join(odir, 'out2.ppm'), + os.path.join(odir, 'out3.ppm'))) + + + #create colorbar + width_colorbar = 100 + length_colorbar = 20 + colorbar = np.ones((length_colorbar, width_colorbar), dtype=np.float32) * \ + (np.linspace(-np.pi, np.pi, num=width_colorbar,endpoint=True,dtype=np.float32))[None,:] + colorbar.astype(np.float32).tofile(os.path.join(odir, 'colorbar')) + runCmd('mdx {} -s {} -cmap cmy -wrap 6.283185307179586 -addr -3.141592653589793 -P -workdir {}'.format(os.path.join(odir, 'colorbar'), width_colorbar, odir)) + runCmd('convert {} -compress LZW -resize 100% {}'.format(os.path.join(odir, 'out.ppm'), os.path.join(odir, 'colorbar_-pi_pi.tiff'))) + runCmd('rm {} {}'.format( + os.path.join(odir, 'colorbar'), + os.path.join(odir, 'out.ppm'))) + + + + diff --git a/contrib/stack/alosStack/ion_correct.py b/contrib/stack/alosStack/ion_correct.py new file mode 100644 index 0000000..b3f0ea2 --- /dev/null +++ b/contrib/stack/alosStack/ion_correct.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import renameFile +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='ionospheric correction') + parser.add_argument('-ion_dir', dest='ion_dir', type=str, required=True, + help = 'directory of ionospheric phase for each date') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + parser.add_argument('-nrlks2', dest='nrlks2', type=int, default=1, + help = 'number of range looks 2. default: 1') + parser.add_argument('-nalks2', dest='nalks2', type=int, default=1, + help = 'number of azimuth looks 2. default: 1') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + ion_dir = inps.ion_dir + dateReference = inps.ref_date + dateSecondary = inps.sec_date + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + numberRangeLooks2 = inps.nrlks2 + numberAzimuthLooks2 = inps.nalks2 + ####################################################### + + pair = '{}-{}'.format(dateReference, dateSecondary) + ms = pair + ml2 = '_{}rlks_{}alks'.format(numberRangeLooks1*numberRangeLooks2, numberAzimuthLooks1*numberAzimuthLooks2) + + multilookDifferentialInterferogram = 'diff_' + ms + ml2 + '.int' + multilookDifferentialInterferogramOriginal = 'diff_' + ms + ml2 + '_ori.int' + + ionosphereReference = os.path.join('../', ion_dir, 'filt_ion_'+dateReference+ml2+'.ion') + ionosphereSecondary = os.path.join('../', ion_dir, 'filt_ion_'+dateSecondary+ml2+'.ion') + + + insarDir = 'insar' + #os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + if not os.path.isfile(ionosphereReference): + raise Exception('ionospheric phase file: {} of reference date does not exist in {}.\n'.format(os.path.basename(ionosphereReference), ion_dir)) + if not os.path.isfile(ionosphereSecondary): + raise Exception('ionospheric phase file: {} of secondary date does not exist in {}.\n'.format(os.path.basename(ionosphereSecondary), ion_dir)) + + #correct interferogram + if os.path.isfile(multilookDifferentialInterferogramOriginal): + print('original interferogram: {} is already here, do not rename: {}'.format(multilookDifferentialInterferogramOriginal, multilookDifferentialInterferogram)) + else: + print('renaming {} to {}'.format(multilookDifferentialInterferogram, multilookDifferentialInterferogramOriginal)) + renameFile(multilookDifferentialInterferogram, multilookDifferentialInterferogramOriginal) + + cmd = "imageMath.py -e='a*exp(-1.0*J*(b-c))' --a={} --b={} --c={} -s BIP -t cfloat -o {}".format( + multilookDifferentialInterferogramOriginal, + ionosphereReference, + ionosphereSecondary, + multilookDifferentialInterferogram) + runCmd(cmd) + + os.chdir('../') diff --git a/contrib/stack/alosStack/ion_filt.py b/contrib/stack/alosStack/ion_filt.py new file mode 100644 index 0000000..8ae6044 --- /dev/null +++ b/contrib/stack/alosStack/ion_filt.py @@ -0,0 +1,499 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.runIonFilt import computeIonosphere +from isceobj.Alos2Proc.runIonFilt import gaussian +#from isceobj.Alos2Proc.runIonFilt import least_sqares +from isceobj.Alos2Proc.runIonFilt import polyfit_2d +from isceobj.Alos2Proc.runIonFilt import adaptive_gaussian +from isceobj.Alos2Proc.runIonFilt import reformatMaskedAreas + +from StackPulic import loadTrack +from StackPulic import createObject +from StackPulic import stackDateStatistics +from StackPulic import acquisitionModesAlos2 +from StackPulic import subbandParameters + +from compute_burst_sync import computeBurstSynchronization + + +def ionFilt(self, referenceTrack, catalog=None): + + from isceobj.Alos2Proc.runIonSubband import defineIonDir + ionDir = defineIonDir() + subbandPrefix = ['lower', 'upper'] + + ionCalDir = os.path.join(ionDir['ion'], ionDir['ionCal']) + os.makedirs(ionCalDir, exist_ok=True) + os.chdir(ionCalDir) + + log = '' + + ############################################################ + # STEP 1. compute ionospheric phase + ############################################################ + from isceobj.Constants import SPEED_OF_LIGHT + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + + ################################### + #SET PARAMETERS HERE + #THESE SHOULD BE GOOD ENOUGH, NO NEED TO SET IN setup(self) + corThresholdAdj = 0.97 + corOrderAdj = 20 + ################################### + + print('\ncomputing ionosphere') + #get files + ml2 = '_{}rlks_{}alks'.format(self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon, + self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon) + + lowerUnwfile = subbandPrefix[0]+ml2+'.unw' + upperUnwfile = subbandPrefix[1]+ml2+'.unw' + corfile = 'diff'+ml2+'.cor' + + #use image size from lower unwrapped interferogram + img = isceobj.createImage() + img.load(lowerUnwfile + '.xml') + width = img.width + length = img.length + + lowerUnw = (np.fromfile(lowerUnwfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + upperUnw = (np.fromfile(upperUnwfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + cor = (np.fromfile(corfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + #amp = (np.fromfile(corfile, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :] + + #masked out user-specified areas + if self.maskedAreasIon != None: + maskedAreas = reformatMaskedAreas(self.maskedAreasIon, length, width) + for area in maskedAreas: + lowerUnw[area[0]:area[1], area[2]:area[3]] = 0 + upperUnw[area[0]:area[1], area[2]:area[3]] = 0 + cor[area[0]:area[1], area[2]:area[3]] = 0 + + #remove possible wired values in coherence + cor[np.nonzero(cor<0)] = 0.0 + cor[np.nonzero(cor>1)] = 0.0 + + #remove water body + wbd = np.fromfile('wbd'+ml2+'.wbd', dtype=np.int8).reshape(length, width) + cor[np.nonzero(wbd==-1)] = 0.0 + + #remove small values + cor[np.nonzero(cor size_max: + print('\n\nWARNING: minimum window size for filtering ionosphere phase {} > maximum window size {}'.format(size_min, size_max)) + print(' re-setting maximum window size to {}\n\n'.format(size_min)) + size_max = size_min + if size_secondary % 2 != 1: + size_secondary += 1 + print('window size of secondary filtering of ionosphere phase should be odd, window size changed to {}'.format(size_secondary)) + + #coherence threshold for fitting a polynomial + corThresholdFit = 0.25 + + #ionospheric phase standard deviation after filtering + std_out0 = self.filterStdIon + #std_out0 = 0.1 + ################################################# + + print('\nfiltering ionosphere') + + #input files + ionfile = 'ion'+ml2+'.ion' + #corfile = 'diff'+ml2+'.cor' + corLowerfile = subbandPrefix[0]+ml2+'.cor' + corUpperfile = subbandPrefix[1]+ml2+'.cor' + #output files + ionfiltfile = 'filt_ion'+ml2+'.ion' + stdfiltfile = 'filt_ion'+ml2+'.std' + windowsizefiltfile = 'filt_ion'+ml2+'.win' + + #read data + img = isceobj.createImage() + img.load(ionfile + '.xml') + width = img.width + length = img.length + + ion = np.fromfile(ionfile, dtype=np.float32).reshape(length, width) + corLower = (np.fromfile(corLowerfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + corUpper = (np.fromfile(corUpperfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + cor = (corLower + corUpper) / 2.0 + index = np.nonzero(np.logical_or(corLower==0, corUpper==0)) + cor[index] = 0 + del corLower, corUpper + + #masked out user-specified areas + if self.maskedAreasIon != None: + maskedAreas = reformatMaskedAreas(self.maskedAreasIon, length, width) + for area in maskedAreas: + ion[area[0]:area[1], area[2]:area[3]] = 0 + cor[area[0]:area[1], area[2]:area[3]] = 0 + + #remove possible wired values in coherence + cor[np.nonzero(cor<0)] = 0.0 + cor[np.nonzero(cor>1)] = 0.0 + + #remove water body. Not helpful, just leave it here + wbd = np.fromfile('wbd'+ml2+'.wbd', dtype=np.int8).reshape(length, width) + cor[np.nonzero(wbd==-1)] = 0.0 + + # #applying water body mask here + # waterBodyFile = 'wbd'+ml2+'.wbd' + # if os.path.isfile(waterBodyFile): + # print('applying water body mask to coherence used to compute ionospheric phase') + # wbd = np.fromfile(waterBodyFile, dtype=np.int8).reshape(length, width) + # cor[np.nonzero(wbd!=0)] = 0.00001 + + #minimize the effect of low coherence pixels + #cor[np.nonzero( (cor<0.85)*(cor!=0) )] = 0.00001 + #filt = adaptive_gaussian(ion, cor, size_max, size_min) + #cor**14 should be a good weight to use. 22-APR-2018 + #filt = adaptive_gaussian_v0(ion, cor**corOrderFilt, size_max, size_min) + + + #1. compute number of looks + azimuthBandwidth = 0 + for i, frameNumber in enumerate(self._insar.referenceFrames): + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + #azimuthBandwidth += 2270.575 * 0.85 + azimuthBandwidth += referenceTrack.frames[i].swaths[j].azimuthBandwidth + azimuthBandwidth = azimuthBandwidth / (len(self._insar.referenceFrames)*(self._insar.endingSwath-self._insar.startingSwath+1)) + + #azimuth number of looks should also apply to burst mode + #assume range bandwidth of subband image is 1/3 of orginal range bandwidth, as in runIonSubband.py!!! + numberOfLooks = referenceTrack.azimuthLineInterval * self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon / (1.0/azimuthBandwidth) *\ + referenceTrack.frames[0].swaths[0].rangeBandwidth / 3.0 / referenceTrack.rangeSamplingRate * self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon + + #consider also burst characteristics. In ScanSAR-stripmap interferometry, azimuthBandwidth is from referenceTrack (ScanSAR) + if self._insar.modeCombination in [21, 31]: + numberOfLooks /= 5.0 + if self._insar.modeCombination in [22, 32]: + numberOfLooks /= 7.0 + if self._insar.modeCombination in [21]: + numberOfLooks *= (self._insar.burstSynchronization/100.0) + + #numberOfLooks checked + print('number of looks to be used for computing subband interferogram standard deviation: {}'.format(numberOfLooks)) + if catalog is not None: + catalog.addItem('number of looks of subband interferograms', numberOfLooks, 'runIonFilt') + log += 'number of looks of subband interferograms: {}\n'.format(numberOfLooks) + + + #2. compute standard deviation of the raw ionospheric phase + #f0 same as in runIonSubband.py!!! + def ion_std(fl, fu, numberOfLooks, cor): + ''' + compute standard deviation of ionospheric phase + fl: lower band center frequency + fu: upper band center frequency + cor: coherence, must be numpy array + ''' + f0 = (fl + fu) / 2.0 + interferogramVar = (1.0 - cor**2) / (2.0 * numberOfLooks * cor**2 + (cor==0)) + std = fl*fu/f0/(fu**2-fl**2)*np.sqrt(fu**2*interferogramVar+fl**2*interferogramVar) + std[np.nonzero(cor==0)] = 0 + return std + std = ion_std(fl, fu, numberOfLooks, cor) + + + #3. compute minimum filter window size for given coherence and standard deviation of filtered ionospheric phase + cor2 = np.linspace(0.1, 0.9, num=9, endpoint=True) + std2 = ion_std(fl, fu, numberOfLooks, cor2) + std_out2 = np.zeros(cor2.size) + win2 = np.zeros(cor2.size, dtype=np.int32) + for i in range(cor2.size): + for size in range(9, 10001, 2): + #this window must be the same as those used in adaptive_gaussian!!! + gw = gaussian(size, size/2.0, scale=1.0) + scale = 1.0 / np.sum(gw / std2[i]**2) + std_out2[i] = scale * np.sqrt(np.sum(gw**2 / std2[i]**2)) + win2[i] = size + if std_out2[i] <= std_out0: + break + print('if ionospheric phase standard deviation <= {} rad, minimum filtering window size required:'.format(std_out0)) + print('coherence window size') + print('************************') + for x, y in zip(cor2, win2): + print(' %5.2f %5d'%(x, y)) + print() + if catalog is not None: + catalog.addItem('coherence value', cor2, 'runIonFilt') + catalog.addItem('minimum filter window size', win2, 'runIonFilt') + log += 'coherence value: {}\n'.format(cor2) + log += 'minimum filter window size: {}\n'.format(win2) + + + #4. filter interferogram + #fit ionosphere + if fit: + #prepare weight + wgt = std**2 + wgt[np.nonzero(cor tsmax: + continue + pairs.append(x) + + dates = datesFromPairs(pairs) + if dateZero is not None: + if dateZero not in dates: + raise Exception('zro_date provided by user not in the dates involved in least squares estimation.') + else: + dateZero = dates[0] + + print('all pairs:\n{}'.format(' '.join(pairsAll))) + print('all dates:\n{}'.format(' '.join(datesAll))) + print('used pairs:\n{}'.format(' '.join(pairs))) + print('used dates:\n{}'.format(' '.join(dates))) + + +#################################################################################### + print('\nSTEP 1. read files') +#################################################################################### + + ndate = len(dates) + npair = len(pairs) + + ml2 = '_{}rlks_{}alks'.format(numberRangeLooks1*numberRangeLooksIon, numberAzimuthLooks1*numberAzimuthLooksIon) + ionfiltfile = 'filt_ion'+ml2+'.ion' + stdfiltfile = 'filt_ion'+ml2+'.std' + windowsizefiltfile = 'filt_ion'+ml2+'.win' + ionfiltfile1 = os.path.join(idir, pairs[0], 'ion/ion_cal', ionfiltfile) + + img = isceobj.createImage() + img.load(ionfiltfile1+'.xml') + width = img.width + length = img.length + + ionPairs = np.zeros((npair, length, width), dtype=np.float32) + stdPairs = np.zeros((npair, length, width), dtype=np.float32) + winPairs = np.zeros((npair, length, width), dtype=np.float32) + for i in range(npair): + ionfiltfile1 = os.path.join(idir, pairs[i], 'ion/ion_cal', ionfiltfile) + stdfiltfile1 = os.path.join(idir, pairs[i], 'ion/ion_cal', stdfiltfile) + windowsizefiltfile1 = os.path.join(idir, pairs[i], 'ion/ion_cal', windowsizefiltfile) + + ionPairs[i, :, :] = np.fromfile(ionfiltfile1, dtype=np.float32).reshape(length, width) + stdPairs[i, :, :] = np.fromfile(stdfiltfile1, dtype=np.float32).reshape(length, width) + winPairs[i, :, :] = np.fromfile(windowsizefiltfile1, dtype=np.float32).reshape(length, width) + + +#################################################################################### + print('\nSTEP 2. do least squares') +#################################################################################### + import copy + from numpy.linalg import matrix_rank + dates2 = copy.deepcopy(dates) + dates2.remove(dateZero) + + #observation matrix + H0 = np.zeros((npair, ndate-1)) + for k in range(npair): + dateReference = pairs[k].split('-')[0] + dateSecondary = pairs[k].split('-')[1] + if dateReference != dateZero: + dateReference_i = dates2.index(dateReference) + H0[k, dateReference_i] = 1 + if dateSecondary != dateZero: + dateSecondary_i = dates2.index(dateSecondary) + H0[k, dateSecondary_i] = -1 + rank = matrix_rank(H0) + if rank < ndate-1: + raise Exception('dates to be estimated are not fully connected by the pairs used in least squares') + else: + print('number of pairs to be used in least squares: {}'.format(npair)) + print('number of dates to be estimated: {}'.format(ndate-1)) + print('observation matrix rank: {}'.format(rank)) + + ts = np.zeros((ndate-1, length, width), dtype=np.float32) + for i in range(length): + if (i+1) % 50 == 0 or (i+1) == length: + print('processing line: %6d of %6d' % (i+1, length), end='\r') + if (i+1) == length: + print() + for j in range(width): + + #observed signal + S0 = ionPairs[:, i, j] + + if ww == False: + #observed signal + S = S0 + H = H0 + else: + #add weight + #https://stackoverflow.com/questions/19624997/understanding-scipys-least-square-function-with-irls + #https://stackoverflow.com/questions/27128688/how-to-use-least-squares-with-weight-matrix-in-python + wgt = winPairs[:, i, j] + W = np.sqrt(1.0/wgt) + H = H0 * W[:, None] + S = S0 * W + + #do least-squares estimation + #[theta, residuals, rank, singular] = np.linalg.lstsq(H, S) + #make W full matrix if use W here (which is a slower method) + #'using W before this' is faster + theta = least_sqares(H, S, W=None) + ts[:, i, j] = theta + + # #dump raw estimate + # cdir = os.getcwd() + # os.makedirs(odir, exist_ok=True) + # os.chdir(odir) + + # for i in range(ndate-1): + # file_name = 'filt_ion_'+dates2[i]+ml2+'.ion' + # ts[i, :, :].astype(np.float32).tofile(file_name) + # create_xml(file_name, width, length, 'float') + # file_name = 'filt_ion_'+dateZero+ml2+'.ion' + # (np.zeros((length, width), dtype=np.float32)).astype(np.float32).tofile(file_name) + # create_xml(file_name, width, length, 'float') + + # os.chdir(cdir) + + +#################################################################################### + print('\nSTEP 3. interpolate ionospheric phase') +#################################################################################### + from scipy.interpolate import interp1d + + ml3 = '_{}rlks_{}alks'.format(numberRangeLooks1*numberRangeLooks2, + numberAzimuthLooks1*numberAzimuthLooks2) + + width2 = width + length2 = length + + #ionrectfile1 = os.path.join(idir, pairs[0], 'insar', pairs[0] + ml3 + '.ion') + #multilookDifferentialInterferogram = os.path.join(idir, pairs[0], 'insar', 'diff_' + pairs[0] + ml3 + '.int') + #img = isceobj.createImage() + #img.load(multilookDifferentialInterferogram + '.xml') + #width3 = img.width + #length3 = img.length + + trackParameter = os.path.join(idir, pairs[0], dateReferenceStack + '.track.xml') + trackTmp = loadProduct(trackParameter) + width3 = int(trackTmp.numberOfSamples / numberRangeLooks2) + length3 = int(trackTmp.numberOfLines / numberAzimuthLooks2) + + #number of range looks output + nrlo = numberRangeLooks1*numberRangeLooks2 + #number of range looks input + nrli = numberRangeLooks1*numberRangeLooksIon + #number of azimuth looks output + nalo = numberAzimuthLooks1*numberAzimuthLooks2 + #number of azimuth looks input + nali = numberAzimuthLooks1*numberAzimuthLooksIon + + cdir = os.getcwd() + os.makedirs(odir, exist_ok=True) + os.chdir(odir) + + for idate in range(ndate-1): + print('interplate {}'.format(dates2[idate])) + if interp and ((numberRangeLooks2 != numberRangeLooksIon) or (numberAzimuthLooks2 != numberAzimuthLooksIon)): + ionfilt = ts[idate, :, :] + index2 = np.linspace(0, width2-1, num=width2, endpoint=True) + index3 = np.linspace(0, width3-1, num=width3, endpoint=True) * nrlo/nrli + (nrlo-nrli)/(2.0*nrli) + ionrect = np.zeros((length3, width3), dtype=np.float32) + for i in range(length2): + f = interp1d(index2, ionfilt[i,:], kind='cubic', fill_value="extrapolate") + ionrect[i, :] = f(index3) + + index2 = np.linspace(0, length2-1, num=length2, endpoint=True) + index3 = np.linspace(0, length3-1, num=length3, endpoint=True) * nalo/nali + (nalo-nali)/(2.0*nali) + for j in range(width3): + f = interp1d(index2, ionrect[0:length2, j], kind='cubic', fill_value="extrapolate") + ionrect[:, j] = f(index3) + + ionrectfile = 'filt_ion_'+dates2[idate]+ml3+'.ion' + ionrect.astype(np.float32).tofile(ionrectfile) + create_xml(ionrectfile, width3, length3, 'float') + else: + ionrectfile = 'filt_ion_'+dates2[idate]+ml2+'.ion' + ts[idate, :, :].astype(np.float32).tofile(ionrectfile) + create_xml(ionrectfile, width, length, 'float') + + if interp and ((numberRangeLooks2 != numberRangeLooksIon) or (numberAzimuthLooks2 != numberAzimuthLooksIon)): + ionrectfile = 'filt_ion_'+dateZero+ml3+'.ion' + (np.zeros((length3, width3), dtype=np.float32)).astype(np.float32).tofile(ionrectfile) + create_xml(ionrectfile, width3, length3, 'float') + else: + ionrectfile = 'filt_ion_'+dateZero+ml2+'.ion' + (np.zeros((length, width), dtype=np.float32)).astype(np.float32).tofile(ionrectfile) + create_xml(ionrectfile, width, length, 'float') + + os.chdir(cdir) diff --git a/contrib/stack/alosStack/ion_subband.py b/contrib/stack/alosStack/ion_subband.py new file mode 100644 index 0000000..a99b58b --- /dev/null +++ b/contrib/stack/alosStack/ion_subband.py @@ -0,0 +1,619 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Constants import SPEED_OF_LIGHT +from isceobj.Alos2Proc.runSwathOffset import swathOffset +from isceobj.Alos2Proc.runFrameOffset import frameOffset +from isceobj.Alos2Proc.runIonSubband import defineIonDir + +from StackPulic import loadTrack +from StackPulic import createObject +from StackPulic import stackDateStatistics +from StackPulic import acquisitionModesAlos2 + +def runIonSubband(self, referenceTrack, idir, dateReferenceStack, dateReference, dateSecondary): + '''create subband interferograms + ''' + #catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + #self.updateParamemetersFromUser() + + #if not self.doIon: + # catalog.printToLog(logger, "runIonSubband") + # self._insar.procDoc.addAllFromCatalog(catalog) + # return + + #referenceTrack = self._insar.loadTrack(reference=True) + #secondaryTrack = self._insar.loadTrack(reference=False) + + #using 1/3, 1/3, 1/3 band split + radarWavelength = referenceTrack.radarWavelength + rangeBandwidth = referenceTrack.frames[0].swaths[0].rangeBandwidth + rangeSamplingRate = referenceTrack.frames[0].swaths[0].rangeSamplingRate + radarWavelengthLower = SPEED_OF_LIGHT/(SPEED_OF_LIGHT / radarWavelength - rangeBandwidth / 3.0) + radarWavelengthUpper = SPEED_OF_LIGHT/(SPEED_OF_LIGHT / radarWavelength + rangeBandwidth / 3.0) + subbandRadarWavelength = [radarWavelengthLower, radarWavelengthUpper] + subbandBandWidth = [rangeBandwidth / 3.0 / rangeSamplingRate, rangeBandwidth / 3.0 / rangeSamplingRate] + subbandFrequencyCenter = [-rangeBandwidth / 3.0 / rangeSamplingRate, rangeBandwidth / 3.0 / rangeSamplingRate] + + subbandPrefix = ['lower', 'upper'] + + ''' + ionDir = { + ionDir['swathMosaic'] : 'mosaic', + ionDir['insar'] : 'insar', + ionDir['ion'] : 'ion', + ionDir['subband'] : ['lower', 'upper'], + ionDir['ionCal'] : 'ion_cal' + } + ''' + #define upper level directory names + ionDir = defineIonDir() + + + #self._insar.subbandRadarWavelength = subbandRadarWavelength + + + ############################################################ + # STEP 1. create directories + ############################################################ + #create and enter 'ion' directory + #after finishing each step, we are in this directory + os.makedirs(ionDir['ion'], exist_ok=True) + os.chdir(ionDir['ion']) + + #create insar processing directories + for k in range(2): + subbandDir = ionDir['subband'][k] + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + swathDir = 's{}'.format(swathNumber) + fullDir = os.path.join(subbandDir, frameDir, swathDir) + os.makedirs(fullDir, exist_ok=True) + + #create ionospheric phase directory + os.makedirs(ionDir['ionCal'], exist_ok=True) + + + ############################################################ + # STEP 2. create subband interferograms + ############################################################ + #import numpy as np + #import stdproc + #from iscesys.StdOEL.StdOELPy import create_writer + #from isceobj.Alos2Proc.Alos2ProcPublic import readOffset + #from contrib.alos2proc.alos2proc import rg_filter + from StackPulic import formInterferogram + + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + swathDir = 's{}'.format(swathNumber) + + #skip this time consuming process, if interferogram already exists + if os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.interferogram)) and \ + os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.interferogram+'.vrt')) and \ + os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.interferogram+'.xml')) and \ + os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.amplitude)) and \ + os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.amplitude+'.vrt')) and \ + os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.amplitude+'.xml')) and \ + os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.interferogram)) and \ + os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.interferogram+'.vrt')) and \ + os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.interferogram+'.xml')) and \ + os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.amplitude)) and \ + os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.amplitude+'.vrt')) and \ + os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.amplitude+'.xml')): + print('interferogram already exists at swath {}, frame {}'.format(swathNumber, frameNumber)) + continue + + # #filter reference and secondary images + # for slcx in [self._insar.referenceSlc, self._insar.secondarySlc]: + # slc = os.path.join('../', frameDir, swathDir, slcx) + # slcLower = os.path.join(ionDir['subband'][0], frameDir, swathDir, slcx) + # slcUpper = os.path.join(ionDir['subband'][1], frameDir, swathDir, slcx) + # rg_filter(slc, 2, + # [slcLower, slcUpper], + # subbandBandWidth, + # subbandFrequencyCenter, + # 257, 2048, 0.1, 0, 0.0) + #resample + for k in range(2): + os.chdir(os.path.join(ionDir['subband'][k], frameDir, swathDir)) + slcReference = os.path.join('../../../../', idir, dateReference, frameDir, swathDir, dateReference+'_{}.slc'.format(ionDir['subband'][k])) + slcSecondary = os.path.join('../../../../', idir, dateSecondary, frameDir, swathDir, dateSecondary+'_{}.slc'.format(ionDir['subband'][k])) + formInterferogram(slcReference, slcSecondary, self._insar.interferogram, self._insar.amplitude, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1) + os.chdir('../../../') + + + ############################################################ + # STEP 3. mosaic swaths + ############################################################ + from isceobj.Alos2Proc.runSwathMosaic import swathMosaic + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + + + #log output info + log = 'mosaic swaths in {} at {}\n'.format(os.path.basename(__file__), datetime.datetime.now()) + log += '================================================================================================\n' + + for k in range(2): + os.chdir(ionDir['subband'][k]) + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.chdir(frameDir) + + mosaicDir = ionDir['swathMosaic'] + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + if not (self._insar.endingSwath-self._insar.startingSwath >= 1): + import shutil + swathDir = 's{}'.format(referenceTrack.frames[i].swaths[0].swathNumber) + + # if not os.path.isfile(self._insar.interferogram): + # os.symlink(os.path.join('../', swathDir, self._insar.interferogram), self._insar.interferogram) + # shutil.copy2(os.path.join('../', swathDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + # shutil.copy2(os.path.join('../', swathDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + # if not os.path.isfile(self._insar.amplitude): + # os.symlink(os.path.join('../', swathDir, self._insar.amplitude), self._insar.amplitude) + # shutil.copy2(os.path.join('../', swathDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt') + # shutil.copy2(os.path.join('../', swathDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml') + + os.rename(os.path.join('../', swathDir, self._insar.interferogram), self._insar.interferogram) + os.rename(os.path.join('../', swathDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + os.rename(os.path.join('../', swathDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + os.rename(os.path.join('../', swathDir, self._insar.amplitude), self._insar.amplitude) + os.rename(os.path.join('../', swathDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt') + os.rename(os.path.join('../', swathDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml') + + #no need to update frame parameters here + os.chdir('../') + #no need to save parameter file here + os.chdir('../') + + continue + + #choose offsets + numberOfFrames = len(referenceTrack.frames) + numberOfSwaths = len(referenceTrack.frames[i].swaths) + # if self.swathOffsetMatching: + # #no need to do this as the API support 2-d list + # #rangeOffsets = (np.array(self._insar.swathRangeOffsetMatchingReference)).reshape(numberOfFrames, numberOfSwaths) + # #azimuthOffsets = (np.array(self._insar.swathAzimuthOffsetMatchingReference)).reshape(numberOfFrames, numberOfSwaths) + # rangeOffsets = self._insar.swathRangeOffsetMatchingReference + # azimuthOffsets = self._insar.swathAzimuthOffsetMatchingReference + + # else: + # #rangeOffsets = (np.array(self._insar.swathRangeOffsetGeometricalReference)).reshape(numberOfFrames, numberOfSwaths) + # #azimuthOffsets = (np.array(self._insar.swathAzimuthOffsetGeometricalReference)).reshape(numberOfFrames, numberOfSwaths) + # rangeOffsets = self._insar.swathRangeOffsetGeometricalReference + # azimuthOffsets = self._insar.swathAzimuthOffsetGeometricalReference + + # rangeOffsets = rangeOffsets[i] + # azimuthOffsets = azimuthOffsets[i] + + + #compute swath offset using reference stack + #geometrical offset is enough now + offsetReferenceStack = swathOffset(referenceTrack.frames[i], dateReference+'.slc', 'swath_offset_' + dateReference + '.txt', + crossCorrelation=False, numberOfAzimuthLooks=10) + #we can faithfully make it integer. + #this can also reduce the error due to floating point computation + rangeOffsets = [float(round(x)) for x in offsetReferenceStack[0]] + azimuthOffsets = [float(round(x)) for x in offsetReferenceStack[1]] + + #list of input files + inputInterferograms = [] + inputAmplitudes = [] + #phaseDiff = [None] + swathPhaseDiffIon = [self.swathPhaseDiffLowerIon, self.swathPhaseDiffUpperIon] + phaseDiff = swathPhaseDiffIon[k] + if swathPhaseDiffIon[k] is None: + phaseDiff = None + else: + phaseDiff = swathPhaseDiffIon[k][i] + phaseDiff.insert(0, None) + + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + swathDir = 's{}'.format(swathNumber) + inputInterferograms.append(os.path.join('../', swathDir, self._insar.interferogram)) + inputAmplitudes.append(os.path.join('../', swathDir, self._insar.amplitude)) + + # #compute phase needed to be compensated using startingRange + # if j >= 1: + # #phaseDiffSwath1 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange)/subbandRadarWavelength[k] + # #phaseDiffSwath2 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange)/subbandRadarWavelength[k] + # phaseDiffSwath1 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \ + # -4.0 * np.pi * secondaryTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) + # phaseDiffSwath2 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \ + # -4.0 * np.pi * secondaryTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) + # if referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange == \ + # referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange: + # #phaseDiff.append(phaseDiffSwath2 - phaseDiffSwath1) + # #if reference and secondary versions are all before or after version 2.025 (starting range error < 0.5 m), + # #it should be OK to do the above. + # #see results in neom where it meets the above requirement, but there is still phase diff + # #to be less risky, we do not input values here + # phaseDiff.append(None) + # else: + # phaseDiff.append(None) + + #note that frame parameters are updated after mosaicking, here no need to update parameters + #mosaic amplitudes + swathMosaic(referenceTrack.frames[i], inputAmplitudes, self._insar.amplitude, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, resamplingMethod=0) + #mosaic interferograms + #These are for ALOS-2, may need to change for ALOS-4! + phaseDiffFixed = [0.0, 0.4754024578084084, 0.9509913179406437, 1.4261648478671614, 2.179664007520499, 2.6766909968024932, 3.130810857] + + #if (referenceTrack.frames[i].processingSoftwareVersion == '2.025' and secondaryTrack.frames[i].processingSoftwareVersion == '2.023') or \ + # (referenceTrack.frames[i].processingSoftwareVersion == '2.023' and secondaryTrack.frames[i].processingSoftwareVersion == '2.025'): + + # # changed value number of samples to estimate new value new values estimate area + # ########################################################################################################################### + # # 2.6766909968024932-->2.6581660335779866 1808694 d169-f2850, north CA + # # 2.179664007520499 -->2.204125866652153 131120 d169-f2850, north CA + + # phaseDiffFixed = [0.0, 0.4754024578084084, 0.9509913179406437, 1.4261648478671614, 2.204125866652153, 2.6581660335779866, 3.130810857] + + snapThreshold = 0.2 + + #the above preparetions only applies to 'self._insar.modeCombination == 21' + #looks like it also works for 31 (scansarNominalModes-stripmapModes) + # if self._insar.modeCombination != 21: + # phaseDiff = None + # phaseDiffFixed = None + # snapThreshold = None + + #whether snap for each swath + if self.swathPhaseDiffSnapIon == None: + snapSwath = [[True for jjj in range(numberOfSwaths-1)] for iii in range(numberOfFrames)] + else: + snapSwath = self.swathPhaseDiffSnapIon + if len(snapSwath) != numberOfFrames: + raise Exception('please specify each frame for parameter: swath phase difference snap to fixed values') + for iii in range(numberOfFrames): + if len(snapSwath[iii]) != (numberOfSwaths-1): + raise Exception('please specify correct number of swaths for parameter: swath phase difference snap to fixed values') + + (phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = swathMosaic(referenceTrack.frames[i], inputInterferograms, self._insar.interferogram, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, updateFrame=False, + phaseCompensation=True, phaseDiff=phaseDiff, phaseDiffFixed=phaseDiffFixed, snapThreshold=snapThreshold, snapSwath=snapSwath[i], pcRangeLooks=1, pcAzimuthLooks=4, + filt=False, resamplingMethod=1) + + #the first item is meaningless for all the following list, so only record the following items + if phaseDiff == None: + phaseDiff = [None for iii in range(self._insar.startingSwath, self._insar.endingSwath + 1)] + #catalog.addItem('frame {} {} band swath phase diff input'.format(frameNumber, ionDir['subband'][k]), phaseDiff[1:], 'runIonSubband') + #catalog.addItem('frame {} {} band swath phase diff estimated'.format(frameNumber, ionDir['subband'][k]), phaseDiffEst[1:], 'runIonSubband') + #catalog.addItem('frame {} {} band swath phase diff used'.format(frameNumber, ionDir['subband'][k]), phaseDiffUsed[1:], 'runIonSubband') + #catalog.addItem('frame {} {} band swath phase diff used source'.format(frameNumber, ionDir['subband'][k]), phaseDiffSource[1:], 'runIonSubband') + #catalog.addItem('frame {} {} band swath phase diff samples used'.format(frameNumber, ionDir['subband'][k]), numberOfValidSamples[1:], 'runIonSubband') + + log += 'frame {} {} band swath phase diff input: {}\n'.format(frameNumber, ionDir['subband'][k], phaseDiff[1:]) + log += 'frame {} {} band swath phase diff estimated: {}\n'.format(frameNumber, ionDir['subband'][k], phaseDiffEst[1:]) + log += 'frame {} {} band swath phase diff used: {}\n'.format(frameNumber, ionDir['subband'][k], phaseDiffUsed[1:]) + log += 'frame {} {} band swath phase diff used source: {}\n'.format(frameNumber, ionDir['subband'][k], phaseDiffSource[1:]) + log += 'frame {} {} band swath phase diff samples used: {}\n'.format(frameNumber, ionDir['subband'][k], numberOfValidSamples[1:]) + + #check if there is value around 3.130810857, which may not be stable + phaseDiffUnstableExist = False + for xxx in phaseDiffUsed: + if abs(abs(xxx) - 3.130810857) < 0.2: + phaseDiffUnstableExist = True + #catalog.addItem('frame {} {} band swath phase diff unstable exists'.format(frameNumber, ionDir['subband'][k]), phaseDiffUnstableExist, 'runIonSubband') + log += 'frame {} {} band swath phase diff unstable exists: {}\n'.format(frameNumber, ionDir['subband'][k], phaseDiffUnstableExist) + log += '\n' + + create_xml(self._insar.amplitude, referenceTrack.frames[i].numberOfSamples, referenceTrack.frames[i].numberOfLines, 'amp') + create_xml(self._insar.interferogram, referenceTrack.frames[i].numberOfSamples, referenceTrack.frames[i].numberOfLines, 'int') + + #update secondary frame parameters here, here no need to update parameters + os.chdir('../') + #save parameter file, here no need to save parameter file + os.chdir('../') + os.chdir('../') + + + ############################################################ + # STEP 4. mosaic frames + ############################################################ + from isceobj.Alos2Proc.runFrameMosaic import frameMosaic + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + + log += 'mosaic frames in {} at {}\n'.format(os.path.basename(__file__), datetime.datetime.now()) + log += '================================================================================================\n' + + + spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2() + for k in range(2): + os.chdir(ionDir['subband'][k]) + + mosaicDir = ionDir['insar'] + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + numberOfFrames = len(referenceTrack.frames) + if numberOfFrames == 1: + import shutil + frameDir = os.path.join('f1_{}/mosaic'.format(self._insar.referenceFrames[0])) + # if not os.path.isfile(self._insar.interferogram): + # os.symlink(os.path.join('../', frameDir, self._insar.interferogram), self._insar.interferogram) + # #shutil.copy2() can overwrite + # shutil.copy2(os.path.join('../', frameDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + # shutil.copy2(os.path.join('../', frameDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + # if not os.path.isfile(self._insar.amplitude): + # os.symlink(os.path.join('../', frameDir, self._insar.amplitude), self._insar.amplitude) + # shutil.copy2(os.path.join('../', frameDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt') + # shutil.copy2(os.path.join('../', frameDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml') + + os.rename(os.path.join('../', frameDir, self._insar.interferogram), self._insar.interferogram) + os.rename(os.path.join('../', frameDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + os.rename(os.path.join('../', frameDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + os.rename(os.path.join('../', frameDir, self._insar.amplitude), self._insar.amplitude) + os.rename(os.path.join('../', frameDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt') + os.rename(os.path.join('../', frameDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml') + + #update track parameters, no need to update track parameters here + + else: + # #choose offsets + # if self.frameOffsetMatching: + # rangeOffsets = self._insar.frameRangeOffsetMatchingReference + # azimuthOffsets = self._insar.frameAzimuthOffsetMatchingReference + # else: + # rangeOffsets = self._insar.frameRangeOffsetGeometricalReference + # azimuthOffsets = self._insar.frameAzimuthOffsetGeometricalReference + + if referenceTrack.operationMode in scansarModes: + matchingMode=0 + else: + matchingMode=1 + + #geometrical offset is enough + offsetReferenceStack = frameOffset(referenceTrack, dateReference+'.slc', 'frame_offset_' + dateReference + '.txt', + crossCorrelation=False, matchingMode=matchingMode) + + #we can faithfully make it integer. + #this can also reduce the error due to floating point computation + rangeOffsets = [float(round(x)) for x in offsetReferenceStack[0]] + azimuthOffsets = [float(round(x)) for x in offsetReferenceStack[1]] + + #list of input files + inputInterferograms = [] + inputAmplitudes = [] + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + inputInterferograms.append(os.path.join('../', frameDir, 'mosaic', self._insar.interferogram)) + inputAmplitudes.append(os.path.join('../', frameDir, 'mosaic', self._insar.amplitude)) + + #note that track parameters are updated after mosaicking + #mosaic amplitudes + frameMosaic(referenceTrack, inputAmplitudes, self._insar.amplitude, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, + updateTrack=False, phaseCompensation=False, resamplingMethod=0) + #mosaic interferograms + (phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = frameMosaic(referenceTrack, inputInterferograms, self._insar.interferogram, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, + updateTrack=False, phaseCompensation=True, resamplingMethod=1) + + create_xml(self._insar.amplitude, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'amp') + create_xml(self._insar.interferogram, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'int') + + #if multiple frames, remove frame amplitudes/inteferograms to save space + for x in inputAmplitudes: + os.remove(x) + os.remove(x+'.vrt') + os.remove(x+'.xml') + + for x in inputInterferograms: + os.remove(x) + os.remove(x+'.vrt') + os.remove(x+'.xml') + + #catalog.addItem('{} band frame phase diff estimated'.format(ionDir['subband'][k]), phaseDiffEst[1:], 'runIonSubband') + #catalog.addItem('{} band frame phase diff used'.format(ionDir['subband'][k]), phaseDiffUsed[1:], 'runIonSubband') + #catalog.addItem('{} band frame phase diff used source'.format(ionDir['subband'][k]), phaseDiffSource[1:], 'runIonSubband') + #catalog.addItem('{} band frame phase diff samples used'.format(ionDir['subband'][k]), numberOfValidSamples[1:], 'runIonSubband') + + log += '{} band frame phase diff estimated: {}\n'.format(ionDir['subband'][k], phaseDiffEst[1:]) + log += '{} band frame phase diff used: {}\n'.format(ionDir['subband'][k], phaseDiffUsed[1:]) + log += '{} band frame phase diff used source: {}\n'.format(ionDir['subband'][k], phaseDiffSource[1:]) + log += '{} band frame phase diff samples used: {}\n'.format(ionDir['subband'][k], numberOfValidSamples[1:]) + log += '\n' + + #update secondary parameters here, no need to update secondary parameters here + + os.chdir('../') + #save parameter file, no need to save parameter file here + os.chdir('../') + + + ############################################################ + # STEP 5. clear frame processing files + ############################################################ + import shutil + from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + + for k in range(2): + os.chdir(ionDir['subband'][k]) + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + #keep subswath interferograms + #shutil.rmtree(frameDir) + #cmd = 'rm -rf {}'.format(frameDir) + #runCmd(cmd) + os.chdir('../') + + + ############################################################ + # STEP 6. create differential interferograms + ############################################################ + import numpy as np + from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + + for k in range(2): + os.chdir(ionDir['subband'][k]) + + insarDir = ionDir['insar'] + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + rangePixelSize = self._insar.numberRangeLooks1 * referenceTrack.rangePixelSize + radarWavelength = subbandRadarWavelength[k] + + ml1 = '_{}rlks_{}alks'.format(self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1) + if dateReference == dateReferenceStack: + rectRangeOffset = os.path.join('../../../', idir, dateSecondary, 'insar', dateSecondary + ml1 + '_rg_rect.off') + cmd = "imageMath.py -e='a*exp(-1.0*J*b*4.0*{}*{}/{})*(b!=0)' --a={} --b={} -o {} -t cfloat".format(np.pi, rangePixelSize, radarWavelength, self._insar.interferogram, rectRangeOffset, self._insar.differentialInterferogram) + elif dateSecondary == dateReferenceStack: + rectRangeOffset = os.path.join('../../../', idir, dateReference, 'insar', dateReference + ml1 + '_rg_rect.off') + cmd = "imageMath.py -e='a*exp(1.0*J*b*4.0*{}*{}/{})*(b!=0)' --a={} --b={} -o {} -t cfloat".format(np.pi, rangePixelSize, radarWavelength, self._insar.interferogram, rectRangeOffset, self._insar.differentialInterferogram) + else: + rectRangeOffset1 = os.path.join('../../../', idir, dateReference, 'insar', dateReference + ml1 + '_rg_rect.off') + rectRangeOffset2 = os.path.join('../../../', idir, dateSecondary, 'insar', dateSecondary + ml1 + '_rg_rect.off') + cmd = "imageMath.py -e='a*exp(1.0*J*(b-c)*4.0*{}*{}/{})*(b!=0)*(c!=0)' --a={} --b={} --c={} -o {} -t cfloat".format(np.pi, rangePixelSize, radarWavelength, self._insar.interferogram, rectRangeOffset1, rectRangeOffset2, self._insar.differentialInterferogram) + runCmd(cmd) + + os.chdir('../../') + + + os.chdir('../') + + + return log + + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='create subband interferograms for ionospheric correction') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where resampled data of each date (YYMMDD) is located. only folders are recognized') + parser.add_argument('-ref_date_stack', dest='ref_date_stack', type=str, required=True, + help = 'reference date of stack. format: YYMMDD') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + # parser.add_argument('-nrlks_ion', dest='nrlks_ion', type=int, default=1, + # help = 'number of range looks ion. default: 1') + # parser.add_argument('-nalks_ion', dest='nalks_ion', type=int, default=1, + # help = 'number of azimuth looks ion. default: 1') + parser.add_argument('-snap', dest='snap', type=int, nargs='+', action='append', default=None, + help='swath phase difference snap to fixed values. e.g. you have 3 swaths and 2 frames. specify this parameter as: -snap 1 1 -snap 1 0, where 0 means no snap, 1 means snap') + parser.add_argument('-phase_diff_lower', dest='phase_diff_lower', type=str, nargs='+', action='append', default=None, + help='swath phase difference lower band. e.g. you have 3 swaths and 2 frames. specify this parameter as: -snap -1.3 2.37 -snap 0.1 None, where None means no user input phase difference value') + parser.add_argument('-phase_diff_upper', dest='phase_diff_upper', type=str, nargs='+', action='append', default=None, + help='swath phase difference upper band. e.g. you have 3 swaths and 2 frames. specify this parameter as: -snap -1.3 2.37 -snap 0.1 None, where None means no user input phase difference value') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + dateReferenceStack = inps.ref_date_stack + dateReference = inps.ref_date + dateSecondary = inps.sec_date + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + #numberRangeLooksIon = inps.nrlks_ion + #numberAzimuthLooksIon = inps.nalks_ion + swathPhaseDiffSnapIon = inps.snap + swathPhaseDiffLowerIon = inps.phase_diff_lower + swathPhaseDiffUpperIon = inps.phase_diff_upper + ####################################################### + + pair = '{}-{}'.format(dateReference, dateSecondary) + ms = pair + + ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1) + + dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir, dateReferenceStack) + nframe = len(frames) + nswath = len(swaths) + + trackReferenceStack = loadTrack('./', dates[dateIndexReference]) + #trackReference = loadTrack('./', dateReference) + #trackSecondary = loadTrack('./', dateSecondary) + + + self = createObject() + self._insar = createObject() + self._insar.referenceFrames = frames + self._insar.startingSwath = swaths[0] + self._insar.endingSwath = swaths[-1] + + self._insar.numberRangeLooks1 = numberRangeLooks1 + self._insar.numberAzimuthLooks1 = numberAzimuthLooks1 + + self._insar.interferogram = ms + ml1 + '.int' + self._insar.amplitude = ms + ml1 + '.amp' + self._insar.differentialInterferogram = 'diff_' + ms + ml1 + '.int' + + #set self.swathPhaseDiffSnapIon, self.swathPhaseDiffLowerIon, self.swathPhaseDiffUpperIon + if swathPhaseDiffSnapIon is not None: + swathPhaseDiffSnapIon = [[True if x==1 else False for x in y] for y in swathPhaseDiffSnapIon] + if len(swathPhaseDiffSnapIon) != nframe: + raise Exception('please specify each frame for parameter: -snap') + for i in range(nframe): + if len(snapSwath[i]) != (nswath-1): + raise Exception('please specify correct number of swaths for parameter: -snap') + + if swathPhaseDiffLowerIon is not None: + swathPhaseDiffLowerIon = [[float(x) if x.upper() != 'NONE' else None for x in y] for y in swathPhaseDiffLowerIon] + if len(swathPhaseDiffLowerIon) != nframe: + raise Exception('please specify each frame for parameter: -phase_diff_lower') + for i in range(nframe): + if len(swathPhaseDiffLowerIon[i]) != (nswath-1): + raise Exception('please specify correct number of swaths for parameter: -phase_diff_lower') + + if swathPhaseDiffUpperIon is not None: + swathPhaseDiffUpperIon = [[float(x) if x.upper() != 'NONE' else None for x in y] for y in swathPhaseDiffUpperIon] + if len(swathPhaseDiffUpperIon) != nframe: + raise Exception('please specify each frame for parameter: -phase_diff_upper') + for i in range(nframe): + if len(swathPhaseDiffUpperIon[i]) != (nswath-1): + raise Exception('please specify correct number of swaths for parameter: -phase_diff_upper') + + self.swathPhaseDiffSnapIon = swathPhaseDiffSnapIon + self.swathPhaseDiffLowerIon = swathPhaseDiffLowerIon + self.swathPhaseDiffUpperIon = swathPhaseDiffUpperIon + + log = runIonSubband(self, trackReferenceStack, idir, dateReferenceStack, dateReference, dateSecondary) + + logFile = 'process.log' + with open(logFile, 'a') as f: + f.write(log) + diff --git a/contrib/stack/alosStack/ion_unwrap.py b/contrib/stack/alosStack/ion_unwrap.py new file mode 100644 index 0000000..da647e0 --- /dev/null +++ b/contrib/stack/alosStack/ion_unwrap.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.runIonUwrap import ionUwrap + +from StackPulic import loadTrack +from StackPulic import createObject +from StackPulic import stackDateStatistics + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='unwrap subband interferograms for ionospheric correction') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where resampled data of each date (YYMMDD) is located. only folders are recognized') + parser.add_argument('-ref_date_stack', dest='ref_date_stack', type=str, required=True, + help = 'reference date of stack. format: YYMMDD') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-wbd', dest='wbd', type=str, required=True, + help = 'water body file') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + parser.add_argument('-nrlks_ion', dest='nrlks_ion', type=int, default=1, + help = 'number of range looks ion. default: 1') + parser.add_argument('-nalks_ion', dest='nalks_ion', type=int, default=1, + help = 'number of azimuth looks ion. default: 1') + parser.add_argument('-filt', dest='filt', action='store_true', default=False, + help='filter subband interferograms') + parser.add_argument('-alpha', dest='alpha', type=float, default=0.3, + help='filtering strength. default: 0.3') + parser.add_argument('-win', dest='win', type=int, default=32, + help = 'filter window size. default: 32') + parser.add_argument('-step', dest='step', type=int, default=4, + help = 'filter step size. default: 4') + parser.add_argument('-keep_mag', dest='keep_mag', action='store_true', default=False, + help='keep magnitude before filtering subband interferogram') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + dateReferenceStack = inps.ref_date_stack + dateReference = inps.ref_date + dateSecondary = inps.sec_date + wbd = inps.wbd + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + numberRangeLooksIon = inps.nrlks_ion + numberAzimuthLooksIon = inps.nalks_ion + filterSubbandInt = inps.filt + filterStrengthSubbandInt = inps.alpha + filterWinsizeSubbandInt = inps.win + filterStepsizeSubbandInt = inps.step + removeMagnitudeBeforeFilteringSubbandInt = not inps.keep_mag + ####################################################### + + pair = '{}-{}'.format(dateReference, dateSecondary) + ms = pair + ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1) + dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir, dateReferenceStack) + trackReference = loadTrack('./', dateReference) + + self = createObject() + self._insar = createObject() + self._insar.wbd = wbd + self._insar.numberRangeLooks1 = numberRangeLooks1 + self._insar.numberAzimuthLooks1 = numberAzimuthLooks1 + self._insar.numberRangeLooksIon = numberRangeLooksIon + self._insar.numberAzimuthLooksIon = numberAzimuthLooksIon + + self._insar.amplitude = ms + ml1 + '.amp' + self._insar.differentialInterferogram = 'diff_' + ms + ml1 + '.int' + self._insar.latitude = dateReferenceStack + ml1 + '.lat' + self._insar.longitude = dateReferenceStack + ml1 + '.lon' + self.filterSubbandInt = filterSubbandInt + self.filterStrengthSubbandInt = filterStrengthSubbandInt + self.filterWinsizeSubbandInt = filterWinsizeSubbandInt + self.filterStepsizeSubbandInt = filterStepsizeSubbandInt + self.removeMagnitudeBeforeFilteringSubbandInt = removeMagnitudeBeforeFilteringSubbandInt + + ionUwrap(self, trackReference, latLonDir=os.path.join(idir, dates[dateIndexReference], 'insar')) diff --git a/contrib/stack/alosStack/look_coherence.py b/contrib/stack/alosStack/look_coherence.py new file mode 100644 index 0000000..5e4a710 --- /dev/null +++ b/contrib/stack/alosStack/look_coherence.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from contrib.alos2proc.alos2proc import look +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd +from isceobj.Alos2Proc.runCoherence import coherence + +from StackPulic import loadProduct +from StackPulic import stackDateStatistics + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='take more looks and compute coherence') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + parser.add_argument('-nrlks2', dest='nrlks2', type=int, default=1, + help = 'number of range looks 2. default: 1') + parser.add_argument('-nalks2', dest='nalks2', type=int, default=1, + help = 'number of azimuth looks 2. default: 1') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + dateReference = inps.ref_date + dateSecondary = inps.sec_date + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + numberRangeLooks2 = inps.nrlks2 + numberAzimuthLooks2 = inps.nalks2 + ####################################################### + + pair = '{}-{}'.format(dateReference, dateSecondary) + + ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1) + ml2 = '_{}rlks_{}alks'.format(numberRangeLooks1*numberRangeLooks2, numberAzimuthLooks1*numberAzimuthLooks2) + + insarDir = 'insar' + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + amplitude = pair + ml1 + '.amp' + differentialInterferogram = 'diff_' + pair + ml1 + '.int' + multilookAmplitude = pair + ml2 + '.amp' + multilookDifferentialInterferogram = 'diff_' + pair + ml2 + '.int' + multilookCoherence = pair + ml2 + '.cor' + + amp = isceobj.createImage() + amp.load(amplitude+'.xml') + width = amp.width + length = amp.length + width2 = int(width / numberRangeLooks2) + length2 = int(length / numberAzimuthLooks2) + + + if not ((numberRangeLooks2 == 1) and (numberAzimuthLooks2 == 1)): + #take looks + look(differentialInterferogram, multilookDifferentialInterferogram, width, numberRangeLooks2, numberAzimuthLooks2, 4, 0, 1) + look(amplitude, multilookAmplitude, width, numberRangeLooks2, numberAzimuthLooks2, 4, 1, 1) + #creat xml + create_xml(multilookDifferentialInterferogram, width2, length2, 'int') + create_xml(multilookAmplitude, width2, length2, 'amp') + + + + if (numberRangeLooks1*numberRangeLooks2*numberAzimuthLooks1*numberAzimuthLooks2 >= 9): + cmd = "imageMath.py -e='sqrt(b_0*b_1);abs(a)/(b_0+(b_0==0))/(b_1+(b_1==0))*(b_0!=0)*(b_1!=0)' --a={} --b={} -o {} -t float -s BIL".format( + multilookDifferentialInterferogram, + multilookAmplitude, + multilookCoherence) + runCmd(cmd) + else: + #estimate coherence using a moving window + coherence(multilookAmplitude, multilookDifferentialInterferogram, multilookCoherence, + method="cchz_wave", windowSize=5) + + + os.chdir('../') diff --git a/contrib/stack/alosStack/look_geom.py b/contrib/stack/alosStack/look_geom.py new file mode 100644 index 0000000..7a3a7db --- /dev/null +++ b/contrib/stack/alosStack/look_geom.py @@ -0,0 +1,130 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml +from contrib.alos2proc.alos2proc import look +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd +from isceobj.Alos2Proc.Alos2ProcPublic import waterBodyRadar + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='take more looks') + parser.add_argument('-date', dest='date', type=str, required=True, + help = 'date. format: YYMMDD') + parser.add_argument('-wbd', dest='wbd', type=str, required=True, + help = 'water body file') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + parser.add_argument('-nrlks2', dest='nrlks2', type=int, default=1, + help = 'number of range looks 2. default: 1') + parser.add_argument('-nalks2', dest='nalks2', type=int, default=1, + help = 'number of azimuth looks 2. default: 1') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + date = inps.date + wbdFile = inps.wbd + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + numberRangeLooks2 = inps.nrlks2 + numberAzimuthLooks2 = inps.nalks2 + ####################################################### + + #pair = '{}-{}'.format(dateReference, dateSecondary) + + ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1) + ml2 = '_{}rlks_{}alks'.format(numberRangeLooks1*numberRangeLooks2, numberAzimuthLooks1*numberAzimuthLooks2) + + + latitude = date + ml1 + '.lat' + longitude = date + ml1 + '.lon' + height = date + ml1 + '.hgt' + los = date + ml1 + '.los' + + multilookLatitude = date + ml2 + '.lat' + multilookLongitude = date + ml2 + '.lon' + multilookHeight = date + ml2 + '.hgt' + multilookLos = date + ml2 + '.los' + multilookWbdOut = date + ml2 + '.wbd' + + wbdFile = os.path.abspath(wbdFile) + + insarDir = 'insar' + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + + img = isceobj.createImage() + img.load(latitude+'.xml') + width = img.width + length = img.length + width2 = int(width / numberRangeLooks2) + length2 = int(length / numberAzimuthLooks2) + + if not ((numberRangeLooks2 == 1) and (numberAzimuthLooks2 == 1)): + #take looks + look(latitude, multilookLatitude, width, numberRangeLooks2, numberAzimuthLooks2, 3, 0, 1) + look(longitude, multilookLongitude, width, numberRangeLooks2, numberAzimuthLooks2, 3, 0, 1) + look(height, multilookHeight, width, numberRangeLooks2, numberAzimuthLooks2, 3, 0, 1) + #creat xml + create_xml(multilookLatitude, width2, length2, 'double') + create_xml(multilookLongitude, width2, length2, 'double') + create_xml(multilookHeight, width2, length2, 'double') + #los has two bands, use look program in isce instead + #cmd = "looks.py -i {} -o {} -r {} -a {}".format(self._insar.los, self._insar.multilookLos, self._insar.numberRangeLooks2, self._insar.numberAzimuthLooks2) + #runCmd(cmd) + + #replace the above system call with function call + from mroipac.looks.Looks import Looks + from isceobj.Image import createImage + inImage = createImage() + inImage.load(los+'.xml') + + lkObj = Looks() + lkObj.setDownLooks(numberAzimuthLooks2) + lkObj.setAcrossLooks(numberRangeLooks2) + lkObj.setInputImage(inImage) + lkObj.setOutputFilename(multilookLos) + lkObj.looks() + + #water body + #this looking operation has no problems where there is only water and land, but there is also possible no-data area + #look(self._insar.wbdOut, self._insar.multilookWbdOut, width, self._insar.numberRangeLooks2, self._insar.numberAzimuthLooks2, 0, 0, 1) + #create_xml(self._insar.multilookWbdOut, width2, length2, 'byte') + #use waterBodyRadar instead to avoid the problems of no-data pixels in water body + waterBodyRadar(multilookLatitude, multilookLongitude, wbdFile, multilookWbdOut) + + + os.chdir('../') \ No newline at end of file diff --git a/contrib/stack/alosStack/mosaic_interferogram.py b/contrib/stack/alosStack/mosaic_interferogram.py new file mode 100644 index 0000000..3a86d4d --- /dev/null +++ b/contrib/stack/alosStack/mosaic_interferogram.py @@ -0,0 +1,226 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml +from isceobj.Alos2Proc.runSwathOffset import swathOffset +from isceobj.Alos2Proc.runFrameOffset import frameOffset +from isceobj.Alos2Proc.runSwathMosaic import swathMosaic +from isceobj.Alos2Proc.runFrameMosaic import frameMosaic + +from StackPulic import acquisitionModesAlos2 +from StackPulic import loadTrack + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='form interferogram') + parser.add_argument('-ref_date_stack', dest='ref_date_stack', type=str, required=True, + help = 'reference date of stack. format: YYMMDD') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + dateReferenceStack = inps.ref_date_stack + dateReference = inps.ref_date + dateSecondary = inps.sec_date + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + ####################################################### + + logFile = 'process.log' + + pair = '{}-{}'.format(dateReference, dateSecondary) + + ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1) + + interferogram = pair + ml1 + '.int' + amplitude = pair + ml1 + '.amp' + + spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2() + + #use one date to find frames and swaths. any date should work, here we use dateIndexReference + frames = sorted([x[-4:] for x in glob.glob(os.path.join('./', 'f*_*'))]) + swaths = sorted([int(x[-1]) for x in glob.glob(os.path.join('./', 'f1_*', 's*'))]) + + nframe = len(frames) + nswath = len(swaths) + + trackReferenceStack = loadTrack('./', dateReferenceStack) + + #mosaic swaths + for i, frameNumber in enumerate(frames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.chdir(frameDir) + + mosaicDir = 'mosaic' + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + + if not (swaths[-1] - swaths[0] >= 1): + + swathDir = 's{}'.format(swaths[0]) + if not os.path.isfile(interferogram): + os.symlink(os.path.join('../', swathDir, interferogram), interferogram) + shutil.copy2(os.path.join('../', swathDir, interferogram+'.vrt'), interferogram+'.vrt') + shutil.copy2(os.path.join('../', swathDir, interferogram+'.xml'), interferogram+'.xml') + if not os.path.isfile(amplitude): + os.symlink(os.path.join('../', swathDir, amplitude), amplitude) + shutil.copy2(os.path.join('../', swathDir, amplitude+'.vrt'), amplitude+'.vrt') + shutil.copy2(os.path.join('../', swathDir, amplitude+'.xml'), amplitude+'.xml') + + os.chdir('../../') + + else: + #compute swath offset using reference stack + #geometrical offset is enough now + offsetReferenceStack = swathOffset(trackReferenceStack.frames[i], dateReferenceStack+'.slc', 'swath_offset_' + dateReferenceStack + '.txt', + crossCorrelation=False, numberOfAzimuthLooks=10) + #we can faithfully make it integer. + #this can also reduce the error due to floating point computation + rangeOffsets = [float(round(x)) for x in offsetReferenceStack[0]] + azimuthOffsets = [float(round(x)) for x in offsetReferenceStack[1]] + + #list of input files + inputInterferograms = [] + inputAmplitudes = [] + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + swathDir = 's{}'.format(swathNumber) + inputInterferograms.append(os.path.join('../', swathDir, interferogram)) + inputAmplitudes.append(os.path.join('../', swathDir, amplitude)) + + #note that frame parameters do not need to be updated after mosaicking + #mosaic amplitudes + swathMosaic(trackReferenceStack.frames[i], inputAmplitudes, amplitude, + rangeOffsets, azimuthOffsets, numberRangeLooks1, numberAzimuthLooks1, resamplingMethod=0) + #mosaic interferograms + swathMosaic(trackReferenceStack.frames[i], inputInterferograms, interferogram, + rangeOffsets, azimuthOffsets, numberRangeLooks1, numberAzimuthLooks1, resamplingMethod=1) + + create_xml(amplitude, trackReferenceStack.frames[i].numberOfSamples, trackReferenceStack.frames[i].numberOfLines, 'amp') + create_xml(interferogram, trackReferenceStack.frames[i].numberOfSamples, trackReferenceStack.frames[i].numberOfLines, 'int') + + os.chdir('../../') + + + #mosaic frame + mosaicDir = 'insar' + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + if nframe == 1: + frameDir = os.path.join('f1_{}/mosaic'.format(frames[0])) + if not os.path.isfile(interferogram): + os.symlink(os.path.join('../', frameDir, interferogram), interferogram) + #shutil.copy2() can overwrite + shutil.copy2(os.path.join('../', frameDir, interferogram+'.vrt'), interferogram+'.vrt') + shutil.copy2(os.path.join('../', frameDir, interferogram+'.xml'), interferogram+'.xml') + if not os.path.isfile(amplitude): + os.symlink(os.path.join('../', frameDir, amplitude), amplitude) + shutil.copy2(os.path.join('../', frameDir, amplitude+'.vrt'), amplitude+'.vrt') + shutil.copy2(os.path.join('../', frameDir, amplitude+'.xml'), amplitude+'.xml') + else: + if trackReferenceStack.operationMode in scansarModes: + matchingMode=0 + else: + matchingMode=1 + + #geometrical offset is enough + offsetReferenceStack = frameOffset(trackReferenceStack, dateReferenceStack+'.slc', 'frame_offset_' + dateReferenceStack + '.txt', + crossCorrelation=False, matchingMode=matchingMode) + + #we can faithfully make it integer. + #this can also reduce the error due to floating point computation + rangeOffsets = [float(round(x)) for x in offsetReferenceStack[0]] + azimuthOffsets = [float(round(x)) for x in offsetReferenceStack[1]] + + #list of input files + inputInterferograms = [] + inputAmplitudes = [] + for i, frameNumber in enumerate(frames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + inputInterferograms.append(os.path.join('../', frameDir, 'mosaic', interferogram)) + inputAmplitudes.append(os.path.join('../', frameDir, 'mosaic', amplitude)) + + #note that track parameters do not need to be updated after mosaicking + #mosaic amplitudes + frameMosaic(trackReferenceStack, inputAmplitudes, amplitude, + rangeOffsets, azimuthOffsets, numberRangeLooks1, numberAzimuthLooks1, + updateTrack=False, phaseCompensation=False, resamplingMethod=0) + #mosaic interferograms + (phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = \ + frameMosaic(trackReferenceStack, inputInterferograms, interferogram, + rangeOffsets, azimuthOffsets, numberRangeLooks1, numberAzimuthLooks1, + updateTrack=False, phaseCompensation=True, resamplingMethod=1) + + create_xml(amplitude, trackReferenceStack.numberOfSamples, trackReferenceStack.numberOfLines, 'amp') + create_xml(interferogram, trackReferenceStack.numberOfSamples, trackReferenceStack.numberOfLines, 'int') + + #if multiple frames, remove frame amplitudes/inteferograms to save space + for x in inputAmplitudes: + os.remove(x) + os.remove(x+'.vrt') + os.remove(x+'.xml') + + for x in inputInterferograms: + os.remove(x) + os.remove(x+'.vrt') + os.remove(x+'.xml') + + #log output info + log = '{} at {}\n'.format(os.path.basename(__file__), datetime.datetime.now()) + log += '================================================================================================\n' + log += 'frame phase diff estimated: {}\n'.format(phaseDiffEst[1:]) + log += 'frame phase diff used: {}\n'.format(phaseDiffUsed[1:]) + log += 'frame phase diff used source: {}\n'.format(phaseDiffSource[1:]) + log += 'frame phase diff samples used: {}\n'.format(numberOfValidSamples[1:]) + log += '\n' + with open(os.path.join('../', logFile), 'a') as f: + f.write(log) + + + + + + + + + + + + diff --git a/contrib/stack/alosStack/mosaic_parameter.py b/contrib/stack/alosStack/mosaic_parameter.py new file mode 100644 index 0000000..1426591 --- /dev/null +++ b/contrib/stack/alosStack/mosaic_parameter.py @@ -0,0 +1,167 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import datetime +import numpy as np + +import isce, isceobj + +from StackPulic import loadTrack +from StackPulic import saveTrack +from StackPulic import stackDateStatistics +from StackPulic import acquisitionModesAlos2 + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='mosaic all swaths and frames to form an entire track') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where data of each date (YYMMDD) is located. only folders are recognized') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, nargs='+', default=[], + help = 'a number of secondary dates seperated by blanks, can also include ref_date. format: YYMMDD YYMMDD YYMMDD. If provided, only process these dates') + parser.add_argument('-ref_frame', dest='ref_frame', type=str, default=None, + help = 'frame number of the swath whose grid is used as reference. e.g. 2800. default: first frame') + parser.add_argument('-ref_swath', dest='ref_swath', type=int, default=None, + help = 'swath number of the swath whose grid is used as reference. e.g. 1. default: first swath') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + dateReference = inps.ref_date + dateSecondary = inps.sec_date + frameReference = inps.ref_frame + swathReference = inps.ref_swath + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + ####################################################### + + DEBUG=False + + spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2() + + #get date statistics + dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir, dateReference) + ndate = len(dates) + nframe = len(frames) + nswath = len(swaths) + + #find frame and swath indexes of reference swath + if frameReference is None: + frameReference = frames[0] + if swathReference is None: + swathReference = swaths[0] + + + frameReferenceIndex = frames.index(frameReference) + swathReferenceIndex = swaths.index(swathReference) + + print('resampling all frames and swaths to frame: {} (index: {}) swath: {} (index {})'.format( + frameReference, frameReferenceIndex, swathReference, swathReferenceIndex)) + + + #mosaic parameters of each date + #strictly follow the actual image mosaicking processing of reference (after resampling adjustment in resample_common_grid.py) + #secondary sensingStart and startingRange are OK, no need to consider other things about secondary + os.chdir(idir) + for idate in range(ndate): + if dateSecondary != []: + if dates[idate] not in dateSecondary: + continue + + print('processing: {}'.format(dates[idate])) + os.chdir(dates[idate]) + + track = loadTrack('./', dates[idate]) + swathReference = track.frames[frameReferenceIndex].swaths[swathReferenceIndex] + #1. mosaic swaths + for i, frameNumber in enumerate(frames): + startingRange = [] + sensingStart = [] + endingRange = [] + sensingEnd = [] + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + swath = track.frames[i].swaths[j] + startingRange.append(swath.startingRange) + endingRange.append(swath.startingRange+swath.rangePixelSize*swath.numberOfSamples) + sensingStart.append(swath.sensingStart) + sensingEnd.append(swath.sensingStart+datetime.timedelta(seconds=swath.azimuthLineInterval*swath.numberOfLines)) + + #update frame parameters + ######################################################### + frame = track.frames[i] + #mosaic size + frame.numberOfSamples = int(round((max(endingRange)-min(startingRange))/swathReference.rangePixelSize) / numberRangeLooks1) + frame.numberOfLines = int(round((max(sensingEnd)-min(sensingStart)).total_seconds()/swathReference.azimuthLineInterval) / numberAzimuthLooks1) + #NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE + #range parameters + frame.startingRange = min(startingRange) + frame.rangeSamplingRate = swathReference.rangeSamplingRate + frame.rangePixelSize = swathReference.rangePixelSize + #azimuth parameters + frame.sensingStart = min(sensingStart) + frame.prf = swathReference.prf + frame.azimuthPixelSize = swathReference.azimuthPixelSize + frame.azimuthLineInterval = swathReference.azimuthLineInterval + + + #2. mosaic frames + startingRange = [] + sensingStart = [] + endingRange = [] + sensingEnd = [] + for i, frameNumber in enumerate(frames): + frame = track.frames[i] + startingRange.append(frame.startingRange) + endingRange.append(frame.startingRange+numberRangeLooks1*frame.rangePixelSize*frame.numberOfSamples) + sensingStart.append(frame.sensingStart) + sensingEnd.append(frame.sensingStart+datetime.timedelta(seconds=numberAzimuthLooks1*frame.azimuthLineInterval*frame.numberOfLines)) + + + #update track parameters + ######################################################### + #mosaic size + track.numberOfSamples = round((max(endingRange)-min(startingRange))/(numberRangeLooks1*swathReference.rangePixelSize)) + track.numberOfLines = round((max(sensingEnd)-min(sensingStart)).total_seconds()/(numberAzimuthLooks1*swathReference.azimuthLineInterval)) + #NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE + #range parameters + track.startingRange = min(startingRange) + track.rangeSamplingRate = swathReference.rangeSamplingRate + track.rangePixelSize = swathReference.rangePixelSize + #azimuth parameters + track.sensingStart = min(sensingStart) + track.prf = swathReference.prf + track.azimuthPixelSize = swathReference.azimuthPixelSize + track.azimuthLineInterval = swathReference.azimuthLineInterval + + #save mosaicking result + saveTrack(track, dates[idate]) + os.chdir('../') diff --git a/contrib/stack/alosStack/pair_up.py b/contrib/stack/alosStack/pair_up.py new file mode 100644 index 0000000..c3f875a --- /dev/null +++ b/contrib/stack/alosStack/pair_up.py @@ -0,0 +1,195 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +from StackPulic import stackDateStatistics +from StackPulic import acquisitionModesAlos2 + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='create InSAR pairs') + parser.add_argument('-idir1', dest='idir1', type=str, required=True, + help = 'input directory where original data of each date (YYMMDD) is located. only folders are recognized') + parser.add_argument('-idir2', dest='idir2', type=str, required=True, + help = 'input directory where resampled data of each date (YYMMDD) is located. only folders are recognized') + parser.add_argument('-xml', dest='xml', type=str, default=None, + help = 'alos2App.py input xml file, e.g. alos2App.xml. default: None') + parser.add_argument('-odir', dest='odir', type=str, required=True, + help = 'output directory') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date. format: YYMMDD') + parser.add_argument('-pairs', dest='pairs', type=str, nargs='+', default=None, + help = 'a number of pairs seperated by blanks. format: YYMMDD-YYMMDD YYMMDD-YYMMDD YYMMDD-YYMMDD... This argument has highest priority. When provided, only process these pairs') + parser.add_argument('-num', dest='num', type=int, default=None, + help = 'number of subsequent acquistions for each acquistion to pair up with. default: all pairs') + parser.add_argument('-exc_date', dest='exc_date', type=str, nargs='+', default=None, + help = 'a number of secondary dates seperated by blanks, can also include ref_date. format: YYMMDD YYMMDD YYMMDD. If provided, these dates will be excluded from pairing up') + parser.add_argument('-tsmin', dest='tsmin', type=float, default=None, + help = 'minimum time span in years for pairing up. default: None') + parser.add_argument('-tsmax', dest='tsmax', type=float, default=None, + help = 'maximum time span in years for pairing up. default: None') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir1 = inps.idir1 + idir2 = inps.idir2 + alos2AppXml = inps.xml + odir = inps.odir + dateReference = inps.ref_date + pairsUser = inps.pairs + subsequentNum = inps.num + dateExcluded = inps.exc_date + tsmin = inps.tsmin + tsmax = inps.tsmax + ####################################################### + + DEBUG=False + + spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2() + + #get date statistics, using resampled version + dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir2, dateReference) + ndate = len(dates) + nframe = len(frames) + nswath = len(swaths) + + if subsequentNum is None: + subsequentNum = ndate - 1 + + #read standard configurations + if alos2AppXml is not None: + tree = ET.parse(alos2AppXml) + root = tree.getroot() + + datefmt = "%y%m%d" + pairsCreated = [] + for i in range(ndate): + mdate = dates[i] + mtime = datetime.datetime.strptime(mdate, datefmt) + for j in range(subsequentNum): + if i+j+1 <= ndate - 1: + sdate = dates[i+j+1] + stime = datetime.datetime.strptime(sdate, datefmt) + pair = mdate + '-' + sdate + ts = np.absolute((stime - mtime).total_seconds()) / (365.0 * 24.0 * 3600) + + #1. determine whether process this pair + if pairsUser is not None: + if pair not in pairsUser: + continue + else: + if dateExcluded is not None: + if (mdate in dateExcluded) or (sdate in dateExcluded): + continue + if tsmin is not None: + if ts < tsmin: + continue + if tsmax is not None: + if ts > tsmax: + continue + + #2. create pair dir + pairsCreated.append(pair) + print('creating pair: {}'.format(pair)) + pairDir = os.path.join(odir, pair) + os.makedirs(pairDir, exist_ok=True) + #create xml + if alos2AppXml is not None: + safe = root.find("component/property[@name='reference directory']") + #safe.text = '{}'.format(os.path.join(inps.dir, mdate)) + safe.text = 'None' + safe = root.find("component/property[@name='secondary directory']") + #safe.text = '{}'.format(os.path.join(inps.dir, sdate)) + safe.text = 'None' + tree.write(os.path.join(pairDir, 'alos2App.xml')) + + #3. make frame/swath directories, and copy *.track.xml and *.frame.xml + if mdate != dates[dateIndexReference]: + shutil.copy2(os.path.join(idir1, mdate, mdate+'.track.xml'), pairDir) + if sdate != dates[dateIndexReference]: + shutil.copy2(os.path.join(idir1, sdate, sdate+'.track.xml'), pairDir) + shutil.copy2(os.path.join(idir2, dates[dateIndexReference], dates[dateIndexReference]+'.track.xml'), pairDir) + + for iframe, frameNumber in enumerate(frames): + frameDir = 'f{}_{}'.format(iframe+1, frameNumber) + os.makedirs(os.path.join(pairDir, frameDir), exist_ok=True) + + if mdate != dates[dateIndexReference]: + shutil.copy2(os.path.join(idir1, mdate, frameDir, mdate+'.frame.xml'), os.path.join(pairDir, frameDir)) + if sdate != dates[dateIndexReference]: + shutil.copy2(os.path.join(idir1, sdate, frameDir, sdate+'.frame.xml'), os.path.join(pairDir, frameDir)) + shutil.copy2(os.path.join(idir2, dates[dateIndexReference], frameDir, dates[dateIndexReference]+'.frame.xml'), os.path.join(pairDir, frameDir)) + + for jswath, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + swathDir = 's{}'.format(swathNumber) + os.makedirs(os.path.join(pairDir, frameDir, swathDir), exist_ok=True) + + if os.path.isfile(os.path.join(pairDir, frameDir, swathDir, mdate+'.slc')): + os.remove(os.path.join(pairDir, frameDir, swathDir, mdate+'.slc')) + relpath = os.path.relpath(os.path.join(idir2, mdate, frameDir, swathDir), os.path.join(pairDir, frameDir, swathDir)) + os.symlink(os.path.join(relpath, mdate+'.slc'), os.path.join(pairDir, frameDir, swathDir, mdate+'.slc')) + #os.symlink(os.path.join(idir2, mdate, frameDir, swathDir, mdate+'.slc'), os.path.join(pairDir, frameDir, swathDir, mdate+'.slc')) + shutil.copy2(os.path.join(idir2, mdate, frameDir, swathDir, mdate+'.slc.vrt'), os.path.join(pairDir, frameDir, swathDir)) + shutil.copy2(os.path.join(idir2, mdate, frameDir, swathDir, mdate+'.slc.xml'), os.path.join(pairDir, frameDir, swathDir)) + + if os.path.isfile(os.path.join(pairDir, frameDir, swathDir, sdate+'.slc')): + os.remove(os.path.join(pairDir, frameDir, swathDir, sdate+'.slc')) + relpath = os.path.relpath(os.path.join(idir2, sdate, frameDir, swathDir), os.path.join(pairDir, frameDir, swathDir)) + os.symlink(os.path.join(relpath, sdate+'.slc'), os.path.join(pairDir, frameDir, swathDir, sdate+'.slc')) + #os.symlink(os.path.join(idir2, sdate, frameDir, swathDir, sdate+'.slc'), os.path.join(pairDir, frameDir, swathDir, sdate+'.slc')) + shutil.copy2(os.path.join(idir2, sdate, frameDir, swathDir, sdate+'.slc.vrt'), os.path.join(pairDir, frameDir, swathDir)) + shutil.copy2(os.path.join(idir2, sdate, frameDir, swathDir, sdate+'.slc.xml'), os.path.join(pairDir, frameDir, swathDir)) + + + print('total number of pairs created: {}'.format(len(pairsCreated))) + if pairsUser is not None: + if sorted(pairsUser) != sorted(pairsCreated): + print() + print('WARNING: user has specified pairs to process, but pairs created are different from user specified pairs') + print(' user specified pairs: {}'.format(', '.join(pairsUser))) + print(' pairs created: {}'.format(', '.join(pairsCreated))) + print() + + + + + + + + + + + + + + + + diff --git a/contrib/stack/alosStack/plot_baseline.py b/contrib/stack/alosStack/plot_baseline.py new file mode 100644 index 0000000..280e38f --- /dev/null +++ b/contrib/stack/alosStack/plot_baseline.py @@ -0,0 +1,122 @@ +#!/usr/bin/env python3 + +#Cunren Liang, JPL/Caltech, 28-NOV-2016 + +#https://matplotlib.org/3.1.1/gallery/text_labels_and_annotations/date.html + +import os +import sys +import glob +import datetime +import argparse +import numpy as np +import matplotlib.pyplot as plt +import matplotlib.dates as mdates + + +def read_alosstack_baseline(baseline_file): + '''read baseline file generated by alosStack + ''' + baseline_dict = {} + with open(baseline_file, 'r') as f: + lines = [line for line in f if line.strip() != ''] + for x in lines[2:]: + blist = x.split() + #to fit into the format of other processors, all alos satellites are after 2000 + #blist[0] = '20' + blist[0] + #blist[1] = '20' + blist[1] + baseline_dict[blist[1]] = float(blist[3]) + baseline_dict[blist[0]] = 0 + + return baseline_dict + + +def cmdLineParse(): + ''' + Command line parser. + ''' + parser = argparse.ArgumentParser(description='plot baselines') + parser.add_argument('-baseline', dest='baseline', type=str, required=True, + help = 'baseline file') + parser.add_argument('-pairs_dir', dest='pairs_dir', type=str, required=True, + help = 'pairs directory containing YYMMDD-YYMMDD folders. Only folders are recognized.') + parser.add_argument('-pairs_exc', dest='pairs_exc', type=str, nargs='+', default=None, + help = 'a number of pairs seperated by blanks. format: YYMMDD-YYMMDD YYMMDD-YYMMDD... If provided, these pairs will be excluded from plotting') + parser.add_argument('-output', dest='output', type=str, default='baseline.pdf', + help = 'output file name') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + baseline = inps.baseline + pairs_dir = inps.pairs_dir + pairs_exc = inps.pairs_exc + output = inps.output + + baseline_dict = read_alosstack_baseline(baseline) + pairs = [os.path.basename(x) for x in sorted(glob.glob(os.path.join(pairs_dir, '*-*'))) if os.path.isdir(x)] + if pairs_exc != None: + for x in pairs_exc: + if x in pairs: + pairs.remove(x) + + #start plot + plt.rcParams['font.family'] = 'Times New Roman' + plt.rcParams['font.size'] = 12 + fig, ax = plt.subplots() + + time = [datetime.datetime.strptime(x, "%y%m%d") for x in baseline_dict] + baseline = [baseline_dict[x] for x in baseline_dict] + ax.plot(time, baseline, 'o', alpha=0.7, c='g') + + year_min = datetime.datetime(min(time).year, 1, 1) + year_max = datetime.datetime(max(time).year+1, 1, 1) + + for x in pairs: + rdate, sdate = x.split('-') + rtime = datetime.datetime.strptime(rdate, "%y%m%d") + stime = datetime.datetime.strptime(sdate, "%y%m%d") + time = [rtime, stime] + baseline = [baseline_dict[rdate], baseline_dict[sdate]] + ax.plot(time, baseline, '-', lw=.5, c='b') + + ax.xaxis.set_major_locator(mdates.YearLocator()) + ax.xaxis.set_major_formatter(mdates.DateFormatter('%Y')) + ax.xaxis.set_minor_locator(mdates.MonthLocator()) + + ax.minorticks_on() + ax.tick_params('both', length=7, which='major', width=1) + ax.tick_params('both', length=4, which='minor', width=0.5) + ax.set_xlim(year_min, year_max) + + ax.format_xdata = mdates.DateFormatter('%Y-%m-%d') + + # rotates and right aligns the x labels, and moves the bottom of the + # axes up to make room for them + #fig.autofmt_xdate() + + + ax.set_xlabel('Time [years]') + ax.set_ylabel('Perpendicular Baseline [meters]') + + + plt.savefig(os.path.splitext(output)[0]+'.pdf') + + + + + + + + + + diff --git a/contrib/stack/alosStack/radar_dem_offset.py b/contrib/stack/alosStack/radar_dem_offset.py new file mode 100644 index 0000000..a2d0617 --- /dev/null +++ b/contrib/stack/alosStack/radar_dem_offset.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import datetime +import numpy as np + +import isce, isceobj +from isceobj.Alos2Proc.runRdrDemOffset import rdrDemOffset + +from StackPulic import loadProduct +from StackPulic import createObject + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='estimate offset between radar and dem') + parser.add_argument('-track', dest='track', type=str, required=True, + help = 'track parameter file') + parser.add_argument('-dem', dest='dem', type=str, required=True, + help = 'dem used for geometrical coregistration') + parser.add_argument('-wbd', dest='wbd', type=str, required=True, + help = 'water body in radar coordinate') + parser.add_argument('-hgt', dest='hgt', type=str, required=True, + help = 'height in radar coordinate computed in geometrical coregistration') + parser.add_argument('-amp', dest='amp', type=str, required=True, + help = 'amplitude image') + parser.add_argument('-output', dest='output', type=str, required=True, + help = 'output file for saving the affine transformation paramters') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + parser.add_argument('-nrlks_sim', dest='nrlks_sim', type=int, default=None, + help = 'number of range looks when simulating radar image') + parser.add_argument('-nalks_sim', dest='nalks_sim', type=int, default=None, + help = 'number of azimuth looks when simulating radar image') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + trackParameter = inps.track + demFile = inps.dem + wbdOut = inps.wbd + height = inps.hgt + amplitude = inps.amp + output = inps.output + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + numberRangeLooksSim = inps.nrlks_sim + numberAzimuthLooksSim = inps.nalks_sim + ####################################################### + + #prepare amplitude image + insarDir = 'insar' + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + if not os.path.isfile(os.path.basename(amplitude)): + os.symlink(os.path.join('../', amplitude), os.path.basename(amplitude)) + if not os.path.isfile(os.path.basename(amplitude)+'.vrt'): + os.symlink(os.path.join('../', amplitude)+'.vrt', os.path.basename(amplitude)+'.vrt') + if not os.path.isfile(os.path.basename(amplitude)+'.xml'): + os.symlink(os.path.join('../', amplitude)+'.xml', os.path.basename(amplitude)+'.xml') + os.chdir('../') + + + ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1) + simFile = 'radar_{}.sim'.format(ml1) + + self = createObject() + self._insar = createObject() + + self._insar.dem = demFile + self._insar.numberRangeLooksSim = numberRangeLooksSim + self._insar.numberRangeLooks1 = numberRangeLooks1 + self._insar.numberAzimuthLooksSim = numberAzimuthLooksSim + self._insar.numberAzimuthLooks1 = numberAzimuthLooks1 + self._insar.height = os.path.basename(height) + self._insar.sim = simFile + self._insar.amplitude = os.path.basename(amplitude) + self._insar.wbdOut = os.path.basename(wbdOut) + self._insar.radarDemAffineTransform = None + + referenceTrack = loadProduct(trackParameter) + rdrDemOffset(self, referenceTrack, catalog=None) + + os.chdir(insarDir) + #save the result + with open(output, 'w') as f: + f.write('{} {}\n{}'.format(self._insar.numberRangeLooksSim, self._insar.numberAzimuthLooksSim, self._insar.radarDemAffineTransform)) + + #remove amplitude image + os.remove(os.path.basename(amplitude)) + os.remove(os.path.basename(amplitude)+'.vrt') + os.remove(os.path.basename(amplitude)+'.xml') + os.chdir('../') \ No newline at end of file diff --git a/contrib/stack/alosStack/rdr2geo.py b/contrib/stack/alosStack/rdr2geo.py new file mode 100644 index 0000000..4ad9f40 --- /dev/null +++ b/contrib/stack/alosStack/rdr2geo.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import datetime +import numpy as np + +import isce, isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import waterBodyRadar +from isceobj.Alos2Proc.runRdr2Geo import topoCPU +from isceobj.Alos2Proc.runRdr2Geo import topoGPU + +from StackPulic import loadTrack +from StackPulic import hasGPU + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='compute longitude, latitude, height and water body from radar parameters') + parser.add_argument('-date', dest='date', type=str, required=True, + help = 'date. format: YYMMDD') + parser.add_argument('-dem', dest='dem', type=str, required=True, + help = 'dem file') + parser.add_argument('-wbd', dest='wbd', type=str, required=True, + help = 'water body file') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + #parser.add_argument('-gpu', dest='gpu', type=int, default=1, + # help = 'use GPU when available. 0: no. 1: yes (default)') + parser.add_argument('-gpu', dest='gpu', action='store_true', default=False, + help='use GPU when available') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + date = inps.date + demFile = inps.dem + wbdFile = inps.wbd + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + useGPU = inps.gpu + ####################################################### + + demFile = os.path.abspath(demFile) + wbdFile = os.path.abspath(wbdFile) + + insarDir = 'insar' + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1) + + latitude = date + ml1 + '.lat' + longitude = date + ml1 + '.lon' + height = date + ml1 + '.hgt' + los = date + ml1 + '.los' + wbdOut = date + ml1 + '.wbd' + + + track = loadTrack('../', date) + if useGPU and hasGPU(): + topoGPU(track, numberRangeLooks1, numberAzimuthLooks1, demFile, + latitude, longitude, height, los) + else: + snwe = topoCPU(track, numberRangeLooks1, numberAzimuthLooks1, demFile, + latitude, longitude, height, los) + waterBodyRadar(latitude, longitude, wbdFile, wbdOut) + + diff --git a/contrib/stack/alosStack/read_data.py b/contrib/stack/alosStack/read_data.py new file mode 100644 index 0000000..44c2bc7 --- /dev/null +++ b/contrib/stack/alosStack/read_data.py @@ -0,0 +1,301 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import datetime +import numpy as np + +import isce, isceobj +import isceobj.Sensor.MultiMode as MultiMode + +from StackPulic import saveProduct +from StackPulic import acquisitionModesAlos2 + + +def getAlos2StackDirs(dataDir): + ''' + 1. this function takes the data directory containing a list of folders, in each of + which data of a date is located, and then returns a list of date directory sorted + by acquisition date. + + 2. under dataDir, only folders are recognized + ''' + import os + import glob + + def sorter(item): + #return date + return item.split('-')[-2] + + #get only folders in dataDir + dateDirs = sorted(glob.glob(os.path.join(dataDir, '*'))) + dateDirs = [x for x in dateDirs if os.path.isdir(x)] + ndate = len(dateDirs) + + #get first LED files in dateDirs + dateFirstleaderFiles = [sorted(glob.glob(os.path.join(x, 'LED-ALOS2*-*-*')))[0] for x in dateDirs] + #sort first LED files using date in LED file name + dateFirstleaderFiles = sorted(dateFirstleaderFiles, key=sorter) + #keep only directory from the path + dateDirs = [os.path.dirname(x) for x in dateFirstleaderFiles] + + return dateDirs + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='read a number of dates of data') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where data of each date is located. only folders are recognized') + parser.add_argument('-odir', dest='odir', type=str, required=True, + help = 'output directory where data of each date is output') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, nargs='+', default=[], + help = 'a number of secondary dates seperated by blanks, can also include reference date. format: YYMMDD YYMMDD YYMMDD. If provided, only read data of these dates') + parser.add_argument('-pol', dest='pol', type=str, default='HH', + help = 'polarization to process, default: HH') + parser.add_argument('-frames', dest='frames', type=str, nargs='+', default=None, + help = 'frames to process, must specify frame numbers of reference if frames are different among dates. e.g. -frames 2800 2850') + parser.add_argument('-starting_swath', dest='starting_swath', type=int, default=None, + help = 'starting swath to process.') + parser.add_argument('-ending_swath', dest='ending_swath', type=int, default=None, + help = 'starting swath to process') + parser.add_argument('-virtual', dest='virtual', action='store_true', default=False, + help='use virtual file') + + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + odir = inps.odir + dateReference = inps.ref_date + dateSecondary = inps.sec_date + pol = inps.pol + framesInput = inps.frames + startingSwath = inps.starting_swath + endingSwath = inps.ending_swath + useVirtualFile = inps.virtual + ####################################################### + + + #date directories sorted by acquistion date retrieved from filenames under each directory + dateDirs = getAlos2StackDirs(os.path.abspath(idir)) + ndate = len(dateDirs) + + if framesInput is not None: + framesInput = sorted(framesInput) + else: + framesInput = None + + + #1. find index of reference date: + dates = [] + dateIndexReference = None + for i in range(ndate): + ledFiles = sorted(glob.glob(os.path.join(dateDirs[i], 'LED-ALOS2*-*-*'))) + date = os.path.basename(ledFiles[0]).split('-')[-2] + dates.append(date) + if date == dateReference: + dateIndexReference = i + if dateIndexReference is None: + raise Exception('cannot get reference date {} from the data list, pleasae check your input'.format(dateReference)) + + + #2. check if data are in the same mode + spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2() + + #first frame of reference date + ledFilesReference = sorted(glob.glob(os.path.join(dateDirs[dateIndexReference], 'LED-ALOS2*-*-*'))) + modeReference = os.path.basename(ledFilesReference[0]).split('-')[-1][0:3] + + if modeReference in spotlightModes: + modeGroupReference = spotlightModes + if modeReference in stripmapModes: + modeGroupReference = stripmapModes + if modeReference in scansarNominalModes: + modeGroupReference = scansarNominalModes + if modeReference in scansarWideModes: + modeGroupReference = scansarWideModes + + #check aquistion mode of all frames of each date + for i in range(ndate): + ledFiles = sorted(glob.glob(os.path.join(dateDirs[i], 'LED-ALOS2*-*-*'))) + nframe = len(ledFiles) + for j in range(nframe): + mode = os.path.basename(ledFiles[j]).split('-')[-1][0:3] + if mode not in modeGroupReference: + raise Exception('all data must be in the same acquistion mode: spotlight, stripmap, or ScanSAR mode') + + + #3. find frame numbers and save it in a 2-d list + frames = [] + #if not set, find frames automatically + if framesInput is None: + for i in range(ndate): + frames0 = [] + ledFiles = sorted(glob.glob(os.path.join(dateDirs[i], 'LED-ALOS2*-*-*'))) + for led in ledFiles: + frames0.append( os.path.basename(led).split('-')[-3][-4:] ) + frames.append(sorted(frames0)) + else: + for i in range(ndate): + frames.append(framesInput) + + framesReference = frames[dateIndexReference] + + #check if there is equal number of frames + nframe = len(frames[dateIndexReference]) + for i in range(ndate): + if nframe != len(frames[i]): + raise Exception('there are not equal number of frames to process, please check your directory of each date') + + + #4. set starting and ending swaths + if modeReference in spotlightModes: + if startingSwath is None: + startingSwath = 1 + if endingSwath is None: + endingSwath = 1 + if modeReference in stripmapModes: + if startingSwath is None: + startingSwath = 1 + if endingSwath is None: + endingSwath = 1 + if modeReference in scansarNominalModes: + if startingSwath is None: + startingSwath = 1 + if endingSwath is None: + endingSwath = 5 + if modeReference in scansarWideModes: + if startingSwath is None: + startingSwath = 1 + if endingSwath is None: + endingSwath = 7 + + #print result + print('\nlist of dates:') + print(' index date frames') + print('=======================================================') + for i in range(ndate): + if dates[i] == dateReference: + print(' %03d %s'%(i, dates[i])+' {}'.format(frames[i])+' reference') + else: + print(' %03d %s'%(i, dates[i])+' {}'.format(frames[i])) + print('\n') + + + ################################################## + #1. create directories and read data + ################################################## + if not os.path.isdir(odir): + print('output directory {} does not exist, create'.format(odir)) + os.makedirs(odir, exist_ok=True) + + os.chdir(odir) + for i in range(ndate): + ledFiles = sorted(glob.glob(os.path.join(dateDirs[i], 'LED-ALOS2*-*-*'))) + date = os.path.basename(ledFiles[0]).split('-')[-2] + dateDir = date + + if dateSecondary != []: + if date not in dateSecondary: + continue + + if os.path.isdir(dateDir): + print('{} already exists, do not create'.format(dateDir)) + continue + else: + os.makedirs(dateDir, exist_ok=True) + os.chdir(dateDir) + + sensor = MultiMode.createSensor(sensor='ALOS2', name=None) + sensor.configure() + sensor.track.configure() + + for j in range(nframe): + #frame number starts with 1 + frameDir = 'f{}_{}'.format(j+1, framesReference[j]) + os.makedirs(frameDir, exist_ok=True) + os.chdir(frameDir) + + #attach a frame to reference and secondary + frameObj = MultiMode.createFrame() + frameObj.configure() + sensor.track.frames.append(frameObj) + + #swath number starts with 1 + for k in range(startingSwath, endingSwath+1): + print('processing date {} frame {} swath {}'.format(date, framesReference[j], k)) + + swathDir = 's{}'.format(k) + os.makedirs(swathDir, exist_ok=True) + os.chdir(swathDir) + + #attach a swath to sensor + swathObj = MultiMode.createSwath() + swathObj.configure() + sensor.track.frames[-1].swaths.append(swathObj) + + #setup sensor + #sensor.leaderFile = sorted(glob.glob(os.path.join(dateDirs[i], 'LED-ALOS2*{}-*-*'.format(framesReference[j]))))[0] + sensor.leaderFile = sorted(glob.glob(os.path.join(dateDirs[i], 'LED-ALOS2*{}-*-*'.format(frames[i][j]))))[0] + if modeReference in scansarModes: + #sensor.imageFile = sorted(glob.glob(os.path.join(dateDirs[i], 'IMG-{}-ALOS2*{}-*-*-F{}'.format(pol.upper(), framesReference[j], k))))[0] + sensor.imageFile = sorted(glob.glob(os.path.join(dateDirs[i], 'IMG-{}-ALOS2*{}-*-*-F{}'.format(pol.upper(), frames[i][j], k))))[0] + else: + #sensor.imageFile = sorted(glob.glob(os.path.join(dateDirs[i], 'IMG-{}-ALOS2*{}-*-*'.format(pol.upper(), framesReference[j]))))[0] + sensor.imageFile = sorted(glob.glob(os.path.join(dateDirs[i], 'IMG-{}-ALOS2*{}-*-*'.format(pol.upper(), frames[i][j]))))[0] + sensor.outputFile = date + '.slc' + sensor.useVirtualFile = useVirtualFile + #read sensor + (imageFDR, imageData)=sensor.readImage() + (leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord)=sensor.readLeader() + sensor.setSwath(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData) + sensor.setFrame(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData) + sensor.setTrack(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData) + os.chdir('../') + #!!!frame numbers of all dates are reset to those of reference date + sensor.track.frames[j].frameNumber = framesReference[j] + saveProduct(sensor.track.frames[-1], date + '.frame.xml') + os.chdir('../') + saveProduct(sensor.track, date + '.track.xml') + os.chdir('../') + + + + + + + + + + + + + + + + diff --git a/contrib/stack/alosStack/rect_range_offset.py b/contrib/stack/alosStack/rect_range_offset.py new file mode 100644 index 0000000..bc92bf3 --- /dev/null +++ b/contrib/stack/alosStack/rect_range_offset.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import datetime +import numpy as np + +import isce, isceobj +from contrib.alos2proc_f.alos2proc_f import rect_with_looks +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + +from StackPulic import createObject + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='rectify range offset') + parser.add_argument('-aff', dest='aff', type=str, required=True, + help = 'affine transform paramter file') + parser.add_argument('-input', dest='input', type=str, default='./', + help = 'input file') + parser.add_argument('-output', dest='output', type=str, required=True, + help = 'output file') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1 . default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + aff = inps.aff + rangeOffset = inps.input + rectRangeOffset = inps.output + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + ####################################################### + + DEBUG=False + + self = createObject() + self._insar = createObject() + + self._insar.rangeOffset = rangeOffset + self._insar.rectRangeOffset = rectRangeOffset + self._insar.numberRangeLooks1 = numberRangeLooks1 + self._insar.numberAzimuthLooks1 = numberAzimuthLooks1 + + #read affine transform parameters + with open(aff, 'r') as f: + lines = f.readlines() + self._insar.numberRangeLooksSim = int(lines[0].split()[0]) + self._insar.numberAzimuthLooksSim = int(lines[0].split()[1]) + self._insar.radarDemAffineTransform = [float(x) for x in lines[1].strip('[').strip(']').split(',')] + if DEBUG: + print('++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++') + print('{} {}\n{}'.format(self._insar.numberRangeLooksSim, self._insar.numberAzimuthLooksSim, self._insar.radarDemAffineTransform)) + print('++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++') + + #rectify + rgoff = isceobj.createImage() + rgoff.load(self._insar.rangeOffset+'.xml') + + if self._insar.radarDemAffineTransform == [1.0, 0.0, 0.0, 1.0, 0.0, 0.0]: + if not os.path.isfile(self._insar.rectRangeOffset): + os.symlink(self._insar.rangeOffset, self._insar.rectRangeOffset) + create_xml(self._insar.rectRangeOffset, rgoff.width, rgoff.length, 'float') + else: + rect_with_looks(self._insar.rangeOffset, + self._insar.rectRangeOffset, + rgoff.width, rgoff.length, + rgoff.width, rgoff.length, + self._insar.radarDemAffineTransform[0], self._insar.radarDemAffineTransform[1], + self._insar.radarDemAffineTransform[2], self._insar.radarDemAffineTransform[3], + self._insar.radarDemAffineTransform[4], self._insar.radarDemAffineTransform[5], + self._insar.numberRangeLooksSim*self._insar.numberRangeLooks1, self._insar.numberAzimuthLooksSim*self._insar.numberAzimuthLooks1, + self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, + 'REAL', + 'Bilinear') + create_xml(self._insar.rectRangeOffset, rgoff.width, rgoff.length, 'float') + diff --git a/contrib/stack/alosStack/resample_common_grid.py b/contrib/stack/alosStack/resample_common_grid.py new file mode 100644 index 0000000..4eb7c4d --- /dev/null +++ b/contrib/stack/alosStack/resample_common_grid.py @@ -0,0 +1,500 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import datetime +import numpy as np + +import isce, isceobj, stdproc +from isceobj.Util.Poly2D import Poly2D +from isceobj.Location.Offset import OffsetField, Offset + +from isceobj.Alos2Proc.Alos2ProcPublic import readOffset +from isceobj.Alos2Proc.runSwathOffset import swathOffset + +from contrib.alos2proc.alos2proc import rg_filter + +from StackPulic import loadTrack +from StackPulic import saveTrack +from StackPulic import subbandParameters +from StackPulic import stackDateStatistics +from StackPulic import acquisitionModesAlos2 + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='resample data to a common grid') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where data of each date (YYMMDD) is located. only folders are recognized') + parser.add_argument('-odir', dest='odir', type=str, required=True, + help = 'output directory where resampled version of each date is output') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, nargs='+', default=[], + help = 'a number of secondary dates seperated by blanks, can also include ref_date. format: YYMMDD YYMMDD YYMMDD. If provided, only resample these dates') + parser.add_argument('-ref_frame', dest='ref_frame', type=str, default=None, + help = 'frame number of the swath whose grid is used as reference. e.g. 2800. default: first frame') + parser.add_argument('-ref_swath', dest='ref_swath', type=int, default=None, + help = 'swath number of the swath whose grid is used as reference. e.g. 1. default: first swath') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'range offsets between swaths/frames should be integer multiples of -nrlks1. default: 1 ') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=14, + help = 'azimuth offsets between swaths/frames should be integer multiples of -nalks1. default: 14') + parser.add_argument('-subband', dest='subband', action='store_true', default=False, + help='create and resample subband SLCs') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + odir = inps.odir + dateReference = inps.ref_date + dateSecondary = inps.sec_date + frameReference = inps.ref_frame + swathReference = inps.ref_swath + nRange = inps.nrlks1 + nAzimuth = inps.nalks1 + subbandFlag = inps.subband + ####################################################### + + DEBUG=False + + spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2() + + #get date statistics + dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir, dateReference) + ndate = len(dates) + nframe = len(frames) + nswath = len(swaths) + + if frameReference is None: + frameReference = frames[0] + else: + if frameReference not in frames: + raise Exception('specified -ref_frame {} not in frame list {}'.format(frameReference, frames)) + if swathReference is None: + swathReference = swaths[0] + else: + if swathReference not in swaths: + raise Exception('specified -ref_swath {} not in swath list {}'.format(swathReference, swaths)) + + #find frame and swath indexes of reference swath + frameReferenceIndex = frames.index(frameReference) + swathReferenceIndex = swaths.index(swathReference) + + print('resampling all frames and swaths to frame: {} (index: {}) swath: {} (index {})'.format( + frameReference, frameReferenceIndex, swathReference, swathReferenceIndex)) + + + #read swath offsets and save in 2-d lists + swathRangeOffsetGeometrical = [] + swathAzimuthOffsetGeometrical = [] + swathRangeOffsetMatching = [] + swathAzimuthOffsetMatching = [] + for i, frameNumber in enumerate(frames): + + swathRangeOffsetGeometrical0 = [] + swathAzimuthOffsetGeometrical0 = [] + swathRangeOffsetMatching0 = [] + swathAzimuthOffsetMatching0 = [] + + if nswath >= 2: + frameDir = 'f{}_{}'.format(i+1, frameNumber) + with open(os.path.join(idir, dateReference, frameDir, 'mosaic/swath_offset.txt'), 'r') as f: + lines = f.readlines() + + for linex in lines: + if 'range offset' in linex: + swathRangeOffsetGeometrical0.append(float(linex.split()[3])) + swathRangeOffsetMatching0.append(float(linex.split()[4])) + if 'azimuth offset' in linex: + swathAzimuthOffsetGeometrical0.append(float(linex.split()[3])) + swathAzimuthOffsetMatching0.append(float(linex.split()[4])) + else: + swathRangeOffsetGeometrical0.append(0.0) + swathRangeOffsetMatching0.append(0.0) + swathAzimuthOffsetGeometrical0.append(0.0) + swathAzimuthOffsetMatching0.append(0.0) + + swathRangeOffsetGeometrical.append(swathRangeOffsetGeometrical0) + swathAzimuthOffsetGeometrical.append(swathAzimuthOffsetGeometrical0) + swathRangeOffsetMatching.append(swathRangeOffsetMatching0) + swathAzimuthOffsetMatching.append(swathAzimuthOffsetMatching0) + + + #read frame offsets and save in 1-d list + frameRangeOffsetGeometrical = [] + frameAzimuthOffsetGeometrical = [] + frameRangeOffsetMatching = [] + frameAzimuthOffsetMatching = [] + + if nframe >= 2: + with open(os.path.join(idir, dateReference, 'insar/frame_offset.txt'), 'r') as f: + lines = f.readlines() + for linex in lines: + if 'range offset' in linex: + frameRangeOffsetGeometrical.append(float(linex.split()[3])) + frameRangeOffsetMatching.append(float(linex.split()[4])) + if 'azimuth offset' in linex: + frameAzimuthOffsetGeometrical.append(float(linex.split()[3])) + frameAzimuthOffsetMatching.append(float(linex.split()[4])) + else: + frameRangeOffsetGeometrical.append(0.0) + frameRangeOffsetMatching.append(0.0) + frameAzimuthOffsetGeometrical.append(0.0) + frameAzimuthOffsetMatching.append(0.0) + + + #compute accurate starting range and sensing start using offset file for reference date + #swath offset is computed between adjacent swaths within a frame, offset unit: first swath sample size + #frame offset is computed between first swaths of adjacent frames, offset unit: first swath sample size + startingRangeAll = [[None for j in range(nswath)] for i in range(nframe)] + sensingStartAll = [[None for j in range(nswath)] for i in range(nframe)] + + trackReference = loadTrack(dateDirs[dateIndexReference], dates[dateIndexReference]) + for i, frameNumber in enumerate(frames): + #startingRange and sensingStart of first swath of current frame + # for i1 in range(i+1): + # startingRangeFirst = trackReference.frames[0].swaths[0].startingRange - \ + # frameRangeOffsetMatching[i1] * trackReference.frames[0].swaths[0].rangePixelSize + # sensingStartFirst = trackReference.frames[0].swaths[0].sensingStart - \ + # datetime.timedelta(seconds = frameAzimuthOffsetMatching[i1] * trackReference.frames[0].swaths[0].azimuthLineInterval) + + startingRangeFirst = trackReference.frames[0].swaths[0].startingRange - \ + sum(frameRangeOffsetMatching[0:i+1]) * trackReference.frames[0].swaths[0].rangePixelSize + sensingStartFirst = trackReference.frames[0].swaths[0].sensingStart - \ + datetime.timedelta(seconds = sum(frameAzimuthOffsetMatching[0:i+1]) * trackReference.frames[0].swaths[0].azimuthLineInterval) + + #startingRange and sensingStart of each swath of current frame + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + # for j1 in range(j+1): + # startingRangeAll[i][j] = startingRangeFirst - \ + # swathRangeOffsetMatching[i][j1] * trackReference.frames[i].swaths[0].rangePixelSize + # sensingStartAll[i][j] = sensingStartFirst - \ + # datetime.timedelta(seconds = swathAzimuthOffsetMatching[i][j1] * trackReference.frames[i].swaths[0].azimuthLineInterval) + + startingRangeAll[i][j] = startingRangeFirst - \ + sum(swathRangeOffsetMatching[i][0:j+1]) * trackReference.frames[i].swaths[0].rangePixelSize + sensingStartAll[i][j] = sensingStartFirst - \ + datetime.timedelta(seconds = sum(swathAzimuthOffsetMatching[i][0:j+1]) * trackReference.frames[i].swaths[0].azimuthLineInterval) + + #check computation result + if DEBUG: + for i, frameNumber in enumerate(frames): + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + print(i, j, (trackReference.frames[i].swaths[j].startingRange-startingRangeAll[i][j])/trackReference.frames[0].swaths[0].rangePixelSize, + (trackReference.frames[i].swaths[j].sensingStart-sensingStartAll[i][j]).total_seconds()/trackReference.frames[0].swaths[0].azimuthLineInterval) + + #update startingRange and sensingStart of reference track + for i, frameNumber in enumerate(frames): + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + trackReference.frames[i].swaths[j].startingRange = startingRangeAll[i][j] + trackReference.frames[i].swaths[j].sensingStart = sensingStartAll[i][j] + + + ##find minimum startingRange and sensingStart + startingRangeMinimum = trackReference.frames[0].swaths[0].startingRange + sensingStartMinimum = trackReference.frames[0].swaths[0].sensingStart + for i, frameNumber in enumerate(frames): + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + if trackReference.frames[i].swaths[j].startingRange < startingRangeMinimum: + startingRangeMinimum = trackReference.frames[i].swaths[j].startingRange + if trackReference.frames[i].swaths[j].sensingStart < sensingStartMinimum: + sensingStartMinimum = trackReference.frames[i].swaths[j].sensingStart + print('startingRangeMinimum (m): {}'.format(startingRangeMinimum)) + print('sensingStartMinimum: {}'.format(sensingStartMinimum)) + + + #adjust each swath of each frame to minimum startingRange and sensingStart + #load reference track again for saving track parameters of resampled + trackReferenceResampled = loadTrack(dateDirs[dateIndexReference], dates[dateIndexReference]) + for i, frameNumber in enumerate(frames): + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + #current swath + swathReference = trackReference.frames[i].swaths[j] + #swath of reference sample size + swathReferenceReference = trackReference.frames[frameReferenceIndex].swaths[swathReferenceIndex] + #current swath resampled + swathReferenceResampled = trackReferenceResampled.frames[i].swaths[j] + + #update startingRange and sensingStart + offsetRange = (swathReference.startingRange - startingRangeMinimum) / (swathReferenceReference.rangePixelSize*nRange) + offsetAzimuth = (swathReference.sensingStart - sensingStartMinimum).total_seconds() / (swathReferenceReference.azimuthLineInterval*nAzimuth) + + swathReferenceResampled.startingRange = startingRangeMinimum + round(offsetRange) * (swathReferenceReference.rangePixelSize*nRange) + swathReferenceResampled.sensingStart = sensingStartMinimum + datetime.timedelta(seconds = round(offsetAzimuth) * + (swathReferenceReference.azimuthLineInterval*nAzimuth)) + + #update other parameters + swathReferenceResampled.numberOfSamples = round(swathReference.numberOfSamples * swathReference.rangePixelSize / swathReferenceReference.rangePixelSize) + swathReferenceResampled.numberOfLines = round(swathReference.numberOfLines * swathReference.azimuthLineInterval / swathReferenceReference.azimuthLineInterval) + swathReferenceResampled.rangeSamplingRate = swathReferenceReference.rangeSamplingRate + swathReferenceResampled.rangePixelSize = swathReferenceReference.rangePixelSize + swathReferenceResampled.prf = swathReferenceReference.prf + swathReferenceResampled.azimuthPixelSize = swathReferenceReference.azimuthPixelSize + swathReferenceResampled.azimuthLineInterval = swathReferenceReference.azimuthLineInterval + #should also update dopplerVsPixel, azimuthFmrateVsPixel? + #if hasattr(swathReference, 'burstLength'): + if swathReference.burstLength is not None: + swathReferenceResampled.burstLength *= (swathReference.burstLength * swathReference.azimuthLineInterval / swathReferenceReference.azimuthLineInterval) + #if hasattr(swathReference, 'burstCycleLength'): + if swathReference.burstCycleLength is not None: + swathReferenceResampled.burstCycleLength *= (swathReference.burstCycleLength * swathReference.azimuthLineInterval / swathReferenceReference.azimuthLineInterval) + #no need to update parameters for ScanSAR burst-by-burst processing, since we are not doing such burst-by-burst processing. + + + #resample each date + os.makedirs(odir, exist_ok=True) + os.chdir(odir) + for idate in range(ndate): + if dateSecondary != []: + if dates[idate] not in dateSecondary: + continue + + os.makedirs(dates[idate], exist_ok=True) + os.chdir(dates[idate]) + + trackSecondary = loadTrack(dateDirs[idate], dates[idate]) + for i, frameNumber in enumerate(frames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.makedirs(frameDir, exist_ok=True) + os.chdir(frameDir) + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + swathDir = 's{}'.format(swathNumber) + os.makedirs(swathDir, exist_ok=True) + os.chdir(swathDir) + + #current swath + swathReference = trackReference.frames[i].swaths[j] + #swath of reference sample size + swathReferenceReference = trackReference.frames[frameReferenceIndex].swaths[swathReferenceIndex] + #current swath resampled + swathReferenceResampled = trackReferenceResampled.frames[i].swaths[j] + + #current swath to be resampled + swathSecondary = trackSecondary.frames[i].swaths[j] + + + #current slc to be processed + slc = os.path.join(dateDirs[idate], frameDir, swathDir, dates[idate]+'.slc') + + + #0. create subband SLCs + if subbandFlag: + subbandRadarWavelength, subbandBandWidth, subbandFrequencyCenter, subbandPrefix = subbandParameters(trackReference) + + slcLower = dates[idate]+'_{}_tmp.slc'.format(subbandPrefix[0]) + slcUpper = dates[idate]+'_{}_tmp.slc'.format(subbandPrefix[1]) + rg_filter(slc, 2, + [slcLower, slcUpper], + subbandBandWidth, + subbandFrequencyCenter, + 257, 2048, 0.1, 0, 0.0) + slcList = [slc, slcLower, slcUpper] + slcListResampled = [dates[idate]+'.slc', dates[idate]+'_{}.slc'.format(subbandPrefix[0]), dates[idate]+'_{}.slc'.format(subbandPrefix[1])] + slcListRemoved = [slcLower, slcUpper] + else: + slcList = [slc] + slcListResampled = [dates[idate]+'.slc'] + slcListRemoved = [] + + + #1. compute offset polynomial + if idate == dateIndexReference: + rangePoly = Poly2D() + rangePoly.initPoly(rangeOrder=1,azimuthOrder=0,coeffs=[[ + (swathReferenceResampled.startingRange - swathReference.startingRange) / swathReference.rangePixelSize, + swathReferenceResampled.rangePixelSize / swathReference.rangePixelSize - 1.0]]) + + azimuthPoly = Poly2D() + azimuthPoly.initPoly(rangeOrder=0,azimuthOrder=1,coeffs=[ + [(swathReferenceResampled.sensingStart - swathReference.sensingStart).total_seconds() / swathReference.azimuthLineInterval], + [swathReferenceResampled.azimuthLineInterval / swathReference.azimuthLineInterval - 1.0]]) + + if DEBUG: + print() + print('rangePoly.getCoeffs(): {}'.format(rangePoly.getCoeffs())) + print('azimuthPoly.getCoeffs(): {}'.format(azimuthPoly.getCoeffs())) + print('rangePoly._meanRange: {}'.format(rangePoly._meanRange)) + print('rangePoly._normRange: {}'.format(rangePoly._normRange)) + print('rangePoly._meanAzimuth: {}'.format(rangePoly._meanAzimuth)) + print('rangePoly._normAzimuth: {}'.format(rangePoly._normAzimuth)) + print('azimuthPoly._meanRange: {}'.format(azimuthPoly._meanRange)) + print('azimuthPoly._normRange: {}'.format(azimuthPoly._normRange)) + print('azimuthPoly._meanAzimuth: {}'.format(azimuthPoly._meanAzimuth)) + print('azimuthPoly._normAzimuth: {}'.format(azimuthPoly._normAzimuth)) + print() + + else: + offsets = readOffset(os.path.join(dateDirs[idate], frameDir, swathDir, 'cull.off')) + # x1 x2 x3 + # y1 y2 y3 + #create new offset field to save offsets: swathReferenceResampled --> swathReference --> swathSecondary + offsetsUpdated = OffsetField() + + for offset in offsets: + offsetUpdate = Offset() + + x1 = offset.x * swathReference.rangePixelSize / swathReferenceResampled.rangePixelSize + \ + (swathReference.startingRange - swathReferenceResampled.startingRange) / swathReferenceResampled.rangePixelSize + y1 = offset.y * swathReference.azimuthLineInterval / swathReferenceResampled.azimuthLineInterval + \ + (swathReference.sensingStart - swathReferenceResampled.sensingStart).total_seconds() / swathReferenceResampled.azimuthLineInterval + + x3 = offset.x + offset.dx + y3 = offset.y + offset.dy + + dx = x3 - x1 + dy = y3 - y1 + + offsetUpdate.setCoordinate(x1, y1) + offsetUpdate.setOffset(dx, dy) + offsetUpdate.setSignalToNoise(offset.snr) + offsetUpdate.setCovariance(offset.sigmax, offset.sigmay, offset.sigmaxy) + offsetsUpdated.addOffset(offsetUpdate) + + azimuthPoly, rangePoly = offsetsUpdated.getFitPolynomials(rangeOrder=2,azimuthOrder=2,maxOrder=True, usenumpy=False) + + #check polynomial accuracy + if DEBUG: + print() + print(' x y dx dy dx(poly) dy(poly) dx - dx(poly) dy - dy(poly)') + print('==============================================================================================================') + for offset in offsetsUpdated: + print('%11.3f %11.3f %11.3f %11.3f %11.3f %11.3f %11.3f %11.3f'%(offset.x, offset.y, + offset.dx, offset.dy, + rangePoly(offset.y, offset.x), azimuthPoly(offset.y, offset.x), + offset.dx - rangePoly(offset.y, offset.x), offset.dy - azimuthPoly(offset.y, offset.x))) + print() + + if DEBUG: + print() + print('rangePoly.getCoeffs(): {}'.format(rangePoly.getCoeffs())) + print('azimuthPoly.getCoeffs(): {}'.format(azimuthPoly.getCoeffs())) + print('rangePoly._meanRange: {}'.format(rangePoly._meanRange)) + print('rangePoly._normRange: {}'.format(rangePoly._normRange)) + print('rangePoly._meanAzimuth: {}'.format(rangePoly._meanAzimuth)) + print('rangePoly._normAzimuth: {}'.format(rangePoly._normAzimuth)) + print('azimuthPoly._meanRange: {}'.format(azimuthPoly._meanRange)) + print('azimuthPoly._normRange: {}'.format(azimuthPoly._normRange)) + print('azimuthPoly._meanAzimuth: {}'.format(azimuthPoly._meanAzimuth)) + print('azimuthPoly._normAzimuth: {}'.format(azimuthPoly._normAzimuth)) + print() + + + #2. carrier phase + dpoly = Poly2D() + order = len(swathSecondary.dopplerVsPixel) - 1 + coeffs = [2*np.pi*val*swathSecondary.azimuthLineInterval for val in swathSecondary.dopplerVsPixel] + dpoly.initPoly(rangeOrder=order, azimuthOrder=0) + dpoly.setCoeffs([coeffs]) + + #azCarrPoly = Poly2D() + #azCarrPoly.initPoly(rangeOrder=0,azimuthOrder=0,coeffs=[[0.]]) + + + #3. resample images + #checked: offset computation results using azimuthPoly/rangePoly and in resamp_slc.f90 + #checked: no flattenning + #checked: no reading of range and azimuth images + #checked: range/azimuth carrier values: 0, 0 + #checked: doppler no problem + # but doppler is computed using reference's coordinate in: + # isce/components/stdproc/stdproc/resamp_slc/src/resamp_slc.f90 + # I have fixed it. + + + for slcInput, slcOutput in zip(slcList, slcListResampled): + inimg = isceobj.createSlcImage() + inimg.load(slcInput + '.xml') + inimg.filename = slcInput + inimg.extraFilename = slcInput+'.vrt' + inimg.setAccessMode('READ') + + rObj = stdproc.createResamp_slc() + #the following two items are actually not used, since we are not flattenning? + #but need to set these otherwise the program complains + rObj.slantRangePixelSpacing = swathSecondary.rangePixelSize + rObj.radarWavelength = trackSecondary.radarWavelength + #rObj.azimuthCarrierPoly = azCarrPoly + rObj.dopplerPoly = dpoly + + rObj.azimuthOffsetsPoly = azimuthPoly + rObj.rangeOffsetsPoly = rangePoly + rObj.imageIn = inimg + + ####Setting reference values + #the following four items are actually not used, since we are not flattenning? + #but need to set these otherwise the program complains + rObj.startingRange = swathSecondary.startingRange + rObj.referenceSlantRangePixelSpacing = swathReferenceResampled.rangePixelSize + rObj.referenceStartingRange = swathReferenceResampled.startingRange + rObj.referenceWavelength = trackReferenceResampled.radarWavelength + + + width = swathReferenceResampled.numberOfSamples + length = swathReferenceResampled.numberOfLines + imgOut = isceobj.createSlcImage() + imgOut.setWidth(width) + imgOut.filename = slcOutput + imgOut.setAccessMode('write') + + rObj.outputWidth = width + rObj.outputLines = length + #rObj.residualRangeImage = rngImg + #rObj.residualAzimuthImage = aziImg + + rObj.resamp_slc(imageOut=imgOut) + + imgOut.renderHdr() + + for x in slcListRemoved: + os.remove(x) + os.remove(x + '.vrt') + os.remove(x + '.xml') + + os.chdir('../') + os.chdir('../') + os.chdir('../') + + + #dump resampled reference paramter files, only do this when reference is resampled + dumpFlag = True + if dateSecondary != []: + if dates[dateIndexReference] not in dateSecondary: + dumpFlag = False + if dumpFlag: + #we are still in directory 'odir' + os.chdir(dates[dateIndexReference]) + saveTrack(trackReferenceResampled, dates[dateIndexReference]) + + + + + + + + + + + diff --git a/contrib/stack/alosStack/unwrap_snaphu.py b/contrib/stack/alosStack/unwrap_snaphu.py new file mode 100644 index 0000000..e1465f5 --- /dev/null +++ b/contrib/stack/alosStack/unwrap_snaphu.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.runUnwrapSnaphu import unwrapSnaphu + +from StackPulic import createObject +from StackPulic import loadProduct + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='take more looks and compute coherence') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where resampled data of each date (YYMMDD) is located. only folders are recognized') + parser.add_argument('-ref_date_stack', dest='ref_date_stack', type=str, required=True, + help = 'reference date of stack. format: YYMMDD') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + parser.add_argument('-nrlks2', dest='nrlks2', type=int, default=1, + help = 'number of range looks 2. default: 1') + parser.add_argument('-nalks2', dest='nalks2', type=int, default=1, + help = 'number of azimuth looks 2. default: 1') + parser.add_argument('-wbd_msk', dest='wbd_msk', action='store_true', default=False, + help='mask unwrapped interferogram with water body') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + dateReferenceStack = inps.ref_date_stack + dateReference = inps.ref_date + dateSecondary = inps.sec_date + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + numberRangeLooks2 = inps.nrlks2 + numberAzimuthLooks2 = inps.nalks2 + waterBodyMaskStartingStep = inps.wbd_msk + ####################################################### + + pair = '{}-{}'.format(dateReference, dateSecondary) + ms = pair + ml2 = '_{}rlks_{}alks'.format(numberRangeLooks1*numberRangeLooks2, numberAzimuthLooks1*numberAzimuthLooks2) + + self = createObject() + self._insar = createObject() + + self._insar.filteredInterferogram = 'filt_' + ms + ml2 + '.int' + self._insar.multilookAmplitude = ms + ml2 + '.amp' + self._insar.multilookPhsig = ms + ml2 + '.phsig' + self._insar.unwrappedInterferogram = 'filt_' + ms + ml2 + '.unw' + self._insar.unwrappedMaskedInterferogram = 'filt_' + ms + ml2 + '_msk.unw' + self._insar.multilookWbdOut = os.path.join('../', idir, dateReferenceStack, 'insar', dateReferenceStack + ml2 + '.wbd') + + self._insar.numberRangeLooks1 = numberRangeLooks1 + self._insar.numberAzimuthLooks1 = numberAzimuthLooks1 + self._insar.numberRangeLooks2 = numberRangeLooks2 + self._insar.numberAzimuthLooks2 = numberAzimuthLooks2 + + if waterBodyMaskStartingStep: + self.waterBodyMaskStartingStep='unwrap' + else: + self.waterBodyMaskStartingStep=None + + trackReference = loadProduct('{}.track.xml'.format(dateReference)) + unwrapSnaphu(self, trackReference) + + + diff --git a/examples/input_files/alos2/alos2App.xml b/examples/input_files/alos2/alos2App.xml index b5398e1..064f210 100644 --- a/examples/input_files/alos2/alos2App.xml +++ b/examples/input_files/alos2/alos2App.xml @@ -30,7 +30,12 @@ /net/kraken/nobak/cunrenl/z_common_data/insarzd_test_dataset/gorkha/wbd/swbdLat_N22_N33_Lon_E078_E092.wbd - + @@ -133,6 +138,10 @@ IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 8, pp. 4492-450 + + @@ -169,6 +178,45 @@ IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 8, pp. 4492-450 + + @@ -234,7 +282,15 @@ IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 8, pp. 4492-450 @@ -260,11 +316,28 @@ IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 8, pp. 4492-450 ==========================================================================================--> + + + + + + + + + + + @@ -253,11 +290,28 @@ IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 8, pp. 4492-450 ==========================================================================================--> + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/examples/input_files/alos2/example_input_files/scansar-scansar/2/alos2App.xml b/examples/input_files/alos2/example_input_files/scansar-scansar/2/alos2App.xml index 5b3eb71..611cacd 100644 --- a/examples/input_files/alos2/example_input_files/scansar-scansar/2/alos2App.xml +++ b/examples/input_files/alos2/example_input_files/scansar-scansar/2/alos2App.xml @@ -15,298 +15,5 @@ True - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/examples/input_files/alos2/example_input_files/scansar-scansar/3/alos2App.xml b/examples/input_files/alos2/example_input_files/scansar-scansar/3/alos2App.xml index fa07d0e..2705cfc 100644 --- a/examples/input_files/alos2/example_input_files/scansar-scansar/3/alos2App.xml +++ b/examples/input_files/alos2/example_input_files/scansar-scansar/3/alos2App.xml @@ -15,298 +15,5 @@ True - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/examples/input_files/alos2/example_input_files/scansar-scansar/4/alos2App.xml b/examples/input_files/alos2/example_input_files/scansar-scansar/4/alos2App.xml index 0c8a4dc..8acbbbc 100644 --- a/examples/input_files/alos2/example_input_files/scansar-scansar/4/alos2App.xml +++ b/examples/input_files/alos2/example_input_files/scansar-scansar/4/alos2App.xml @@ -15,298 +15,5 @@ True - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/examples/input_files/alos2/example_input_files/scansar-scansar_7s/alos2App.xml b/examples/input_files/alos2/example_input_files/scansar-scansar_7s/alos2App.xml index 0e9cc31..2ebcf7e 100644 --- a/examples/input_files/alos2/example_input_files/scansar-scansar_7s/alos2App.xml +++ b/examples/input_files/alos2/example_input_files/scansar-scansar_7s/alos2App.xml @@ -15,298 +15,5 @@ True - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/examples/input_files/alos2/example_input_files/scansar-scansar_burst/1/alos2burstApp.xml b/examples/input_files/alos2/example_input_files/scansar-scansar_burst/1/alos2burstApp.xml index ab7913f..88af8f0 100644 --- a/examples/input_files/alos2/example_input_files/scansar-scansar_burst/1/alos2burstApp.xml +++ b/examples/input_files/alos2/example_input_files/scansar-scansar_burst/1/alos2burstApp.xml @@ -15,284 +15,5 @@ False - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/examples/input_files/alos2/example_input_files/scansar-scansar_burst/2/alos2burstApp.xml b/examples/input_files/alos2/example_input_files/scansar-scansar_burst/2/alos2burstApp.xml index 3aafbdf..ace03e8 100644 --- a/examples/input_files/alos2/example_input_files/scansar-scansar_burst/2/alos2burstApp.xml +++ b/examples/input_files/alos2/example_input_files/scansar-scansar_burst/2/alos2burstApp.xml @@ -15,284 +15,5 @@ False - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/examples/input_files/alos2/example_input_files/scansar-scansar_burst/3/alos2burstApp.xml b/examples/input_files/alos2/example_input_files/scansar-scansar_burst/3/alos2burstApp.xml index 40d1355..c1d65ef 100644 --- a/examples/input_files/alos2/example_input_files/scansar-scansar_burst/3/alos2burstApp.xml +++ b/examples/input_files/alos2/example_input_files/scansar-scansar_burst/3/alos2burstApp.xml @@ -15,284 +15,5 @@ False - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/examples/input_files/alos2/example_input_files/scansar-scansar_burst/4/alos2burstApp.xml b/examples/input_files/alos2/example_input_files/scansar-scansar_burst/4/alos2burstApp.xml index f461005..56c7b27 100644 --- a/examples/input_files/alos2/example_input_files/scansar-scansar_burst/4/alos2burstApp.xml +++ b/examples/input_files/alos2/example_input_files/scansar-scansar_burst/4/alos2burstApp.xml @@ -15,284 +15,5 @@ False - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/examples/input_files/alos2/example_input_files/scansar-stripmap/1/alos2App.xml b/examples/input_files/alos2/example_input_files/scansar-stripmap/1/alos2App.xml index eebc7ca..435a05d 100644 --- a/examples/input_files/alos2/example_input_files/scansar-stripmap/1/alos2App.xml +++ b/examples/input_files/alos2/example_input_files/scansar-stripmap/1/alos2App.xml @@ -13,298 +13,5 @@ True - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/examples/input_files/alos2/example_input_files/scansar-stripmap/2/alos2App.xml b/examples/input_files/alos2/example_input_files/scansar-stripmap/2/alos2App.xml index 49b4c79..4b7cb28 100644 --- a/examples/input_files/alos2/example_input_files/scansar-stripmap/2/alos2App.xml +++ b/examples/input_files/alos2/example_input_files/scansar-stripmap/2/alos2App.xml @@ -13,298 +13,5 @@ True - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/examples/input_files/alos2/example_input_files/stripmap-stripmap/1/alos2App.xml b/examples/input_files/alos2/example_input_files/stripmap-stripmap/1/alos2App.xml index 76fcd05..628ef72 100644 --- a/examples/input_files/alos2/example_input_files/stripmap-stripmap/1/alos2App.xml +++ b/examples/input_files/alos2/example_input_files/stripmap-stripmap/1/alos2App.xml @@ -13,298 +13,5 @@ True - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/examples/input_files/alos2/example_input_files/stripmap-stripmap/2/alos2App.xml b/examples/input_files/alos2/example_input_files/stripmap-stripmap/2/alos2App.xml index 0dd767f..c35b6f1 100644 --- a/examples/input_files/alos2/example_input_files/stripmap-stripmap/2/alos2App.xml +++ b/examples/input_files/alos2/example_input_files/stripmap-stripmap/2/alos2App.xml @@ -15,298 +15,5 @@ True - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/examples/input_files/alos2/example_input_files/stripmap-stripmap/3/alos2App.xml b/examples/input_files/alos2/example_input_files/stripmap-stripmap/3/alos2App.xml index cc13796..474a552 100644 --- a/examples/input_files/alos2/example_input_files/stripmap-stripmap/3/alos2App.xml +++ b/examples/input_files/alos2/example_input_files/stripmap-stripmap/3/alos2App.xml @@ -13,298 +13,5 @@ True - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/examples/input_files/alos2/example_input_files/stripmap-stripmap/4/alos2App.xml b/examples/input_files/alos2/example_input_files/stripmap-stripmap/4/alos2App.xml index 935813c..1f6afed 100644 --- a/examples/input_files/alos2/example_input_files/stripmap-stripmap/4/alos2App.xml +++ b/examples/input_files/alos2/example_input_files/stripmap-stripmap/4/alos2App.xml @@ -13,298 +13,5 @@ True - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/examples/input_files/alos2/example_input_files/test1.sh b/examples/input_files/alos2/example_input_files/test1.sh index e4ad7a4..bd25a94 100644 --- a/examples/input_files/alos2/example_input_files/test1.sh +++ b/examples/input_files/alos2/example_input_files/test1.sh @@ -1,3 +1,6 @@ +export OMP_NUM_THREADS=4 +export CUDA_VISIBLE_DEVICES=7 + #scansar-scansar ########################## cd scansar-scansar/1 diff --git a/examples/input_files/alos2/example_input_files/test2.sh b/examples/input_files/alos2/example_input_files/test2.sh index 991a18e..c74bdb4 100644 --- a/examples/input_files/alos2/example_input_files/test2.sh +++ b/examples/input_files/alos2/example_input_files/test2.sh @@ -1,3 +1,6 @@ +export OMP_NUM_THREADS=4 +export CUDA_VISIBLE_DEVICES=6 + #scansar-scansar_burst cd scansar-scansar_burst/1 alos2burstApp.py --steps