Merge pull request #175 from CunrenLiang/main

new alosStack, PRF change of ALOS-1, update of TOPS ionospheric correction, new ionospheric filter
LT1AB
Ryan Burns 2021-03-04 10:52:45 -08:00 committed by GitHub
commit d0ec8c1794
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
119 changed files with 13096 additions and 5446 deletions

View File

@ -117,6 +117,13 @@ WBD = Application.Parameter('wbd',
mandatory=False,
doc='water body file')
DO_INSAR = Application.Parameter('doInSAR',
public_name='do InSAR',
default = True,
type = bool,
mandatory = False,
doc = 'do InSAR')
USE_VIRTUAL_FILE = Application.Parameter('useVirtualFile',
public_name = 'use virtual file',
default=True,
@ -323,6 +330,30 @@ MASKED_AREAS_ION = Application.Parameter('maskedAreasIon',
container = list,
doc = 'areas masked out in ionospheric phase estimation')
SWATH_PHASE_DIFF_SNAP_ION = Application.Parameter('swathPhaseDiffSnapIon',
public_name = 'swath phase difference snap to fixed values',
default = None,
type = bool,
mandatory = False,
container = list,
doc = 'swath phase difference snap to fixed values')
SWATH_PHASE_DIFF_LOWER_ION = Application.Parameter('swathPhaseDiffLowerIon',
public_name = 'swath phase difference of lower band',
default = None,
type = float,
mandatory = False,
container = list,
doc = 'swath phase difference of lower band')
SWATH_PHASE_DIFF_UPPER_ION = Application.Parameter('swathPhaseDiffUpperIon',
public_name = 'swath phase difference of upper band',
default = None,
type = float,
mandatory = False,
container = list,
doc = 'swath phase difference of upper band')
FIT_ION = Application.Parameter('fitIon',
public_name = 'apply polynomial fit before filtering ionosphere phase',
default = True,
@ -330,20 +361,55 @@ FIT_ION = Application.Parameter('fitIon',
mandatory = False,
doc = 'apply polynomial fit before filtering ionosphere phase')
FILT_ION = Application.Parameter('filtIon',
public_name = 'whether filtering ionosphere phase',
default = True,
type = bool,
mandatory = False,
doc = 'whether filtering ionosphere phase')
FIT_ADAPTIVE_ION = Application.Parameter('fitAdaptiveIon',
public_name = 'apply polynomial fit in adaptive filtering window',
default = True,
type = bool,
mandatory = False,
doc = 'apply polynomial fit in adaptive filtering window')
FILT_SECONDARY_ION = Application.Parameter('filtSecondaryIon',
public_name = 'whether do secondary filtering of ionosphere phase',
default = True,
type = bool,
mandatory = False,
doc = 'whether do secondary filtering of ionosphere phase')
FILTERING_WINSIZE_MAX_ION = Application.Parameter('filteringWinsizeMaxIon',
public_name='maximum window size for filtering ionosphere phase',
default=151,
default=301,
type=int,
mandatory=False,
doc='maximum window size for filtering ionosphere phase')
FILTERING_WINSIZE_MIN_ION = Application.Parameter('filteringWinsizeMinIon',
public_name='minimum window size for filtering ionosphere phase',
default=41,
default=11,
type=int,
mandatory=False,
doc='minimum window size for filtering ionosphere phase')
FILTERING_WINSIZE_SECONDARY_ION = Application.Parameter('filteringWinsizeSecondaryIon',
public_name='window size of secondary filtering of ionosphere phase',
default=5,
type=int,
mandatory=False,
doc='window size of secondary filtering of ionosphere phase')
FILTER_STD_ION = Application.Parameter('filterStdIon',
public_name = 'standard deviation of ionosphere phase after filtering',
default = None,
type=float,
mandatory = False,
doc = 'standard deviation of ionosphere phase after filtering')
FILTER_SUBBAND_INT = Application.Parameter('filterSubbandInt',
public_name = 'filter subband interferogram',
default = False,
@ -579,6 +645,7 @@ class Alos2InSAR(Application):
DEM,
DEM_GEO,
WBD,
DO_INSAR,
USE_VIRTUAL_FILE,
USE_GPU,
BURST_SYNCHRONIZATION_THRESHOLD,
@ -608,9 +675,17 @@ class Alos2InSAR(Application):
NUMBER_RANGE_LOOKS_ION,
NUMBER_AZIMUTH_LOOKS_ION,
MASKED_AREAS_ION,
SWATH_PHASE_DIFF_SNAP_ION,
SWATH_PHASE_DIFF_LOWER_ION,
SWATH_PHASE_DIFF_UPPER_ION,
FIT_ION,
FILT_ION,
FIT_ADAPTIVE_ION,
FILT_SECONDARY_ION,
FILTERING_WINSIZE_MAX_ION,
FILTERING_WINSIZE_MIN_ION,
FILTERING_WINSIZE_SECONDARY_ION,
FILTER_STD_ION,
FILTER_SUBBAND_INT,
FILTER_STRENGTH_SUBBAND_INT,
FILTER_WINSIZE_SUBBAND_INT,
@ -750,6 +825,7 @@ class Alos2InSAR(Application):
## Add instance attribute RunWrapper functions, which emulate methods.
def _add_methods(self):
self.runPreprocessor = Alos2Proc.createPreprocessor(self)
self.runBaseline = Alos2Proc.createBaseline(self)
self.runDownloadDem = Alos2Proc.createDownloadDem(self)
self.runPrepareSlc = Alos2Proc.createPrepareSlc(self)
self.runSlcOffset = Alos2Proc.createSlcOffset(self)
@ -768,6 +844,7 @@ class Alos2InSAR(Application):
self.runIonSubband = Alos2Proc.createIonSubband(self)
self.runIonUwrap = Alos2Proc.createIonUwrap(self)
self.runIonFilt = Alos2Proc.createIonFilt(self)
self.runIonCorrect = Alos2Proc.createIonCorrect(self)
self.runFilt = Alos2Proc.createFilt(self)
self.runUnwrapSnaphu = Alos2Proc.createUnwrapSnaphu(self)
self.runGeocode = Alos2Proc.createGeocode(self)
@ -797,6 +874,13 @@ class Alos2InSAR(Application):
)
)
self.step('baseline',
func=self.runBaseline,
doc=(
"""compute baseline, burst synchronization etc"""
)
)
self.step('download_dem',
func=self.runDownloadDem,
doc=(
@ -909,6 +993,12 @@ class Alos2InSAR(Application):
)
)
self.step('ion_correct', func=self.runIonCorrect,
doc=(
"""resample ionospheric phase and ionospheric correction"""
)
)
self.step('filt', func=self.runFilt,
doc=(
"""filter interferogram"""
@ -970,6 +1060,8 @@ class Alos2InSAR(Application):
# Run a preprocessor for the two sets of frames
self.runPreprocessor()
self.runBaseline()
self.runDownloadDem()
self.runPrepareSlc()
@ -1006,6 +1098,8 @@ class Alos2InSAR(Application):
self.runIonFilt()
self.runIonCorrect()
self.runFilt()
self.runUnwrapSnaphu()

View File

@ -313,6 +313,30 @@ MASKED_AREAS_ION = Application.Parameter('maskedAreasIon',
container = list,
doc = 'areas masked out in ionospheric phase estimation')
SWATH_PHASE_DIFF_SNAP_ION = Application.Parameter('swathPhaseDiffSnapIon',
public_name = 'swath phase difference snap to fixed values',
default = None,
type = bool,
mandatory = False,
container = list,
doc = 'swath phase difference snap to fixed values')
SWATH_PHASE_DIFF_LOWER_ION = Application.Parameter('swathPhaseDiffLowerIon',
public_name = 'swath phase difference of lower band',
default = None,
type = float,
mandatory = False,
container = list,
doc = 'swath phase difference of lower band')
SWATH_PHASE_DIFF_UPPER_ION = Application.Parameter('swathPhaseDiffUpperIon',
public_name = 'swath phase difference of upper band',
default = None,
type = float,
mandatory = False,
container = list,
doc = 'swath phase difference of upper band')
FIT_ION = Application.Parameter('fitIon',
public_name = 'apply polynomial fit before filtering ionosphere phase',
default = True,
@ -320,20 +344,55 @@ FIT_ION = Application.Parameter('fitIon',
mandatory = False,
doc = 'apply polynomial fit before filtering ionosphere phase')
FILT_ION = Application.Parameter('filtIon',
public_name = 'whether filtering ionosphere phase',
default = True,
type = bool,
mandatory = False,
doc = 'whether filtering ionosphere phase')
FIT_ADAPTIVE_ION = Application.Parameter('fitAdaptiveIon',
public_name = 'apply polynomial fit in adaptive filtering window',
default = True,
type = bool,
mandatory = False,
doc = 'apply polynomial fit in adaptive filtering window')
FILT_SECONDARY_ION = Application.Parameter('filtSecondaryIon',
public_name = 'whether do secondary filtering of ionosphere phase',
default = True,
type = bool,
mandatory = False,
doc = 'whether do secondary filtering of ionosphere phase')
FILTERING_WINSIZE_MAX_ION = Application.Parameter('filteringWinsizeMaxIon',
public_name='maximum window size for filtering ionosphere phase',
default=151,
default=301,
type=int,
mandatory=False,
doc='maximum window size for filtering ionosphere phase')
FILTERING_WINSIZE_MIN_ION = Application.Parameter('filteringWinsizeMinIon',
public_name='minimum window size for filtering ionosphere phase',
default=41,
default=11,
type=int,
mandatory=False,
doc='minimum window size for filtering ionosphere phase')
FILTERING_WINSIZE_SECONDARY_ION = Application.Parameter('filteringWinsizeSecondaryIon',
public_name='window size of secondary filtering of ionosphere phase',
default=5,
type=int,
mandatory=False,
doc='window size of secondary filtering of ionosphere phase')
FILTER_STD_ION = Application.Parameter('filterStdIon',
public_name = 'standard deviation of ionosphere phase after filtering',
default = None,
type=float,
mandatory = False,
doc = 'standard deviation of ionosphere phase after filtering')
FILTER_SUBBAND_INT = Application.Parameter('filterSubbandInt',
public_name = 'filter subband interferogram',
default = False,
@ -543,9 +602,17 @@ class Alos2burstInSAR(Application):
NUMBER_RANGE_LOOKS_ION,
NUMBER_AZIMUTH_LOOKS_ION,
MASKED_AREAS_ION,
SWATH_PHASE_DIFF_SNAP_ION,
SWATH_PHASE_DIFF_LOWER_ION,
SWATH_PHASE_DIFF_UPPER_ION,
FIT_ION,
FILT_ION,
FIT_ADAPTIVE_ION,
FILT_SECONDARY_ION,
FILTERING_WINSIZE_MAX_ION,
FILTERING_WINSIZE_MIN_ION,
FILTERING_WINSIZE_SECONDARY_ION,
FILTER_STD_ION,
FILTER_SUBBAND_INT,
FILTER_STRENGTH_SUBBAND_INT,
FILTER_WINSIZE_SUBBAND_INT,
@ -679,6 +746,7 @@ class Alos2burstInSAR(Application):
## Add instance attribute RunWrapper functions, which emulate methods.
def _add_methods(self):
self.runPreprocessor = Alos2burstProc.createPreprocessor(self)
self.runBaseline = Alos2burstProc.createBaseline(self)
self.runExtractBurst = Alos2burstProc.createExtractBurst(self)
self.runDownloadDem = Alos2burstProc.createDownloadDem(self)
self.runCoregGeom = Alos2burstProc.createCoregGeom(self)
@ -698,6 +766,7 @@ class Alos2burstInSAR(Application):
self.runIonSubband = Alos2burstProc.createIonSubband(self)
self.runIonUwrap = Alos2burstProc.createIonUwrap(self)
self.runIonFilt = Alos2burstProc.createIonFilt(self)
self.runIonCorrect = Alos2burstProc.createIonCorrect(self)
self.runFilt = Alos2burstProc.createFilt(self)
self.runUnwrapSnaphu = Alos2burstProc.createUnwrapSnaphu(self)
self.runGeocode = Alos2burstProc.createGeocode(self)
@ -724,6 +793,12 @@ class Alos2burstInSAR(Application):
)
)
self.step('baseline', func=self.runBaseline,
doc=(
"""compute baseline, burst synchronization etc"""
)
)
self.step('extract_burst', func=self.runExtractBurst,
doc=(
"""extract bursts from full aperture images"""
@ -838,6 +913,12 @@ class Alos2burstInSAR(Application):
)
)
self.step('ion_correct', func=self.runIonCorrect,
doc=(
"""resample ionospheric phase and ionospheric correction"""
)
)
self.step('filt', func=self.runFilt,
doc=(
"""filter interferogram"""
@ -891,6 +972,8 @@ class Alos2burstInSAR(Application):
# Run a preprocessor for the two sets of frames
self.runPreprocessor()
self.runBaseline()
self.runExtractBurst()
self.runDownloadDem()
@ -929,6 +1012,8 @@ class Alos2burstInSAR(Application):
self.runIonFilt()
self.runIonCorrect()
self.runFilt()
self.runUnwrapSnaphu()

View File

@ -438,6 +438,20 @@ ION_DO_ION = Application.Parameter('ION_doIon',
mandatory = False,
doc = '')
ION_APPLY_ION = Application.Parameter('ION_applyIon',
public_name = 'apply ionosphere correction',
default = False,
type = bool,
mandatory = False,
doc = '')
ION_CONSIDER_BURST_PROPERTIES = Application.Parameter('ION_considerBurstProperties',
public_name = 'consider burst properties in ionosphere computation',
default = False,
type = bool,
mandatory = False,
doc = '')
ION_START_STEP = Application.Parameter(
'ION_startStep',
public_name='start ionosphere step',
@ -649,6 +663,8 @@ class TopsInSAR(Application):
########################################################
#for ionospheric correction
ION_DO_ION,
ION_APPLY_ION,
ION_CONSIDER_BURST_PROPERTIES,
ION_START_STEP,
ION_END_STEP,
ION_ION_HEIGHT,
@ -734,6 +750,9 @@ class TopsInSAR(Application):
if(self.geocode_list is None):
#if not provided by the user use the list from InsarProc
self.geocode_list = self.insar.geocode_list
#for ionosphere
if 'topophase.ion' not in self.geocode_list:
self.geocode_list.append('topophase.ion')
else:
#if geocode_list defined here, then give it to InsarProc
#for consistency between insarApp and InsarProc and warn the user

View File

@ -91,7 +91,7 @@ def create_xml(fileName, width, length, fileType):
#image.finalizeImage()
def multilook_v1(data, nalks, nrlks):
def multilook_v1(data, nalks, nrlks, mean=True):
'''
doing multiple looking
ATTENSION: original array changed after running this function
@ -106,10 +106,13 @@ def multilook_v1(data, nalks, nrlks):
for i in range(1, nrlks):
data[0:length2*nalks:nalks, 0:width2*nrlks:nrlks] += data[0:length2*nalks:nalks, i:width2*nrlks:nrlks]
return data[0:length2*nalks:nalks, 0:width2*nrlks:nrlks] / nrlks / nalks
if mean:
return data[0:length2*nalks:nalks, 0:width2*nrlks:nrlks] / nrlks / nalks
else:
return data[0:length2*nalks:nalks, 0:width2*nrlks:nrlks]
def multilook(data, nalks, nrlks):
def multilook(data, nalks, nrlks, mean=True):
'''
doing multiple looking
'''
@ -125,7 +128,10 @@ def multilook(data, nalks, nrlks):
for i in range(1, nrlks):
data2[:, 0:width2*nrlks:nrlks] += data2[:, i:width2*nrlks:nrlks]
return data2[:, 0:width2*nrlks:nrlks] / nrlks / nalks
if mean:
return data2[:, 0:width2*nrlks:nrlks] / nrlks / nalks
else:
return data2[:, 0:width2*nrlks:nrlks]
def cal_coherence_1(inf, win=5):
@ -281,9 +287,9 @@ def reformatGeometricalOffset(rangeOffsetFile, azimuthOffsetFile, reformatedOffs
offsetsPlain = offsetsPlain + "{:8d} {:10.3f} {:8d} {:12.3f} {:11.5f} {:11.6f} {:11.6f} {:11.6f}\n".format(
int(j*rangeStep+1),
float(rgoff[i][j]),
float(rgoff[i][j])*rangeStep,
int(i*azimuthStep+1),
float(azoff[i][j]),
float(azoff[i][j])*azimuthStep,
float(22.00015),
float(0.000273),
float(0.002126),
@ -536,6 +542,7 @@ def waterBodyRadar(latFile, lonFile, wbdFile, wbdOutFile):
latFp = open(latFile, 'rb')
lonFp = open(lonFile, 'rb')
wbdOutFp = open(wbdOutFile, 'wb')
wbdOutIndex = np.arange(width, dtype=np.int32)
print("create water body in radar coordinates...")
for i in range(length):
if (((i+1)%200) == 0):
@ -551,7 +558,7 @@ def waterBodyRadar(latFile, lonFile, wbdFile, wbdOutFile):
np.logical_and(sampleIndex>=0, sampleIndex<=demImage.width-1)
)
#keep SRTM convention. water body. (0) --- land; (-1) --- water; (-2 or other value) --- no data.
wbdOut = wbd[(lineIndex[inboundIndex], sampleIndex[inboundIndex])]
wbdOut[(wbdOutIndex[inboundIndex],)] = wbd[(lineIndex[inboundIndex], sampleIndex[inboundIndex])]
wbdOut.astype(np.int8).tofile(wbdOutFp)
print("processing line %6d of %6d" % (length, length))
#create_xml(wbdOutFile, width, length, 'byte')
@ -748,7 +755,7 @@ def snaphuUnwrap(track, t, wrapName, corName, unwrapName, nrlks, nalks, costMode
return
def snaphuUnwrapOriginal(wrapName, corName, ampName, unwrapName, costMode = 's', initMethod = 'mcf'):
def snaphuUnwrapOriginal(wrapName, corName, ampName, unwrapName, costMode = 's', initMethod = 'mcf', snaphuConfFile = 'snaphu.conf'):
'''
unwrap interferogram using original snaphu program
'''
@ -761,14 +768,14 @@ def snaphuUnwrapOriginal(wrapName, corName, ampName, unwrapName, costMode = 's',
length = corImg.length
#specify coherence file format in configure file
snaphuConfFile = 'snaphu.conf'
#snaphuConfFile = 'snaphu.conf'
if corImg.bands == 1:
snaphuConf = '''CORRFILEFORMAT FLOAT_DATA
CONNCOMPFILE {}
MAXNCOMPS 20'''.format(unwrapName+'.conncomp')
else:
snaphuConf = '''CORRFILEFORMAT FLOAT_DATA
snaphuConf = '''CORRFILEFORMAT ALT_LINE_DATA
CONNCOMPFILE {}
MAXNCOMPS 20'''.format(unwrapName+'.conncomp')
with open(snaphuConfFile, 'w') as f:
@ -808,7 +815,7 @@ MAXNCOMPS 20'''.format(unwrapName+'.conncomp')
return
def getBboxGeo(track):
def getBboxGeo(track, useTrackOnly=False, numberOfSamples=1, numberOfLines=1, numberRangeLooks=1, numberAzimuthLooks=1):
'''
get bounding box in geo-coordinate
'''
@ -816,7 +823,15 @@ def getBboxGeo(track):
pointingDirection = {'right': -1, 'left' :1}
bboxRdr = getBboxRdr(track)
if useTrackOnly:
import datetime
rangeMin = track.startingRange + (numberRangeLooks-1.0)/2.0*track.rangePixelSize
rangeMax = rangeMin + (numberOfSamples-1) * numberRangeLooks * track.rangePixelSize
azimuthTimeMin = track.sensingStart + datetime.timedelta(seconds=(numberAzimuthLooks-1.0)/2.0*track.azimuthLineInterval)
azimuthTimeMax = azimuthTimeMin + datetime.timedelta(seconds=(numberOfLines-1) * numberAzimuthLooks * track.azimuthLineInterval)
bboxRdr = [rangeMin, rangeMax, azimuthTimeMin, azimuthTimeMax]
else:
bboxRdr = getBboxRdr(track)
rangeMin = bboxRdr[0]
rangeMax = bboxRdr[1]
@ -1183,11 +1198,269 @@ def create_multi_index2(width2, l1, l2):
return ((l2 - l1) / 2.0 + np.arange(width2) * l2) / l1
def computePhaseDiff(data1, data22, coherenceWindowSize=5, coherenceThreshold=0.85):
import copy
import numpy as np
from isceobj.Alos2Proc.Alos2ProcPublic import cal_coherence_1
#data22 will be changed in the processing, so make a copy here
data2 = copy.deepcopy(data22)
dataDiff = data1 * np.conj(data2)
cor = cal_coherence_1(dataDiff, win=coherenceWindowSize)
index = np.nonzero(np.logical_and(cor>coherenceThreshold, dataDiff!=0))
#check if there are valid pixels
if index[0].size == 0:
phaseDiff = 0.0
numberOfValidSamples = 0
return (phaseDiff, numberOfValidSamples)
else:
numberOfValidSamples = index[0].size
#in case phase difference is around PI, sum of +PI and -PI is zero, which affects the following
#mean phase difference computation.
#remove magnitude before doing sum?
dataDiff = dataDiff / (np.absolute(dataDiff)+(dataDiff==0))
phaseDiff0 = np.angle(np.sum(dataDiff[index], dtype=np.complex128))
#now the phase difference values are mostly centered at 0
data2 *= np.exp(np.complex64(1j) * phaseDiff0)
phaseDiff = phaseDiff0
#compute phase difference
numberOfIterations = 1000000
threshold = 0.000001
for k in range(numberOfIterations):
dataDiff = data1 * np.conj(data2)
angle = np.mean(np.angle(dataDiff[index]), dtype=np.float64)
phaseDiff += angle
data2 *= np.exp(np.complex64(1j) * angle)
print('phase offset: %15.12f rad after iteration: %3d'%(phaseDiff, k+1))
if (k+1 >= 5) and (angle <= threshold):
break
#only take the value within -pi--pi
if phaseDiff > np.pi:
phaseDiff -= 2.0 * np.pi
if phaseDiff < -np.pi:
phaseDiff += 2.0 * np.pi
# mean phase difference
# number of valid samples to compute the phase difference
return (phaseDiff, numberOfValidSamples)
def snap(inputValue, fixedValues, snapThreshold):
'''
fixedValues can be a list or numpy array
'''
import numpy as np
diff = np.absolute(np.absolute(np.array(fixedValues)) - np.absolute(inputValue))
indexMin = np.argmin(diff)
if diff[indexMin] < snapThreshold:
outputValue = np.sign(inputValue) * np.absolute(fixedValues[indexMin])
snapped = True
else:
outputValue = inputValue
snapped = False
return (outputValue, snapped)
modeProcParDict = {
'ALOS-2': {
#All SPT (SBS) modes are the same
'SBS': {
'numberRangeLooks1': 2,
'numberAzimuthLooks1': 4,
'numberRangeLooks2': 4,
'numberAzimuthLooks2': 4,
'numberRangeLooksIon': 16,
'numberAzimuthLooksIon': 16,
'filterStdIon': 0.015
},
#All SM1 (UBS, UBD) modes are the same
'UBS': {
'numberRangeLooks1': 2,
'numberAzimuthLooks1': 3,
'numberRangeLooks2': 4,
'numberAzimuthLooks2': 4,
'numberRangeLooksIon': 32,
'numberAzimuthLooksIon': 32,
'filterStdIon': 0.015
},
'UBD': {
'numberRangeLooks1': 2,
'numberAzimuthLooks1': 3,
'numberRangeLooks2': 4,
'numberAzimuthLooks2': 4,
'numberRangeLooksIon': 32,
'numberAzimuthLooksIon': 32,
'filterStdIon': 0.015
},
#All SM2 (HBS, HBD, HBQ) modes are the same
'HBS': {
'numberRangeLooks1': 2,
'numberAzimuthLooks1': 4,
'numberRangeLooks2': 4,
'numberAzimuthLooks2': 4,
'numberRangeLooksIon': 16,
'numberAzimuthLooksIon': 16,
'filterStdIon': 0.035
},
'HBD': {
'numberRangeLooks1': 2,
'numberAzimuthLooks1': 4,
'numberRangeLooks2': 4,
'numberAzimuthLooks2': 4,
'numberRangeLooksIon': 16,
'numberAzimuthLooksIon': 16,
'filterStdIon': 0.035
},
'HBQ': {
'numberRangeLooks1': 2,
'numberAzimuthLooks1': 4,
'numberRangeLooks2': 4,
'numberAzimuthLooks2': 4,
'numberRangeLooksIon': 16,
'numberAzimuthLooksIon': 16,
'filterStdIon': 0.035
},
#All SM3 (FBS, FBD, FBQ) modes are the same
'FBS': {
'numberRangeLooks1': 2,
'numberAzimuthLooks1': 4,
'numberRangeLooks2': 4,
'numberAzimuthLooks2': 4,
'numberRangeLooksIon': 16,
'numberAzimuthLooksIon': 16,
'filterStdIon': 0.075
},
'FBD': {
'numberRangeLooks1': 2,
'numberAzimuthLooks1': 4,
'numberRangeLooks2': 4,
'numberAzimuthLooks2': 4,
'numberRangeLooksIon': 16,
'numberAzimuthLooksIon': 16,
'filterStdIon': 0.075
},
'FBQ': {
'numberRangeLooks1': 2,
'numberAzimuthLooks1': 4,
'numberRangeLooks2': 4,
'numberAzimuthLooks2': 4,
'numberRangeLooksIon': 16,
'numberAzimuthLooksIon': 16,
'filterStdIon': 0.075
},
#All WD1 (WBS, WBD) modes are the same
'WBS': {
'numberRangeLooks1': 1,
'numberAzimuthLooks1': 14,
'numberRangeLooks2': 5,
'numberAzimuthLooks2': 2,
'numberRangeLooksIon': 80,
'numberAzimuthLooksIon': 32,
'filterStdIon': 0.1
},
'WBD': {
'numberRangeLooks1': 1,
'numberAzimuthLooks1': 14,
'numberRangeLooks2': 5,
'numberAzimuthLooks2': 2,
'numberRangeLooksIon': 80,
'numberAzimuthLooksIon': 32,
'filterStdIon': 0.1
},
#All WD1 (WWS, WWD) modes are the same
'WWS': {
'numberRangeLooks1': 2,
'numberAzimuthLooks1': 14,
'numberRangeLooks2': 5,
'numberAzimuthLooks2': 2,
'numberRangeLooksIon': 80,
'numberAzimuthLooksIon': 32,
'filterStdIon': 0.075
},
'WWD': {
'numberRangeLooks1': 2,
'numberAzimuthLooks1': 14,
'numberRangeLooks2': 5,
'numberAzimuthLooks2': 2,
'numberRangeLooksIon': 80,
'numberAzimuthLooksIon': 32,
'filterStdIon': 0.075
},
#All WD2 (VBS, VBD) modes are the same
'VBS': {
'numberRangeLooks1': 1,
'numberAzimuthLooks1': 14,
'numberRangeLooks2': 5,
'numberAzimuthLooks2': 2,
'numberRangeLooksIon': 80,
'numberAzimuthLooksIon': 32,
'filterStdIon': 0.1
},
'VBD': {
'numberRangeLooks1': 1,
'numberAzimuthLooks1': 14,
'numberRangeLooks2': 5,
'numberAzimuthLooks2': 2,
'numberRangeLooksIon': 80,
'numberAzimuthLooksIon': 32,
'filterStdIon': 0.1
}
}
}
import numpy as np
filterStdPolyIon = np.array([ 2.31536879e-05, -3.41687763e-03, 1.39904121e-01])

View File

@ -4,6 +4,7 @@ InstallSameDir(
Alos2ProcPublic.py
Factories.py
denseOffsetNote.txt
runBaseline.py
runCoherence.py
runDenseOffset.py
runDiffInterferogram.py
@ -16,6 +17,7 @@ InstallSameDir(
runGeo2Rdr.py
runGeocode.py
runGeocodeOffset.py
runIonCorrect.py
runIonFilt.py
runIonSubband.py
runIonUwrap.py

View File

@ -74,6 +74,7 @@ def createUnwrap2Stage(other, do_unwrap_2stage = None, unwrapperName = None):
createPreprocessor = _factory("runPreprocessor")
createBaseline = _factory("runBaseline")
createDownloadDem = _factory("runDownloadDem")
createPrepareSlc = _factory("runPrepareSlc")
createSlcOffset = _factory("runSlcOffset")
@ -92,6 +93,7 @@ createCoherence = _factory("runCoherence")
createIonSubband = _factory("runIonSubband")
createIonUwrap = _factory("runIonUwrap")
createIonFilt = _factory("runIonFilt")
createIonCorrect = _factory("runIonCorrect")
createFilt = _factory("runFilt")
createUnwrapSnaphu = _factory("runUnwrapSnaphu")
createGeocode = _factory("runGeocode")

View File

@ -40,6 +40,6 @@ project = 'Alos2Proc'
install = os.path.join(envisceobj['PRJ_SCONS_INSTALL'],package,project)
listFiles = ['__init__.py', 'Factories.py', 'Alos2Proc.py', 'Alos2ProcPublic.py', 'runPreprocessor.py', 'runDownloadDem.py', 'runPrepareSlc.py', 'runSlcOffset.py', 'runFormInterferogram.py', 'runSwathOffset.py', 'runSwathMosaic.py', 'runFrameOffset.py', 'runFrameMosaic.py', 'runRdr2Geo.py', 'runGeo2Rdr.py', 'runRdrDemOffset.py', 'runRectRangeOffset.py', 'runDiffInterferogram.py', 'runLook.py', 'runCoherence.py', 'runIonSubband.py', 'runIonUwrap.py', 'runIonFilt.py', 'runFilt.py', 'runUnwrapSnaphu.py', 'runGeocode.py', 'srtm_no_swbd_tiles.txt', 'srtm_tiles.txt', 'swbd_tiles.txt', 'runSlcMosaic.py', 'runSlcMatch.py', 'runDenseOffset.py', 'runFiltOffset.py', 'runGeocodeOffset.py', 'denseOffsetNote.txt']
listFiles = ['__init__.py', 'Factories.py', 'Alos2Proc.py', 'Alos2ProcPublic.py', 'runPreprocessor.py', 'runBaseline.py', 'runDownloadDem.py', 'runPrepareSlc.py', 'runSlcOffset.py', 'runFormInterferogram.py', 'runSwathOffset.py', 'runSwathMosaic.py', 'runFrameOffset.py', 'runFrameMosaic.py', 'runRdr2Geo.py', 'runGeo2Rdr.py', 'runRdrDemOffset.py', 'runRectRangeOffset.py', 'runDiffInterferogram.py', 'runLook.py', 'runCoherence.py', 'runIonSubband.py', 'runIonUwrap.py', 'runIonFilt.py', 'runIonCorrect.py', 'runFilt.py', 'runUnwrapSnaphu.py', 'runGeocode.py', 'srtm_no_swbd_tiles.txt', 'srtm_tiles.txt', 'swbd_tiles.txt', 'runSlcMosaic.py', 'runSlcMatch.py', 'runDenseOffset.py', 'runFiltOffset.py', 'runGeocodeOffset.py', 'denseOffsetNote.txt']
envisceobj.Install(install,listFiles)
envisceobj.Alias('install',install)

View File

@ -0,0 +1,229 @@
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import logging
import datetime
import numpy as np
import isceobj
import isceobj.Sensor.MultiMode as MultiMode
from isceobj.Planet.Planet import Planet
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
from isceobj.Alos2Proc.Alos2ProcPublic import getBboxRdr
from isceobj.Alos2Proc.Alos2ProcPublic import getBboxGeo
logger = logging.getLogger('isce.alos2insar.runBaseline')
def runBaseline(self):
'''compute baseline
'''
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
self.updateParamemetersFromUser()
referenceTrack = self._insar.loadTrack(reference=True)
secondaryTrack = self._insar.loadTrack(reference=False)
##################################################
#2. compute burst synchronization
##################################################
#burst synchronization may slowly change along a track as a result of the changing relative speed of the two flights
#in one frame, real unsynchronized time is the same for all swaths
unsynTime = 0
#real synchronized time/percentage depends on the swath burst length (synTime = burstlength - abs(unsynTime))
#synTime = 0
synPercentage = 0
numberOfFrames = len(self._insar.referenceFrames)
numberOfSwaths = self._insar.endingSwath - self._insar.startingSwath + 1
for i, frameNumber in enumerate(self._insar.referenceFrames):
for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)):
referenceSwath = referenceTrack.frames[i].swaths[j]
secondarySwath = secondaryTrack.frames[i].swaths[j]
#using Piyush's code for computing range and azimuth offsets
midRange = referenceSwath.startingRange + referenceSwath.rangePixelSize * referenceSwath.numberOfSamples * 0.5
midSensingStart = referenceSwath.sensingStart + datetime.timedelta(seconds = referenceSwath.numberOfLines * 0.5 / referenceSwath.prf)
llh = referenceTrack.orbit.rdr2geo(midSensingStart, midRange)
slvaz, slvrng = secondaryTrack.orbit.geo2rdr(llh)
###Translate to offsets
#note that secondary range pixel size and prf might be different from reference, here we assume there is a virtual secondary with same
#range pixel size and prf
rgoff = ((slvrng - secondarySwath.startingRange) / referenceSwath.rangePixelSize) - referenceSwath.numberOfSamples * 0.5
azoff = ((slvaz - secondarySwath.sensingStart).total_seconds() * referenceSwath.prf) - referenceSwath.numberOfLines * 0.5
#compute burst synchronization
#burst parameters for ScanSAR wide mode not estimed yet
if self._insar.modeCombination == 21:
scburstStartLine = (referenceSwath.burstStartTime - referenceSwath.sensingStart).total_seconds() * referenceSwath.prf + azoff
#secondary burst start times corresponding to reference burst start times (100% synchronization)
scburstStartLines = np.arange(scburstStartLine - 100000*referenceSwath.burstCycleLength, \
scburstStartLine + 100000*referenceSwath.burstCycleLength, \
referenceSwath.burstCycleLength)
dscburstStartLines = -((secondarySwath.burstStartTime - secondarySwath.sensingStart).total_seconds() * secondarySwath.prf - scburstStartLines)
#find the difference with minimum absolute value
unsynLines = dscburstStartLines[np.argmin(np.absolute(dscburstStartLines))]
if np.absolute(unsynLines) >= secondarySwath.burstLength:
synLines = 0
if unsynLines > 0:
unsynLines = secondarySwath.burstLength
else:
unsynLines = -secondarySwath.burstLength
else:
synLines = secondarySwath.burstLength - np.absolute(unsynLines)
unsynTime += unsynLines / referenceSwath.prf
synPercentage += synLines / referenceSwath.burstLength * 100.0
catalog.addItem('burst synchronization of frame {} swath {}'.format(frameNumber, swathNumber), '%.1f%%'%(synLines / referenceSwath.burstLength * 100.0), 'runBaseline')
############################################################################################
#illustration of the sign of the number of unsynchronized lines (unsynLines)
#The convention is the same as ampcor offset, that is,
# secondaryLineNumber = referenceLineNumber + unsynLines
#
# |-----------------------| ------------
# | | ^
# | | |
# | | | unsynLines < 0
# | | |
# | | \ /
# | | |-----------------------|
# | | | |
# | | | |
# |-----------------------| | |
# Reference Burst | |
# | |
# | |
# | |
# | |
# |-----------------------|
# Secondary Burst
#
#
############################################################################################
##burst parameters for ScanSAR wide mode not estimed yet
elif self._insar.modeCombination == 31:
#scansar is reference
scburstStartLine = (referenceSwath.burstStartTime - referenceSwath.sensingStart).total_seconds() * referenceSwath.prf + azoff
#secondary burst start times corresponding to reference burst start times (100% synchronization)
for k in range(-100000, 100000):
saz_burstx = scburstStartLine + referenceSwath.burstCycleLength * k
st_burstx = secondarySwath.sensingStart + datetime.timedelta(seconds=saz_burstx / referenceSwath.prf)
if saz_burstx >= 0.0 and saz_burstx <= secondarySwath.numberOfLines -1:
secondarySwath.burstStartTime = st_burstx
secondarySwath.burstLength = referenceSwath.burstLength
secondarySwath.burstCycleLength = referenceSwath.burstCycleLength
secondarySwath.swathNumber = referenceSwath.swathNumber
break
#unsynLines = 0
#synLines = referenceSwath.burstLength
#unsynTime += unsynLines / referenceSwath.prf
#synPercentage += synLines / referenceSwath.burstLength * 100.0
catalog.addItem('burst synchronization of frame {} swath {}'.format(frameNumber, swathNumber), '%.1f%%'%(100.0), 'runBaseline')
else:
pass
#overwrite original frame parameter file
if self._insar.modeCombination == 31:
frameDir = 'f{}_{}'.format(i+1, frameNumber)
self._insar.saveProduct(secondaryTrack.frames[i], os.path.join(frameDir, self._insar.secondaryFrameParameter))
#getting average
if self._insar.modeCombination == 21:
unsynTime /= numberOfFrames*numberOfSwaths
synPercentage /= numberOfFrames*numberOfSwaths
elif self._insar.modeCombination == 31:
unsynTime = 0.
synPercentage = 100.
else:
pass
#record results
if (self._insar.modeCombination == 21) or (self._insar.modeCombination == 31):
self._insar.burstUnsynchronizedTime = unsynTime
self._insar.burstSynchronization = synPercentage
catalog.addItem('burst synchronization averaged', '%.1f%%'%(synPercentage), 'runBaseline')
##################################################
#3. compute baseline
##################################################
#only compute baseline at four corners and center of the reference track
bboxRdr = getBboxRdr(referenceTrack)
rangeMin = bboxRdr[0]
rangeMax = bboxRdr[1]
azimuthTimeMin = bboxRdr[2]
azimuthTimeMax = bboxRdr[3]
azimuthTimeMid = azimuthTimeMin+datetime.timedelta(seconds=(azimuthTimeMax-azimuthTimeMin).total_seconds()/2.0)
rangeMid = (rangeMin + rangeMax) / 2.0
points = [[azimuthTimeMin, rangeMin],
[azimuthTimeMin, rangeMax],
[azimuthTimeMax, rangeMin],
[azimuthTimeMax, rangeMax],
[azimuthTimeMid, rangeMid]]
Bpar = []
Bperp = []
#modify Piyush's code for computing baslines
refElp = Planet(pname='Earth').ellipsoid
for x in points:
referenceSV = referenceTrack.orbit.interpolate(x[0], method='hermite')
target = referenceTrack.orbit.rdr2geo(x[0], x[1])
slvTime, slvrng = secondaryTrack.orbit.geo2rdr(target)
secondarySV = secondaryTrack.orbit.interpolateOrbit(slvTime, method='hermite')
targxyz = np.array(refElp.LLH(target[0], target[1], target[2]).ecef().tolist())
mxyz = np.array(referenceSV.getPosition())
mvel = np.array(referenceSV.getVelocity())
sxyz = np.array(secondarySV.getPosition())
#to fix abrupt change near zero in baseline grid. JUN-05-2020
mvelunit = mvel / np.linalg.norm(mvel)
sxyz = sxyz - np.dot ( sxyz-mxyz, mvelunit) * mvelunit
aa = np.linalg.norm(sxyz-mxyz)
costheta = (x[1]*x[1] + aa*aa - slvrng*slvrng)/(2.*x[1]*aa)
Bpar.append(aa*costheta)
perp = aa * np.sqrt(1 - costheta*costheta)
direction = np.sign(np.dot( np.cross(targxyz-mxyz, sxyz-mxyz), mvel))
Bperp.append(direction*perp)
catalog.addItem('parallel baseline at upperleft of reference track', Bpar[0], 'runBaseline')
catalog.addItem('parallel baseline at upperright of reference track', Bpar[1], 'runBaseline')
catalog.addItem('parallel baseline at lowerleft of reference track', Bpar[2], 'runBaseline')
catalog.addItem('parallel baseline at lowerright of reference track', Bpar[3], 'runBaseline')
catalog.addItem('parallel baseline at center of reference track', Bpar[4], 'runBaseline')
catalog.addItem('perpendicular baseline at upperleft of reference track', Bperp[0], 'runBaseline')
catalog.addItem('perpendicular baseline at upperright of reference track', Bperp[1], 'runBaseline')
catalog.addItem('perpendicular baseline at lowerleft of reference track', Bperp[2], 'runBaseline')
catalog.addItem('perpendicular baseline at lowerright of reference track', Bperp[3], 'runBaseline')
catalog.addItem('perpendicular baseline at center of reference track', Bperp[4], 'runBaseline')
##################################################
#4. compute bounding box
##################################################
referenceBbox = getBboxGeo(referenceTrack)
secondaryBbox = getBboxGeo(secondaryTrack)
catalog.addItem('reference bounding box', referenceBbox, 'runBaseline')
catalog.addItem('secondary bounding box', secondaryBbox, 'runBaseline')
catalog.printToLog(logger, "runBaseline")
self._insar.procDoc.addAllFromCatalog(catalog)

View File

@ -13,8 +13,12 @@ from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
logger = logging.getLogger('isce.alos2insar.runCoherence')
def runCoherence(self):
'''Extract images.
'''estimate coherence
'''
if hasattr(self, 'doInSAR'):
if not self.doInSAR:
return
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
self.updateParamemetersFromUser()

View File

@ -15,6 +15,10 @@ logger = logging.getLogger('isce.alos2insar.runDiffInterferogram')
def runDiffInterferogram(self):
'''Extract images.
'''
if hasattr(self, 'doInSAR'):
if not self.doInSAR:
return
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
self.updateParamemetersFromUser()

View File

@ -21,12 +21,24 @@ logger = logging.getLogger('isce.alos2insar.runFilt')
def runFilt(self):
'''filter interferogram
'''
if hasattr(self, 'doInSAR'):
if not self.doInSAR:
return
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
self.updateParamemetersFromUser()
#referenceTrack = self._insar.loadTrack(reference=True)
#secondaryTrack = self._insar.loadTrack(reference=False)
filt(self)
catalog.printToLog(logger, "runFilt")
self._insar.procDoc.addAllFromCatalog(catalog)
def filt(self):
insarDir = 'insar'
os.makedirs(insarDir, exist_ok=True)
os.chdir(insarDir)
@ -150,21 +162,17 @@ def runFilt(self):
print('\nmask filtered interferogram using: {}'.format(self._insar.multilookWbdOut))
if self.waterBodyMaskStartingStep=='filt':
if not os.path.exists(self._insar.multilookWbdOut):
catalog.addItem('warning message', 'requested masking interferogram with water body, but water body does not exist', 'runFilt')
else:
wbd = np.fromfile(self._insar.multilookWbdOut, dtype=np.int8).reshape(length, width)
phsig=np.memmap(self._insar.multilookPhsig, dtype='float32', mode='r+', shape=(length, width))
phsig[np.nonzero(wbd==-1)]=0
del phsig
filt=np.memmap(self._insar.filteredInterferogram, dtype='complex64', mode='r+', shape=(length, width))
filt[np.nonzero(wbd==-1)]=0
del filt
del wbd
#if not os.path.exists(self._insar.multilookWbdOut):
# catalog.addItem('warning message', 'requested masking interferogram with water body, but water body does not exist', 'runFilt')
#else:
wbd = np.fromfile(self._insar.multilookWbdOut, dtype=np.int8).reshape(length, width)
phsig=np.memmap(self._insar.multilookPhsig, dtype='float32', mode='r+', shape=(length, width))
phsig[np.nonzero(wbd==-1)]=0
del phsig
filt=np.memmap(self._insar.filteredInterferogram, dtype='complex64', mode='r+', shape=(length, width))
filt[np.nonzero(wbd==-1)]=0
del filt
del wbd
os.chdir('../')
catalog.printToLog(logger, "runFilt")
self._insar.procDoc.addAllFromCatalog(catalog)

View File

@ -18,6 +18,10 @@ logger = logging.getLogger('isce.alos2insar.runFormInterferogram')
def runFormInterferogram(self):
'''form interferograms.
'''
if hasattr(self, 'doInSAR'):
if not self.doInSAR:
return
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
self.updateParamemetersFromUser()

View File

@ -17,6 +17,10 @@ logger = logging.getLogger('isce.alos2insar.runFrameMosaic')
def runFrameMosaic(self):
'''mosaic frames
'''
if hasattr(self, 'doInSAR'):
if not self.doInSAR:
return
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
self.updateParamemetersFromUser()
@ -103,13 +107,18 @@ def runFrameMosaic(self):
rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1,
updateTrack=False, phaseCompensation=False, resamplingMethod=0)
#mosaic interferograms
frameMosaic(referenceTrack, inputInterferograms, self._insar.interferogram,
(phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = frameMosaic(referenceTrack, inputInterferograms, self._insar.interferogram,
rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1,
updateTrack=True, phaseCompensation=True, resamplingMethod=1)
create_xml(self._insar.amplitude, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'amp')
create_xml(self._insar.interferogram, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'int')
catalog.addItem('frame phase diff estimated', phaseDiffEst[1:], 'runFrameMosaic')
catalog.addItem('frame phase diff used', phaseDiffUsed[1:], 'runFrameMosaic')
catalog.addItem('frame phase diff used source', phaseDiffSource[1:], 'runFrameMosaic')
catalog.addItem('frame phase diff samples used', numberOfValidSamples[1:], 'runFrameMosaic')
#update secondary parameters here
#do not match for secondary, always use geometrical
rangeOffsets = self._insar.frameRangeOffsetGeometricalSecondary
@ -125,7 +134,7 @@ def runFrameMosaic(self):
self._insar.procDoc.addAllFromCatalog(catalog)
def frameMosaic(track, inputFiles, outputfile, rangeOffsets, azimuthOffsets, numberOfRangeLooks, numberOfAzimuthLooks, updateTrack=False, phaseCompensation=False, resamplingMethod=0):
def frameMosaic(track, inputFiles, outputfile, rangeOffsets, azimuthOffsets, numberOfRangeLooks, numberOfAzimuthLooks, updateTrack=False, phaseCompensation=False, phaseDiffFixed=None, snapThreshold=None, resamplingMethod=0):
'''
mosaic frames
@ -138,6 +147,8 @@ def frameMosaic(track, inputFiles, outputfile, rangeOffsets, azimuthOffsets, num
numberOfAzimuthLooks: number of azimuth looks of the input files
updateTrack: whether update track parameters
phaseCompensation: whether do phase compensation for each frame
phaseDiffFixed: if provided, the estimated value will snap to one of these values, which is nearest to the estimated one.
snapThreshold: this is used with phaseDiffFixed
resamplingMethod: 0: amp resampling. 1: int resampling. 2: slc resampling
'''
import numpy as np
@ -149,6 +160,8 @@ def frameMosaic(track, inputFiles, outputfile, rangeOffsets, azimuthOffsets, num
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
from isceobj.Alos2Proc.Alos2ProcPublic import find_vrt_file
from isceobj.Alos2Proc.Alos2ProcPublic import find_vrt_keyword
from isceobj.Alos2Proc.Alos2ProcPublic import computePhaseDiff
from isceobj.Alos2Proc.Alos2ProcPublic import snap
numberOfFrames = len(track.frames)
frames = track.frames
@ -184,90 +197,107 @@ def frameMosaic(track, inputFiles, outputfile, rangeOffsets, azimuthOffsets, num
if i == 0:
rinfs[i] = inf
else:
infImg = isceobj.createImage()
infImg.load(inf+'.xml')
rangeOffsets2Frac = rangeOffsets2[i] - int(rangeOffsets2[i])
azimuthOffsets2Frac = azimuthOffsets2[i] - int(azimuthOffsets2[i])
#no need to resample
if (abs(rangeOffsets2[i] - round(rangeOffsets2[i])) < 0.0001) and (abs(azimuthOffsets2[i] - round(azimuthOffsets2[i])) < 0.0001):
if os.path.isfile(rinfs[i]):
os.remove(rinfs[i])
os.symlink(inf, rinfs[i])
#all of the following use of rangeOffsets2/azimuthOffsets2 is inside int(), we do the following in case it is like
#4.99999999999...
rangeOffsets2[i] = round(rangeOffsets2[i])
azimuthOffsets2[i] = round(azimuthOffsets2[i])
if resamplingMethod == 0:
rect_with_looks(inf,
rinfs[i],
infImg.width, infImg.length,
infImg.width, infImg.length,
1.0, 0.0,
0.0, 1.0,
rangeOffsets2Frac, azimuthOffsets2Frac,
1,1,
1,1,
'COMPLEX',
'Bilinear')
if infImg.getImageType() == 'amp':
create_xml(rinfs[i], infImg.width, infImg.length, 'amp')
else:
create_xml(rinfs[i], infImg.width, infImg.length, 'int')
elif resamplingMethod == 1:
#decompose amplitude and phase
phaseFile = 'phase'
amplitudeFile = 'amplitude'
data = np.fromfile(inf, dtype=np.complex64).reshape(infImg.length, infImg.width)
phase = np.exp(np.complex64(1j) * np.angle(data))
phase[np.nonzero(data==0)] = 0
phase.astype(np.complex64).tofile(phaseFile)
amplitude = np.absolute(data)
amplitude.astype(np.float32).tofile(amplitudeFile)
#resampling
phaseRectFile = 'phaseRect'
amplitudeRectFile = 'amplitudeRect'
rect_with_looks(phaseFile,
phaseRectFile,
infImg.width, infImg.length,
infImg.width, infImg.length,
1.0, 0.0,
0.0, 1.0,
rangeOffsets2Frac, azimuthOffsets2Frac,
1,1,
1,1,
'COMPLEX',
'Sinc')
rect_with_looks(amplitudeFile,
amplitudeRectFile,
infImg.width, infImg.length,
infImg.width, infImg.length,
1.0, 0.0,
0.0, 1.0,
rangeOffsets2Frac, azimuthOffsets2Frac,
1,1,
1,1,
'REAL',
'Bilinear')
#recombine amplitude and phase
phase = np.fromfile(phaseRectFile, dtype=np.complex64).reshape(infImg.length, infImg.width)
amplitude = np.fromfile(amplitudeRectFile, dtype=np.float32).reshape(infImg.length, infImg.width)
(phase*amplitude).astype(np.complex64).tofile(rinfs[i])
#tidy up
os.remove(phaseFile)
os.remove(amplitudeFile)
os.remove(phaseRectFile)
os.remove(amplitudeRectFile)
infImg = isceobj.createImage()
infImg.load(inf+'.xml')
if infImg.getImageType() == 'amp':
create_xml(rinfs[i], infImg.width, infImg.length, 'amp')
else:
create_xml(rinfs[i], infImg.width, infImg.length, 'int')
else:
resamp(inf,
rinfs[i],
'fake',
'fake',
infImg.width, infImg.length,
frames[i].swaths[0].prf,
frames[i].swaths[0].dopplerVsPixel,
[rangeOffsets2Frac, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[azimuthOffsets2Frac, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
create_xml(rinfs[i], infImg.width, infImg.length, 'slc')
infImg = isceobj.createImage()
infImg.load(inf+'.xml')
rangeOffsets2Frac = rangeOffsets2[i] - int(rangeOffsets2[i])
azimuthOffsets2Frac = azimuthOffsets2[i] - int(azimuthOffsets2[i])
if resamplingMethod == 0:
rect_with_looks(inf,
rinfs[i],
infImg.width, infImg.length,
infImg.width, infImg.length,
1.0, 0.0,
0.0, 1.0,
rangeOffsets2Frac, azimuthOffsets2Frac,
1,1,
1,1,
'COMPLEX',
'Bilinear')
if infImg.getImageType() == 'amp':
create_xml(rinfs[i], infImg.width, infImg.length, 'amp')
else:
create_xml(rinfs[i], infImg.width, infImg.length, 'int')
elif resamplingMethod == 1:
#decompose amplitude and phase
phaseFile = 'phase'
amplitudeFile = 'amplitude'
data = np.fromfile(inf, dtype=np.complex64).reshape(infImg.length, infImg.width)
phase = np.exp(np.complex64(1j) * np.angle(data))
phase[np.nonzero(data==0)] = 0
phase.astype(np.complex64).tofile(phaseFile)
amplitude = np.absolute(data)
amplitude.astype(np.float32).tofile(amplitudeFile)
#resampling
phaseRectFile = 'phaseRect'
amplitudeRectFile = 'amplitudeRect'
rect_with_looks(phaseFile,
phaseRectFile,
infImg.width, infImg.length,
infImg.width, infImg.length,
1.0, 0.0,
0.0, 1.0,
rangeOffsets2Frac, azimuthOffsets2Frac,
1,1,
1,1,
'COMPLEX',
'Sinc')
rect_with_looks(amplitudeFile,
amplitudeRectFile,
infImg.width, infImg.length,
infImg.width, infImg.length,
1.0, 0.0,
0.0, 1.0,
rangeOffsets2Frac, azimuthOffsets2Frac,
1,1,
1,1,
'REAL',
'Bilinear')
#recombine amplitude and phase
phase = np.fromfile(phaseRectFile, dtype=np.complex64).reshape(infImg.length, infImg.width)
amplitude = np.fromfile(amplitudeRectFile, dtype=np.float32).reshape(infImg.length, infImg.width)
(phase*amplitude).astype(np.complex64).tofile(rinfs[i])
#tidy up
os.remove(phaseFile)
os.remove(amplitudeFile)
os.remove(phaseRectFile)
os.remove(amplitudeRectFile)
if infImg.getImageType() == 'amp':
create_xml(rinfs[i], infImg.width, infImg.length, 'amp')
else:
create_xml(rinfs[i], infImg.width, infImg.length, 'int')
else:
resamp(inf,
rinfs[i],
'fake',
'fake',
infImg.width, infImg.length,
frames[i].swaths[0].prf,
frames[i].swaths[0].dopplerVsPixel,
[rangeOffsets2Frac, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[azimuthOffsets2Frac, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
create_xml(rinfs[i], infImg.width, infImg.length, 'slc')
#determine output width and length
#actually no need to calculate in azimuth direction
@ -305,6 +335,15 @@ def frameMosaic(track, inputFiles, outputfile, rangeOffsets, azimuthOffsets, num
#compute phase offset
if phaseCompensation:
phaseDiffEst = [0.0 for i in range(numberOfFrames)]
phaseDiffUsed = [0.0 for i in range(numberOfFrames)]
phaseDiffSource = ['estimated' for i in range(numberOfFrames)]
numberOfValidSamples = [0 for i in range(numberOfFrames)]
#phaseDiffEst = [0.0]
#phaseDiffUsed = [0.0]
#phaseDiffSource = ['estimated']
phaseOffsetPolynomials = [np.array([0.0])]
for i in range(1, numberOfFrames):
upperframe = np.zeros((ye[i-1]-ys[i]+1, outWidth), dtype=np.complex128)
@ -323,8 +362,29 @@ def frameMosaic(track, inputFiles, outputfile, rangeOffsets, azimuthOffsets, num
diff = np.sum(upperframe * np.conj(lowerframe), axis=0)
(firstLine, lastLine, firstSample, lastSample) = findNonzero(np.reshape(diff, (1, outWidth)))
#here i use mean value(deg=0) in case difference is around -pi or pi.
#!!!!!there have been updates, now deg must be 0
deg = 0
p = np.polyfit(np.arange(firstSample, lastSample+1), np.angle(diff[firstSample:lastSample+1]), deg)
#need to use a more sophisticated method to compute the mean phase difference
(phaseDiffEst[i], numberOfValidSamples[i]) = computePhaseDiff(upperframe, lowerframe, coherenceWindowSize=9, coherenceThreshold=0.80)
#snap phase difference to fixed values
if phaseDiffFixed is not None:
(outputValue, snapped) = snap(phaseDiffEst[i], phaseDiffFixed, snapThreshold)
if snapped == True:
phaseDiffUsed[i] = outputValue
phaseDiffSource[i] = 'estimated+snap'
else:
phaseDiffUsed[i] = phaseDiffEst[i]
phaseDiffSource[i] = 'estimated'
else:
phaseDiffUsed[i] = phaseDiffEst[i]
phaseDiffSource[i] = 'estimated'
#use new phase constant value
p[-1] = phaseDiffUsed[i]
phaseOffsetPolynomials.append(p)
@ -435,6 +495,10 @@ def frameMosaic(track, inputFiles, outputfile, rangeOffsets, azimuthOffsets, num
track.azimuthPixelSize = frames[0].azimuthPixelSize
track.azimuthLineInterval = frames[0].azimuthLineInterval
if phaseCompensation:
# estimated phase diff, used phase diff, used phase diff source
return (phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples)
def frameMosaicParameters(track, rangeOffsets, azimuthOffsets, numberOfRangeLooks, numberOfAzimuthLooks):
'''

View File

@ -13,6 +13,10 @@ logger = logging.getLogger('isce.alos2insar.runFrameOffset')
def runFrameOffset(self):
'''estimate frame offsets.
'''
if hasattr(self, 'doInSAR'):
if not self.doInSAR:
return
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
self.updateParamemetersFromUser()

View File

@ -13,6 +13,10 @@ logger = logging.getLogger('isce.alos2insar.runGeo2Rdr')
def runGeo2Rdr(self):
'''compute range and azimuth offsets
'''
if hasattr(self, 'doInSAR'):
if not self.doInSAR:
return
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
self.updateParamemetersFromUser()

View File

@ -16,6 +16,10 @@ logger = logging.getLogger('isce.alos2insar.runGeocode')
def runGeocode(self):
'''geocode final products
'''
if hasattr(self, 'doInSAR'):
if not self.doInSAR:
return
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
self.updateParamemetersFromUser()

View File

@ -0,0 +1,150 @@
import os
import logging
import numpy as np
import numpy.matlib
import isceobj
logger = logging.getLogger('isce.alos2insar.runIonCorrect')
def runIonCorrect(self):
'''resample original ionosphere and ionospheric correction
'''
if hasattr(self, 'doInSAR'):
if not self.doInSAR:
return
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
self.updateParamemetersFromUser()
if not self.doIon:
catalog.printToLog(logger, "runIonCorrect")
self._insar.procDoc.addAllFromCatalog(catalog)
return
referenceTrack = self._insar.loadTrack(reference=True)
secondaryTrack = self._insar.loadTrack(reference=False)
from isceobj.Alos2Proc.runIonSubband import defineIonDir
ionDir = defineIonDir()
subbandPrefix = ['lower', 'upper']
ionCalDir = os.path.join(ionDir['ion'], ionDir['ionCal'])
os.makedirs(ionCalDir, exist_ok=True)
os.chdir(ionCalDir)
############################################################
# STEP 3. resample ionospheric phase
############################################################
from contrib.alos2proc_f.alos2proc_f import rect
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
from scipy.interpolate import interp1d
import shutil
#################################################
#SET PARAMETERS HERE
#interpolation method
interpolationMethod = 1
#################################################
print('\ninterpolate ionosphere')
ml2 = '_{}rlks_{}alks'.format(self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon,
self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon)
ml3 = '_{}rlks_{}alks'.format(self._insar.numberRangeLooks1*self._insar.numberRangeLooks2,
self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooks2)
ionfiltfile = 'filt_ion'+ml2+'.ion'
#ionrectfile = 'filt_ion'+ml3+'.ion'
ionrectfile = self._insar.multilookIon
img = isceobj.createImage()
img.load(ionfiltfile + '.xml')
width2 = img.width
length2 = img.length
img = isceobj.createImage()
img.load(os.path.join('../../', ionDir['insar'], self._insar.multilookDifferentialInterferogram) + '.xml')
width3 = img.width
length3 = img.length
#number of range looks output
nrlo = self._insar.numberRangeLooks1*self._insar.numberRangeLooks2
#number of range looks input
nrli = self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon
#number of azimuth looks output
nalo = self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooks2
#number of azimuth looks input
nali = self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon
if (self._insar.numberRangeLooks2 != self._insar.numberRangeLooksIon) or \
(self._insar.numberAzimuthLooks2 != self._insar.numberAzimuthLooksIon):
#this should be faster using fortran
if interpolationMethod == 0:
rect(ionfiltfile, ionrectfile,
width2,length2,
width3,length3,
nrlo/nrli, 0.0,
0.0, nalo/nali,
(nrlo-nrli)/(2.0*nrli),
(nalo-nali)/(2.0*nali),
'REAL','Bilinear')
#finer, but slower method
else:
ionfilt = np.fromfile(ionfiltfile, dtype=np.float32).reshape(length2, width2)
index2 = np.linspace(0, width2-1, num=width2, endpoint=True)
index3 = np.linspace(0, width3-1, num=width3, endpoint=True) * nrlo/nrli + (nrlo-nrli)/(2.0*nrli)
ionrect = np.zeros((length3, width3), dtype=np.float32)
for i in range(length2):
f = interp1d(index2, ionfilt[i,:], kind='cubic', fill_value="extrapolate")
ionrect[i, :] = f(index3)
index2 = np.linspace(0, length2-1, num=length2, endpoint=True)
index3 = np.linspace(0, length3-1, num=length3, endpoint=True) * nalo/nali + (nalo-nali)/(2.0*nali)
for j in range(width3):
f = interp1d(index2, ionrect[0:length2, j], kind='cubic', fill_value="extrapolate")
ionrect[:, j] = f(index3)
ionrect.astype(np.float32).tofile(ionrectfile)
del ionrect
create_xml(ionrectfile, width3, length3, 'float')
os.rename(ionrectfile, os.path.join('../../insar', ionrectfile))
os.rename(ionrectfile+'.vrt', os.path.join('../../insar', ionrectfile)+'.vrt')
os.rename(ionrectfile+'.xml', os.path.join('../../insar', ionrectfile)+'.xml')
os.chdir('../../insar')
else:
shutil.copyfile(ionfiltfile, os.path.join('../../insar', ionrectfile))
os.chdir('../../insar')
create_xml(ionrectfile, width3, length3, 'float')
#now we are in 'insar'
############################################################
# STEP 4. correct interferogram
############################################################
from isceobj.Alos2Proc.Alos2ProcPublic import renameFile
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
if self.applyIon:
print('\ncorrect interferogram')
if os.path.isfile(self._insar.multilookDifferentialInterferogramOriginal):
print('original interferogram: {} is already here, do not rename: {}'.format(self._insar.multilookDifferentialInterferogramOriginal, self._insar.multilookDifferentialInterferogram))
else:
print('renaming {} to {}'.format(self._insar.multilookDifferentialInterferogram, self._insar.multilookDifferentialInterferogramOriginal))
renameFile(self._insar.multilookDifferentialInterferogram, self._insar.multilookDifferentialInterferogramOriginal)
cmd = "imageMath.py -e='a*exp(-1.0*J*b)' --a={} --b={} -s BIP -t cfloat -o {}".format(
self._insar.multilookDifferentialInterferogramOriginal,
self._insar.multilookIon,
self._insar.multilookDifferentialInterferogram)
runCmd(cmd)
else:
print('\nionospheric phase estimation finished, but correction of interfeorgram not requested')
os.chdir('../')
catalog.printToLog(logger, "runIonCorrect")
self._insar.procDoc.addAllFromCatalog(catalog)

View File

@ -15,6 +15,10 @@ logger = logging.getLogger('isce.alos2insar.runIonFilt')
def runIonFilt(self):
'''compute and filter ionospheric phase
'''
if hasattr(self, 'doInSAR'):
if not self.doInSAR:
return
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
self.updateParamemetersFromUser()
@ -110,56 +114,71 @@ def runIonFilt(self):
############################################################
# STEP 2. filter ionospheric phase
############################################################
import scipy.signal as ss
#################################################
#SET PARAMETERS HERE
#if applying polynomial fitting
#False: no fitting, True: with fitting
#fit and filter ionosphere
fit = self.fitIon
#gaussian filtering window size
filt = self.filtIon
fitAdaptive = self.fitAdaptiveIon
filtSecondary = self.filtSecondaryIon
if (fit == False) and (filt == False):
raise Exception('either fit ionosphere or filt ionosphere should be True when doing ionospheric correction\n')
#filtering window size
size_max = self.filteringWinsizeMaxIon
size_min = self.filteringWinsizeMinIon
size_secondary = self.filteringWinsizeSecondaryIon
if size_min > size_max:
print('\n\nWARNING: minimum window size for filtering ionosphere phase {} > maximum window size {}'.format(size_min, size_max))
print(' re-setting maximum window size to {}\n\n'.format(size_min))
size_max = size_min
if size_secondary % 2 != 1:
size_secondary += 1
print('window size of secondary filtering of ionosphere phase should be odd, window size changed to {}'.format(size_secondary))
if size_min >= size_max:
print('\n\nWARNING: minimum window size for filtering ionosphere phase {} >= maximum window size {}'.format(size_min, size_max))
print(' resetting maximum window size to {}\n\n'.format(size_min+5))
size_max = size_min + 5
#coherence threshold for fitting a polynomial
corThresholdFit = 0.25
#THESE SHOULD BE GOOD ENOUGH, NO NEED TO SET IN setup(self)
#corThresholdFit = 0.85
#Now changed to use lower band coherence. crl, 23-apr-2020.
useDiffCoherence = False
if useDiffCoherence:
#parameters for using diff coherence
corfile = 'diff'+ml2+'.cor'
corThresholdFit = 0.95
# 1 is not good for low coherence case, changed to 20
#corOrderFit = 1
corOrderFit = 20
corOrderFilt = 14
#ionospheric phase standard deviation after filtering
if self.filterStdIon is not None:
std_out0 = self.filterStdIon
else:
#parameters for using lower/upper band coherence
corfile = subbandPrefix[0]+ml2+'.cor'
corThresholdFit = 0.4
corOrderFit = 10
corOrderFilt = 4
if referenceTrack.operationMode == secondaryTrack.operationMode:
from isceobj.Alos2Proc.Alos2ProcPublic import modeProcParDict
std_out0 = modeProcParDict['ALOS-2'][referenceTrack.operationMode]['filterStdIon']
else:
from isceobj.Alos2Proc.Alos2ProcPublic import filterStdPolyIon
std_out0 = np.polyval(filterStdPolyIon, referenceTrack.frames[0].swaths[0].rangeBandwidth/(1e6))
#std_out0 = 0.1
#################################################
print('\nfiltering ionosphere')
#input files
ionfile = 'ion'+ml2+'.ion'
#corfile = 'diff'+ml2+'.cor'
corLowerfile = subbandPrefix[0]+ml2+'.cor'
corUpperfile = subbandPrefix[1]+ml2+'.cor'
#output files
ionfiltfile = 'filt_ion'+ml2+'.ion'
stdfiltfile = 'filt_ion'+ml2+'.std'
windowsizefiltfile = 'filt_ion'+ml2+'.win'
#read data
img = isceobj.createImage()
img.load(ionfile + '.xml')
width = img.width
length = img.length
#ion = (np.fromfile(ionfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :]
ion = np.fromfile(ionfile, dtype=np.float32).reshape(length, width)
cor = (np.fromfile(corfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :]
#amp = (np.fromfile(ionfile, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :]
corLower = (np.fromfile(corLowerfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :]
corUpper = (np.fromfile(corUpperfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :]
cor = (corLower + corUpper) / 2.0
index = np.nonzero(np.logical_or(corLower==0, corUpper==0))
cor[index] = 0
del corLower, corUpper
#masked out user-specified areas
if self.maskedAreasIon != None:
@ -172,7 +191,7 @@ def runIonFilt(self):
cor[np.nonzero(cor<0)] = 0.0
cor[np.nonzero(cor>1)] = 0.0
#remove water body
#remove water body. Not helpful, just leave it here
wbd = np.fromfile('wbd'+ml2+'.wbd', dtype=np.int8).reshape(length, width)
cor[np.nonzero(wbd==-1)] = 0.0
@ -183,141 +202,121 @@ def runIonFilt(self):
# wbd = np.fromfile(waterBodyFile, dtype=np.int8).reshape(length, width)
# cor[np.nonzero(wbd!=0)] = 0.00001
if fit:
import copy
wgt = copy.deepcopy(cor)
wgt[np.nonzero(wgt<corThresholdFit)] = 0.0
ion_fit = weight_fitting(ion, wgt**corOrderFit, width, length, 1, 1, 1, 1, 2)
ion -= ion_fit * (ion!=0)
#minimize the effect of low coherence pixels
#cor[np.nonzero( (cor<0.85)*(cor!=0) )] = 0.00001
#filt = adaptive_gaussian(ion, cor, size_max, size_min)
#cor**14 should be a good weight to use. 22-APR-2018
filt = adaptive_gaussian(ion, cor**corOrderFilt, size_max, size_min)
#filt = adaptive_gaussian_v0(ion, cor**corOrderFilt, size_max, size_min)
#1. compute number of looks
azimuthBandwidth = 0
for i, frameNumber in enumerate(self._insar.referenceFrames):
for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)):
#azimuthBandwidth += 2270.575 * 0.85
azimuthBandwidth += referenceTrack.frames[i].swaths[j].azimuthBandwidth
azimuthBandwidth = azimuthBandwidth / (len(self._insar.referenceFrames)*(self._insar.endingSwath-self._insar.startingSwath+1))
#azimuth number of looks should also apply to burst mode
#assume range bandwidth of subband image is 1/3 of orginal range bandwidth, as in runIonSubband.py!!!
numberOfLooks = referenceTrack.azimuthLineInterval * self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon / (1.0/azimuthBandwidth) *\
referenceTrack.frames[0].swaths[0].rangeBandwidth / 3.0 / referenceTrack.rangeSamplingRate * self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon
#consider also burst characteristics. In ScanSAR-stripmap interferometry, azimuthBandwidth is from referenceTrack (ScanSAR)
if self._insar.modeCombination in [21, 31]:
numberOfLooks /= 5.0
if self._insar.modeCombination in [22, 32]:
numberOfLooks /= 7.0
if self._insar.modeCombination in [21]:
numberOfLooks *= (self._insar.burstSynchronization/100.0)
#numberOfLooks checked
print('number of looks to be used for computing subband interferogram standard deviation: {}'.format(numberOfLooks))
catalog.addItem('number of looks of subband interferograms', numberOfLooks, 'runIonFilt')
#2. compute standard deviation of the raw ionospheric phase
#f0 same as in runIonSubband.py!!!
def ion_std(fl, fu, numberOfLooks, cor):
'''
compute standard deviation of ionospheric phase
fl: lower band center frequency
fu: upper band center frequency
cor: coherence, must be numpy array
'''
f0 = (fl + fu) / 2.0
interferogramVar = (1.0 - cor**2) / (2.0 * numberOfLooks * cor**2 + (cor==0))
std = fl*fu/f0/(fu**2-fl**2)*np.sqrt(fu**2*interferogramVar+fl**2*interferogramVar)
std[np.nonzero(cor==0)] = 0
return std
std = ion_std(fl, fu, numberOfLooks, cor)
#3. compute minimum filter window size for given coherence and standard deviation of filtered ionospheric phase
cor2 = np.linspace(0.1, 0.9, num=9, endpoint=True)
std2 = ion_std(fl, fu, numberOfLooks, cor2)
std_out2 = np.zeros(cor2.size)
win2 = np.zeros(cor2.size, dtype=np.int32)
for i in range(cor2.size):
for size in range(9, 10001, 2):
#this window must be the same as those used in adaptive_gaussian!!!
gw = gaussian(size, size/2.0, scale=1.0)
scale = 1.0 / np.sum(gw / std2[i]**2)
std_out2[i] = scale * np.sqrt(np.sum(gw**2 / std2[i]**2))
win2[i] = size
if std_out2[i] <= std_out0:
break
print('if ionospheric phase standard deviation <= {} rad, minimum filtering window size required:'.format(std_out0))
print('coherence window size')
print('************************')
for x, y in zip(cor2, win2):
print(' %5.2f %5d'%(x, y))
print()
catalog.addItem('coherence value', cor2, 'runIonFilt')
catalog.addItem('minimum filter window size', win2, 'runIonFilt')
#4. filter interferogram
#fit ionosphere
if fit:
filt += ion_fit * (filt!=0)
#prepare weight
wgt = std**2
wgt[np.nonzero(cor<corThresholdFit)] = 0
index = np.nonzero(wgt!=0)
wgt[index] = 1.0/(wgt[index])
#fit
ion_fit, coeff = polyfit_2d(ion, wgt, 2)
ion -= ion_fit * (ion!=0)
#filter the rest of the ionosphere
if filt:
(ion_filt, std_out, window_size_out) = adaptive_gaussian(ion, std, size_min, size_max, std_out0, fit=fitAdaptive)
if filtSecondary:
print('applying secondary filtering with window size {}'.format(size_secondary))
g2d = gaussian(size_secondary, size_secondary/2.0, scale=1.0)
scale = ss.fftconvolve((ion_filt!=0), g2d, mode='same')
ion_filt = (ion_filt!=0) * ss.fftconvolve(ion_filt, g2d, mode='same') / (scale + (scale==0))
catalog.addItem('standard deviation of filtered ionospheric phase', std_out0, 'runIonFilt')
# ion = np.zeros((length*2, width), dtype=np.float32)
# ion[0:length*2:2, :] = amp
# ion[1:length*2:2, :] = filt
# ion.astype(np.float32).tofile(ionfiltfile)
# img.filename = ionfiltfile
# img.extraFilename = ionfiltfile + '.vrt'
# img.renderHdr()
#get final results
if (fit == True) and (filt == True):
ion_final = ion_filt + ion_fit * (ion_filt!=0)
elif (fit == True) and (filt == False):
ion_final = ion_fit
elif (fit == False) and (filt == True):
ion_final = ion_filt
else:
ion_final = ion
filt.astype(np.float32).tofile(ionfiltfile)
#output results
ion_final.astype(np.float32).tofile(ionfiltfile)
create_xml(ionfiltfile, width, length, 'float')
if filt == True:
std_out.astype(np.float32).tofile(stdfiltfile)
create_xml(stdfiltfile, width, length, 'float')
window_size_out.astype(np.float32).tofile(windowsizefiltfile)
create_xml(windowsizefiltfile, width, length, 'float')
############################################################
# STEP 3. resample ionospheric phase
############################################################
from contrib.alos2proc_f.alos2proc_f import rect
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
from scipy.interpolate import interp1d
import shutil
#################################################
#SET PARAMETERS HERE
#interpolation method
interpolationMethod = 1
#################################################
print('\ninterpolate ionosphere')
ml3 = '_{}rlks_{}alks'.format(self._insar.numberRangeLooks1*self._insar.numberRangeLooks2,
self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooks2)
ionfiltfile = 'filt_ion'+ml2+'.ion'
#ionrectfile = 'filt_ion'+ml3+'.ion'
ionrectfile = self._insar.multilookIon
img = isceobj.createImage()
img.load(ionfiltfile + '.xml')
width2 = img.width
length2 = img.length
img = isceobj.createImage()
img.load(os.path.join('../../', ionDir['insar'], self._insar.multilookDifferentialInterferogram) + '.xml')
width3 = img.width
length3 = img.length
#number of range looks output
nrlo = self._insar.numberRangeLooks1*self._insar.numberRangeLooks2
#number of range looks input
nrli = self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon
#number of azimuth looks output
nalo = self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooks2
#number of azimuth looks input
nali = self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon
if (self._insar.numberRangeLooks2 != self._insar.numberRangeLooksIon) or \
(self._insar.numberAzimuthLooks2 != self._insar.numberAzimuthLooksIon):
#this should be faster using fortran
if interpolationMethod == 0:
rect(ionfiltfile, ionrectfile,
width2,length2,
width3,length3,
nrlo/nrli, 0.0,
0.0, nalo/nali,
(nrlo-nrli)/(2.0*nrli),
(nalo-nali)/(2.0*nali),
'REAL','Bilinear')
#finer, but slower method
else:
ionfilt = np.fromfile(ionfiltfile, dtype=np.float32).reshape(length2, width2)
index2 = np.linspace(0, width2-1, num=width2, endpoint=True)
index3 = np.linspace(0, width3-1, num=width3, endpoint=True) * nrlo/nrli + (nrlo-nrli)/(2.0*nrli)
ionrect = np.zeros((length3, width3), dtype=np.float32)
for i in range(length2):
f = interp1d(index2, ionfilt[i,:], kind='cubic', fill_value="extrapolate")
ionrect[i, :] = f(index3)
index2 = np.linspace(0, length2-1, num=length2, endpoint=True)
index3 = np.linspace(0, length3-1, num=length3, endpoint=True) * nalo/nali + (nalo-nali)/(2.0*nali)
for j in range(width3):
f = interp1d(index2, ionrect[0:length2, j], kind='cubic', fill_value="extrapolate")
ionrect[:, j] = f(index3)
ionrect.astype(np.float32).tofile(ionrectfile)
del ionrect
create_xml(ionrectfile, width3, length3, 'float')
os.rename(ionrectfile, os.path.join('../../insar', ionrectfile))
os.rename(ionrectfile+'.vrt', os.path.join('../../insar', ionrectfile)+'.vrt')
os.rename(ionrectfile+'.xml', os.path.join('../../insar', ionrectfile)+'.xml')
os.chdir('../../insar')
else:
shutil.copyfile(ionfiltfile, os.path.join('../../insar', ionrectfile))
os.chdir('../../insar')
create_xml(ionrectfile, width3, length3, 'float')
#now we are in 'insar'
############################################################
# STEP 4. correct interferogram
############################################################
from isceobj.Alos2Proc.Alos2ProcPublic import renameFile
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
if self.applyIon:
print('\ncorrect interferogram')
if os.path.isfile(self._insar.multilookDifferentialInterferogramOriginal):
print('original interferogram: {} is already here, do not rename: {}'.format(self._insar.multilookDifferentialInterferogramOriginal, self._insar.multilookDifferentialInterferogram))
else:
print('renaming {} to {}'.format(self._insar.multilookDifferentialInterferogram, self._insar.multilookDifferentialInterferogramOriginal))
renameFile(self._insar.multilookDifferentialInterferogram, self._insar.multilookDifferentialInterferogramOriginal)
cmd = "imageMath.py -e='a*exp(-1.0*J*b)' --a={} --b={} -s BIP -t cfloat -o {}".format(
self._insar.multilookDifferentialInterferogramOriginal,
self._insar.multilookIon,
self._insar.multilookDifferentialInterferogram)
runCmd(cmd)
else:
print('\nionospheric phase estimation finished, but correction of interfeorgram not requested')
os.chdir('../')
os.chdir('../../')
catalog.printToLog(logger, "runIonFilt")
self._insar.procDoc.addAllFromCatalog(catalog)
@ -372,7 +371,8 @@ def computeIonosphere(lowerUnw, upperUnw, wgt, fl, fu, adjFlag, dispersive):
diff = mv
#adjust phase using a surface
else:
diff = weight_fitting(lowerUnw - upperUnw, wgt, width, length, 1, 1, 1, 1, 2)
#diff = weight_fitting(lowerUnw - upperUnw, wgt, width, length, 1, 1, 1, 1, 2)
diff, coeff = polyfit_2d(lowerUnw - upperUnw, wgt, 2)
flag2 = (lowerUnw!=0)
index2 = np.nonzero(flag2)
@ -403,133 +403,6 @@ def computeIonosphere(lowerUnw, upperUnw, wgt, fl, fu, adjFlag, dispersive):
return ionos
def fit_surface(x, y, z, wgt, order):
# x: x coordinate, a column vector
# y: y coordinate, a column vector
# z: z coordinate, a column vector
# wgt: weight of the data points, a column vector
#number of data points
m = x.shape[0]
l = np.ones((m,1), dtype=np.float64)
# #create polynomial
# if order == 1:
# #order of estimated coefficents: 1, x, y
# a1 = np.concatenate((l, x, y), axis=1)
# elif order == 2:
# #order of estimated coefficents: 1, x, y, x*y, x**2, y**2
# a1 = np.concatenate((l, x, y, x*y, x**2, y**2), axis=1)
# elif order == 3:
# #order of estimated coefficents: 1, x, y, x*y, x**2, y**2, x**2*y, y**2*x, x**3, y**3
# a1 = np.concatenate((l, x, y, x*y, x**2, y**2, x**2*y, y**2*x, x**3, y**3), axis=1)
# else:
# raise Exception('order not supported yet\n')
if order < 1:
raise Exception('order must be larger than 1.\n')
#create polynomial
a1 = l;
for i in range(1, order+1):
for j in range(i+1):
a1 = np.concatenate((a1, x**(i-j)*y**(j)), axis=1)
#number of variable to be estimated
n = a1.shape[1]
#do the least squares
a = a1 * np.matlib.repmat(np.sqrt(wgt), 1, n)
b = z * np.sqrt(wgt)
c = np.linalg.lstsq(a, b, rcond=-1)[0]
#type: <class 'numpy.ndarray'>
return c
def cal_surface(x, y, c, order):
#x: x coordinate, a row vector
#y: y coordinate, a column vector
#c: coefficients of polynomial from fit_surface
#order: order of polynomial
if order < 1:
raise Exception('order must be larger than 1.\n')
#number of lines
length = y.shape[0]
#number of columns, if row vector, only one element in the shape tuple
#width = x.shape[1]
width = x.shape[0]
x = np.matlib.repmat(x, length, 1)
y = np.matlib.repmat(y, 1, width)
z = c[0] * np.ones((length,width), dtype=np.float64)
index = 0
for i in range(1, order+1):
for j in range(i+1):
index += 1
z += c[index] * x**(i-j)*y**(j)
return z
def weight_fitting(ionos, weight, width, length, nrli, nali, nrlo, nalo, order):
'''
ionos: input ionospheric phase
weight: weight
width: file width
length: file length
nrli: number of range looks of the input interferograms
nali: number of azimuth looks of the input interferograms
nrlo: number of range looks of the output ionosphere phase
nalo: number of azimuth looks of the ioutput ionosphere phase
order: the order of the polynomial for fitting ionosphere phase estimates
'''
from isceobj.Alos2Proc.Alos2ProcPublic import create_multi_index2
lengthi = int(length/nali)
widthi = int(width/nrli)
lengtho = int(length/nalo)
widtho = int(width/nrlo)
#calculate output index
rgindex = create_multi_index2(widtho, nrli, nrlo)
azindex = create_multi_index2(lengtho, nali, nalo)
#look for data to use
flag = (weight!=0)*(ionos!=0)
point_index = np.nonzero(flag)
m = point_index[0].shape[0]
#calculate input index matrix
x0=np.matlib.repmat(np.arange(widthi), lengthi, 1)
y0=np.matlib.repmat(np.arange(lengthi).reshape(lengthi, 1), 1, widthi)
x = x0[point_index].reshape(m, 1)
y = y0[point_index].reshape(m, 1)
z = ionos[point_index].reshape(m, 1)
w = weight[point_index].reshape(m, 1)
#convert to higher precision type before use
x=np.asfarray(x,np.float64)
y=np.asfarray(y,np.float64)
z=np.asfarray(z,np.float64)
w=np.asfarray(w,np.float64)
coeff = fit_surface(x, y, z, w, order)
#convert to higher precision type before use
rgindex=np.asfarray(rgindex,np.float64)
azindex=np.asfarray(azindex,np.float64)
phase_fit = cal_surface(rgindex, azindex.reshape(lengtho, 1), coeff, order)
#format: widtho, lengtho, single band float32
return phase_fit
def gaussian(size, sigma, scale = 1.0):
if size % 2 != 1:
@ -542,7 +415,7 @@ def gaussian(size, sigma, scale = 1.0):
return f2d/np.sum(f2d)
def adaptive_gaussian(ionos, wgt, size_max, size_min):
def adaptive_gaussian_v0(ionos, wgt, size_max, size_min):
'''
This program performs Gaussian filtering with adaptive window size.
ionos: ionosphere
@ -602,6 +475,253 @@ def adaptive_gaussian(ionos, wgt, size_max, size_min):
return out2
def least_sqares(H, S, W=None):
'''
#This can make use multiple threads (set environment variable: OMP_NUM_THREADS)
linear equations: H theta = s
W: weight matrix
'''
S.reshape(H.shape[0], 1)
if W is None:
#use np.dot instead since some old python versions don't have matmul
m1 = np.linalg.inv(np.dot(H.transpose(), H))
Z = np.dot( np.dot(m1, H.transpose()) , S)
else:
#use np.dot instead since some old python versions don't have matmul
m1 = np.linalg.inv(np.dot(np.dot(H.transpose(), W), H))
Z = np.dot(np.dot(np.dot(m1, H.transpose()), W), S)
return Z.reshape(Z.size)
def polyfit_2d(data, weight, order):
'''
fit a surface to a 2-d matrix
data: input 2-d data
weight: corresponding 2-d weight
order: order. must >= 1
zero samples in data and weight are OK.
'''
#import numpy as np
if order < 1:
raise Exception('order must >= 1!\n')
if data.shape != weight.shape:
raise Exception('data and weight must be of same size!\n')
(length, width) = data.shape
#length*width, but below is better since no need to convert to int
n = data.size
#number of coefficients
ncoeff = 1
for i in range(1, order+1):
for j in range(i+1):
ncoeff += 1
#row, column
y, x = np.indices((length, width))
x = x.flatten()
y = y.flatten()
z = data.flatten()
weight = np.sqrt(weight.flatten())
#linear functions: H theta = s
#compute observation matrix H (n*ncoeff)
H = np.zeros((n, ncoeff))
H[:,0] += 1
k = 1
for i in range(1, order+1):
for j in range(i+1):
#x and y do not need to be column vector here
H[:, k] = x**(i-j)*y**(j)
k += 1
#least squares
#this is robust to singular cases
coeff = np.linalg.lstsq(H*weight[:,None], z*weight, rcond=-1)[0]
#this uses multiple threads, should be faster
#coeff = least_sqares(H*weight[:,None], z*weight, W=None)
#fit surface
data_fit = (np.dot(H, coeff)).reshape(length, width)
return (data_fit, coeff)
def adaptive_gaussian(data, std, size_min, size_max, std_out0, fit=True):
'''
This program performs Gaussian filtering with adaptive window size.
Cunren Liang, 11-JUN-2020
data: input raw data, numpy array
std: standard deviation of raw data, numpy array
size_min: minimum filter window size
size_max: maximum filter window size (size_min <= size_max, size_min == size_max is allowed)
std_out0: standard deviation of output data
fit: whether do fitting before gaussian filtering
'''
import scipy.signal as ss
(length, width) = data.shape
#assume zero-value samples are invalid
index = np.nonzero(np.logical_or(data==0, std==0))
data[index] = 0
std[index] = 0
#compute weight using standard deviation
wgt = 1.0 / (std**2 + (std==0))
wgt[index] = 0
#compute number of gaussian filters
if size_min > size_max:
raise Exception('size_min: {} > size_max: {}\n'.format(size_min, size_max))
if size_min % 2 == 0:
size_min += 1
if size_max % 2 == 0:
size_max += 1
size_num = int((size_max - size_min) / 2 + 1)
#'size_num == 1' is checked to be OK starting from here
#create gaussian filters
print('compute Gaussian filters\n')
gaussian_filters = []
for i in range(size_num):
size = int(size_min + i * 2)
gaussian_filters.append(gaussian(size, size/2.0, scale=1.0))
#compute standard deviation after filtering coresponding to each of gaussian_filters
#if value is 0, there is no valid sample in the gaussian window
print('compute standard deviation after filtering for each filtering window size')
std_filt = np.zeros((length, width, size_num))
for i in range(size_num):
size = int(size_min + i * 2)
print('current window size: %4d, min window size: %4d, max window size: %4d' % (size, size_min, size_max), end='\r', flush=True)
#robust zero value detector. non-zero convolution result at least >= 1, so can use 0.5
#as threshold to detect zero-value result
index = np.nonzero(ss.fftconvolve(wgt!=0, gaussian_filters[i]!=0, mode='same') < 0.5)
scale = ss.fftconvolve(wgt, gaussian_filters[i], mode='same')
scale[index] = 0
#variance of resulting filtered sample
var_filt = ss.fftconvolve(wgt, gaussian_filters[i]**2, mode='same') / (scale**2 + (scale==0))
var_filt[index] = 0
std_filt[:, :, i] = np.sqrt(var_filt)
print('\n')
#find gaussian window size (3rd-dimension index of the window size in gaussian_filters)
#if value is -1, there is no valid sample in any of the gaussian windows
#and therefore no filtering in the next step is needed
print('find Gaussian window size to use')
gaussian_index = np.zeros((length, width), dtype=np.int32)
std_filt2 = np.zeros((length, width))
for i in range(length):
if (((i+1)%50) == 0):
print('processing line %6d of %6d' % (i+1, length), end='\r', flush=True)
for j in range(width):
if np.sum(std_filt[i, j, :]) == 0:
gaussian_index[i, j] = -1
else:
gaussian_index[i, j] = size_num - 1
for k in range(size_num):
if (std_filt[i, j, k] != 0) and (std_filt[i, j, k] <= std_out0):
gaussian_index[i, j] = k
break
if gaussian_index[i, j] != -1:
std_filt2[i, j] = std_filt[i, j, gaussian_index[i, j]]
del std_filt
print("processing line %6d of %6d\n" % (length, length))
#adaptive gaussian filtering
print('filter image')
data_out = np.zeros((length, width))
std_out = np.zeros((length, width))
window_size_out = np.zeros((length, width), dtype=np.int16)
for i in range(length):
#if (((i+1)%5) == 0):
print('processing line %6d of %6d' % (i+1, length), end='\r', flush=True)
for j in range(width):
#if value is -1, there is no valid sample in any of the gaussian windows
#and therefore no filtering in the next step is needed
if gaussian_index[i, j] == -1:
continue
#1. extract data
size = int(size_min + gaussian_index[i, j] * 2)
size_half = int((size - 1) / 2)
window_size_out[i, j] = size
#index in original data
first_line = max(i-size_half, 0)
last_line = min(i+size_half, length-1)
first_column = max(j-size_half, 0)
last_column = min(j+size_half, width-1)
length_valid = last_line - first_line + 1
width_valid = last_column - first_column + 1
#index in filter window
if first_line == 0:
last_line2 = size - 1
first_line2 = last_line2 - (length_valid - 1)
else:
first_line2 = 0
last_line2 = first_line2 + (length_valid - 1)
if first_column == 0:
last_column2 = size - 1
first_column2 = last_column2 - (width_valid - 1)
else:
first_column2 = 0
last_column2 = first_column2 + (width_valid - 1)
#prepare data and weight within the window
data_window = np.zeros((size, size))
wgt_window = np.zeros((size, size))
data_window[first_line2:last_line2+1, first_column2:last_column2+1] = data[first_line:last_line+1, first_column:last_column+1]
wgt_window[first_line2:last_line2+1, first_column2:last_column2+1] = wgt[first_line:last_line+1, first_column:last_column+1]
#number of valid samples in the filtering window
n_valid = np.sum(data_window!=0)
#2. fit
#order, n_coeff = (1, 3)
order, n_coeff = (2, 6)
if fit:
#must have enough samples to do fitting
#even if order is 2, n_coeff * 3 is much smaller than size_min*size_min in most cases.
if n_valid > n_coeff * 3:
#data_fit = weight_fitting(data_window, wgt_window, size, size, 1, 1, 1, 1, order)
data_fit, coeff = polyfit_2d(data_window, wgt_window, order)
index = np.nonzero(data_window!=0)
data_window[index] -= data_fit[index]
#3. filter
wgt_window_2 = wgt_window * gaussian_filters[gaussian_index[i, j]]
scale = 1.0/np.sum(wgt_window_2)
wgt_window_2 *= scale
data_out[i, j] = np.sum(wgt_window_2 * data_window)
#std_out[i, j] = scale * np.sqrt(np.sum(wgt_window*(gaussian_filters[gaussian_index[i, j]]**2)))
#already computed
std_out[i, j] = std_filt2[i, j]
#print('std_out[i, j], std_filt2[i, j]', std_out[i, j], std_filt2[i, j])
#4. add back filtered value
if fit:
if n_valid > n_coeff * 3:
data_out[i, j] += data_fit[size_half, size_half]
print('\n')
return (data_out, std_out, window_size_out)
def reformatMaskedAreas(maskedAreas, length, width):
'''
reformat masked areas coordinates that are ready to use
@ -633,3 +753,5 @@ def reformatMaskedAreas(maskedAreas, length, width):
raise Exception('area {} masked out in ionospheric phase estimation not correct'.format(i+1))
return maskedAreasReformated

View File

@ -14,6 +14,10 @@ logger = logging.getLogger('isce.alos2insar.runIonSubband')
def runIonSubband(self):
'''create subband interferograms
'''
if hasattr(self, 'doInSAR'):
if not self.doInSAR:
return
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
self.updateParamemetersFromUser()
@ -296,30 +300,39 @@ def runIonSubband(self):
#list of input files
inputInterferograms = []
inputAmplitudes = []
phaseDiff = [None]
#phaseDiff = [None]
swathPhaseDiffIon = [self.swathPhaseDiffLowerIon, self.swathPhaseDiffUpperIon]
phaseDiff = swathPhaseDiffIon[k]
if swathPhaseDiffIon[k] is None:
phaseDiff = None
else:
phaseDiff = swathPhaseDiffIon[k][i]
phaseDiff.insert(0, None)
for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)):
swathDir = 's{}'.format(swathNumber)
inputInterferograms.append(os.path.join('../', swathDir, self._insar.interferogram))
inputAmplitudes.append(os.path.join('../', swathDir, self._insar.amplitude))
#compute phase needed to be compensated using startingRange
if j >= 1:
#phaseDiffSwath1 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange)/subbandRadarWavelength[k]
#phaseDiffSwath2 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange)/subbandRadarWavelength[k]
phaseDiffSwath1 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \
-4.0 * np.pi * secondaryTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k])
phaseDiffSwath2 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \
-4.0 * np.pi * secondaryTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k])
if referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange == \
referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange:
#phaseDiff.append(phaseDiffSwath2 - phaseDiffSwath1)
#if reference and secondary versions are all before or after version 2.025 (starting range error < 0.5 m),
#it should be OK to do the above.
#see results in neom where it meets the above requirement, but there is still phase diff
#to be less risky, we do not input values here
phaseDiff.append(None)
else:
phaseDiff.append(None)
if False:
#compute phase needed to be compensated using startingRange
if j >= 1:
#phaseDiffSwath1 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange)/subbandRadarWavelength[k]
#phaseDiffSwath2 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange)/subbandRadarWavelength[k]
phaseDiffSwath1 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \
-4.0 * np.pi * secondaryTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k])
phaseDiffSwath2 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \
-4.0 * np.pi * secondaryTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k])
if referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange == \
referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange:
#phaseDiff.append(phaseDiffSwath2 - phaseDiffSwath1)
#if reference and secondary versions are all before or after version 2.025 (starting range error < 0.5 m),
#it should be OK to do the above.
#see results in neom where it meets the above requirement, but there is still phase diff
#to be less risky, we do not input values here
phaseDiff.append(None)
else:
phaseDiff.append(None)
#note that frame parameters are updated after mosaicking, here no need to update parameters
#mosaic amplitudes
@ -329,6 +342,17 @@ def runIonSubband(self):
#These are for ALOS-2, may need to change for ALOS-4!
phaseDiffFixed = [0.0, 0.4754024578084084, 0.9509913179406437, 1.4261648478671614, 2.179664007520499, 2.6766909968024932, 3.130810857]
if False:
if (referenceTrack.frames[i].processingSoftwareVersion == '2.025' and secondaryTrack.frames[i].processingSoftwareVersion == '2.023') or \
(referenceTrack.frames[i].processingSoftwareVersion == '2.023' and secondaryTrack.frames[i].processingSoftwareVersion == '2.025'):
# changed value number of samples to estimate new value new values estimate area
###########################################################################################################################
# 2.6766909968024932-->2.6581660335779866 1808694 d169-f2850, north CA
# 2.179664007520499 -->2.204125866652153 131120 d169-f2850, north CA
phaseDiffFixed = [0.0, 0.4754024578084084, 0.9509913179406437, 1.4261648478671614, 2.204125866652153, 2.6581660335779866, 3.130810857]
snapThreshold = 0.2
#the above preparetions only applies to 'self._insar.modeCombination == 21'
@ -338,24 +362,36 @@ def runIonSubband(self):
phaseDiffFixed = None
snapThreshold = None
(phaseDiffEst, phaseDiffUsed, phaseDiffSource) = swathMosaic(referenceTrack.frames[i], inputInterferograms, self._insar.interferogram,
#whether snap for each swath
if self.swathPhaseDiffSnapIon == None:
snapSwath = [[True for jjj in range(numberOfSwaths-1)] for iii in range(numberOfFrames)]
else:
snapSwath = self.swathPhaseDiffSnapIon
if len(snapSwath) != numberOfFrames:
raise Exception('please specify each frame for parameter: swath phase difference snap to fixed values')
for iii in range(numberOfFrames):
if len(snapSwath[iii]) != (numberOfSwaths-1):
raise Exception('please specify correct number of swaths for parameter: swath phase difference snap to fixed values')
(phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = swathMosaic(referenceTrack.frames[i], inputInterferograms, self._insar.interferogram,
rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, updateFrame=False,
phaseCompensation=True, phaseDiff=phaseDiff, phaseDiffFixed=phaseDiffFixed, snapThreshold=snapThreshold, pcRangeLooks=1, pcAzimuthLooks=4,
phaseCompensation=True, phaseDiff=phaseDiff, phaseDiffFixed=phaseDiffFixed, snapThreshold=snapThreshold, snapSwath=snapSwath[i], pcRangeLooks=1, pcAzimuthLooks=4,
filt=False, resamplingMethod=1)
#the first item is meaningless for all the following list, so only record the following items
if phaseDiff == None:
phaseDiff = [None for iii in range(self._insar.startingSwath, self._insar.endingSwath + 1)]
catalog.addItem('{} subswath phase difference input'.format(ionDir['subband'][k]), phaseDiff[1:], 'runIonSubband')
catalog.addItem('{} subswath phase difference estimated'.format(ionDir['subband'][k]), phaseDiffEst[1:], 'runIonSubband')
catalog.addItem('{} subswath phase difference used'.format(ionDir['subband'][k]), phaseDiffUsed[1:], 'runIonSubband')
catalog.addItem('{} subswath phase difference used source'.format(ionDir['subband'][k]), phaseDiffSource[1:], 'runIonSubband')
catalog.addItem('frame {} {} band swath phase diff input'.format(frameNumber, ionDir['subband'][k]), phaseDiff[1:], 'runIonSubband')
catalog.addItem('frame {} {} band swath phase diff estimated'.format(frameNumber, ionDir['subband'][k]), phaseDiffEst[1:], 'runIonSubband')
catalog.addItem('frame {} {} band swath phase diff used'.format(frameNumber, ionDir['subband'][k]), phaseDiffUsed[1:], 'runIonSubband')
catalog.addItem('frame {} {} band swath phase diff used source'.format(frameNumber, ionDir['subband'][k]), phaseDiffSource[1:], 'runIonSubband')
catalog.addItem('frame {} {} band swath phase diff samples used'.format(frameNumber, ionDir['subband'][k]), numberOfValidSamples[1:], 'runIonSubband')
#check if there is value around 3.130810857, which may not be stable
phaseDiffUnstableExist = False
for xxx in phaseDiffUsed:
if abs(abs(xxx) - 3.130810857) < 0.2:
phaseDiffUnstableExist = True
catalog.addItem('{} subswath phase difference unstable exists'.format(ionDir['subband'][k]), phaseDiffUnstableExist, 'runIonSubband')
catalog.addItem('frame {} {} band swath phase diff unstable exists'.format(frameNumber, ionDir['subband'][k]), phaseDiffUnstableExist, 'runIonSubband')
create_xml(self._insar.amplitude, referenceTrack.frames[i].numberOfSamples, referenceTrack.frames[i].numberOfLines, 'amp')
create_xml(self._insar.interferogram, referenceTrack.frames[i].numberOfSamples, referenceTrack.frames[i].numberOfLines, 'int')
@ -426,13 +462,18 @@ def runIonSubband(self):
rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1,
updateTrack=False, phaseCompensation=False, resamplingMethod=0)
#mosaic interferograms
frameMosaic(referenceTrack, inputInterferograms, self._insar.interferogram,
(phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = frameMosaic(referenceTrack, inputInterferograms, self._insar.interferogram,
rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1,
updateTrack=False, phaseCompensation=True, resamplingMethod=1)
create_xml(self._insar.amplitude, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'amp')
create_xml(self._insar.interferogram, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'int')
catalog.addItem('{} band frame phase diff estimated'.format(ionDir['subband'][k]), phaseDiffEst[1:], 'runIonSubband')
catalog.addItem('{} band frame phase diff used'.format(ionDir['subband'][k]), phaseDiffUsed[1:], 'runIonSubband')
catalog.addItem('{} band frame phase diff used source'.format(ionDir['subband'][k]), phaseDiffSource[1:], 'runIonSubband')
catalog.addItem('{} band frame phase diff samples used'.format(ionDir['subband'][k]), numberOfValidSamples[1:], 'runIonSubband')
#update secondary parameters here, no need to update secondary parameters here
os.chdir('../')

View File

@ -4,6 +4,7 @@
#
import os
import shutil
import logging
import datetime
import numpy as np
@ -15,6 +16,10 @@ logger = logging.getLogger('isce.alos2insar.runIonUwrap')
def runIonUwrap(self):
'''unwrap subband interferograms
'''
if hasattr(self, 'doInSAR'):
if not self.doInSAR:
return
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
self.updateParamemetersFromUser()
@ -24,7 +29,17 @@ def runIonUwrap(self):
return
referenceTrack = self._insar.loadTrack(reference=True)
secondaryTrack = self._insar.loadTrack(reference=False)
#secondaryTrack = self._insar.loadTrack(reference=False)
ionUwrap(self, referenceTrack)
os.chdir('../../')
catalog.printToLog(logger, "runIonUwrap")
self._insar.procDoc.addAllFromCatalog(catalog)
def ionUwrap(self, referenceTrack, latLonDir=None):
wbdFile = os.path.abspath(self._insar.wbd)
from isceobj.Alos2Proc.runIonSubband import defineIonDir
@ -73,8 +88,14 @@ def runIonUwrap(self):
#water body
if k == 0:
look(os.path.join(fullbandDir, self._insar.latitude), 'lat'+ml2+'.lat', width, self._insar.numberRangeLooksIon, self._insar.numberAzimuthLooksIon, 3, 0, 1)
look(os.path.join(fullbandDir, self._insar.longitude), 'lon'+ml2+'.lon', width, self._insar.numberRangeLooksIon, self._insar.numberAzimuthLooksIon, 3, 0, 1)
if latLonDir is None:
latFile = os.path.join(fullbandDir, self._insar.latitude)
lonFile = os.path.join(fullbandDir, self._insar.longitude)
else:
latFile = os.path.join('../../', latLonDir, self._insar.latitude)
lonFile = os.path.join('../../', latLonDir, self._insar.longitude)
look(latFile, 'lat'+ml2+'.lat', width, self._insar.numberRangeLooksIon, self._insar.numberAzimuthLooksIon, 3, 0, 1)
look(lonFile, 'lon'+ml2+'.lon', width, self._insar.numberRangeLooksIon, self._insar.numberAzimuthLooksIon, 3, 0, 1)
create_xml('lat'+ml2+'.lat', width2, length2, 'double')
create_xml('lon'+ml2+'.lon', width2, length2, 'double')
waterBodyRadar('lat'+ml2+'.lat', 'lon'+ml2+'.lon', wbdFile, 'wbd'+ml2+'.wbd')
@ -132,8 +153,9 @@ def runIonUwrap(self):
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
from mroipac.icu.Icu import Icu
if self.filterSubbandInt:
for k in range(2):
for k in range(2):
#1. filtering subband interferogram
if self.filterSubbandInt:
toBeFiltered = 'tmp.int'
if self.removeMagnitudeBeforeFilteringSubbandInt:
cmd = "imageMath.py -e='a/(abs(a)+(a==0))' --a={} -o {} -t cfloat -s BSQ".format(subbandPrefix[k]+ml2+'.int', toBeFiltered)
@ -156,45 +178,50 @@ def runIonUwrap(self):
os.remove(toBeFiltered + '.vrt')
os.remove(toBeFiltered + '.xml')
#create phase sigma for phase unwrapping
#recreate filtered image
filtImage = isceobj.createIntImage()
filtImage.load('filt_'+subbandPrefix[k]+ml2+'.int' + '.xml')
filtImage.setAccessMode('read')
filtImage.createImage()
toBeUsedInPhsig = 'filt_'+subbandPrefix[k]+ml2+'.int'
else:
toBeUsedInPhsig = subbandPrefix[k]+ml2+'.int'
#amplitude image
ampImage = isceobj.createAmpImage()
ampImage.load(subbandPrefix[k]+ml2+'.amp' + '.xml')
ampImage.setAccessMode('read')
ampImage.createImage()
#2. create phase sigma for phase unwrapping
#recreate filtered image
filtImage = isceobj.createIntImage()
filtImage.load(toBeUsedInPhsig + '.xml')
filtImage.setAccessMode('read')
filtImage.createImage()
#phase sigma correlation image
phsigImage = isceobj.createImage()
phsigImage.setFilename(subbandPrefix[k]+ml2+'.phsig')
phsigImage.setWidth(width)
phsigImage.dataType='FLOAT'
phsigImage.bands = 1
phsigImage.setImageType('cor')
phsigImage.setAccessMode('write')
phsigImage.createImage()
#amplitude image
ampImage = isceobj.createAmpImage()
ampImage.load(subbandPrefix[k]+ml2+'.amp' + '.xml')
ampImage.setAccessMode('read')
ampImage.createImage()
icu = Icu(name='insarapp_filter_icu')
icu.configure()
icu.unwrappingFlag = False
icu.icu(intImage = filtImage, ampImage=ampImage, phsigImage=phsigImage)
#phase sigma correlation image
phsigImage = isceobj.createImage()
phsigImage.setFilename(subbandPrefix[k]+ml2+'.phsig')
phsigImage.setWidth(filtImage.width)
phsigImage.dataType='FLOAT'
phsigImage.bands = 1
phsigImage.setImageType('cor')
phsigImage.setAccessMode('write')
phsigImage.createImage()
phsigImage.renderHdr()
icu = Icu(name='insarapp_filter_icu')
icu.configure()
icu.unwrappingFlag = False
icu.icu(intImage = filtImage, ampImage=ampImage, phsigImage=phsigImage)
filtImage.finalizeImage()
ampImage.finalizeImage()
phsigImage.finalizeImage()
phsigImage.renderHdr()
filtImage.finalizeImage()
ampImage.finalizeImage()
phsigImage.finalizeImage()
############################################################
# STEP 4. phase unwrapping
############################################################
from isceobj.Alos2Proc.Alos2ProcPublic import snaphuUnwrap
from isceobj.Alos2Proc.Alos2ProcPublic import snaphuUnwrapOriginal
for k in range(2):
tmid = referenceTrack.sensingStart + datetime.timedelta(seconds=(self._insar.numberAzimuthLooks1-1.0)/2.0*referenceTrack.azimuthLineInterval+
@ -207,16 +234,24 @@ def runIonUwrap(self):
toBeUnwrapped = subbandPrefix[k]+ml2+'.int'
coherenceFile = 'diff'+ml2+'.cor'
snaphuUnwrap(referenceTrack, tmid,
toBeUnwrapped,
coherenceFile,
subbandPrefix[k]+ml2+'.unw',
self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon,
self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon,
costMode = 'SMOOTH',initMethod = 'MCF', defomax = 2, initOnly = True)
os.chdir('../../')
catalog.printToLog(logger, "runIonUwrap")
self._insar.procDoc.addAllFromCatalog(catalog)
#if shutil.which('snaphu') != None:
#do not use original snaphu now
if False:
print('\noriginal snaphu program found')
print('unwrap {} using original snaphu, rather than that in ISCE'.format(toBeUnwrapped))
snaphuUnwrapOriginal(toBeUnwrapped,
subbandPrefix[k]+ml2+'.phsig',
subbandPrefix[k]+ml2+'.amp',
subbandPrefix[k]+ml2+'.unw',
costMode = 's',
initMethod = 'mcf',
snaphuConfFile = '{}_snaphu.conf'.format(subbandPrefix[k]))
else:
snaphuUnwrap(referenceTrack, tmid,
toBeUnwrapped,
coherenceFile,
subbandPrefix[k]+ml2+'.unw',
self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon,
self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon,
costMode = 'SMOOTH',initMethod = 'MCF', defomax = 2, initOnly = True)

View File

@ -17,6 +17,10 @@ logger = logging.getLogger('isce.alos2insar.runLook')
def runLook(self):
'''take looks
'''
if hasattr(self, 'doInSAR'):
if not self.doInSAR:
return
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
self.updateParamemetersFromUser()

View File

@ -15,6 +15,7 @@ from isceobj.Planet.Planet import Planet
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
from isceobj.Alos2Proc.Alos2ProcPublic import getBboxRdr
from isceobj.Alos2Proc.Alos2ProcPublic import getBboxGeo
from isceobj.Alos2Proc.Alos2ProcPublic import modeProcParDict
logger = logging.getLogger('isce.alos2insar.runPreprocessor')
@ -110,81 +111,20 @@ def runPreprocessor(self):
self._insar.numberRangeLooksIon = self.numberRangeLooksIon
self._insar.numberAzimuthLooksIon = self.numberAzimuthLooksIon
if self._insar.numberRangeLooks1 == None:
if referenceMode in ['SBS']:
self._insar.numberRangeLooks1 = 2
elif referenceMode in ['UBS', 'UBD']:
self._insar.numberRangeLooks1 = 2
elif referenceMode in ['HBS', 'HBD', 'HBQ']:
self._insar.numberRangeLooks1 = 2
elif referenceMode in ['FBS', 'FBD', 'FBQ']:
self._insar.numberRangeLooks1 = 2
elif referenceMode in ['WBS', 'WBD']:
self._insar.numberRangeLooks1 = 1
elif referenceMode in ['WWS', 'WWD']:
self._insar.numberRangeLooks1 = 2
elif referenceMode in ['VBS', 'VBD']:
self._insar.numberRangeLooks1 = 1
else:
raise Exception('unknow acquisition mode')
if self._insar.numberRangeLooks1 is None:
self._insar.numberRangeLooks1 = modeProcParDict['ALOS-2'][referenceMode]['numberRangeLooks1']
if self._insar.numberAzimuthLooks1 is None:
self._insar.numberAzimuthLooks1 = modeProcParDict['ALOS-2'][referenceMode]['numberAzimuthLooks1']
if self._insar.numberAzimuthLooks1 == None:
if referenceMode in ['SBS']:
self._insar.numberAzimuthLooks1 = 4
elif referenceMode in ['UBS', 'UBD']:
self._insar.numberAzimuthLooks1 = 2
elif referenceMode in ['HBS', 'HBD', 'HBQ']:
self._insar.numberAzimuthLooks1 = 2
elif referenceMode in ['FBS', 'FBD', 'FBQ']:
self._insar.numberAzimuthLooks1 = 4
elif referenceMode in ['WBS', 'WBD']:
self._insar.numberAzimuthLooks1 = 14
elif referenceMode in ['WWS', 'WWD']:
self._insar.numberAzimuthLooks1 = 14
elif referenceMode in ['VBS', 'VBD']:
self._insar.numberAzimuthLooks1 = 14
else:
raise Exception('unknow acquisition mode')
if self._insar.numberRangeLooks2 is None:
self._insar.numberRangeLooks2 = modeProcParDict['ALOS-2'][referenceMode]['numberRangeLooks2']
if self._insar.numberAzimuthLooks2 is None:
self._insar.numberAzimuthLooks2 = modeProcParDict['ALOS-2'][referenceMode]['numberAzimuthLooks2']
if self._insar.numberRangeLooks2 == None:
if referenceMode in spotlightModes:
self._insar.numberRangeLooks2 = 4
elif referenceMode in stripmapModes:
self._insar.numberRangeLooks2 = 4
elif referenceMode in scansarModes:
self._insar.numberRangeLooks2 = 5
else:
raise Exception('unknow acquisition mode')
if self._insar.numberAzimuthLooks2 == None:
if referenceMode in spotlightModes:
self._insar.numberAzimuthLooks2 = 4
elif referenceMode in stripmapModes:
self._insar.numberAzimuthLooks2 = 4
elif referenceMode in scansarModes:
self._insar.numberAzimuthLooks2 = 2
else:
raise Exception('unknow acquisition mode')
if self._insar.numberRangeLooksIon == None:
if referenceMode in spotlightModes:
self._insar.numberRangeLooksIon = 16
elif referenceMode in stripmapModes:
self._insar.numberRangeLooksIon = 16
elif referenceMode in scansarModes:
self._insar.numberRangeLooksIon = 40
else:
raise Exception('unknow acquisition mode')
if self._insar.numberAzimuthLooksIon == None:
if referenceMode in spotlightModes:
self._insar.numberAzimuthLooksIon = 16
elif referenceMode in stripmapModes:
self._insar.numberAzimuthLooksIon = 16
elif referenceMode in scansarModes:
self._insar.numberAzimuthLooksIon = 16
else:
raise Exception('unknow acquisition mode')
if self._insar.numberRangeLooksIon is None:
self._insar.numberRangeLooksIon = modeProcParDict['ALOS-2'][referenceMode]['numberRangeLooksIon']
if self._insar.numberAzimuthLooksIon is None:
self._insar.numberAzimuthLooksIon = modeProcParDict['ALOS-2'][referenceMode]['numberAzimuthLooksIon']
#define processing file names
@ -335,201 +275,6 @@ def runPreprocessor(self):
self._insar.saveProduct(self.secondary.track, self._insar.secondaryTrackParameter)
##################################################
#2. compute burst synchronization
##################################################
#burst synchronization may slowly change along a track as a result of the changing relative speed of the two flights
#in one frame, real unsynchronized time is the same for all swaths
unsynTime = 0
#real synchronized time/percentage depends on the swath burst length (synTime = burstlength - abs(unsynTime))
#synTime = 0
synPercentage = 0
numberOfFrames = len(self._insar.referenceFrames)
numberOfSwaths = self._insar.endingSwath - self._insar.startingSwath + 1
for i, frameNumber in enumerate(self._insar.referenceFrames):
for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)):
referenceSwath = self.reference.track.frames[i].swaths[j]
secondarySwath = self.secondary.track.frames[i].swaths[j]
#using Piyush's code for computing range and azimuth offsets
midRange = referenceSwath.startingRange + referenceSwath.rangePixelSize * referenceSwath.numberOfSamples * 0.5
midSensingStart = referenceSwath.sensingStart + datetime.timedelta(seconds = referenceSwath.numberOfLines * 0.5 / referenceSwath.prf)
llh = self.reference.track.orbit.rdr2geo(midSensingStart, midRange)
slvaz, slvrng = self.secondary.track.orbit.geo2rdr(llh)
###Translate to offsets
#note that secondary range pixel size and prf might be different from reference, here we assume there is a virtual secondary with same
#range pixel size and prf
rgoff = ((slvrng - secondarySwath.startingRange) / referenceSwath.rangePixelSize) - referenceSwath.numberOfSamples * 0.5
azoff = ((slvaz - secondarySwath.sensingStart).total_seconds() * referenceSwath.prf) - referenceSwath.numberOfLines * 0.5
#compute burst synchronization
#burst parameters for ScanSAR wide mode not estimed yet
if self._insar.modeCombination == 21:
scburstStartLine = (referenceSwath.burstStartTime - referenceSwath.sensingStart).total_seconds() * referenceSwath.prf + azoff
#secondary burst start times corresponding to reference burst start times (100% synchronization)
scburstStartLines = np.arange(scburstStartLine - 100000*referenceSwath.burstCycleLength, \
scburstStartLine + 100000*referenceSwath.burstCycleLength, \
referenceSwath.burstCycleLength)
dscburstStartLines = -((secondarySwath.burstStartTime - secondarySwath.sensingStart).total_seconds() * secondarySwath.prf - scburstStartLines)
#find the difference with minimum absolute value
unsynLines = dscburstStartLines[np.argmin(np.absolute(dscburstStartLines))]
if np.absolute(unsynLines) >= secondarySwath.burstLength:
synLines = 0
if unsynLines > 0:
unsynLines = secondarySwath.burstLength
else:
unsynLines = -secondarySwath.burstLength
else:
synLines = secondarySwath.burstLength - np.absolute(unsynLines)
unsynTime += unsynLines / referenceSwath.prf
synPercentage += synLines / referenceSwath.burstLength * 100.0
catalog.addItem('burst synchronization of frame {} swath {}'.format(frameNumber, swathNumber), '%.1f%%'%(synLines / referenceSwath.burstLength * 100.0), 'runPreprocessor')
############################################################################################
#illustration of the sign of the number of unsynchronized lines (unsynLines)
#The convention is the same as ampcor offset, that is,
# secondaryLineNumber = referenceLineNumber + unsynLines
#
# |-----------------------| ------------
# | | ^
# | | |
# | | | unsynLines < 0
# | | |
# | | \ /
# | | |-----------------------|
# | | | |
# | | | |
# |-----------------------| | |
# Reference Burst | |
# | |
# | |
# | |
# | |
# |-----------------------|
# Secondary Burst
#
#
############################################################################################
##burst parameters for ScanSAR wide mode not estimed yet
elif self._insar.modeCombination == 31:
#scansar is reference
scburstStartLine = (referenceSwath.burstStartTime - referenceSwath.sensingStart).total_seconds() * referenceSwath.prf + azoff
#secondary burst start times corresponding to reference burst start times (100% synchronization)
for k in range(-100000, 100000):
saz_burstx = scburstStartLine + referenceSwath.burstCycleLength * k
st_burstx = secondarySwath.sensingStart + datetime.timedelta(seconds=saz_burstx / referenceSwath.prf)
if saz_burstx >= 0.0 and saz_burstx <= secondarySwath.numberOfLines -1:
secondarySwath.burstStartTime = st_burstx
secondarySwath.burstLength = referenceSwath.burstLength
secondarySwath.burstCycleLength = referenceSwath.burstCycleLength
secondarySwath.swathNumber = referenceSwath.swathNumber
break
#unsynLines = 0
#synLines = referenceSwath.burstLength
#unsynTime += unsynLines / referenceSwath.prf
#synPercentage += synLines / referenceSwath.burstLength * 100.0
catalog.addItem('burst synchronization of frame {} swath {}'.format(frameNumber, swathNumber), '%.1f%%'%(100.0), 'runPreprocessor')
else:
pass
#overwrite original frame parameter file
if self._insar.modeCombination == 31:
frameDir = 'f{}_{}'.format(i+1, frameNumber)
self._insar.saveProduct(self.secondary.track.frames[i], os.path.join(frameDir, self._insar.secondaryFrameParameter))
#getting average
if self._insar.modeCombination == 21:
unsynTime /= numberOfFrames*numberOfSwaths
synPercentage /= numberOfFrames*numberOfSwaths
elif self._insar.modeCombination == 31:
unsynTime = 0.
synPercentage = 100.
else:
pass
#record results
if (self._insar.modeCombination == 21) or (self._insar.modeCombination == 31):
self._insar.burstUnsynchronizedTime = unsynTime
self._insar.burstSynchronization = synPercentage
catalog.addItem('burst synchronization averaged', '%.1f%%'%(synPercentage), 'runPreprocessor')
##################################################
#3. compute baseline
##################################################
#only compute baseline at four corners and center of the reference track
bboxRdr = getBboxRdr(self.reference.track)
rangeMin = bboxRdr[0]
rangeMax = bboxRdr[1]
azimuthTimeMin = bboxRdr[2]
azimuthTimeMax = bboxRdr[3]
azimuthTimeMid = azimuthTimeMin+datetime.timedelta(seconds=(azimuthTimeMax-azimuthTimeMin).total_seconds()/2.0)
rangeMid = (rangeMin + rangeMax) / 2.0
points = [[azimuthTimeMin, rangeMin],
[azimuthTimeMin, rangeMax],
[azimuthTimeMax, rangeMin],
[azimuthTimeMax, rangeMax],
[azimuthTimeMid, rangeMid]]
Bpar = []
Bperp = []
#modify Piyush's code for computing baslines
refElp = Planet(pname='Earth').ellipsoid
for x in points:
referenceSV = self.reference.track.orbit.interpolate(x[0], method='hermite')
target = self.reference.track.orbit.rdr2geo(x[0], x[1])
slvTime, slvrng = self.secondary.track.orbit.geo2rdr(target)
secondarySV = self.secondary.track.orbit.interpolateOrbit(slvTime, method='hermite')
targxyz = np.array(refElp.LLH(target[0], target[1], target[2]).ecef().tolist())
mxyz = np.array(referenceSV.getPosition())
mvel = np.array(referenceSV.getVelocity())
sxyz = np.array(secondarySV.getPosition())
#to fix abrupt change near zero in baseline grid. JUN-05-2020
mvelunit = mvel / np.linalg.norm(mvel)
sxyz = sxyz - np.dot ( sxyz-mxyz, mvelunit) * mvelunit
aa = np.linalg.norm(sxyz-mxyz)
costheta = (x[1]*x[1] + aa*aa - slvrng*slvrng)/(2.*x[1]*aa)
Bpar.append(aa*costheta)
perp = aa * np.sqrt(1 - costheta*costheta)
direction = np.sign(np.dot( np.cross(targxyz-mxyz, sxyz-mxyz), mvel))
Bperp.append(direction*perp)
catalog.addItem('parallel baseline at upperleft of reference track', Bpar[0], 'runPreprocessor')
catalog.addItem('parallel baseline at upperright of reference track', Bpar[1], 'runPreprocessor')
catalog.addItem('parallel baseline at lowerleft of reference track', Bpar[2], 'runPreprocessor')
catalog.addItem('parallel baseline at lowerright of reference track', Bpar[3], 'runPreprocessor')
catalog.addItem('parallel baseline at center of reference track', Bpar[4], 'runPreprocessor')
catalog.addItem('perpendicular baseline at upperleft of reference track', Bperp[0], 'runPreprocessor')
catalog.addItem('perpendicular baseline at upperright of reference track', Bperp[1], 'runPreprocessor')
catalog.addItem('perpendicular baseline at lowerleft of reference track', Bperp[2], 'runPreprocessor')
catalog.addItem('perpendicular baseline at lowerright of reference track', Bperp[3], 'runPreprocessor')
catalog.addItem('perpendicular baseline at center of reference track', Bperp[4], 'runPreprocessor')
##################################################
#4. compute bounding box
##################################################
referenceBbox = getBboxGeo(self.reference.track)
secondaryBbox = getBboxGeo(self.secondary.track)
catalog.addItem('reference bounding box', referenceBbox, 'runPreprocessor')
catalog.addItem('secondary bounding box', secondaryBbox, 'runPreprocessor')
catalog.printToLog(logger, "runPreprocessor")
self._insar.procDoc.addAllFromCatalog(catalog)

View File

@ -14,6 +14,10 @@ logger = logging.getLogger('isce.alos2insar.runRdr2Geo')
def runRdr2Geo(self):
'''compute lat/lon/hgt
'''
if hasattr(self, 'doInSAR'):
if not self.doInSAR:
return
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
self.updateParamemetersFromUser()

View File

@ -20,10 +20,20 @@ logger = logging.getLogger('isce.alos2insar.runRdrDemOffset')
def runRdrDemOffset(self):
'''estimate between radar image and dem
'''
if hasattr(self, 'doInSAR'):
if not self.doInSAR:
return
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
self.updateParamemetersFromUser()
referenceTrack = self._insar.loadTrack(reference=True)
rdrDemOffset(self, referenceTrack, catalog=catalog)
def rdrDemOffset(self, referenceTrack, catalog=None):
demFile = os.path.abspath(self._insar.dem)
insarDir = 'insar'
@ -96,13 +106,15 @@ def runRdrDemOffset(self):
if (landRatio <= 0.00125):
print('\n\nWARNING: land area too small for estimating offsets between radar and dem')
print('do not estimate offsets between radar and dem\n\n')
self._insar.radarDemAffineTransform = [1.0, 0.0, 0.0, 1.0, 0.0, 0.0]
catalog.addItem('warning message', 'land area too small for estimating offsets between radar and dem', 'runRdrDemOffset')
if catalog is not None:
self._insar.radarDemAffineTransform = [1.0, 0.0, 0.0, 1.0, 0.0, 0.0]
catalog.addItem('warning message', 'land area too small for estimating offsets between radar and dem', 'runRdrDemOffset')
os.chdir('../../')
catalog.printToLog(logger, "runRdrDemOffset")
self._insar.procDoc.addAllFromCatalog(catalog)
if catalog is not None:
catalog.printToLog(logger, "runRdrDemOffset")
self._insar.procDoc.addAllFromCatalog(catalog)
return
@ -130,8 +142,9 @@ def runRdrDemOffset(self):
if numberOfOffsetsAzimuth < 10:
numberOfOffsetsAzimuth = 10
catalog.addItem('number of range offsets', '{}'.format(numberOfOffsetsRange), 'runRdrDemOffset')
catalog.addItem('number of azimuth offsets', '{}'.format(numberOfOffsetsAzimuth), 'runRdrDemOffset')
if catalog is not None:
catalog.addItem('number of range offsets', '{}'.format(numberOfOffsetsRange), 'runRdrDemOffset')
catalog.addItem('number of azimuth offsets', '{}'.format(numberOfOffsetsAzimuth), 'runRdrDemOffset')
#matching
ampcor = Ampcor(name='insarapp_slcs_ampcor')
@ -247,12 +260,14 @@ def runRdrDemOffset(self):
print('\n\nWARNING: too few points left after culling, {} left'.format(numCullOffsets))
print('do not estimate offsets between radar and dem\n\n')
self._insar.radarDemAffineTransform = [1.0, 0.0, 0.0, 1.0, 0.0, 0.0]
catalog.addItem('warning message', 'too few points left after culling, {} left'.format(numCullOffsets), 'runRdrDemOffset')
if catalog is not None:
catalog.addItem('warning message', 'too few points left after culling, {} left'.format(numCullOffsets), 'runRdrDemOffset')
os.chdir('../../')
catalog.printToLog(logger, "runRdrDemOffset")
self._insar.procDoc.addAllFromCatalog(catalog)
if catalog is not None:
catalog.printToLog(logger, "runRdrDemOffset")
self._insar.procDoc.addAllFromCatalog(catalog)
return
@ -277,8 +292,9 @@ def runRdrDemOffset(self):
os.chdir('../../')
catalog.printToLog(logger, "runRdrDemOffset")
self._insar.procDoc.addAllFromCatalog(catalog)
if catalog is not None:
catalog.printToLog(logger, "runRdrDemOffset")
self._insar.procDoc.addAllFromCatalog(catalog)
def simulateRadar(hgtfile, simfile, scale=3.0, offset=100.0):

View File

@ -15,6 +15,10 @@ logger = logging.getLogger('isce.alos2insar.runRectRangeOffset')
def runRectRangeOffset(self):
'''rectify range offset
'''
if hasattr(self, 'doInSAR'):
if not self.doInSAR:
return
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
self.updateParamemetersFromUser()

View File

@ -40,13 +40,30 @@ def runSlcMosaic(self):
if len(referenceTrack.frames) > 1:
matchingMode=1
#determine whether reference offset from matching is already done in previous InSAR processing.
if hasattr(self, 'doInSAR'):
if not self.doInSAR:
referenceEstimated = False
else:
if self.frameOffsetMatching == False:
referenceEstimated = False
else:
referenceEstimated = True
else:
if self.frameOffsetMatching == False:
referenceEstimated = False
else:
referenceEstimated = True
#if reference offsets from matching are not already computed
if self.frameOffsetMatching == False:
#if self.frameOffsetMatching == False:
if referenceEstimated == False:
offsetReference = frameOffset(referenceTrack, self._insar.referenceSlc, self._insar.referenceFrameOffset,
crossCorrelation=True, matchingMode=matchingMode)
offsetSecondary = frameOffset(secondaryTrack, self._insar.secondarySlc, self._insar.secondaryFrameOffset,
crossCorrelation=True, matchingMode=matchingMode)
if self.frameOffsetMatching == False:
#if self.frameOffsetMatching == False:
if referenceEstimated == False:
self._insar.frameRangeOffsetMatchingReference = offsetReference[2]
self._insar.frameAzimuthOffsetMatchingReference = offsetReference[3]
self._insar.frameRangeOffsetMatchingSecondary = offsetSecondary[2]
@ -110,6 +127,43 @@ def runSlcMosaic(self):
secondaryTrack.dopplerVsPixel = secondaryTrack.frames[0].swaths[0].dopplerVsPixel
else:
#in case InSAR, and therefore runSwathMosaic, was not done previously
for i, frameNumber in enumerate(self._insar.referenceFrames):
#update frame parameters
#########################################################
frame = referenceTrack.frames[i]
#mosaic size
frame.numberOfSamples = frame.swaths[0].numberOfSamples
frame.numberOfLines = frame.swaths[0].numberOfLines
#NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE
#range parameters
frame.startingRange = frame.swaths[0].startingRange
frame.rangeSamplingRate = frame.swaths[0].rangeSamplingRate
frame.rangePixelSize = frame.swaths[0].rangePixelSize
#azimuth parameters
frame.sensingStart = frame.swaths[0].sensingStart
frame.prf = frame.swaths[0].prf
frame.azimuthPixelSize = frame.swaths[0].azimuthPixelSize
frame.azimuthLineInterval = frame.swaths[0].azimuthLineInterval
#update frame parameters, secondary
#########################################################
frame = secondaryTrack.frames[i]
#mosaic size
frame.numberOfSamples = frame.swaths[0].numberOfSamples
frame.numberOfLines = frame.swaths[0].numberOfLines
#NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE
#range parameters
frame.startingRange = frame.swaths[0].startingRange
frame.rangeSamplingRate = frame.swaths[0].rangeSamplingRate
frame.rangePixelSize = frame.swaths[0].rangePixelSize
#azimuth parameters
frame.sensingStart = frame.swaths[0].sensingStart
frame.prf = frame.swaths[0].prf
frame.azimuthPixelSize = frame.swaths[0].azimuthPixelSize
frame.azimuthLineInterval = frame.swaths[0].azimuthLineInterval
#mosaic reference slc
#########################################################
#choose offsets

View File

@ -25,6 +25,11 @@ logger = logging.getLogger('isce.alos2insar.runSlcOffset')
def runSlcOffset(self):
'''estimate SLC offsets
'''
if hasattr(self, 'doInSAR'):
if not self.doInSAR:
print('\nInSAR processing not requested, skip this and the remaining InSAR steps...')
return
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
self.updateParamemetersFromUser()

View File

@ -17,6 +17,10 @@ logger = logging.getLogger('isce.alos2insar.runSwathMosaic')
def runSwathMosaic(self):
'''mosaic subswaths
'''
if hasattr(self, 'doInSAR'):
if not self.doInSAR:
return
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
self.updateParamemetersFromUser()
@ -162,7 +166,7 @@ def runSwathMosaic(self):
self._insar.procDoc.addAllFromCatalog(catalog)
def swathMosaic(frame, inputFiles, outputfile, rangeOffsets, azimuthOffsets, numberOfRangeLooks, numberOfAzimuthLooks, updateFrame=False, phaseCompensation=False, phaseDiff=None, phaseDiffFixed=None, snapThreshold=None, pcRangeLooks=1, pcAzimuthLooks=4, filt=False, resamplingMethod=0):
def swathMosaic(frame, inputFiles, outputfile, rangeOffsets, azimuthOffsets, numberOfRangeLooks, numberOfAzimuthLooks, updateFrame=False, phaseCompensation=False, phaseDiff=None, phaseDiffFixed=None, snapThreshold=None, snapSwath=None, pcRangeLooks=1, pcAzimuthLooks=4, filt=False, resamplingMethod=0):
'''
mosaic swaths
@ -181,6 +185,7 @@ def swathMosaic(frame, inputFiles, outputfile, rangeOffsets, azimuthOffsets, num
phaseDiff: pre-computed compensation phase for each swath
phaseDiffFixed: if provided, the estimated value will snap to one of these values, which is nearest to the estimated one.
snapThreshold: this is used with phaseDiffFixed
snapSwath: indicate whether snap to fixed values for each swath phase diff, must be specified if phaseDiffFixed!=None
pcRangeLooks: number of range looks to take when compute swath phase difference
pcAzimuthLooks: number of azimuth looks to take when compute swath phase difference
filt: whether do filtering when compute swath phase difference
@ -193,6 +198,8 @@ def swathMosaic(frame, inputFiles, outputfile, rangeOffsets, azimuthOffsets, num
from isceobj.Alos2Proc.Alos2ProcPublic import multilook
from isceobj.Alos2Proc.Alos2ProcPublic import cal_coherence_1
from isceobj.Alos2Proc.Alos2ProcPublic import filterInterferogram
from isceobj.Alos2Proc.Alos2ProcPublic import computePhaseDiff
from isceobj.Alos2Proc.Alos2ProcPublic import snap
numberOfSwaths = len(frame.swaths)
swaths = frame.swaths
@ -208,8 +215,10 @@ def swathMosaic(frame, inputFiles, outputfile, rangeOffsets, azimuthOffsets, num
rectWidth.append( int(swaths[i].numberOfSamples / numberOfRangeLooks) )
rectLength.append( int(swaths[i].numberOfLines / numberOfAzimuthLooks) )
else:
rectWidth.append( int(1.0 / rangeScale[i] * int(swaths[i].numberOfSamples / numberOfRangeLooks)) )
rectLength.append( int(1.0 / azimuthScale[i] * int(swaths[i].numberOfLines / numberOfAzimuthLooks)) )
rectWidth.append( round(1.0 / rangeScale[i] * int(swaths[i].numberOfSamples / numberOfRangeLooks)) )
rectLength.append( round(1.0 / azimuthScale[i] * int(swaths[i].numberOfLines / numberOfAzimuthLooks)) )
#rectWidth.append( int(1.0 / rangeScale[i] * int(swaths[i].numberOfSamples / numberOfRangeLooks)) )
#rectLength.append( int(1.0 / azimuthScale[i] * int(swaths[i].numberOfLines / numberOfAzimuthLooks)) )
#convert original offset to offset for images with looks
#use list instead of np.array to make it consistent with the rest of the code
@ -236,71 +245,80 @@ def swathMosaic(frame, inputFiles, outputfile, rangeOffsets, azimuthOffsets, num
os.remove(rinfs[i])
os.symlink(inf, rinfs[i])
else:
infImg = isceobj.createImage()
infImg.load(inf+'.xml')
rangeOffsets2Frac = rangeOffsets2[i] - int(rangeOffsets2[i])
azimuthOffsets2Frac = azimuthOffsets2[i] - int(azimuthOffsets2[i])
#no need to resample
if (abs(rangeOffsets2[i] - round(rangeOffsets2[i])) < 0.0001) and (abs(azimuthOffsets2[i] - round(azimuthOffsets2[i])) < 0.0001):
if os.path.isfile(rinfs[i]):
os.remove(rinfs[i])
os.symlink(inf, rinfs[i])
#all of the following use of rangeOffsets2/azimuthOffsets2 is inside int(), we do the following in case it is like
#4.99999999999...
rangeOffsets2[i] = round(rangeOffsets2[i])
azimuthOffsets2[i] = round(azimuthOffsets2[i])
else:
infImg = isceobj.createImage()
infImg.load(inf+'.xml')
rangeOffsets2Frac = rangeOffsets2[i] - int(rangeOffsets2[i])
azimuthOffsets2Frac = azimuthOffsets2[i] - int(azimuthOffsets2[i])
if resamplingMethod == 0:
rect_with_looks(inf,
rinfs[i],
infImg.width, infImg.length,
rectWidth[i], rectLength[i],
rangeScale[i], 0.0,
0.0,azimuthScale[i],
rangeOffsets2Frac * rangeScale[i], azimuthOffsets2Frac * azimuthScale[i],
1,1,
1,1,
'COMPLEX',
'Bilinear')
elif resamplingMethod == 1:
#decompose amplitude and phase
phaseFile = 'phase'
amplitudeFile = 'amplitude'
data = np.fromfile(inf, dtype=np.complex64).reshape(infImg.length, infImg.width)
phase = np.exp(np.complex64(1j) * np.angle(data))
phase[np.nonzero(data==0)] = 0
phase.astype(np.complex64).tofile(phaseFile)
amplitude = np.absolute(data)
amplitude.astype(np.float32).tofile(amplitudeFile)
if resamplingMethod == 0:
rect_with_looks(inf,
rinfs[i],
infImg.width, infImg.length,
rectWidth[i], rectLength[i],
rangeScale[i], 0.0,
0.0,azimuthScale[i],
rangeOffsets2Frac * rangeScale[i], azimuthOffsets2Frac * azimuthScale[i],
1,1,
1,1,
'COMPLEX',
'Bilinear')
elif resamplingMethod == 1:
#decompose amplitude and phase
phaseFile = 'phase'
amplitudeFile = 'amplitude'
data = np.fromfile(inf, dtype=np.complex64).reshape(infImg.length, infImg.width)
phase = np.exp(np.complex64(1j) * np.angle(data))
phase[np.nonzero(data==0)] = 0
phase.astype(np.complex64).tofile(phaseFile)
amplitude = np.absolute(data)
amplitude.astype(np.float32).tofile(amplitudeFile)
#resampling
phaseRectFile = 'phaseRect'
amplitudeRectFile = 'amplitudeRect'
rect_with_looks(phaseFile,
phaseRectFile,
infImg.width, infImg.length,
rectWidth[i], rectLength[i],
rangeScale[i], 0.0,
0.0,azimuthScale[i],
rangeOffsets2Frac * rangeScale[i], azimuthOffsets2Frac * azimuthScale[i],
1,1,
1,1,
'COMPLEX',
'Sinc')
rect_with_looks(amplitudeFile,
amplitudeRectFile,
infImg.width, infImg.length,
rectWidth[i], rectLength[i],
rangeScale[i], 0.0,
0.0,azimuthScale[i],
rangeOffsets2Frac * rangeScale[i], azimuthOffsets2Frac * azimuthScale[i],
1,1,
1,1,
'REAL',
'Bilinear')
#resampling
phaseRectFile = 'phaseRect'
amplitudeRectFile = 'amplitudeRect'
rect_with_looks(phaseFile,
phaseRectFile,
infImg.width, infImg.length,
rectWidth[i], rectLength[i],
rangeScale[i], 0.0,
0.0,azimuthScale[i],
rangeOffsets2Frac * rangeScale[i], azimuthOffsets2Frac * azimuthScale[i],
1,1,
1,1,
'COMPLEX',
'Sinc')
rect_with_looks(amplitudeFile,
amplitudeRectFile,
infImg.width, infImg.length,
rectWidth[i], rectLength[i],
rangeScale[i], 0.0,
0.0,azimuthScale[i],
rangeOffsets2Frac * rangeScale[i], azimuthOffsets2Frac * azimuthScale[i],
1,1,
1,1,
'REAL',
'Bilinear')
#recombine amplitude and phase
phase = np.fromfile(phaseRectFile, dtype=np.complex64).reshape(rectLength[i], rectWidth[i])
amplitude = np.fromfile(amplitudeRectFile, dtype=np.float32).reshape(rectLength[i], rectWidth[i])
(phase*amplitude).astype(np.complex64).tofile(rinfs[i])
#recombine amplitude and phase
phase = np.fromfile(phaseRectFile, dtype=np.complex64).reshape(rectLength[i], rectWidth[i])
amplitude = np.fromfile(amplitudeRectFile, dtype=np.float32).reshape(rectLength[i], rectWidth[i])
(phase*amplitude).astype(np.complex64).tofile(rinfs[i])
#tidy up
os.remove(phaseFile)
os.remove(amplitudeFile)
os.remove(phaseRectFile)
os.remove(amplitudeRectFile)
#tidy up
os.remove(phaseFile)
os.remove(amplitudeFile)
os.remove(phaseRectFile)
os.remove(amplitudeRectFile)
#determine output width and length
@ -355,6 +373,8 @@ def swathMosaic(frame, inputFiles, outputfile, rangeOffsets, azimuthOffsets, num
# 2. 'estimated+snap': estimated from subswath overlap and snap to a fixed value
# 3. 'input': pre-computed
# confidence level: 3 > 2 > 1
numberOfValidSamples = [None for i in range(numberOfSwaths)]
# only record when (filt == False) and (index[0].size >= 4000)
if phaseCompensation:
#compute swath phase offset
diffMean = [0.0]
@ -469,48 +489,30 @@ def swathMosaic(frame, inputFiles, outputfile, rangeOffsets, azimuthOffsets, num
data2 *= np.exp(np.complex64(1j) * angle)
print('phase offset: %15.12f rad with filter strength: %f, window size: %3d'%(diffMean0, filterStrength, filterWinSize))
else:
diffMean0 = 0.0
for k in range(30):
dataDiff = data1 * np.conj(data2)
cor = cal_coherence_1(dataDiff, win=5)
if filt:
index = np.nonzero(np.logical_and(cor>0.95, dataDiff!=0))
else:
index = np.nonzero(np.logical_and(cor>0.85, dataDiff!=0))
if index[0].size < 100:
diffMean0 = 0.0
print('\n\nWARNING: too few high coherence pixels for swath phase difference estimation')
print(' number of high coherence pixels: {}\n\n'.format(index[0].size))
break
angle = np.mean(np.angle(dataDiff[index]), dtype=np.float64)
diffMean0 += angle
data2 *= np.exp(np.complex64(1j) * angle)
print('phase offset: %15.12f rad after loop: %3d'%(diffMean0, k))
if filt:
(diffMean0, numberOfValidSamples[i]) = computePhaseDiff(data1, data2, coherenceWindowSize=5, coherenceThreshold=0.95)
else:
(diffMean0, numberOfValidSamples[i]) = computePhaseDiff(data1, data2, coherenceWindowSize=5, coherenceThreshold=0.85)
if numberOfValidSamples[i] < 100:
diffMean0 = 0.0
print('\n\nWARNING: too few high coherence pixels for swath phase difference estimation')
print(' number of high coherence pixels: {}\n\n'.format(numberOfValidSamples[i]))
DEBUG=False
if DEBUG and (k==0):
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
(length7, width7)=dataDiff.shape
filename = 'diff_ori_s{}-s{}_loop_{}.int'.format(frame.swaths[i-1].swathNumber, frame.swaths[i].swathNumber, k)
dataDiff.astype(np.complex64).tofile(filename)
create_xml(filename, width7, length7, 'int')
filename = 'cor_ori_s{}-s{}_loop_{}.cor'.format(frame.swaths[i-1].swathNumber, frame.swaths[i].swathNumber, k)
cor.astype(np.float32).tofile(filename)
create_xml(filename, width7, length7, 'float')
#do not record when filt
if filt:
numberOfValidSamples[i] = None
#save purely estimated diff phase
phaseDiffEst[i] = diffMean0
#if fixed diff phase provided and the estimated diff phase is close enough to a fixed value, snap to it
############################################################################################################
if phaseDiffFixed != None:
phaseDiffTmp = np.absolute(np.absolute(np.array(phaseDiffFixed)) - np.absolute(diffMean0))
phaseDiffTmpMinIndex = np.argmin(phaseDiffTmp)
if phaseDiffTmp[phaseDiffTmpMinIndex] < snapThreshold:
diffMean0 = np.sign(diffMean0) * np.absolute(phaseDiffFixed[phaseDiffTmpMinIndex])
phaseDiffSource[i] = 'estimated+snap'
############################################################################################################
if snapSwath[i-1] == True:
(outputValue, snapped) = snap(diffMean0, phaseDiffFixed, snapThreshold)
if snapped == True:
diffMean0 = outputValue
phaseDiffSource[i] = 'estimated+snap'
diffMean.append(diffMean0)
print('phase offset: subswath{} - subswath{}: {}'.format(frame.swaths[i-1].swathNumber, frame.swaths[i].swathNumber, diffMean0))
@ -550,7 +552,7 @@ def swathMosaic(frame, inputFiles, outputfile, rangeOffsets, azimuthOffsets, num
if phaseCompensation:
# estimated phase diff, used phase diff, used phase diff source
return (phaseDiffEst, diffMean, phaseDiffSource)
return (phaseDiffEst, diffMean, phaseDiffSource, numberOfValidSamples)
def swathMosaicParameters(frame, rangeOffsets, azimuthOffsets, numberOfRangeLooks, numberOfAzimuthLooks):
'''

View File

@ -18,6 +18,10 @@ logger = logging.getLogger('isce.alos2insar.runSwathOffset')
def runSwathOffset(self):
'''estimate swath offsets.
'''
if hasattr(self, 'doInSAR'):
if not self.doInSAR:
return
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
self.updateParamemetersFromUser()

View File

@ -19,12 +19,23 @@ logger = logging.getLogger('isce.alos2insar.runUnwrapSnaphu')
def runUnwrapSnaphu(self):
'''unwrap filtered interferogram
'''
if hasattr(self, 'doInSAR'):
if not self.doInSAR:
return
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
self.updateParamemetersFromUser()
referenceTrack = self._insar.loadTrack(reference=True)
#secondaryTrack = self._insar.loadTrack(reference=False)
unwrapSnaphu(self, referenceTrack)
catalog.printToLog(logger, "runUnwrapSnaphu")
self._insar.procDoc.addAllFromCatalog(catalog)
def unwrapSnaphu(self, referenceTrack):
insarDir = 'insar'
os.makedirs(insarDir, exist_ok=True)
os.chdir(insarDir)
@ -70,21 +81,20 @@ def runUnwrapSnaphu(self):
wbdImage.load(self._insar.multilookWbdOut+'.xml')
width = wbdImage.width
length = wbdImage.length
if not os.path.exists(self._insar.multilookWbdOut):
catalog.addItem('warning message', 'requested masking interferogram with water body, but water body does not exist', 'runUnwrapSnaphu')
else:
wbd = np.fromfile(self._insar.multilookWbdOut, dtype=np.int8).reshape(length, width)
unw=np.memmap(self._insar.unwrappedInterferogram, dtype='float32', mode='r+', shape=(length*2, width))
(unw[0:length*2:2, :])[np.nonzero(wbd==-1)]=0
(unw[1:length*2:2, :])[np.nonzero(wbd==-1)]=0
del unw
unw=np.memmap(self._insar.unwrappedMaskedInterferogram, dtype='float32', mode='r+', shape=(length*2, width))
(unw[0:length*2:2, :])[np.nonzero(wbd==-1)]=0
(unw[1:length*2:2, :])[np.nonzero(wbd==-1)]=0
del unw, wbd
#if not os.path.exists(self._insar.multilookWbdOut):
# catalog.addItem('warning message', 'requested masking interferogram with water body, but water body does not exist', 'runUnwrapSnaphu')
#else:
wbd = np.fromfile(self._insar.multilookWbdOut, dtype=np.int8).reshape(length, width)
unw=np.memmap(self._insar.unwrappedInterferogram, dtype='float32', mode='r+', shape=(length*2, width))
(unw[0:length*2:2, :])[np.nonzero(wbd==-1)]=0
(unw[1:length*2:2, :])[np.nonzero(wbd==-1)]=0
del unw
unw=np.memmap(self._insar.unwrappedMaskedInterferogram, dtype='float32', mode='r+', shape=(length*2, width))
(unw[0:length*2:2, :])[np.nonzero(wbd==-1)]=0
(unw[1:length*2:2, :])[np.nonzero(wbd==-1)]=0
del unw, wbd
os.chdir('../')
catalog.printToLog(logger, "runUnwrapSnaphu")
self._insar.procDoc.addAllFromCatalog(catalog)

View File

@ -74,6 +74,7 @@ def createUnwrap2Stage(other, do_unwrap_2stage = None, unwrapperName = None):
createPreprocessor = _factory("runPreprocessor")
createBaseline = _factory("runBaseline", path = "isceobj.Alos2Proc.")
createExtractBurst = _factory("runExtractBurst")
createDownloadDem = _factory("runDownloadDem", path = "isceobj.Alos2Proc.")
createCoregGeom = _factory("runCoregGeom")
@ -93,6 +94,7 @@ createCoherence = _factory("runCoherence", path = "isceobj.Alos2Proc.")
createIonSubband = _factory("runIonSubband")
createIonUwrap = _factory("runIonUwrap", path = "isceobj.Alos2Proc.")
createIonFilt = _factory("runIonFilt", path = "isceobj.Alos2Proc.")
createIonCorrect = _factory("runIonCorrect", path = "isceobj.Alos2Proc.")
createFilt = _factory("runFilt", path = "isceobj.Alos2Proc.")
createUnwrapSnaphu = _factory("runUnwrapSnaphu", path = "isceobj.Alos2Proc.")
createGeocode = _factory("runGeocode", path = "isceobj.Alos2Proc.")

View File

@ -102,13 +102,18 @@ def runFrameMosaic(self):
rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1,
updateTrack=False, phaseCompensation=False, resamplingMethod=0)
#mosaic interferograms
frameMosaic(referenceTrack, inputInterferograms, self._insar.interferogram,
(phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = frameMosaic(referenceTrack, inputInterferograms, self._insar.interferogram,
rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1,
updateTrack=True, phaseCompensation=True, resamplingMethod=1)
create_xml(self._insar.amplitude, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'amp')
create_xml(self._insar.interferogram, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'int')
catalog.addItem('frame phase diff estimated', phaseDiffEst[1:], 'runFrameMosaic')
catalog.addItem('frame phase diff used', phaseDiffUsed[1:], 'runFrameMosaic')
catalog.addItem('frame phase diff used source', phaseDiffSource[1:], 'runFrameMosaic')
catalog.addItem('frame phase diff samples used', numberOfValidSamples[1:], 'runFrameMosaic')
#update secondary parameters here
#do not match for secondary, always use geometrical
rangeOffsets = self._insar.frameRangeOffsetGeometricalSecondary
@ -153,11 +158,17 @@ def runFrameMosaic(self):
inputSd[k].append(os.path.join('../', frameDir, 'mosaic', sdFile))
#mosaic spectral diversity interferograms
for inputSdList, outputSdFile in zip(inputSd, self._insar.interferogramSd):
frameMosaic(referenceTrack, inputSdList, outputSdFile,
for i, (inputSdList, outputSdFile) in enumerate(zip(inputSd, self._insar.interferogramSd)):
(phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = frameMosaic(referenceTrack, inputSdList, outputSdFile,
rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1,
updateTrack=False, phaseCompensation=True, resamplingMethod=1)
catalog.addItem('sd {} frame phase diff estimated'.format(i+1), phaseDiffEst[1:], 'runFrameMosaic')
catalog.addItem('sd {} frame phase diff used'.format(i+1), phaseDiffUsed[1:], 'runFrameMosaic')
catalog.addItem('sd {} frame phase diff used source'.format(i+1), phaseDiffSource[1:], 'runFrameMosaic')
catalog.addItem('sd {} frame phase diff samples used'.format(i+1), numberOfValidSamples[1:], 'runFrameMosaic')
for sdFile in self._insar.interferogramSd:
create_xml(sdFile, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'int')

View File

@ -252,30 +252,38 @@ def runIonSubband(self):
#list of input files
inputInterferograms = []
inputAmplitudes = []
phaseDiff = [None]
#phaseDiff = [None]
swathPhaseDiffIon = [self.swathPhaseDiffLowerIon, self.swathPhaseDiffUpperIon]
phaseDiff = swathPhaseDiffIon[k]
if swathPhaseDiffIon[k] is None:
phaseDiff = None
else:
phaseDiff = swathPhaseDiffIon[k][i]
phaseDiff.insert(0, None)
for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)):
swathDir = 's{}'.format(swathNumber)
inputInterferograms.append(os.path.join('../', swathDir, self._insar.interferogram))
inputAmplitudes.append(os.path.join('../', swathDir, self._insar.amplitude))
#compute phase needed to be compensated using startingRange
if j >= 1:
#phaseDiffSwath1 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange)/subbandRadarWavelength[k]
#phaseDiffSwath2 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange)/subbandRadarWavelength[k]
phaseDiffSwath1 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \
-4.0 * np.pi * secondaryTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k])
phaseDiffSwath2 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \
-4.0 * np.pi * secondaryTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k])
if referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange == \
referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange:
#phaseDiff.append(phaseDiffSwath2 - phaseDiffSwath1)
#if reference and secondary versions are all before or after version 2.025 (starting range error < 0.5 m),
#it should be OK to do the above.
#see results in neom where it meets the above requirement, but there is still phase diff
#to be less risky, we do not input values here
phaseDiff.append(None)
else:
phaseDiff.append(None)
# #compute phase needed to be compensated using startingRange
# if j >= 1:
# #phaseDiffSwath1 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange)/subbandRadarWavelength[k]
# #phaseDiffSwath2 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange)/subbandRadarWavelength[k]
# phaseDiffSwath1 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \
# -4.0 * np.pi * secondaryTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k])
# phaseDiffSwath2 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \
# -4.0 * np.pi * secondaryTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k])
# if referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange == \
# referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange:
# #phaseDiff.append(phaseDiffSwath2 - phaseDiffSwath1)
# #if reference and secondary versions are all before or after version 2.025 (starting range error < 0.5 m),
# #it should be OK to do the above.
# #see results in neom where it meets the above requirement, but there is still phase diff
# #to be less risky, we do not input values here
# phaseDiff.append(None)
# else:
# phaseDiff.append(None)
#note that frame parameters are updated after mosaicking
#mosaic amplitudes
@ -294,24 +302,36 @@ def runIonSubband(self):
phaseDiffFixed = None
snapThreshold = None
(phaseDiffEst, phaseDiffUsed, phaseDiffSource) = swathMosaic(referenceTrack.frames[i], inputInterferograms, self._insar.interferogram,
#whether snap for each swath
if self.swathPhaseDiffSnapIon == None:
snapSwath = [[True for jjj in range(numberOfSwaths-1)] for iii in range(numberOfFrames)]
else:
snapSwath = self.swathPhaseDiffSnapIon
if len(snapSwath) != numberOfFrames:
raise Exception('please specify each frame for parameter: swath phase difference snap to fixed values')
for iii in range(numberOfFrames):
if len(snapSwath[iii]) != (numberOfSwaths-1):
raise Exception('please specify correct number of swaths for parameter: swath phase difference snap to fixed values')
(phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = swathMosaic(referenceTrack.frames[i], inputInterferograms, self._insar.interferogram,
rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, updateFrame=False,
phaseCompensation=True, phaseDiff=phaseDiff, phaseDiffFixed=phaseDiffFixed, snapThreshold=snapThreshold, pcRangeLooks=1, pcAzimuthLooks=3,
phaseCompensation=True, phaseDiff=phaseDiff, phaseDiffFixed=phaseDiffFixed, snapThreshold=snapThreshold, snapSwath=snapSwath[i], pcRangeLooks=1, pcAzimuthLooks=3,
filt=False, resamplingMethod=1)
#the first item is meaningless for all the following list, so only record the following items
if phaseDiff == None:
phaseDiff = [None for iii in range(self._insar.startingSwath, self._insar.endingSwath + 1)]
catalog.addItem('{} subswath phase difference input'.format(ionDir['subband'][k]), phaseDiff[1:], 'runIonSubband')
catalog.addItem('{} subswath phase difference estimated'.format(ionDir['subband'][k]), phaseDiffEst[1:], 'runIonSubband')
catalog.addItem('{} subswath phase difference used'.format(ionDir['subband'][k]), phaseDiffUsed[1:], 'runIonSubband')
catalog.addItem('{} subswath phase difference used source'.format(ionDir['subband'][k]), phaseDiffSource[1:], 'runIonSubband')
catalog.addItem('frame {} {} band subswath phase diff input'.format(frameNumber, ionDir['subband'][k]), phaseDiff[1:], 'runIonSubband')
catalog.addItem('frame {} {} band subswath phase diff estimated'.format(frameNumber, ionDir['subband'][k]), phaseDiffEst[1:], 'runIonSubband')
catalog.addItem('frame {} {} band subswath phase diff used'.format(frameNumber, ionDir['subband'][k]), phaseDiffUsed[1:], 'runIonSubband')
catalog.addItem('frame {} {} band subswath phase diff used source'.format(frameNumber, ionDir['subband'][k]), phaseDiffSource[1:], 'runIonSubband')
catalog.addItem('frame {} {} band subswath phase diff samples used'.format(frameNumber, ionDir['subband'][k]), numberOfValidSamples[1:], 'runIonSubband')
#check if there is value around 3.130810857, which may not be stable
phaseDiffUnstableExist = False
for xxx in phaseDiffUsed:
if abs(abs(xxx) - 3.130810857) < 0.2:
phaseDiffUnstableExist = True
catalog.addItem('{} subswath phase difference unstable exists'.format(ionDir['subband'][k]), phaseDiffUnstableExist, 'runIonSubband')
catalog.addItem('frame {} {} band subswath phase diff unstable exists'.format(frameNumber, ionDir['subband'][k]), phaseDiffUnstableExist, 'runIonSubband')
create_xml(self._insar.amplitude, referenceTrack.frames[i].numberOfSamples, referenceTrack.frames[i].numberOfLines, 'amp')
create_xml(self._insar.interferogram, referenceTrack.frames[i].numberOfSamples, referenceTrack.frames[i].numberOfLines, 'int')
@ -378,13 +398,18 @@ def runIonSubband(self):
rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1,
updateTrack=False, phaseCompensation=False, resamplingMethod=0)
#mosaic interferograms
frameMosaic(referenceTrack, inputInterferograms, self._insar.interferogram,
(phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = frameMosaic(referenceTrack, inputInterferograms, self._insar.interferogram,
rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1,
updateTrack=False, phaseCompensation=True, resamplingMethod=1)
create_xml(self._insar.amplitude, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'amp')
create_xml(self._insar.interferogram, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'int')
catalog.addItem('{} band frame phase diff estimated'.format(ionDir['subband'][k]), phaseDiffEst[1:], 'runIonSubband')
catalog.addItem('{} band frame phase diff used'.format(ionDir['subband'][k]), phaseDiffUsed[1:], 'runIonSubband')
catalog.addItem('{} band frame phase diff used source'.format(ionDir['subband'][k]), phaseDiffSource[1:], 'runIonSubband')
catalog.addItem('{} band frame phase diff samples used'.format(ionDir['subband'][k]), numberOfValidSamples[1:], 'runIonSubband')
os.chdir('../')
os.chdir('../')

View File

@ -258,201 +258,6 @@ def runPreprocessor(self):
self._insar.saveProduct(self.secondary.track, self._insar.secondaryTrackParameter)
##################################################
#2. compute burst synchronization
##################################################
#burst synchronization may slowly change along a track as a result of the changing relative speed of the two flights
#in one frame, real unsynchronized time is the same for all swaths
unsynTime = 0
#real synchronized time/percentage depends on the swath burst length (synTime = burstlength - abs(unsynTime))
#synTime = 0
synPercentage = 0
numberOfFrames = len(self._insar.referenceFrames)
numberOfSwaths = self._insar.endingSwath - self._insar.startingSwath + 1
for i, frameNumber in enumerate(self._insar.referenceFrames):
for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)):
referenceSwath = self.reference.track.frames[i].swaths[j]
secondarySwath = self.secondary.track.frames[i].swaths[j]
#using Piyush's code for computing range and azimuth offsets
midRange = referenceSwath.startingRange + referenceSwath.rangePixelSize * referenceSwath.numberOfSamples * 0.5
midSensingStart = referenceSwath.sensingStart + datetime.timedelta(seconds = referenceSwath.numberOfLines * 0.5 / referenceSwath.prf)
llh = self.reference.track.orbit.rdr2geo(midSensingStart, midRange)
slvaz, slvrng = self.secondary.track.orbit.geo2rdr(llh)
###Translate to offsets
#note that secondary range pixel size and prf might be different from reference, here we assume there is a virtual secondary with same
#range pixel size and prf
rgoff = ((slvrng - secondarySwath.startingRange) / referenceSwath.rangePixelSize) - referenceSwath.numberOfSamples * 0.5
azoff = ((slvaz - secondarySwath.sensingStart).total_seconds() * referenceSwath.prf) - referenceSwath.numberOfLines * 0.5
#compute burst synchronization
#burst parameters for ScanSAR wide mode not estimed yet
if self._insar.modeCombination == 21:
scburstStartLine = (referenceSwath.burstStartTime - referenceSwath.sensingStart).total_seconds() * referenceSwath.prf + azoff
#secondary burst start times corresponding to reference burst start times (100% synchronization)
scburstStartLines = np.arange(scburstStartLine - 100000*referenceSwath.burstCycleLength, \
scburstStartLine + 100000*referenceSwath.burstCycleLength, \
referenceSwath.burstCycleLength)
dscburstStartLines = -((secondarySwath.burstStartTime - secondarySwath.sensingStart).total_seconds() * secondarySwath.prf - scburstStartLines)
#find the difference with minimum absolute value
unsynLines = dscburstStartLines[np.argmin(np.absolute(dscburstStartLines))]
if np.absolute(unsynLines) >= secondarySwath.burstLength:
synLines = 0
if unsynLines > 0:
unsynLines = secondarySwath.burstLength
else:
unsynLines = -secondarySwath.burstLength
else:
synLines = secondarySwath.burstLength - np.absolute(unsynLines)
unsynTime += unsynLines / referenceSwath.prf
synPercentage += synLines / referenceSwath.burstLength * 100.0
catalog.addItem('burst synchronization of frame {} swath {}'.format(frameNumber, swathNumber), '%.1f%%'%(synLines / referenceSwath.burstLength * 100.0), 'runPreprocessor')
############################################################################################
#illustration of the sign of the number of unsynchronized lines (unsynLines)
#The convention is the same as ampcor offset, that is,
# secondaryLineNumber = referenceLineNumber + unsynLines
#
# |-----------------------| ------------
# | | ^
# | | |
# | | | unsynLines < 0
# | | |
# | | \ /
# | | |-----------------------|
# | | | |
# | | | |
# |-----------------------| | |
# Reference Burst | |
# | |
# | |
# | |
# | |
# |-----------------------|
# Secondary Burst
#
#
############################################################################################
##burst parameters for ScanSAR wide mode not estimed yet
elif self._insar.modeCombination == 31:
#scansar is reference
scburstStartLine = (referenceSwath.burstStartTime - referenceSwath.sensingStart).total_seconds() * referenceSwath.prf + azoff
#secondary burst start times corresponding to reference burst start times (100% synchronization)
for k in range(-100000, 100000):
saz_burstx = scburstStartLine + referenceSwath.burstCycleLength * k
st_burstx = secondarySwath.sensingStart + datetime.timedelta(seconds=saz_burstx / referenceSwath.prf)
if saz_burstx >= 0.0 and saz_burstx <= secondarySwath.numberOfLines -1:
secondarySwath.burstStartTime = st_burstx
secondarySwath.burstLength = referenceSwath.burstLength
secondarySwath.burstCycleLength = referenceSwath.burstCycleLength
secondarySwath.swathNumber = referenceSwath.swathNumber
break
#unsynLines = 0
#synLines = referenceSwath.burstLength
#unsynTime += unsynLines / referenceSwath.prf
#synPercentage += synLines / referenceSwath.burstLength * 100.0
catalog.addItem('burst synchronization of frame {} swath {}'.format(frameNumber, swathNumber), '%.1f%%'%(100.0), 'runPreprocessor')
else:
pass
#overwrite original frame parameter file
if self._insar.modeCombination == 31:
frameDir = 'f{}_{}'.format(i+1, frameNumber)
self._insar.saveProduct(self.secondary.track.frames[i], os.path.join(frameDir, self._insar.secondaryFrameParameter))
#getting average
if self._insar.modeCombination == 21:
unsynTime /= numberOfFrames*numberOfSwaths
synPercentage /= numberOfFrames*numberOfSwaths
elif self._insar.modeCombination == 31:
unsynTime = 0.
synPercentage = 100.
else:
pass
#record results
if (self._insar.modeCombination == 21) or (self._insar.modeCombination == 31):
self._insar.burstUnsynchronizedTime = unsynTime
self._insar.burstSynchronization = synPercentage
catalog.addItem('burst synchronization averaged', '%.1f%%'%(synPercentage), 'runPreprocessor')
##################################################
#3. compute baseline
##################################################
#only compute baseline at four corners and center of the reference track
bboxRdr = getBboxRdr(self.reference.track)
rangeMin = bboxRdr[0]
rangeMax = bboxRdr[1]
azimuthTimeMin = bboxRdr[2]
azimuthTimeMax = bboxRdr[3]
azimuthTimeMid = azimuthTimeMin+datetime.timedelta(seconds=(azimuthTimeMax-azimuthTimeMin).total_seconds()/2.0)
rangeMid = (rangeMin + rangeMax) / 2.0
points = [[azimuthTimeMin, rangeMin],
[azimuthTimeMin, rangeMax],
[azimuthTimeMax, rangeMin],
[azimuthTimeMax, rangeMax],
[azimuthTimeMid, rangeMid]]
Bpar = []
Bperp = []
#modify Piyush's code for computing baslines
refElp = Planet(pname='Earth').ellipsoid
for x in points:
referenceSV = self.reference.track.orbit.interpolate(x[0], method='hermite')
target = self.reference.track.orbit.rdr2geo(x[0], x[1])
slvTime, slvrng = self.secondary.track.orbit.geo2rdr(target)
secondarySV = self.secondary.track.orbit.interpolateOrbit(slvTime, method='hermite')
targxyz = np.array(refElp.LLH(target[0], target[1], target[2]).ecef().tolist())
mxyz = np.array(referenceSV.getPosition())
mvel = np.array(referenceSV.getVelocity())
sxyz = np.array(secondarySV.getPosition())
#to fix abrupt change near zero in baseline grid. JUN-05-2020
mvelunit = mvel / np.linalg.norm(mvel)
sxyz = sxyz - np.dot ( sxyz-mxyz, mvelunit) * mvelunit
aa = np.linalg.norm(sxyz-mxyz)
costheta = (x[1]*x[1] + aa*aa - slvrng*slvrng)/(2.*x[1]*aa)
Bpar.append(aa*costheta)
perp = aa * np.sqrt(1 - costheta*costheta)
direction = np.sign(np.dot( np.cross(targxyz-mxyz, sxyz-mxyz), mvel))
Bperp.append(direction*perp)
catalog.addItem('parallel baseline at upperleft of reference track', Bpar[0], 'runPreprocessor')
catalog.addItem('parallel baseline at upperright of reference track', Bpar[1], 'runPreprocessor')
catalog.addItem('parallel baseline at lowerleft of reference track', Bpar[2], 'runPreprocessor')
catalog.addItem('parallel baseline at lowerright of reference track', Bpar[3], 'runPreprocessor')
catalog.addItem('parallel baseline at center of reference track', Bpar[4], 'runPreprocessor')
catalog.addItem('perpendicular baseline at upperleft of reference track', Bperp[0], 'runPreprocessor')
catalog.addItem('perpendicular baseline at upperright of reference track', Bperp[1], 'runPreprocessor')
catalog.addItem('perpendicular baseline at lowerleft of reference track', Bperp[2], 'runPreprocessor')
catalog.addItem('perpendicular baseline at lowerright of reference track', Bperp[3], 'runPreprocessor')
catalog.addItem('perpendicular baseline at center of reference track', Bperp[4], 'runPreprocessor')
##################################################
#4. compute bounding box
##################################################
referenceBbox = getBboxGeo(self.reference.track)
secondaryBbox = getBboxGeo(self.secondary.track)
catalog.addItem('reference bounding box', referenceBbox, 'runPreprocessor')
catalog.addItem('secondary bounding box', secondaryBbox, 'runPreprocessor')
catalog.printToLog(logger, "runPreprocessor")
self._insar.procDoc.addAllFromCatalog(catalog)

View File

@ -26,8 +26,6 @@
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
import os
import datetime
from . import CEOS
@ -114,8 +112,8 @@ class ALOS(Sensor):
complex(-6.297074e-3,8.026685e-3),
complex(7.217117e-1,-2.367683e-2))
constants = Constants(iBias=15.5,
qBias=15.5,
constants = Constants(iBias=63.5,
qBias=63.5,
pointingDirection=-1,
antennaLength=8.9)
@ -499,7 +497,7 @@ class ALOS(Sensor):
outputNow = self.output + appendStr
if not (self._resampleFlag == ''):
filein = self.output + '__tmp__'
self.imageFile.extractImage(filein)
self.imageFile.extractImage(filein, i) #image number start with 0
self.populateMetadata()
objResample = None
if(self._resampleFlag == 'single2dual'):
@ -513,7 +511,7 @@ class ALOS(Sensor):
objResample.updateFrame(self.frame)
os.remove(filein)
else:
self.imageFile.extractImage(outputNow)
self.imageFile.extractImage(outputNow, i) #image number start with 0
self.populateMetadata()
width = self.frame.getImage().getWidth()
# self.readOrbitPulse(self._leaderFile,outputNow,width)
@ -721,7 +719,7 @@ class ImageFile(object):
return None
def extractImage(self,output=None):
def extractImage(self,output=None, image_i=0):
"""For now, call a wrapped version of ALOS_pre_process"""
productLevel = float(self.parent.leaderFile.sceneHeaderRecord.metadata[
'Product level code'])
@ -731,13 +729,13 @@ class ImageFile(object):
elif productLevel == 1.1:
self.extractSLC(output)
elif productLevel == 1.0:
self.extractRaw(output)
self.extractRaw(output, image_i) #image number start with 0
else:
raise ValueError(productLevel)
return None
@use_api
def extractRaw(self,output=None):
def extractRaw(self,output=None, image_i=0):
#if (self.numberOfSarChannels == 1):
# print "Single Pol Data Found"
# self.extractSinglePolImage(output=output)
@ -748,15 +746,16 @@ class ImageFile(object):
if self.parent.leaderFile.sceneHeaderRecord.metadata[
'Processing facility identifier'] == 'ERSDAC':
prmDict = alos.alose_Py(self.parent._leaderFile,
self.parent._imageFile, output)
self.parent._imageFile, output, image_i) #image number start with 0
else:
prmDict = alos.alos_Py(self.parent._leaderFile,
self.parent._imageFile, output)
self.parent._imageFile, output, image_i) #image number start with 0
pass
# updated 07/24/2012
self.width = prmDict['NUMBER_BYTES_PER_LINE'] - 2 * prmDict['FIRST_SAMPLE']
self.length = self.imageFDR.metadata['Number of lines per data set']
#self.length = self.imageFDR.metadata['Number of lines per data set']
self.length = prmDict['NUMBER_LINES']
self.prefix = self.imageFDR.metadata[
'Number of bytes of prefix data per record']
self.suffix = self.imageFDR.metadata[

View File

@ -85,6 +85,14 @@ Python_add_library(alos MODULE
src/ALOS_pre_process/swap_ALOS_data_info.c
src/ALOS_pre_process/write_ALOS_prm.c
src/ALOS_pre_process/readOrbitPulse.f
src/ALOS_pre_process/get_sio_struct.c
src/ALOS_pre_process/lib_array.c
src/ALOS_pre_process/lib_cpx.c
src/ALOS_pre_process/lib_file.c
src/ALOS_pre_process/lib_func.c
src/ALOS_pre_process/put_sio_struct.c
src/ALOS_pre_process/resamp.h
src/ALOS_pre_process/resamp_azimuth.c
)
target_include_directories(alos PUBLIC
include

View File

@ -333,6 +333,8 @@ class ALOS2(Component):
swath.rangePixelSize = Const.c/(2.0*swath.rangeSamplingRate)
swath.rangeBandwidth =abs((sceneHeaderRecord.metadata['Nominal range pulse (chirp) amplitude coefficient linear term']) *
(sceneHeaderRecord.metadata['Range pulse length in microsec']*1.0e-6))
#this value is also correct
#swath.rangeBandwidth = sceneHeaderRecord.metadata['Total processor bandwidth in range'] * 1000.0
#sensingStart
yr = imageData.metadata['Sensor acquisition year']
@ -357,9 +359,16 @@ class ALOS2(Component):
# '64': 'Manual observation'
#print('ScanSAR mode, using PRF from the line header')
swath.prf = imageData.metadata['PRF'] * 1.0e-3
#entire azimuth spectrum is processed for ScanSAR. Here we 0.85 * minimum PRF of '08': 'ScanSAR nominal mode' (subswath 4)
swath.azimuthBandwidth = 2270.575 * 0.85
#if operationMode == '08':
# swath.azimuthBandwidth = 2270.575 * 0.85 / 5.0
#else:
# swath.azimuthBandwidth = 2270.575 * 0.85 / 7.0
else:
#print('not ScanSAR mode, using PRF from leader file')
swath.prf = sceneHeaderRecord.metadata['Pulse Repetition Frequency in mHz']*1.0e-3
swath.azimuthBandwidth = sceneHeaderRecord.metadata['Total processor bandwidth in azimuth']
#azimuth pixel size at swath center on ground
azimuthTime = swath.sensingStart + datetime.timedelta(seconds=swath.numberOfLines/swath.prf/2.0)

View File

@ -91,6 +91,13 @@ AZIMUTH_PIXEL_SIZE = Component.Parameter('azimuthPixelSize',
mandatory = True,
doc = 'azimuth pixel size on ground in m')
AZIMUTH_BANDWIDTH = Component.Parameter('azimuthBandwidth',
public_name = 'azimuth bandwidth',
default = None,
type=float,
mandatory = True,
doc = 'azimuth bandwidth in Hz')
AZIMUTH_LINE_INTERVAL = Component.Parameter('azimuthLineInterval',
public_name = 'azimuth line interval',
default = None,
@ -206,6 +213,7 @@ class Swath(Component):
SENSING_START,
PRF,
AZIMUTH_PIXEL_SIZE,
AZIMUTH_BANDWIDTH,
AZIMUTH_LINE_INTERVAL,
DOPPLER_VS_PIXEL,
AZIMUTH_FMRATE_VS_PIXEL,

View File

@ -136,6 +136,17 @@ class TerraSARX(Sensor):
self.populateMetadata()
fp.close()
def grab_from_xml(self, path):
try:
res = self._xml_root.find(path).text
except:
raise Exception('Tag= %s not found'%(path))
if res is None:
raise Exception('Tag = %s not found'%(path))
return res
def populateMetadata(self):
"""
Populate our Metadata objects
@ -171,8 +182,23 @@ class TerraSARX(Sensor):
instrument.setIncidenceAngle(incidenceAngle)
instrument.setPulseRepetitionFrequency(prf)
instrument.setRangePixelSize(rangePixelSize)
#Cunren Liang, 2015
#the chirp bandwidth extracted before is definetely wrong
#I re-extract it here.
rangeSamplingFrequency = float(self.grab_from_xml('instrument/settings/RSF'))
chirpPulseBandwidth = float(self.grab_from_xml('instrument/settings/rxBandwidth'))
# this is not a correct value, TSX product does not provide pulse length
rangePulseLength = 1
#print("\n\n\n\n\n\n\n\n{0}\n\n\n\n\n\n\n\n\n".format(rangeSamplingFrequency))
#print("\n\n\n\n\n\n\n\n{0}\n\n\n\n\n\n\n\n\n".format(chirpPulseBandwidth))
#jng no sampling rate extracted before.
instrument.setRangeSamplingRate(1/rowSpacing)
#instrument.setRangeSamplingRate(1/rowSpacing)
#the upper setting should be wrong, I change it. Cunren Liang, 2015
instrument.setRangeSamplingRate(rangeSamplingFrequency)
instrument.setPulseLength(rangePulseLength)
instrument.setChirpSlope(chirpPulseBandwidth/rangePulseLength)
#instrument.setRangeBias(0)

View File

@ -25,9 +25,6 @@
// Author: Giangi Sacco
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#include <Python.h>
#include <iostream>
#include "alosmodule.h"
@ -68,11 +65,12 @@ PyInit_alos()
PyObject *alos_C(PyObject* self,PyObject *args)
{
char *imageFile,*leaderFile,*outFile;
int image_i;
struct PRM inputPRM;
struct PRM outputPRM;
struct GLOBALS globals;
if(!PyArg_ParseTuple(args,"sss",&leaderFile,&imageFile,&outFile))
if(!PyArg_ParseTuple(args,"sssi",&leaderFile,&imageFile,&outFile, &image_i))
{
return NULL;
}
@ -96,7 +94,7 @@ PyObject *alos_C(PyObject* self,PyObject *args)
globals.dopp = 0; // Are we calculating a doppler?
globals.tbias = 0.0; // Is there a time bias to fix poor orbits?
ALOS_pre_process(inputPRM,&outputPRM,globals);
ALOS_pre_process(inputPRM,&outputPRM,globals,image_i);
PyObject * dict = PyDict_New();
createDictionaryOutput(&outputPRM,dict);
@ -106,11 +104,12 @@ PyObject *alos_C(PyObject* self,PyObject *args)
PyObject *alose_C(PyObject* self,PyObject *args)
{
char *imageFile,*leaderFile,*outFile;
int image_i;
struct PRM inputPRM;
struct PRM outputPRM;
struct GLOBALS globals;
if(!PyArg_ParseTuple(args,"sss",&leaderFile,&imageFile,&outFile))
if(!PyArg_ParseTuple(args,"sssi",&leaderFile,&imageFile,&outFile, &image_i))
{
return NULL;
}
@ -134,7 +133,7 @@ PyObject *alose_C(PyObject* self,PyObject *args)
globals.dopp = 0; // Are we calculating a doppler?
globals.tbias = 0.0; // Is there a time bias to fix poor orbits?
ALOS_pre_process(inputPRM,&outputPRM,globals);
ALOS_pre_process(inputPRM,&outputPRM,globals,image_i);
PyObject * dict = PyDict_New();
createDictionaryOutput(&outputPRM,dict);
@ -184,6 +183,14 @@ PyObject * createDictionaryOutput(struct PRM * prm, PyObject * dict)
intVal = PyLong_FromLong((long) prm->good_bytes);
PyDict_SetItemString(dict,"NUMBER_GOOD_BYTES",intVal);
Py_XDECREF(intVal);
intVal = PyLong_FromLong((long) prm->num_lines);
PyDict_SetItemString(dict,"NUMBER_LINES",intVal);
Py_XDECREF(intVal);
intVal = PyLong_FromLong((long) prm->num_rng_bins);
PyDict_SetItemString(dict,"NUMBER_RANGE_BIN",intVal);
Py_XDECREF(intVal);

View File

@ -25,9 +25,6 @@
// Author: Giangi Sacco
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#ifndef alosmodule_h
#define alosmodue_h
@ -42,7 +39,7 @@ extern "C"
PyObject *alose_C(PyObject *self,PyObject *args);
PyObject *createDictionaryOutput(struct PRM *prm,PyObject *dict);
int ALOS_pre_process(struct PRM inputPRM, struct PRM *outputPRM,
struct GLOBALS globals);
struct GLOBALS globals, int image_i);
}
static PyMethodDef alos_methods[] =

View File

@ -17,9 +17,18 @@
* added write_roi
* *****************************************************************************/
#include "alosglobals.h"
#include"image_sio.h"
#include"lib_functions.h"
#include "resamp.h"
//ALOS I or Q mean = 15.5, so get 15 or 16 randomly here
//#define ZERO_VALUE (char)(15 + rand() % 2)
//I changed the dynamic range when reading data
//ALOS I or Q mean = 63.5, so get 63 or 64 randomly here
#define ZERO_VALUE (char)(63 + rand() % 2)
char *USAGE = "\n\nUsage: ALOS_pre_process imagefile LEDfile [-near near_range] [-radius RE] [-swap] [-V] [-debug] [-quiet] \n"
"\ncreates data.raw and writes out parameters (PRM format) to stdout\n"
@ -44,9 +53,8 @@ char *USAGE = "\n\nUsage: ALOS_pre_process imagefile LEDfile [-near near_rang
"-tbias tbias correct the clock bias (positive value means plus)\n"
"Example:\n"
"ALOS_pre_process IMG-HH-ALPSRP050420840-H1.0__A LED-ALPSRP050420840-H1.0__A \n";
long read_ALOS_data (FILE *, FILE *, struct PRM *, long *);
long read_ALOSE_data (FILE *, FILE *, struct PRM *, long *);
long read_ALOS_data (FILE *, FILE *, struct PRM *, long *, struct resamp_info *, int);
long read_ALOSE_data (FILE *, FILE *, struct PRM *, long *, struct resamp_info *, int);
void parse_ALOS_commands(int, char **, char *, struct PRM *);
void set_ALOS_defaults(struct PRM *);
void print_ALOS_defaults(struct PRM *);
@ -58,19 +66,54 @@ int write_roi(char *, FILE *, struct PRM, struct ALOS_ORB, char *);
// ISCE stuff
void init_from_PRM(struct PRM inPRM, struct PRM *prm);
int resamp_azimuth(char *slc2, char *rslc2, int nrg, int naz1, int naz2, double prf, double *dopcoeff, double *azcoef, int n, double beta);
int
ALOS_pre_process(struct PRM inputPRM, struct PRM *outputPRM,struct GLOBALS globals)
ALOS_pre_process(struct PRM inputPRM, struct PRM *outputPRM,struct GLOBALS globals, int image_i) //image number starts with 0!!!
{
FILE *imagefile, *ldrfile;
FILE *rawfile[11];//*prmfile[11];
//char prmfilename[128];
FILE *rawfile[11], *prmfile[11];
char prmfilename[128];
int nPRF;
long byte_offset;
struct PRM prm;
struct ALOS_ORB orb;
char date[8];
//////////////////////////////////////////////
FILE *resampinfofile;
struct resamp_info rspi;
struct resamp_info rspi_new;
struct resamp_info rspi_pre[100];//maximum number of frames: 100
int i, j, k;
double SC_clock_start;
double SC_clock_start_resamp;
double d2s = 24.0 * 3600.0;
double line_number_first;
int num_lines_out;
int gap_flag;
double prf_all[200];//maximum number of prfs: 200
int frame_counter_start_all[200];//maximum number of prfs: 200
int nPRF_all;//maximum number of prfs: 200
double dopcoeff[4];
double azcoef[2];
int num_lines_max, j_max;
char outputfile[256];
char *data;
FILE *first_prf_fp;
FILE *next_prf_fp;
int num_lines_append;
//int num_lines_gap;
int ret;
//////////////////////////////////////////////
//if (argc < 3) die (USAGE,"");
printf("reading image: %d\n", image_i);
/* set flags */
dopp = globals.dopp;
@ -91,19 +134,21 @@ char date[8];
init_from_PRM(inputPRM,&prm);
//parse_ALOS_commands(argc, argv, USAGE, &prm);
//if (verbose) print_ALOS_defaults(&prm);
/* apply an additional timing bias based on corner reflector analysis */
//tbias = tbias - 0.0020835;
if (verbose) print_ALOS_defaults(&prm);
if (is_big_endian_() == -1) {swap = 1;fprintf(stderr,".... swapping bytes\n");} else {swap = 0;}
/* IMG and LED files should exist already */
if ((rawfile[0] = fopen(prm.input_file,"w")) == NULL) die("can't open ",prm.input_file);
if ((imagefile = fopen(globals.imagefilename, "r")) == NULL) die ("couldn't open Level 1.0 IMG file \n",globals.imagefilename);
if ((ldrfile = fopen(inputPRM.led_file, "r")) == NULL) die ("couldn't open LED file \n",inputPRM.led_file);
/* if it exists, copy to prm structure */
//strcpy(prm.led_file,leaderFilename);
strcpy(prm.led_file,inputPRM.led_file);
/* name and open output files and header files for raw data (but input for later processing) */
//get_files(&prm, &rawfile[nPRF], &prmfile[nPRF], prmfilename, argv[1], nPRF);
get_files(&prm, &rawfile[nPRF], &prmfile[nPRF], prmfilename, prm.input_file, nPRF);
/* read sarleader; put info into prm; write log file if specified */
read_ALOS_sarleader(ldrfile, &prm, &orb);
@ -125,7 +170,7 @@ char date[8];
/* if prf changes, create new prm and data files */
if (nPRF > 0 ) {
if (verbose) fprintf(stderr,"creating multiple files due to PRF change (*.%d) \n",nPRF+1);
//get_files(&prm, &rawfile[nPRF], &prmfile[nPRF], prmfilename, argv[1], nPRF);
get_files(&prm, &rawfile[nPRF], &prmfile[nPRF], prmfilename, prm.input_file, nPRF);
}
/* set the chirp extension to 500 if FBD fs = 16000000 */
@ -141,21 +186,26 @@ char date[8];
returns byte offset if the PRF changes */
/* calculate parameters from orbit */
if (ALOS_format == 0) {
byte_offset = read_ALOS_data(imagefile, rawfile[nPRF], &prm, &byte_offset);
byte_offset = read_ALOS_data(imagefile, rawfile[nPRF], &prm, &byte_offset, &rspi, nPRF);
}
/* ERSDAC - use read_ALOSE_data */
if (ALOS_format == 1) {
byte_offset = read_ALOSE_data(imagefile, rawfile[nPRF], &prm, &byte_offset);
byte_offset = read_ALOSE_data(imagefile, rawfile[nPRF], &prm, &byte_offset, &rspi, nPRF);
}
//fclose(rawfile[nPRF]);
// should work for AUIG and ERSDAC
ALOS_ldr_orbit(&orb, &prm);
/* calculate doppler from raw file */
dopp=1;//always compute doppler for doing prf resampling
if (dopp == 1) calc_dop(&prm);
//prf as a function of range in Hz
rspi.fd1[nPRF] = prm.fd1;
rspi.fdd1[nPRF] = prm.fdd1;
rspi.fddd1[nPRF] = prm.fddd1;
//rspi.input_file[nPRF] = prm.input_file;
strcpy(rspi.input_file[nPRF], prm.input_file);
/* divide prf in half for quad_pol */
/* fix chirp slope */
@ -172,7 +222,7 @@ char date[8];
if (force_slope == 1) prm.chirp_slope = forced_slope;
/* write ascii output, SIO format */
//put_sio_struct(prm, prmfile[nPRF]);
put_sio_struct(prm, prmfile[nPRF]);
/* write roi_pac output */
if (roi) {
@ -184,6 +234,322 @@ char date[8];
nPRF++;
}
rspi.nPRF=nPRF;
////////////////////////////////////////////////////////////////////////////////////////////////////////////////
printf("\nPRF details of frame: %d\n", image_i);
printf("+++++++++++++++++++++++++++++++++++++++++++++++\n");
printf("number of PRF: %d\n", rspi.nPRF);
for (i = 0; i < rspi.nPRF; i++){
printf("PRF %d prf (Hz): %f\n", i+1, rspi.prf[i]);
printf("PRF %d start time (days): %20.12f\n", i+1, rspi.SC_clock_start[i]);
printf("PRF %d frame_counter_start: %d\n", i+1, rspi.frame_counter_start[i]);
printf("PRF %d frame_counter_end: %d\n", i+1, rspi.frame_counter_end[i]);
printf("PRF %d number of lines: %d\n\n", i+1, rspi.frame_counter_end[i]-rspi.frame_counter_start[i]+1);
}
//open parameter file for doing time adjustment and interpolation
if (image_i == 0){
if((resampinfofile = fopen("resampinfo.bin", "wb")) == NULL)
die("couldn't open resampinfo file","resampinfo.bin");
}
else{
//open the file for reading and appending
if((resampinfofile = fopen("resampinfo.bin", "ab+")) == NULL)
die("couldn't open resampinfo file","resampinfo.bin");
rewind(resampinfofile);
for(i=0; i < image_i; i++){
if((fread((void *) &rspi_pre[i],sizeof(struct resamp_info), 1, resampinfofile)) != 1)
die("couldn't read from file","resampinfo.bin");
}
}
//get parameter from this image
memcpy(&rspi_pre[image_i], &rspi, sizeof(struct resamp_info));
//initialize rspi_new with resamp_info from reading the image, put the adjusted time in it
memcpy(&rspi_new, &rspi, sizeof(struct resamp_info));
//adjust start time
//unified PRF of the full track: first prf of first image
//start time of the full track: first line of first image
//only adjust time when the format is not ERSDAC format, becasue ERSDAC format does not have sdr.frame_counter.
printf("adjust start times\n");
if(ALOS_format == 0){
if(image_i==0){
//adjust start time of prf file i, no need to adjust for first prf
for(i = 1; i < rspi_pre[0].nPRF; i++){
//time of the line just before the first line of first prf file
SC_clock_start = rspi_pre[0].SC_clock_start[0] - (1.0/rspi_pre[0].prf[0]) / d2s;
//time of the last line of each prf file
for(j = 0; j < i; j++){
if(rspi_pre[0].num_lines[j] != rspi_pre[0].frame_counter_end[j] - rspi_pre[0].frame_counter_start[j] + 1)
fprintf(stderr, "\n\nWARNING: in image %d prf file %d, \
number of lines in file: %d is not equal to that computed from frame_counter: %d\n\n", \
0, j, rspi_pre[0].num_lines[j], rspi_pre[0].frame_counter_end[j] - rspi_pre[0].frame_counter_start[j] + 1);
SC_clock_start += (rspi_pre[0].frame_counter_end[j]-rspi_pre[0].frame_counter_start[j]+1) * (1.0/rspi_pre[0].prf[j]) / d2s;
}
//time of the first line of current prf file
SC_clock_start += (1.0/rspi_pre[0].prf[i]) / d2s;
printf("time adjustment result for image %d, prf %d:\n", image_i, i);
printf("+++++++++++++++++++++++++++++++++++++++++++++++\n");
printf("original start time: %20.12f\n", rspi_pre[0].SC_clock_start[i]);
printf("adjusted start time: %20.12f\n", SC_clock_start);
printf("original - adjusted: %f (number of PRI)\n\n", (rspi_pre[0].SC_clock_start[i]-SC_clock_start)*d2s/(1.0/rspi_pre[0].prf[i]));
//update
rspi_new.SC_clock_start[i] = SC_clock_start;
}
}
else{
//1. check to see if there is gap between images
gap_flag = 0;
for(i = 0; i < image_i; i++){
if (rspi_pre[i].frame_counter_end[rspi_pre[i].nPRF-1] - rspi_pre[i+1].frame_counter_start[0] <= -2){
fprintf(stderr, "\n\nWARNING: there are gaps between image %d and image: %d\n", i, i+1);
fprintf(stderr, "since we don't know the prf of these gap lines, we are not able to adjust starting time\n\n");
gap_flag = 1;
}
}
//2. adjust start time
if(gap_flag == 0){
//2.1 count the number of prf chunks in the full track including this image
nPRF_all = 0;
for(i = 0; i < image_i+1; i++){
for(j = 0; j < rspi_pre[i].nPRF; j++){
if((i==0) && (j==0)){
prf_all[nPRF_all] = rspi_pre[i].prf[j];
frame_counter_start_all[nPRF_all] = rspi_pre[i].frame_counter_start[j];
nPRF_all += 1;
}
else{
if((rspi_pre[i].frame_counter_start[j]>frame_counter_start_all[nPRF_all-1]) && (rspi_pre[i].prf[j]!=prf_all[nPRF_all-1])){
prf_all[nPRF_all] = rspi_pre[i].prf[j];
frame_counter_start_all[nPRF_all] = rspi_pre[i].frame_counter_start[j];
nPRF_all += 1;
}
}
}
}
printf("number of prfs including this image: %d\n", nPRF_all);
printf("list of prfs:\n");
for(i = 0; i < nPRF_all; i++){
printf("frame_counter: %d, prf: %f\n", frame_counter_start_all[i], prf_all[i]);
}
//2.2 adjust start time
for(i = 0; i < rspi_pre[image_i].nPRF; i++){
//time of the line just before the first line of first prf file
//because the unite is day, the errors caused can be 0.042529743164777756 lines, should remove the integer or year part of SC_clock_start, or
//use second as unit in the future
SC_clock_start = rspi_pre[0].SC_clock_start[0] - (1.0/rspi_pre[0].prf[0]) / d2s;
//if there is only one PRF (no prf changes across all images)
if(nPRF_all == 1){
SC_clock_start += (rspi_pre[image_i].frame_counter_start[0] - rspi_pre[0].frame_counter_start[0] + 1) * (1.0/rspi_pre[0].prf[0]) / d2s;
}
else{
//find its position among the prfs, start from the second prf
for(j = 1; j < nPRF_all; j++){
if(rspi_pre[image_i].frame_counter_start[i] < frame_counter_start_all[j]){
//time of the last line of each prf chuck
for(k = 1; k < j; k++)
SC_clock_start += (frame_counter_start_all[k]-frame_counter_start_all[k-1]) * (1.0/prf_all[k-1]) / d2s;
SC_clock_start += (rspi_pre[image_i].frame_counter_start[i] - frame_counter_start_all[j-1] + 1) * (1.0/prf_all[j-1]) / d2s;
break;
}
else if(rspi_pre[image_i].frame_counter_start[i] == frame_counter_start_all[j]){
//time of the last line of each prf chuck
for(k = 1; k < j; k++)
SC_clock_start += (frame_counter_start_all[k]-frame_counter_start_all[k-1]) * (1.0/prf_all[k-1]) / d2s;
SC_clock_start += (rspi_pre[image_i].frame_counter_start[i] - frame_counter_start_all[j-1] + 1) * (1.0/prf_all[j-1]) / d2s;
//extra pri of j-1 above, so remove it and add the pri of j
SC_clock_start += (1.0/prf_all[j]) / d2s - (1.0/prf_all[j-1]) / d2s;
break;
}
else{
if(j == nPRF_all - 1){
for(k = 1; k < j+1; k++)
SC_clock_start += (frame_counter_start_all[k]-frame_counter_start_all[k-1]) * (1.0/prf_all[k-1]) / d2s;
SC_clock_start += (rspi_pre[image_i].frame_counter_start[i] - frame_counter_start_all[j] + 1) * (1.0/prf_all[j]) / d2s;
break;
}
else{
continue;
}
}
}
}
//time of the first line of current prf file
printf("time adjustment result for image %d, prf %d:\n", image_i, i);
printf("+++++++++++++++++++++++++++++++++++++++++++++++\n");
printf("original start time: %20.12f\n", rspi_pre[image_i].SC_clock_start[i]);
printf("adjusted start time: %20.12f\n", SC_clock_start);
printf("original - adjusted: %f (number of PRI)\n\n", (rspi_pre[image_i].SC_clock_start[i]-SC_clock_start)*d2s/(1.0/rspi_pre[image_i].prf[i]));
//update
rspi_new.SC_clock_start[i] = SC_clock_start;
}
}
}
}
// use parameters from rspi_pre[image_i], instead of rspi_new (to be updated)
//except rspi_new.SC_clock_start[i], since it was updated (more accurate) above.
printf("azimuth resampling\n");
for(i = 0; i < rspi_pre[image_i].nPRF; i++){
if((image_i==0)&&(i==0))
continue;
//convention: line numbers start with zero
//line number of first line of first prf of first image: 0
//line number of first line of this prf file
line_number_first = (rspi_new.SC_clock_start[i] - rspi_pre[0].SC_clock_start[0]) * d2s / (1.0 / rspi_pre[0].prf[0]);
//unit: pri of first prf of first image
num_lines_out = (rspi_pre[image_i].frame_counter_end[i] - rspi_pre[image_i].frame_counter_start[i] + 1) * (1.0/rspi_pre[image_i].prf[i]) / (1.0/rspi_pre[0].prf[0]);
if((fabs(roundfi(line_number_first)-line_number_first)<0.1) && (rspi_pre[image_i].prf[i]==rspi_pre[0].prf[0]))
continue;
//time of first line of the resampled image
SC_clock_start_resamp = rspi_pre[0].SC_clock_start[0] + roundfi(line_number_first) * (1.0 / rspi_pre[0].prf[0]) / d2s;
//compute offset parameters
//azcoef[0] + azpos * azcoef[1]
azcoef[0] = (SC_clock_start_resamp - rspi_new.SC_clock_start[i]) * d2s / (1.0/rspi_pre[image_i].prf[i]);
azcoef[1] = (1.0/rspi_pre[0].prf[0]) / (1.0/rspi_pre[image_i].prf[i]) - 1.0;
//use doppler centroid frequency estimated from prf with maximum number of lines in this image
num_lines_max = -1;
j_max = -1;
for(j = 0; j < rspi_pre[image_i].nPRF; j++){
if(rspi_pre[image_i].num_lines[j] >= num_lines_max){
num_lines_max = rspi_pre[image_i].num_lines[j];
j_max = j;
}
}
dopcoeff[0] = rspi_pre[image_i].fd1[j_max]; //average prf for alos-1 is good enough (calc_dop.c).
dopcoeff[1] = 0.0;
dopcoeff[2] = 0.0;
dopcoeff[3] = 0.0;
//The filenames of all three files created for each prf, are from prm.input_file
//PRM: prm.input_file.PRM + (.prfno_start_from_1, if not first prf)
//data: prm.input_file + (.prfno_start_from_1, if not first prf)
//data after resampling: prm.input_file + (.prfno_start_from_1, if not first prf) + .interp
sprintf(outputfile,"%s.interp", rspi_pre[image_i].input_file[i]);
//start interpolation
resamp_azimuth(rspi_pre[image_i].input_file[i], outputfile, rspi_pre[image_i].num_bins[i], num_lines_out, rspi_pre[image_i].num_lines[i], rspi_pre[image_i].prf[i], dopcoeff, azcoef, 9, 5.0);
//update parameters
rspi_new.SC_clock_start[i] = SC_clock_start_resamp;
rspi_new.num_lines[i] = num_lines_out;
rspi_new.prf[i] = rspi_pre[0].prf[0];
rspi_new.fd1[i] = dopcoeff[0];
rspi_new.fdd1[i]= dopcoeff[1];
rspi_new.fddd1[i]=dopcoeff[2];
strcpy(rspi_new.input_file[i], outputfile);
}
//concatenate prfs: put all prfs to the first prf
// use parameters from rspi_new (updated), instead of rspi_pre[image_i]
if(rspi_new.nPRF > 1){
//prepare for appending subsequent prfs to first prf: open files and allocate memory
if((first_prf_fp = fopen(rspi_new.input_file[0], "ab")) == NULL)
die("can't open", rspi_new.input_file[0]);
//number of range samples in each prf is asummed to be same
if((data = (char *)malloc(2*sizeof(char)*rspi_new.num_bins[0])) == NULL)
die("can't allocate memory for data", "");
//append prf i
for(i = 1; i < rspi_new.nPRF; i++){
//number of lines to be appended between frames if there are gaps
num_lines_append = (rspi_new.SC_clock_start[i] - rspi_new.SC_clock_start[0]) * d2s / (1.0/rspi_pre[0].prf[0]) - rspi_new.num_lines[0];
if(num_lines_append >= 1){
for(j = 0; j < num_lines_append; j++){
for(k = 0; k < 2*rspi_new.num_bins[i]; k++)
data[k] = ZERO_VALUE;
if(fwrite((char *)data, 2*sizeof(char)*rspi_new.num_bins[i], 1, first_prf_fp) != 1)
die("can't write data to", rspi_new.input_file[0]);
}
rspi_new.num_lines[0] += num_lines_append;
}
//append data from rspi_new.input_file[i]
if((next_prf_fp = fopen(rspi_new.input_file[i], "rb")) == NULL)
die("can't open", rspi_new.input_file[i]);
num_lines_append = 0;
for(j = 0; j < rspi_new.num_lines[i]; j++){
if((rspi_new.SC_clock_start[i] + j * (1.0/rspi_pre[0].prf[0]) / d2s - rspi_new.SC_clock_start[0]) * d2s / (1.0/rspi_pre[0].prf[0]) >= rspi_new.num_lines[0]){
if(fread((char *)data, 2*sizeof(char)*rspi_new.num_bins[i], 1, next_prf_fp) != 1)
die("can't read data from", rspi_new.input_file[i]);
if(fwrite((char *)data, 2*sizeof(char)*rspi_new.num_bins[i], 1, first_prf_fp) != 1)
die("can't write data to", rspi_new.input_file[0]);
num_lines_append += 1;
}
else{
fseek(next_prf_fp, 2*sizeof(char)*rspi_new.num_bins[i], SEEK_CUR);
}
}
rspi_new.num_lines[0] += num_lines_append;
fclose(next_prf_fp);
}
free(data);
fclose(first_prf_fp);
}
//tidy up intermediate files
for(i = 0; i < rspi_pre[image_i].nPRF; i++){
//if Return value = 0 then it indicates str1 is equal to str2.
ret = strcmp(rspi_new.input_file[i], rspi_pre[image_i].input_file[i]);
if(i == 0){
if(ret != 0){
//remove original
if(remove(rspi_pre[image_i].input_file[i]) != 0)
die("can't delete file", rspi_pre[image_i].input_file[i]);
//keep resampled and appended
if(rename(rspi_new.input_file[i], rspi_pre[image_i].input_file[i]) != 0)
die("can't rename file", rspi_new.input_file[i]);
}
}
else{
//remove original
if(remove(rspi_pre[image_i].input_file[i]) != 0)
die("can't delete file", rspi_pre[image_i].input_file[i]);
//remove resampled
if(ret != 0){
if(remove(rspi_new.input_file[i]) != 0)
die("can't delete file", rspi_new.input_file[i]);
}
}
}
//update prm
prm.prf = rspi_new.prf[0];
prm.num_lines = rspi_new.num_lines[0];
prm.SC_clock_start = rspi_new.SC_clock_start[0];
prm.SC_clock_stop = prm.SC_clock_start + (prm.num_lines - 1) * (1.0/prm.prf) / d2s;
prm.fd1 = rspi_pre[image_i].fd1[j_max]; //average prf for alos-1 is good enough (calc_dop.c).
prm.fdd1 = 0.0;
prm.fddd1 =0.0;
prm.xmi = 63.5;
prm.xmq = 63.5;
//write to resampinfo.bin
if((fwrite((void *)&rspi_pre[image_i], sizeof(struct resamp_info), 1, resampinfofile)) != 1 )
die("couldn't write to file", "resampinfo.bin");
fclose(resampinfofile);
////////////////////////////////////////////////////////////////////////////////////////////////////////////////
if (orb.points != NULL)
{
@ -198,12 +564,19 @@ void get_files(struct PRM *prm, FILE **rawfile, FILE **prmfile, char *prmfilenam
/* name and open output file for raw data (but input for later processing) */
/* if more than 1 set of output files, append an integer (beginning with 2) */
if (n == 0) {
sprintf(prm->input_file,"%s.raw", name);
//if (n == 0) {
// sprintf(prm->input_file,"%s.raw", name);
// sprintf(prmfilename,"%s.PRM", name);
//} else {
// sprintf(prm->input_file,"%s.raw.%d",name,n+1);
// sprintf(prmfilename,"%s.PRM.%d", name, n+1);
//}
if (n==0) {
sprintf(prmfilename,"%s.PRM", name);
sprintf(prm->input_file,"%s",name);
} else {
sprintf(prm->input_file,"%s.raw.%d",name,n+1);
sprintf(prmfilename,"%s.PRM.%d", name, n+1);
sprintf(prm->input_file,"%s.%d",name,n+1);
}
/* now open the files */
@ -212,4 +585,4 @@ void get_files(struct PRM *prm, FILE **rawfile, FILE **prmfile, char *prmfilenam
if ((*prmfile = fopen(prmfilename, "w")) == NULL) die ("couldn't open output PRM file \n",prmfilename);
}
/*------------------------------------------------------*/

View File

@ -6,14 +6,16 @@ Import('envSensorSrc1')
package = envSensorSrc1['PACKAGE']
project = envSensorSrc1['PROJECT']
install = envSensorSrc1['PRJ_LIB_DIR']
headerFiles = ['data_ALOS.h','data_ALOSE.h','image_sio.h','orbit_ALOS.h','sarleader_ALOS.h','sarleader_fdr.h','siocomplex.h']
sourceFiles = ['ALOSE_orbits_utils.c','ALOS_ldr_orbit.c','ALOS_pre_process.c','calc_dop.c','hermite_c.c','init_from_PRM.c',
'interpolate_ALOS_orbit.c','null_sio_struct.c','parse_ALOS_commands.c','polyfit.c','read_ALOSE_data.c',
headerFiles = ['data_ALOS.h','data_ALOSE.h','image_sio.h','orbit_ALOS.h','sarleader_ALOS.h','sarleader_fdr.h','siocomplex.h', 'resamp.h']
sourceFiles = ['ALOSE_orbits_utils.c','ALOS_ldr_orbit.c','ALOS_pre_process.c','calc_dop.c','get_sio_struct.c','hermite_c.c','init_from_PRM.c',
'interpolate_ALOS_orbit.c','null_sio_struct.c','parse_ALOS_commands.c','polyfit.c','put_sio_struct.c','read_ALOSE_data.c',
'read_ALOS_data.c','read_ALOS_sarleader.c','roi_utils.c','set_ALOS_defaults.c','siocomplex.c',
'swap_ALOS_data_info.c','utils.c','write_ALOS_prm.c',
'readOrbitPulse.f','readOrbitPulseState.f',
'readOrbitPulseSetState.f','image_sio.c',
]
sourceFiles += ['lib_array.c', 'lib_cpx.c', 'lib_file.c',
'lib_func.c', 'resamp_azimuth.c']
lib = envSensorSrc1.Library(target = 'alos', source = sourceFiles)
envSensorSrc1.Install(install,lib)
envSensorSrc1.Alias('install',install)

View File

@ -12,6 +12,7 @@
* Date: *
* *****************************************************************************/
#include "image_sio.h"
#include "lib_functions.h"
#include "siocomplex.h"
@ -23,8 +24,8 @@ void calc_dop(struct PRM *prm)
long n;
float *xr, *ac, *sg;
double sumd;
fcomplex *ai, *bi, *ab;
fcomplex ctmp;
fcomplex_sio *ai, *bi, *ab;
fcomplex_sio ctmp;
FILE *fin;
fprintf(stderr,".... calculating doppler for %s\n",prm->input_file);
@ -40,9 +41,15 @@ void calc_dop(struct PRM *prm)
ac = (float *) malloc(n*sizeof(float));
sg = (float *) malloc(n*sizeof(float));
ai = (fcomplex *) malloc(n*sizeof(fcomplex));
bi = (fcomplex *) malloc(n*sizeof(fcomplex));
ab = (fcomplex *) malloc(2*n*sizeof(fcomplex));
ai = (fcomplex_sio *) malloc(n*sizeof(fcomplex_sio));
bi = (fcomplex_sio *) malloc(n*sizeof(fcomplex_sio));
ab = (fcomplex_sio *) malloc(2*n*sizeof(fcomplex_sio));
for(i = 0; i< n;i++){
ab[i].r = 0;
ab[i].i = 0;
}
/* read a line of data from fin (input file, chars) to ai (complex floats) */
fread(indata, sizeof(unsigned char), prm->bytes_per_line, fin);
@ -52,7 +59,7 @@ void calc_dop(struct PRM *prm)
/* inefficient; could put loops inside each other */
for (i=prm->first_line; i<prm->num_lines-1; i++){
if (i/2000 == i/2000.0) fprintf(stderr," Working on line %d \n",i);
//if (i/2000 == i/2000.0) fprintf(stderr," Working on line %d \n",i);
fread(indata, sizeof(unsigned char), prm->bytes_per_line, fin);
@ -87,9 +94,10 @@ void calc_dop(struct PRM *prm)
free(xr); free(ac); free(sg);
free(ai); free(bi); free(ab);
free(indata);
fprintf(stderr,"done\n");
}
/*---------------------------------------------------*/
void read_data(fcomplex *data, unsigned char *indata, int i, struct PRM *prm)
void read_data(fcomplex_sio *data, unsigned char *indata, int i, struct PRM *prm)
{
int ii ;

View File

@ -0,0 +1,199 @@
/*--------------------------------------------------------------------*/
/*
Read parameters into PRM structure from PRM file
Based on get_params by Evelyn J. Price
Modified by RJM
*/
/*--------------------------------------------------------------------*/
#include "image_sio.h"
#include "lib_functions.h"
/*
void get_sio_struct(FILE *, struct PRM *);
void get_string(char *, char *, char *, char *);
void get_int(char *, char *, char *, int *);
void get_double(char *, char *, char *, double *);
*/
void get_sio_struct(FILE *fh, struct PRM *s) {
char name[256], value[256];
debug = 0;
if (debug) {
fprintf(stderr, "get_sio_struct:\n");
fprintf(stderr, "PRMname (PRM value) interpreted value\n");
}
while (fscanf(fh, "%s = %s \n", name, value) != EOF) {
/* strings */
if (strcmp(name, "input_file") == 0)
get_string(name, "input_file", value, s->input_file);
if (strcmp(name, "led_file") == 0)
get_string(name, "led_file", value, s->led_file);
if (strcmp(name, "out_amp_file") == 0)
get_string(name, "out_amp_file", value, s->out_amp_file);
if (strcmp(name, "out_data_file") == 0)
get_string(name, "out_data_file", value, s->out_data_file);
if (strcmp(name, "scnd_rng_mig") == 0)
get_string(name, "scnd_rng_mig", value, s->srm);
if (strcmp(name, "deskew") == 0)
get_string(name, "deskew", value, s->deskew);
if (strcmp(name, "Flip_iq") == 0)
get_string(name, "Flip_iq", value, s->iqflip);
if (strcmp(name, "offset_video") == 0)
get_string(name, "offset_video", value, s->offset_video);
if (strcmp(name, "ref_file") == 0)
get_string(name, "ref_file", value, s->ref_file);
if (strcmp(name, "SLC_file") == 0)
get_string(name, "SLC_file", value, s->SLC_file);
if (strcmp(name, "orbdir") == 0)
get_string(name, "orbdir", value, s->orbdir);
//if (strcmp(name, "lookdir") == 0)
// get_string(name, "lookdir", value, s->lookdir);
if (strcmp(name, "date") == 0)
get_string(name, "date", value, s->date);
/* integers */
if (strcmp(name, "nrows") == 0)
get_int(name, "nrows", value, &s->nrows);
if (strcmp(name, "num_lines") == 0)
get_int(name, "num_lines", value, &s->num_lines);
if (strcmp(name, "bytes_per_line") == 0)
get_int(name, "bytes_per_line", value, &s->bytes_per_line);
if (strcmp(name, "good_bytes_per_line") == 0)
get_int(name, "good_bytes_per_line", value, &s->good_bytes);
if (strcmp(name, "first_line") == 0)
get_int(name, "first_line", value, &s->first_line);
if (strcmp(name, "num_patches") == 0)
get_int(name, "num_patches", value, &s->num_patches);
if (strcmp(name, "first_sample") == 0)
get_int(name, "first_sample", value, &s->first_sample);
if (strcmp(name, "num_valid_az") == 0)
get_int(name, "num_valid_az", value, &s->num_valid_az);
if (strcmp(name, "SC_identity") == 0)
get_int(name, "SC_identity", value, &s->SC_identity);
if (strcmp(name, "chirp_ext") == 0)
get_int(name, "chirp_ext", value, &s->chirp_ext);
if (strcmp(name, "st_rng_bin") == 0)
get_int(name, "st_rng_bin", value, &s->st_rng_bin);
if (strcmp(name, "num_rng_bins") == 0)
get_int(name, "num_rng_bins", value, &s->num_rng_bins);
if (strcmp(name, "ref_identity") == 0)
get_int(name, "ref_identity", value, &s->ref_identity);
if (strcmp(name, "nlooks") == 0)
get_int(name, "nlooks", value, &s->nlooks);
if (strcmp(name, "rshift") == 0)
get_int(name, "rshift", value, &s->rshift);
if (strcmp(name, "ashift") == 0)
get_int(name, "ashift", value, &s->ashift);
/* backwards compatibility for xshift/rshift yshift/ashift */
if (strcmp(name, "xshift") == 0)
get_int(name, "rshift", value, &s->rshift);
if (strcmp(name, "yshift") == 0)
get_int(name, "ashift", value, &s->ashift);
if (strcmp(name, "SLC_format") == 0)
get_int(name, "SLC_format", value, &s->SLC_format);
/* doubles */
if (strcmp(name, "SC_clock_start") == 0)
get_double(name, "SC_clock_start", value, &s->SC_clock_start);
if (strcmp(name, "SC_clock_stop") == 0)
get_double(name, "SC_clock_stop", value, &s->SC_clock_stop);
if (strcmp(name, "icu_start") == 0)
get_double(name, "icu_start", value, &s->icu_start);
//if (strcmp(name, "clock_start") == 0)
// get_double(name, "clock_start", value, &s->clock_start);
//if (strcmp(name, "clock_stop") == 0)
// get_double(name, "clock_stop", value, &s->clock_stop);
if (strcmp(name, "caltone") == 0)
get_double(name, "caltone", value, &s->caltone);
if (strcmp(name, "earth_radius") == 0)
get_double(name, "earth_radius", value, &s->RE);
if (strcmp(name, "equatorial_radius") == 0)
get_double(name, "equatorial_radius", value, &s->ra);
if (strcmp(name, "polar_radius") == 0)
get_double(name, "polar_radius", value, &s->rc);
if (strcmp(name, "SC_vel") == 0)
get_double(name, "SC_vel", value, &s->vel);
if (strcmp(name, "SC_height") == 0)
get_double(name, "SC_height", value, &s->ht);
if (strcmp(name, "SC_height_start") == 0)
get_double(name, "SC_height_start", value, &s->ht_start);
if (strcmp(name, "SC_height_end") == 0)
get_double(name, "SC_height_end", value, &s->ht_end);
if (strcmp(name, "near_range") == 0)
get_double(name, "near_range", value, &s->near_range);
if (strcmp(name, "PRF") == 0)
get_double(name, "PRF", value, &s->prf);
if (strcmp(name, "I_mean") == 0)
get_double(name, "I_mean", value, &s->xmi);
if (strcmp(name, "Q_mean") == 0)
get_double(name, "Q_mean", value, &s->xmq);
if (strcmp(name, "az_res") == 0)
get_double(name, "az_res", value, &s->az_res);
if (strcmp(name, "rng_samp_rate") == 0)
get_double(name, "rng_samp_rate", value, &s->fs);
if (strcmp(name, "chirp_slope") == 0)
get_double(name, "chirp_slope", value, &s->chirp_slope);
if (strcmp(name, "pulse_dur") == 0)
get_double(name, "pulse_dur", value, &s->pulsedur);
if (strcmp(name, "radar_wavelength") == 0)
get_double(name, "radar_wavelength", value, &s->lambda);
if (strcmp(name, "rng_spec_wgt") == 0)
get_double(name, "rng_spec_wgt", value, &s->rhww);
if (strcmp(name, "rm_rng_band") == 0)
get_double(name, "rm_rng_band", value, &s->pctbw);
if (strcmp(name, "rm_az_band") == 0)
get_double(name, "rm_az_band", value, &s->pctbwaz);
if (strcmp(name, "fd1") == 0)
get_double(name, "fd1", value, &s->fd1);
if (strcmp(name, "fdd1") == 0)
get_double(name, "fdd1", value, &s->fdd1);
if (strcmp(name, "fddd1") == 0)
get_double(name, "fddd1", value, &s->fddd1);
if (strcmp(name, "sub_int_r") == 0)
get_double(name, "sub_int_r", value, &s->sub_int_r);
if (strcmp(name, "sub_int_a") == 0)
get_double(name, "sub_int_a", value, &s->sub_int_a);
if (strcmp(name, "stretch_r") == 0)
get_double(name, "stretch_r", value, &s->stretch_r);
if (strcmp(name, "stretch_a") == 0)
get_double(name, "stretch_a", value, &s->stretch_a);
if (strcmp(name, "a_stretch_r") == 0)
get_double(name, "a_stretch_r", value, &s->a_stretch_r);
if (strcmp(name, "a_stretch_a") == 0)
get_double(name, "a_stretch_a", value, &s->a_stretch_a);
if (strcmp(name, "baseline_start") == 0)
get_double(name, "baseline_start", value, &s->baseline_start);
if (strcmp(name, "alpha_start") == 0)
get_double(name, "alpha_start", value, &s->alpha_start);
if (strcmp(name, "baseline_end") == 0)
get_double(name, "baseline_end", value, &s->baseline_end);
if (strcmp(name, "alpha_end") == 0)
get_double(name, "alpha_end", value, &s->alpha_end);
//if (strcmp(name, "SLC_scale") == 0)
// get_double(name, "SLC_scale", value, &s->SLC_scale);
}
}
/*--------------------------------------------------------------------------------*/
void get_string(char *s1, char *name, char *value, char *s2) {
strcpy(s2, value);
if (debug == 1)
fprintf(stderr, " %s (%s) = %s\n", s1, name, value);
}
/*--------------------------------------------------------------------------------*/
void get_int(char *s1, char *name, char *value, int *iparam) {
*iparam = atoi(value);
if (debug == 1)
fprintf(stderr, " %s (%s) = %s (%d)\n", s1, name, value, *iparam);
}
/*--------------------------------------------------------------------------------*/
void get_double(char *s1, char *name, char *value, double *param) {
*param = atof(value);
if (debug == 1)
fprintf(stderr, " %s (%s) = %s (%lf)\n", s1, name, value, *param);
}
/*--------------------------------------------------------------------------------*/

View File

@ -33,9 +33,9 @@
#define NULL_DOUBLE -99999.9999
#define NULL_CHAR "XXXXXXXX"
typedef struct SCOMPLEX {short r,i;} scomplex;
typedef struct FCOMPLEX {float r,i;} fcomplex;
typedef struct DCOMPLEX {double r,i;} dcomplex;
typedef struct SCOMPLEX_SIO {short r,i;} scomplex_sio;
typedef struct FCOMPLEX_SIO {float r,i;} fcomplex_sio;
typedef struct DCOMPLEX_SIO {double r,i;} dcomplex_sio;
struct PRM {
char input_file[256];
@ -121,6 +121,20 @@ struct PRM {
double bpara; /* parallel baseline - added by RJM */
double bperp; /* perpendicular baseline - added by RJM */
};
struct resamp_info {
//we assume there are no more than 20 prfs per image
int nPRF; //number of prfs, start with 1
int frame_counter_start[20];
int frame_counter_end[20];
int num_lines[20];
int num_bins[20];
double prf[20];
double SC_clock_start[20]; /* YYDDD.DDDD */
double fd1[20];
double fdd1[20];
double fddd1[20];
char input_file[20][256]; //we assume there are no more than 256 characters in the file name
};
/*
offset_video off_vid
chirp_ext nextend

View File

@ -0,0 +1,575 @@
//////////////////////////////////////
// Cunren Liang, NASA JPL/Caltech
// Copyright 2017
//////////////////////////////////////
#include "resamp.h"
/****************************************************************/
/* allocating arrays */
/****************************************************************/
signed char *vector_char(long nl, long nh)
/* allocate a signed char vector with subscript range v[nl..nh] */
{
signed char *v;
v=(signed char *)malloc((size_t) ((nh-nl+1+NR_END)*sizeof(signed char)));
if (!v){
fprintf(stderr,"Error: cannot allocate 1-D vector\n");
exit(1);
}
return v-nl+NR_END;
}
void free_vector_char(signed char *v, long nl, long nh)
/* free a signed char vector allocated with vector() */
{
free((FREE_ARG) (v+nl-NR_END));
}
unsigned char *vector_unchar(long nl, long nh)
/* allocate a unsigned char vector with subscript range v[nl..nh] */
{
unsigned char *v;
v=(unsigned char *)malloc((size_t) ((nh-nl+1+NR_END)*sizeof(unsigned char)));
if (!v){
fprintf(stderr,"Error: cannot allocate 1-D vector\n");
exit(1);
}
return v-nl+NR_END;
}
void free_vector_unchar(unsigned char *v, long nl, long nh)
/* free a unsigned char vector allocated with vector() */
{
free((FREE_ARG) (v+nl-NR_END));
}
int *vector_int(long nl, long nh)
/* allocate an int vector with subscript range v[nl..nh] */
{
int *v;
v=(int *)malloc((size_t) ((nh-nl+1+NR_END)*sizeof(int)));
if (!v) nrerror("Error: cannot allocate vector_int()");
return v-nl+NR_END;
}
void free_vector_int(int *v, long nl, long nh)
/* free an int vector allocated with ivector() */
{
free((FREE_ARG) (v+nl-NR_END));
}
float *vector_float(long nl, long nh)
/* allocate a float vector with subscript range v[nl..nh] */
{
float *v;
v=(float *)malloc((size_t) ((nh-nl+1+NR_END)*sizeof(float)));
if (!v){
fprintf(stderr,"Error: cannot allocate 1-D vector\n");
exit(1);
}
return v-nl+NR_END;
}
void free_vector_float(float *v, long nl, long nh)
/* free a float vector allocated with vector() */
{
free((FREE_ARG) (v+nl-NR_END));
}
double *vector_double(long nl, long nh)
/* allocate a double vector with subscript range v[nl..nh] */
{
double *v;
v=(double *)malloc((size_t) ((nh-nl+1+NR_END)*sizeof(double)));
if (!v){
fprintf(stderr,"Error: cannot allocate 1-D vector\n");
exit(1);
}
return v-nl+NR_END;
}
void free_vector_double(double *v, long nl, long nh)
/* free a double vector allocated with vector() */
{
free((FREE_ARG) (v+nl-NR_END));
}
fcomplex *vector_fcomplex(long nl, long nh)
/* allocate a fcomplex vector with subscript range v[nl..nh] */
{
fcomplex *v;
v=(fcomplex *)malloc((size_t) ((nh-nl+1+NR_END)*sizeof(fcomplex)));
if (!v) nrerror("cannot allocate fcvector()");
return v-nl+NR_END;
}
void free_vector_fcomplex(fcomplex *v, long nl, long nh)
/* free a fcomplex vector allocated with fcvector() */
{
free((FREE_ARG) (v+nl-NR_END));
}
signed char **matrix_char(long nrl, long nrh, long ncl, long nch)
/* allocate a signed char matrix with subscript range m[nrl..nrh][ncl..nch] */
{
long i, nrow=nrh-nrl+1,ncol=nch-ncl+1;
signed char **m;
/* allocate pointers to rows */
m=(signed char **) malloc((size_t)((nrow+NR_END)*sizeof(signed char*)));
if (!m) nrerror("Error: cannot allocate vector2d_float()");
m += NR_END;
m -= nrl;
/* allocate rows and set pointers to them */
m[nrl]=(signed char *) malloc((size_t)((nrow*ncol+NR_END)*sizeof(signed char)));
if (!m[nrl]) nrerror("Error: cannot allocate vector2d_float()");
m[nrl] += NR_END;
m[nrl] -= ncl;
for(i=nrl+1;i<=nrh;i++) m[i]=m[i-1]+ncol;
/* return pointer to array of pointers to rows */
return m;
}
void free_matrix_char(signed char **m, long nrl, long nrh, long ncl, long nch)
/* free a signed char matrix allocated by matrix() */
{
free((FREE_ARG) (m[nrl]+ncl-NR_END));
free((FREE_ARG) (m+nrl-NR_END));
}
unsigned char **matrix_unchar(long nrl, long nrh, long ncl, long nch)
/* allocate a unsigned char matrix with subscript range m[nrl..nrh][ncl..nch] */
{
long i, nrow=nrh-nrl+1,ncol=nch-ncl+1;
unsigned char **m;
/* allocate pointers to rows */
m=(unsigned char **) malloc((size_t)((nrow+NR_END)*sizeof(unsigned char*)));
if (!m) nrerror("Error: cannot allocate vector2d_float()");
m += NR_END;
m -= nrl;
/* allocate rows and set pointers to them */
m[nrl]=(unsigned char *) malloc((size_t)((nrow*ncol+NR_END)*sizeof(unsigned char)));
if (!m[nrl]) nrerror("Error: cannot allocate vector2d_float()");
m[nrl] += NR_END;
m[nrl] -= ncl;
for(i=nrl+1;i<=nrh;i++) m[i]=m[i-1]+ncol;
/* return pointer to array of pointers to rows */
return m;
}
void free_matrix_unchar(unsigned char **m, long nrl, long nrh, long ncl, long nch)
/* free a unsigned char matrix allocated by matrix() */
{
free((FREE_ARG) (m[nrl]+ncl-NR_END));
free((FREE_ARG) (m+nrl-NR_END));
}
float **matrix_float(long nrl, long nrh, long ncl, long nch)
/* allocate a float matrix with subscript range m[nrl..nrh][ncl..nch] */
{
long i, nrow=nrh-nrl+1,ncol=nch-ncl+1;
float **m;
/* allocate pointers to rows */
m=(float **) malloc((size_t)((nrow+NR_END)*sizeof(float*)));
if (!m) nrerror("Error: cannot allocate vector2d_float()");
m += NR_END;
m -= nrl;
/* allocate rows and set pointers to them */
m[nrl]=(float *) malloc((size_t)((nrow*ncol+NR_END)*sizeof(float)));
if (!m[nrl]) nrerror("Error: cannot allocate vector2d_float()");
m[nrl] += NR_END;
m[nrl] -= ncl;
for(i=nrl+1;i<=nrh;i++) m[i]=m[i-1]+ncol;
/* return pointer to array of pointers to rows */
return m;
}
void free_matrix_float(float **m, long nrl, long nrh, long ncl, long nch)
/* free a float matrix allocated by matrix() */
{
free((FREE_ARG) (m[nrl]+ncl-NR_END));
free((FREE_ARG) (m+nrl-NR_END));
}
double **matrix_double(long nrl, long nrh, long ncl, long nch)
/* allocate a double matrix with subscript range m[nrl..nrh][ncl..nch] */
{
long i, nrow=nrh-nrl+1,ncol=nch-ncl+1;
double **m;
/* allocate pointers to rows */
m=(double **) malloc((size_t)((nrow+NR_END)*sizeof(double*)));
if (!m) nrerror("Error: cannot allocate vector2d_double()");
m += NR_END;
m -= nrl;
/* allocate rows and set pointers to them */
m[nrl]=(double *) malloc((size_t)((nrow*ncol+NR_END)*sizeof(double)));
if (!m[nrl]) nrerror("Error: cannot allocate vector2d_double()");
m[nrl] += NR_END;
m[nrl] -= ncl;
for(i=nrl+1;i<=nrh;i++) m[i]=m[i-1]+ncol;
/* return pointer to array of pointers to rows */
return m;
}
void free_matrix_double(double **m, long nrl, long nrh, long ncl, long nch)
/* free a double matrix allocated by matrix() */
{
free((FREE_ARG) (m[nrl]+ncl-NR_END));
free((FREE_ARG) (m+nrl-NR_END));
}
/****************************************************************/
/* allocating C-style arrays */
/****************************************************************/
FILE **array1d_FILE(long nc){
FILE **fv;
fv = (FILE **)malloc(nc * sizeof(FILE *));
if(!fv){
fprintf(stderr,"Error: cannot allocate 1-D FILE array\n");
exit(1);
}
return fv;
}
void free_array1d_FILE(FILE **fv){
free(fv);
}
signed char *array1d_char(long nc){
signed char *fv;
fv = (signed char*) malloc(nc * sizeof(signed char));
if(!fv){
fprintf(stderr,"Error: cannot allocate 1-D signed char vector\n");
exit(1);
}
return fv;
}
void free_array1d_char(signed char *fv){
free(fv);
}
unsigned char *array1d_unchar(long nc){
unsigned char *fv;
fv = (unsigned char*) malloc(nc * sizeof(unsigned char));
if(!fv){
fprintf(stderr,"Error: cannot allocate 1-D unsigned char vector\n");
exit(1);
}
return fv;
}
void free_array1d_unchar(unsigned char *fv){
free(fv);
}
int *array1d_int(long nc){
int *fv;
fv = (int*) malloc(nc * sizeof(int));
if(!fv){
fprintf(stderr,"Error: cannot allocate 1-D int array\n");
exit(1);
}
return fv;
}
void free_array1d_int(int *fv){
free(fv);
}
float *array1d_float(long nc){
float *fv;
fv = (float*) malloc(nc * sizeof(float));
if(!fv){
fprintf(stderr,"Error: cannot allocate 1-D float vector\n");
exit(1);
}
return fv;
}
void free_array1d_float(float *fv){
free(fv);
}
double *array1d_double(long nc){
double *fv;
fv = (double*) malloc(nc * sizeof(double));
if(!fv){
fprintf(stderr,"Error: cannot allocate 1-D double vector\n");
exit(1);
}
return fv;
}
void free_array1d_double(double *fv){
free(fv);
}
fcomplex *array1d_fcomplex(long nc){
fcomplex *fcv;
fcv = (fcomplex*) malloc(nc * sizeof(fcomplex));
if(!fcv){
fprintf(stderr,"Error: cannot allocate 1-D float complex vector\n");
exit(1);
}
return fcv;
}
void free_array1d_fcomplex(fcomplex *fcv){
free(fcv);
}
dcomplex *array1d_dcomplex(long nc){
dcomplex *fcv;
fcv = (dcomplex*) malloc(nc * sizeof(dcomplex));
if(!fcv){
fprintf(stderr,"Error: cannot allocate 1-D double complex vector\n");
exit(1);
}
return fcv;
}
void free_array1d_dcomplex(dcomplex *fcv){
free(fcv);
}
signed char **array2d_char(long nl, long nc){
/* allocate a signed char 2-D matrix */
signed char **m;
int i;
/* allocate pointers to rows */
m = (signed char **) malloc(nl * sizeof(signed char *));
if(!m){
fprintf(stderr,"Error: cannot allocate 2-D matrix\n");
exit(1);
}
/* allocate rows */
m[0] = (signed char*) malloc(nl * nc * sizeof(signed char));
if(!m[0]){
fprintf(stderr,"Error: cannot allocate 2-D matrix\n");
exit(1);
}
/* set pointers */
for(i = 1; i < nl; i++){
m[i] = m[i-1] + nc;
}
return m;
}
void free_array2d_char(signed char **m){
/* free a signed char matrix allocated by farray2d() */
free(m[0]);
free(m);
}
unsigned char **array2d_unchar(long nl, long nc){
/* allocate a unsigned char 2-D matrix */
unsigned char **m;
int i;
/* allocate pointers to rows */
m = (unsigned char **) malloc(nl * sizeof(unsigned char *));
if(!m){
fprintf(stderr,"Error: cannot allocate 2-D matrix\n");
exit(1);
}
/* allocate rows */
m[0] = (unsigned char*) malloc(nl * nc * sizeof(unsigned char));
if(!m[0]){
fprintf(stderr,"Error: cannot allocate 2-D matrix\n");
exit(1);
}
/* set pointers */
for(i = 1; i < nl; i++){
m[i] = m[i-1] + nc;
}
return m;
}
void free_array2d_unchar(unsigned char **m){
/* free a signed unchar matrix allocated by farray2d() */
free(m[0]);
free(m);
}
float **array2d_float(long nl, long nc){
/* allocate a float 2-D matrix */
float **m;
int i;
/* allocate pointers to rows */
m = (float **) malloc(nl * sizeof(float *));
if(!m){
fprintf(stderr,"Error: cannot allocate 2-D matrix\n");
exit(1);
}
/* allocate rows */
m[0] = (float*) malloc(nl * nc * sizeof(float));
if(!m[0]){
fprintf(stderr,"Error: cannot allocate 2-D matrix\n");
exit(1);
}
/* set pointers */
for(i = 1; i < nl; i++){
m[i] = m[i-1] + nc;
}
return m;
}
void free_array2d_float(float **m){
/* free a float matrix allocated by farray2d() */
free(m[0]);
free(m);
}
double **array2d_double(long nl, long nc){
/* allocate a double 2-D matrix */
double **m;
int i;
/* allocate pointers to rows */
m = (double **) malloc(nl * sizeof(double *));
if(!m){
fprintf(stderr,"Error: cannot allocate 2-D matrix\n");
exit(1);
}
/* allocate rows */
m[0] = (double*) malloc(nl * nc * sizeof(double));
if(!m[0]){
fprintf(stderr,"Error: cannot allocate 2-D matrix\n");
exit(1);
}
/* set pointers */
for(i = 1; i < nl; i++){
m[i] = m[i-1] + nc;
}
return m;
}
void free_array2d_double(double **m){
/* free a double matrix allocated by farray2d() */
free(m[0]);
free(m);
}
fcomplex **array2d_fcomplex(long nl, long nc){
/* allocate a fcomplex 2-D matrix */
fcomplex **m;
int i;
/* allocate pointers to rows */
m = (fcomplex **) malloc(nl * sizeof(fcomplex *));
if(!m){
fprintf(stderr,"Error: cannot allocate 2-D matrix\n");
exit(1);
}
/* allocate rows */
m[0] = (fcomplex*) malloc(nl * nc * sizeof(fcomplex));
if(!m[0]){
fprintf(stderr,"Error: cannot allocate 2-D matrix\n");
exit(1);
}
/* set pointers */
for(i = 1; i < nl; i++){
m[i] = m[i-1] + nc;
}
return m;
}
void free_array2d_fcomplex(fcomplex **m){
/* free a fcomplex matrix allocated by fcarray2d() */
free(m[0]);
free(m);
}
/****************************************************************/
/* handling error */
/****************************************************************/
void nrerror(char error_text[])
/* Numerical Recipes standard error handler */
{
fprintf(stderr,"Numerical Recipes run-time error...\n");
fprintf(stderr,"%s\n",error_text);
fprintf(stderr,"...now exiting to system...\n");
exit(1);
}

View File

@ -0,0 +1,72 @@
//////////////////////////////////////
// Cunren Liang, NASA JPL/Caltech
// Copyright 2017
//////////////////////////////////////
#include "resamp.h"
// complex operations
fcomplex cmul(fcomplex a, fcomplex b)
{
fcomplex c;
c.re=a.re*b.re-a.im*b.im;
c.im=a.im*b.re+a.re*b.im;
return c;
}
fcomplex cconj(fcomplex z)
{
fcomplex c;
c.re=z.re;
c.im = -z.im;
return c;
}
fcomplex cadd(fcomplex a, fcomplex b)
{
fcomplex c;
c.re=a.re+b.re;
c.im=a.im+b.im;
return c;
}
float xcabs(fcomplex z)
{
float x,y,ans,temp;
x=fabs(z.re);
y=fabs(z.im);
if (x == 0.0)
ans=y;
else if (y == 0.0)
ans=x;
else if (x > y) {
temp=y/x;
ans=x*sqrt(1.0+temp*temp);
} else {
temp=x/y;
ans=y*sqrt(1.0+temp*temp);
}
return ans;
}
float cphs(fcomplex z){
float ans;
if(z.re == 0.0 && z.im == 0.0)
ans = 0.0;
else
ans = atan2(z.im, z.re);
return ans;
//it seems that there is no need to add the if clause
//do a test:
// printf("%12.4f, %12.4f, %12.4f, %12.4f, %12.4f\n", \
// atan2(0.0, 1.0), atan2(1.0, 0.0), atan2(0.0, -1.0), atan2(-1.0, 0.0), atan2(0.0, 0.0));
//output:
// 0.0000, 1.5708, 3.1416, -1.5708, 0.0000
}

View File

@ -0,0 +1,43 @@
//////////////////////////////////////
// Cunren Liang, NASA JPL/Caltech
// Copyright 2017
//////////////////////////////////////
#include "resamp.h"
FILE *openfile(char *filename, char *pattern){
FILE *fp;
fp=fopen(filename, pattern);
if (fp==NULL){
fprintf(stderr,"Error: cannot open file: %s\n", filename);
exit(1);
}
return fp;
}
void readdata(void *data, size_t blocksize, FILE *fp){
if(fread(data, blocksize, 1, fp) != 1){
fprintf(stderr,"Error: cannot read data\n");
exit(1);
}
}
void writedata(void *data, size_t blocksize, FILE *fp){
if(fwrite(data, blocksize, 1, fp) != 1){
fprintf(stderr,"Error: cannot write data\n");
exit(1);
}
}
long file_length(FILE* fp, long width, long element_size){
long length;
fseeko(fp,0L,SEEK_END);
length = ftello(fp) / element_size / width;
rewind(fp);
return length;
}

View File

@ -0,0 +1,275 @@
//////////////////////////////////////
// Cunren Liang, NASA JPL/Caltech
// Copyright 2017
//////////////////////////////////////
#include "resamp.h"
long next_pow2(long a){
long i;
long x;
x = 2;
while(x < a){
x *= 2;
}
return x;
}
void circ_shift(fcomplex *in, int na, int nc){
int i;
int ncm;
ncm = nc%na;
if(ncm < 0){
for(i = 0; i < abs(ncm); i++)
left_shift(in, na);
}
else if(ncm > 0){
for(i = 0; i < ncm; i++)
right_shift(in, na);
}
else{ //ncm == 0, no need to shift
i = 0;
}
}
void left_shift(fcomplex *in, int na){
int i;
fcomplex x;
if(na < 1){
fprintf(stderr, "Error: array size < 1\n\n");
exit(1);
}
else if(na > 1){
x.re = in[0].re;
x.im = in[0].im;
for(i = 0; i <= na - 2; i++){
in[i].re = in[i+1].re;
in[i].im = in[i+1].im;
}
in[na-1].re = x.re;
in[na-1].im = x.im;
}
else{ //na==1, no need to shift
i = 0;
}
}
void right_shift(fcomplex *in, int na){
int i;
fcomplex x;
if(na < 1){
fprintf(stderr, "Error: array size < 1\n\n");
exit(1);
}
else if(na > 1){
x.re = in[na-1].re;
x.im = in[na-1].im;
for(i = na - 1; i >= 1; i--){
in[i].re = in[i-1].re;
in[i].im = in[i-1].im;
}
in[0].re = x.re;
in[0].im = x.im;
}
else{ //na==1, no need to shift
i = 0;
}
}
int roundfi(float a){
int b;
if(a > 0)
b = (int)(a + 0.5);
else if (a < 0)
b = (int)(a - 0.5);
else
b = a;
return b;
}
void sinc(int n, int m, float *coef){
int i;
int hmn;
hmn = n * m / 2;
for(i=-hmn; i<=hmn; i++){
if(i != 0){
coef[i] = sin(PI * i / m) / (PI * i / m);
//coef[i] = sin(pi * i / m) / (pi * i / m);
}
else{
coef[i] = 1.0;
}
}
}
//kaiser() is equivalent to kaiser2()
//it is created to just keep the same style of sinc().
void kaiser(int n, int m, float *coef, float beta){
int i;
int hmn;
float a;
hmn = n * m / 2;
for(i = -hmn; i <= hmn; i++){
a = 1.0 - 4.0 * i * i / (n * m) / (n * m);
coef[i] = bessi0(beta * sqrt(a)) / bessi0(beta);
}
}
void kaiser2(float beta, int n, float *coef){
int i;
int hn;
float a;
hn = (n - 1) / 2;
for(i = -hn; i<=hn; i++){
a = 1.0 - 4.0 * i * i / (n - 1.0) / (n - 1.0);
coef[i] = bessi0(beta * sqrt(a)) / bessi0(beta);
}
}
void bandpass_filter(float bw, float bc, int n, int nfft, int ncshift, float beta, fcomplex *filter){
int i;
float *kw;
int hn;
fcomplex bwx, bcx;
hn = (n-1)/2;
if(n > nfft){
fprintf(stderr, "Error: fft length too small!\n\n");
exit(1);
}
if(abs(ncshift) > nfft){
fprintf(stderr, "Error: fft length too small or shift too big!\n\n");
exit(1);
}
//set all the elements to zero
for(i = 0; i < nfft; i++){
filter[i].re = 0.0;
filter[i].im = 0.0;
}
//calculate kaiser window
kw = vector_float(-hn, hn);
kaiser2(beta, n, kw);
//calculate filter
for(i = -hn; i <= hn; i++){
bcx.re = cos(bc * 2.0 * PI * i);
bcx.im = sin(bc * 2.0 * PI * i);
if(i == 0){
bwx.re = 1.0;
bwx.im = 0.0;
}
else{
bwx.re = sin(bw * PI * i) / (bw * PI * i);
bwx.im = 0.0;
}
filter[i+hn] = cmul(bcx, bwx);
filter[i+hn].re = bw * kw[i] * filter[i+hn].re;
filter[i+hn].im = bw * kw[i] * filter[i+hn].im;
}
//circularly shift filter, we shift the filter to left.
ncshift = -abs(ncshift);
circ_shift(filter, nfft, ncshift);
free_vector_float(kw, -hn, hn);
}
float bessi0(float x)
{
float ax,ans;
double y;
if ((ax=fabs(x)) < 3.75) {
y=x/3.75;
y*=y;
ans=1.0+y*(3.5156229+y*(3.0899424+y*(1.2067492
+y*(0.2659732+y*(0.360768e-1+y*0.45813e-2)))));
} else {
y=3.75/ax;
ans=(exp(ax)/sqrt(ax))*(0.39894228+y*(0.1328592e-1
+y*(0.225319e-2+y*(-0.157565e-2+y*(0.916281e-2
+y*(-0.2057706e-1+y*(0.2635537e-1+y*(-0.1647633e-1
+y*0.392377e-2))))))));
}
return ans;
}
#define SWAP(a,b) tempr=(a);(a)=(b);(b)=tempr
void four1(float data[], unsigned long nn, int isign)
{
unsigned long n,mmax,m,j,istep,i;
double wtemp,wr,wpr,wpi,wi,theta;
float tempr,tempi;
n=nn << 1;
j=1;
for (i=1;i<n;i+=2) {
if (j > i) {
SWAP(data[j],data[i]);
SWAP(data[j+1],data[i+1]);
}
m=nn;
while (m >= 2 && j > m) {
j -= m;
m >>= 1;
}
j += m;
}
mmax=2;
while (n > mmax) {
istep=mmax << 1;
theta=isign*(6.28318530717959/mmax);
wtemp=sin(0.5*theta);
wpr = -2.0*wtemp*wtemp;
wpi=sin(theta);
wr=1.0;
wi=0.0;
for (m=1;m<mmax;m+=2) {
for (i=m;i<=n;i+=istep) {
j=i+mmax;
tempr=wr*data[j]-wi*data[j+1];
tempi=wr*data[j+1]+wi*data[j];
data[j]=data[i]-tempr;
data[j+1]=data[i+1]-tempi;
data[i] += tempr;
data[i+1] += tempi;
}
wr=(wtemp=wr)*wpr-wi*wpi+wr;
wi=wi*wpr+wtemp*wpi+wi;
}
mmax=istep;
}
}
#undef SWAP

View File

@ -9,8 +9,8 @@
void ALOS_ldr_orbit(struct ALOS_ORB *, struct PRM *);
void calc_height_velocity(struct ALOS_ORB *, struct PRM *, double, double, double *, double *, double *, double *, double *);
void calc_dop(struct PRM *);
void cfft1d_(int *, fcomplex *, int *);
void read_data(fcomplex *, unsigned char *, int, struct PRM *);
void cfft1d_(int *, fcomplex_sio *, int *);
void read_data(fcomplex_sio *, unsigned char *, int, struct PRM *);
void null_sio_struct(struct PRM *);
void get_sio_struct(FILE *, struct PRM *);
void put_sio_struct(struct PRM, FILE *);

View File

@ -0,0 +1,168 @@
/********************************************************************************
* Creator: Rob Mellors and David T. Sandwell * (San Diego State University,
*Scripps Institution of Oceanography) * Date : 10/03/2007 *
********************************************************************************/
/********************************************************************************
* Modification history: * Date: *
* *****************************************************************************/
#include "image_sio.h"
#include "lib_functions.h"
/*
#define OUTFILE stdout
*/
/***************************************************************************/
void put_sio_struct(struct PRM prm, FILE *OUTFILE) {
/* set by set_ALOS_defaults */
if (prm.num_valid_az != NULL_INT)
fprintf(OUTFILE, "num_valid_az = %d \n", prm.num_valid_az);
if (prm.nrows != NULL_INT)
fprintf(OUTFILE, "nrows = %d \n", prm.nrows);
if (prm.first_line != NULL_INT)
fprintf(OUTFILE, "first_line = %d \n", prm.first_line);
if (strncmp(prm.deskew, NULL_CHAR, 8) != 0)
fprintf(OUTFILE, "deskew = %s \n", prm.deskew);
if (prm.caltone != NULL_DOUBLE)
fprintf(OUTFILE, "caltone = %lf \n", prm.caltone);
if (prm.st_rng_bin != NULL_INT)
fprintf(OUTFILE, "st_rng_bin = %d \n", prm.st_rng_bin);
if (strncmp(prm.iqflip, NULL_CHAR, 8) != 0)
fprintf(OUTFILE, "Flip_iq = %s \n", prm.iqflip);
if (strncmp(prm.offset_video, NULL_CHAR, 8) != 0)
fprintf(OUTFILE, "offset_video = %s \n", prm.offset_video);
if (prm.az_res != NULL_DOUBLE)
fprintf(OUTFILE, "az_res = %lf \n", prm.az_res);
if (prm.nlooks != NULL_INT)
fprintf(OUTFILE, "nlooks = %d \n", prm.nlooks);
if (prm.chirp_ext != NULL_INT)
fprintf(OUTFILE, "chirp_ext = %d \n", prm.chirp_ext);
if (strncmp(prm.srm, NULL_CHAR, 8) != 0)
fprintf(OUTFILE, "scnd_rng_mig = %s \n", prm.srm);
if (prm.rhww != NULL_DOUBLE)
fprintf(OUTFILE, "rng_spec_wgt = %lf \n", prm.rhww);
if (prm.pctbw != NULL_DOUBLE)
fprintf(OUTFILE, "rm_rng_band = %lf \n", prm.pctbw);
if (prm.pctbwaz != NULL_DOUBLE)
fprintf(OUTFILE, "rm_az_band = %lf \n", prm.pctbwaz);
if (prm.rshift != NULL_INT)
fprintf(OUTFILE, "rshift = %d \n", prm.rshift);
if (prm.ashift != NULL_INT)
fprintf(OUTFILE, "ashift = %d \n", prm.ashift);
if (prm.stretch_a != NULL_DOUBLE)
fprintf(OUTFILE, "stretch_r = %lf \n", prm.stretch_r);
if (prm.stretch_a != NULL_DOUBLE)
fprintf(OUTFILE, "stretch_a = %lf \n", prm.stretch_a);
if (prm.a_stretch_r != NULL_DOUBLE)
fprintf(OUTFILE, "a_stretch_r = %lf \n", prm.a_stretch_r);
if (prm.a_stretch_a != NULL_DOUBLE)
fprintf(OUTFILE, "a_stretch_a = %lf \n", prm.a_stretch_a);
if (prm.first_sample != NULL_INT)
fprintf(OUTFILE, "first_sample = %d \n", prm.first_sample);
if (prm.SC_identity != NULL_INT)
fprintf(OUTFILE, "SC_identity = %d \n", prm.SC_identity);
if (prm.fs != NULL_DOUBLE)
fprintf(OUTFILE, "rng_samp_rate = %lf \n", prm.fs);
/* from read_ALOS_data */
if (strncmp(prm.input_file, NULL_CHAR, 8) != 0)
fprintf(OUTFILE, "input_file = %s \n", prm.input_file);
if (prm.num_rng_bins != NULL_INT)
fprintf(OUTFILE, "num_rng_bins = %d \n", prm.num_rng_bins);
if (prm.bytes_per_line != NULL_INT)
fprintf(OUTFILE, "bytes_per_line = %d \n", prm.bytes_per_line);
if (prm.good_bytes != NULL_INT)
fprintf(OUTFILE, "good_bytes_per_line = %d \n", prm.good_bytes);
if (prm.prf != NULL_DOUBLE)
fprintf(OUTFILE, "PRF = %lf \n", prm.prf);
if (prm.pulsedur != NULL_DOUBLE)
fprintf(OUTFILE, "pulse_dur = %e \n", prm.pulsedur);
if (prm.near_range != NULL_DOUBLE)
fprintf(OUTFILE, "near_range = %lf \n", prm.near_range);
if (prm.num_lines != NULL_INT)
fprintf(OUTFILE, "num_lines = %d \n", prm.num_lines);
if (prm.num_patches != NULL_INT)
fprintf(OUTFILE, "num_patches = %d \n", prm.num_patches);
if (prm.SC_clock_start != NULL_DOUBLE)
fprintf(OUTFILE, "SC_clock_start = %16.10lf \n", prm.SC_clock_start);
if (prm.SC_clock_stop != NULL_DOUBLE)
fprintf(OUTFILE, "SC_clock_stop = %16.10lf \n", prm.SC_clock_stop);
//if (prm.clock_start != NULL_DOUBLE)
// fprintf(OUTFILE, "clock_start = %16.12lf \n", prm.clock_start);
//if (prm.clock_stop != NULL_DOUBLE)
// fprintf(OUTFILE, "clock_stop = %16.12lf \n", prm.clock_stop);
if (strncmp(prm.led_file, NULL_CHAR, 8) != 0)
fprintf(OUTFILE, "led_file = %s \n", prm.led_file);
/* from read_ALOS_ldrfile */
if (strncmp(prm.date, NULL_CHAR, 8) != 0)
fprintf(OUTFILE, "date = %.6s \n", prm.date);
if (strncmp(prm.orbdir, NULL_CHAR, 8) != 0)
fprintf(OUTFILE, "orbdir = %.1s \n", prm.orbdir);
//if (strncmp(prm.lookdir, NULL_CHAR, 8) != 0)
// fprintf(OUTFILE, "lookdir = %.1s \n", prm.lookdir);
if (prm.lambda != NULL_DOUBLE)
fprintf(OUTFILE, "radar_wavelength = %lg \n", prm.lambda);
if (prm.chirp_slope != NULL_DOUBLE)
fprintf(OUTFILE, "chirp_slope = %lg \n", prm.chirp_slope);
if (prm.fs != NULL_DOUBLE)
fprintf(OUTFILE, "rng_samp_rate = %lf \n", prm.fs);
if (prm.xmi != NULL_DOUBLE)
fprintf(OUTFILE, "I_mean = %lg \n", prm.xmi);
if (prm.xmq != NULL_DOUBLE)
fprintf(OUTFILE, "Q_mean = %lg \n", prm.xmq);
if (prm.vel != NULL_DOUBLE)
fprintf(OUTFILE, "SC_vel = %lf \n", prm.vel);
if (prm.RE != NULL_DOUBLE)
fprintf(OUTFILE, "earth_radius = %lf \n", prm.RE);
if (prm.ra != NULL_DOUBLE)
fprintf(OUTFILE, "equatorial_radius = %lf \n", prm.ra);
if (prm.rc != NULL_DOUBLE)
fprintf(OUTFILE, "polar_radius = %lf \n", prm.rc);
if (prm.ht != NULL_DOUBLE)
fprintf(OUTFILE, "SC_height = %lf \n", prm.ht);
if (prm.ht_start != NULL_DOUBLE)
fprintf(OUTFILE, "SC_height_start = %lf \n", prm.ht_start);
if (prm.ht_end != NULL_DOUBLE)
fprintf(OUTFILE, "SC_height_end = %lf \n", prm.ht_end);
if (prm.fd1 != NULL_DOUBLE)
fprintf(OUTFILE, "fd1 = %lf \n", prm.fd1);
if (prm.fdd1 != NULL_DOUBLE)
fprintf(OUTFILE, "fdd1 = %12.8lf \n", prm.fdd1);
if (prm.fddd1 != NULL_DOUBLE)
fprintf(OUTFILE, "fddd1 = %lf \n", prm.fddd1);
/* from calc_baseline */
/*
if (prm.rshift != NULL_INT) fprintf(OUTFILE, "rshift = %d
\n",prm.rshift); if (prm.ashift != NULL_INT) fprintf(OUTFILE, "ashift =
%d\n",prm.ashift);
*/
if (prm.sub_int_r != NULL_DOUBLE)
fprintf(OUTFILE, "sub_int_r = %f \n", prm.sub_int_r);
if (prm.sub_int_a != NULL_DOUBLE)
fprintf(OUTFILE, "sub_int_a = %f \n", prm.sub_int_a);
if (prm.bpara != NULL_DOUBLE)
fprintf(OUTFILE, "B_parallel = %f \n", prm.bpara);
if (prm.bperp != NULL_DOUBLE)
fprintf(OUTFILE, "B_perpendicular = %f \n", prm.bperp);
if (prm.baseline_start != NULL_DOUBLE)
fprintf(OUTFILE, "baseline_start = %f \n", prm.baseline_start);
if (prm.alpha_start != NULL_DOUBLE)
fprintf(OUTFILE, "alpha_start = %f \n", prm.alpha_start);
if (prm.baseline_end != NULL_DOUBLE)
fprintf(OUTFILE, "baseline_end = %f \n", prm.baseline_end);
if (prm.alpha_end != NULL_DOUBLE)
fprintf(OUTFILE, "alpha_end = %f \n", prm.alpha_end);
/* from sarp */
if (strncmp(prm.SLC_file, NULL_CHAR, 8) != 0)
fprintf(OUTFILE, "SLC_file = %s \n", prm.SLC_file);
//if (strncmp(prm.dtype, NULL_CHAR, 8) != 0)
// fprintf(OUTFILE, "dtype = %s \n", prm.dtype);
//if (prm.SLC_scale != NULL_DOUBLE)
// fprintf(OUTFILE, "SLC_scale = %f \n", prm.SLC_scale);
}
/***************************************************************************/

View File

@ -22,6 +22,11 @@
* 15-Apr-2010 Replaced ALOS identifier with ALOSE Jeff Bytof *
**************************************************************************/
/********************************************************************************
This program has been upgraded to handle the ALOS-1 PRF change issue.
BUT HAS NOT BEEN TESTED YET!!!
*********************************************************************************/
/*
the data header information is read into the structure dfd
the line prefix information is read into sdr
@ -36,7 +41,8 @@ SC_clock_start SC_clock_stop
#include "image_sio.h"
#include "lib_functions.h"
#define ZERO_VALUE (char)(63 + rand() % 2)
#define clip127(A) (((A) > 127) ? 127 : (((A) < 0) ? 0 : A))
/*
#define znew (int) (z=36969*(z&65535)+(z>>16))
typedef unsigned long UL;
@ -54,11 +60,12 @@ int assign_sardata_params_ALOSE(struct PRM *, int, int *, int *);
void swap_ALOS_data_info(struct sardata_info_ALOSE *sdr);
void settable(unsigned long);
void print_params(struct PRM *prm);
int check_shift(struct PRM *, int *, int *, int *, int);
int check_shift(struct PRM *, int *, int *, int *, int, int);
int set_file_position(FILE *, long *, int);
int reset_params(struct PRM *prm, long *, int *, int *);
int fill_shift_data(int, int, int, int, int, char *, char *, FILE *);
int handle_prf_change_ALOSE(struct PRM *, FILE *, long *, int);
void change_dynamic_range(char *data, long length);
static struct sardata_record r1;
static struct sardata_descriptor_ALOSE dfd;
@ -74,11 +81,13 @@ struct sardata_info_ALOSE
SARDATA__WCS_ALOSE
SARDATA_RVL_ALOSE(SP)
*/
long read_ALOSE_data (FILE *imagefile, FILE *outfile, struct PRM *prm, long *byte_offset) {
long read_ALOSE_data (FILE *imagefile, FILE *outfile, struct PRM *prm, long *byte_offset, struct resamp_info *rspi, int nPRF) {
char *data_fbd, *data, *shift_data;
int record_length0; /* length of record read at start of file */
int record_length1; /* length of record read in file */
int start_sdr_rec_len = 0; /* sdr record length for fisrt record */
int slant_range_old = 0; /* slant range of previous record */
int line_suffix_size; /* number of bytes after data */
int data_length; /* bytes of data */
int k, n, m, ishift, shift, shift0;
@ -91,6 +100,13 @@ long read_ALOSE_data (FILE *imagefile, FILE *outfile, struct PRM *prm, long *byt
if (verbose) fprintf(stderr,".... reading header \n");
//here we still get sdr from the first data line no matter whether prf changes.
//this sdr is used to initialize record_length0 in assign_sardata_params, which
//is used at line 152 to check if record_length changed.
//I think we should get sdr from first prf-change data line for the output of prf-change file.
//Cunren Liang. 02-DEC-2019
/* read header information */
read_sardata_info_ALOSE(imagefile, prm, &header_size, &line_prefix_size);
if (verbose) fprintf(stderr,".... reading header %d %d\n", header_size, line_prefix_size);
@ -115,7 +131,7 @@ long read_ALOSE_data (FILE *imagefile, FILE *outfile, struct PRM *prm, long *byt
shift0 = 0;
n = 1;
m = 0;
m = 2;//first line sequence_number
/* read the rest of the file */
while ( (fread((void *) &sdr,sizeof(struct sardata_info_ALOSE), 1, imagefile)) == 1 ) {
@ -124,9 +140,26 @@ long read_ALOSE_data (FILE *imagefile, FILE *outfile, struct PRM *prm, long *byt
/* checks for little endian/ big endian */
if (swap) swap_ALOS_data_info(&sdr);
if (n == 2)
//rspi->frame_counter_start[nPRF] = sdr.frame_counter;
//unfortunately restec format does not have this info, so we are not able to adjust time
rspi->frame_counter_start[nPRF] = 0;
/* if this is partway through the file due to prf change, reset sequence, PRF, and near_range */
if (n == 2)
start_sdr_rec_len = sdr.record_length;
if ((*byte_offset > 0) && (n == 2)) reset_params(prm, byte_offset, &n, &m);
if (sdr.record_length != start_sdr_rec_len) {
printf(" ***** warning sdr.record_length error %d \n", sdr.record_length);
sdr.record_length = start_sdr_rec_len;
sdr.PRF = prm->prf;
sdr.slant_range = slant_range_old;
}
if (sdr.sequence_number != n) printf(" missing line: n, seq# %d %d \n", n, sdr.sequence_number);
/* check for changes in record_length and PRF */
@ -136,11 +169,15 @@ long read_ALOSE_data (FILE *imagefile, FILE *outfile, struct PRM *prm, long *byt
/* if prf changes, close file and set byte_offset */
if ((sdr.PRF) != prm->prf) {
handle_prf_change_ALOSE(prm, imagefile, byte_offset, n);
n-=1;
break;
}
//rspi->frame_counter_end[nPRF] = sdr.frame_counter;
//unfortunately restec format does not have this info, so we are not able to adjust time
rspi->frame_counter_end[nPRF] = 0;
/* check shift to see if it varies from beginning or from command line value */
check_shift(prm, &shift, &ishift, &shift0, record_length1);
check_shift(prm, &shift, &ishift, &shift0, record_length1, 1);
if ((verbose) && (n/2000.0 == n/2000)) {
fprintf(stderr," Working on line %d prf %f record length %d slant_range %d \n"
@ -151,6 +188,7 @@ long read_ALOSE_data (FILE *imagefile, FILE *outfile, struct PRM *prm, long *byt
if ( fread ((char *) data, record_length1, (size_t) 1, imagefile) != 1 ) break;
data_length = record_length1;
slant_range_old = sdr.slant_range;
/* write line header to output data */
/* PSA - turning off headers
@ -165,6 +203,7 @@ long read_ALOSE_data (FILE *imagefile, FILE *outfile, struct PRM *prm, long *byt
}
/* write fbd data */
if (shift == 0) {
change_dynamic_range(data_fbd, data_length/2);
fwrite((char *) data_fbd, data_length/2, 1, outfile);
} else if (shift != 0) {
fill_shift_data(shift, ishift, data_length/2, line_suffix_size, record_length1, data_fbd, shift_data, outfile);
@ -173,6 +212,7 @@ long read_ALOSE_data (FILE *imagefile, FILE *outfile, struct PRM *prm, long *byt
else {
/* write fbs data */
if (shift == 0) {
change_dynamic_range(data, data_length);
fwrite((char *) data, data_length, 1, outfile);
} else if (shift != 0) {
fill_shift_data(shift, ishift, data_length, line_suffix_size, record_length1, data, shift_data, outfile);
@ -180,11 +220,19 @@ long read_ALOSE_data (FILE *imagefile, FILE *outfile, struct PRM *prm, long *byt
}
}
//we are not writing out line prefix data, need to correct these parameters
//as they are used in doppler computation.
prm->first_sample = 0;
prm->bytes_per_line -= line_prefix_size;
prm->good_bytes -= line_prefix_size;
//this is the sdr of the first prf-change data line, should seek back to get last sdr to be used here.
/* calculate end time */
prm->SC_clock_stop = get_clock_ALOSE(sdr, tbias);
/* m is non-zero only in the event of a prf change */
prm->num_lines = n - m - 1;
//not correct if PRF changes, so I updated it here.
prm->num_lines = n - m + 1;
prm->num_patches = (int)((1.0*n)/(1.0*prm->num_valid_az));
if (prm->num_lines == 0) prm->num_lines = 1;
@ -194,6 +242,15 @@ long read_ALOSE_data (FILE *imagefile, FILE *outfile, struct PRM *prm, long *byt
prm->prf = 1.e3/pri;
prm->xmi = 63.5;
prm->xmq = 63.5;
rspi->prf[nPRF] = prm->prf;
rspi->SC_clock_start[nPRF] = prm->SC_clock_start;
rspi->num_lines[nPRF] = prm->num_lines;
rspi->num_bins[nPRF] = prm->bytes_per_line/(2*sizeof(char));
if (verbose) print_params(prm);
free(data);
@ -321,14 +378,14 @@ double get_clock();
/***************************************************************************/
int handle_prf_change_ALOSE(struct PRM *prm, FILE *imagefile, long *byte_offset, int n)
{
prm->num_lines = n;
//prm->num_lines = n;
fseek(imagefile, -1*sizeof(struct sardata_info_ALOSE), SEEK_CUR);
*byte_offset = ftell(imagefile);
printf(" *** PRF changed from %lf to %lf at line %d (byte %ld)\n", (0.001*prm->prf),(0.001*sdr.PRF), n, *byte_offset);
printf(" end: PRF changed from %lf to %lf at line %d \n", (0.001*prm->prf),(0.001*sdr.PRF), n);
printf(" *** PRF changed from %lf to %lf at line %d (byte %ld)\n", (0.001*prm->prf),(0.001*sdr.PRF), n-1, *byte_offset);
// printf(" end: PRF changed from %lf to %lf at line %d \n", (0.001*prm->prf),(0.001*sdr.PRF), n);
return(EXIT_SUCCESS);
}

View File

@ -47,7 +47,8 @@ SC_clock_start SC_clock_stop
#include "image_sio.h"
#include "lib_functions.h"
#define ZERO_VALUE (char)(63 + rand() % 2)
#define clip127(A) (((A) > 127) ? 127 : (((A) < 0) ? 0 : A))
#define znew (int) (z=36969*(z&65535)+(z>>16))
typedef unsigned long UL;
static UL z=362436069, t[256];
@ -61,21 +62,24 @@ void swap_ALOS_data_info(struct sardata_info *sdr);
long read_sardata_info(FILE *, struct PRM *, int *, int *);
void print_params(struct PRM *prm);
int assign_sardata_params(struct PRM *, int, int *, int *);
int check_shift(struct PRM *, int *, int *, int *, int);
int check_shift(struct PRM *, int *, int *, int *, int, int);
int set_file_position(FILE *, long *, int);
int reset_params(struct PRM *prm, long *, int *, int *);
int fill_shift_data(int, int, int, int, int, char *, char *, FILE *);
int handle_prf_change(struct PRM *, FILE *, long *, int);
void change_dynamic_range(char *data, long length);
static struct sardata_record r1;
static struct sardata_descriptor dfd;
static struct sardata_info sdr;
long read_ALOS_data (FILE *imagefile, FILE *outfile, struct PRM *prm, long *byte_offset) {
long read_ALOS_data (FILE *imagefile, FILE *outfile, struct PRM *prm, long *byte_offset, struct resamp_info *rspi, int nPRF) {
char *data, *shift_data;
int record_length0; /* length of record read at start of file */
int record_length1; /* length of record read in file */
int start_sdr_rec_len = 0; /* sdr record length for fisrt record */
int slant_range_old = 0; /* slant range of previous record */
int line_suffix_size; /* number of bytes after data */
int data_length; /* bytes of data */
int n, m, ishift, shift, shift0, npatch_max;
@ -87,6 +91,13 @@ long read_ALOS_data (FILE *imagefile, FILE *outfile, struct PRM *prm, long *byte
if (debug) fprintf(stderr,".... reading header \n");
//here we still get sdr from the first data line no matter whether prf changes.
//this sdr is used to initialize record_length0 in assign_sardata_params, which
//is used at line 152 to check if record_length changed.
//I think we should get sdr from first prf-change data line for the output of prf-change file.
//Cunren Liang. 02-DEC-2019
/* read header information */
read_sardata_info(imagefile, prm, &header_size, &line_prefix_size);
@ -111,7 +122,7 @@ long read_ALOS_data (FILE *imagefile, FILE *outfile, struct PRM *prm, long *byte
shift0 = 0;
n = 1;
m = 0;
m = 2;//first line sequence_number
/* read the rest of the file */
while ( (fread((void *) &sdr,sizeof(struct sardata_info), 1, imagefile)) == 1 ) {
@ -120,10 +131,29 @@ long read_ALOS_data (FILE *imagefile, FILE *outfile, struct PRM *prm, long *byte
/* checks for little endian/ big endian */
if (swap) swap_ALOS_data_info(&sdr);
/* if this is partway through the file due to prf change, reset sequence, PRF, and near_range */
if ((*byte_offset > 0) && (n == 2)) reset_params(prm, byte_offset, &n, &m);
if (sdr.sequence_number != n) printf(" missing line: n, seq# %d %d \n", n, sdr.sequence_number);
if (n == 2)
rspi->frame_counter_start[nPRF] = sdr.frame_counter;
/* if this is partway through the file due to prf change, reset sequence,
* PRF, and near_range */
if (n == 2)
start_sdr_rec_len = sdr.record_length;
if ((*byte_offset > 0) && (n == 2))
reset_params(prm, byte_offset, &n, &m);
if (sdr.record_length != start_sdr_rec_len) {
printf(" ***** warning sdr.record_length error %d \n", sdr.record_length);
sdr.record_length = start_sdr_rec_len;
sdr.PRF = prm->prf;
sdr.slant_range = slant_range_old;
}
if (sdr.sequence_number != n)
printf(" missing line: n, seq# %d %d \n", n, sdr.sequence_number);
/* check for changes in record_length and PRF */
record_length1 = sdr.record_length - line_prefix_size;
@ -132,11 +162,13 @@ long read_ALOS_data (FILE *imagefile, FILE *outfile, struct PRM *prm, long *byte
/* if prf changes, close file and set byte_offset */
if ((sdr.PRF) != prm->prf) {
handle_prf_change(prm, imagefile, byte_offset, n);
n-=1;
break;
}
rspi->frame_counter_end[nPRF] = sdr.frame_counter;
/* check shift to see if it varies from beginning or from command line value */
check_shift(prm, &shift, &ishift, &shift0, record_length1);
check_shift(prm, &shift, &ishift, &shift0, record_length1, 0);
if ((verbose) && (n/2000.0 == n/2000)) {
fprintf(stderr," Working on line %d prf %f record length %d slant_range %d \n"
@ -147,13 +179,15 @@ long read_ALOS_data (FILE *imagefile, FILE *outfile, struct PRM *prm, long *byte
if ( fread ((char *) data, record_length1, (size_t) 1, imagefile) != 1 ) break;
data_length = record_length1;
slant_range_old = sdr.slant_range;
/* write line header to output data */
/* PSA turning off headers
fwrite((void *) &sdr, line_prefix_size, 1, outfile); */
//header is not written to output
//fwrite((void *) &sdr, line_prefix_size, 1, outfile);
/* write data */
if (shift == 0) {
change_dynamic_range(data, data_length);
fwrite((char *) data, data_length, 1, outfile);
/* if data is shifted, fill in with data values of NULL_DATA at start or end*/
} else if (shift != 0) {
@ -161,13 +195,21 @@ long read_ALOS_data (FILE *imagefile, FILE *outfile, struct PRM *prm, long *byte
}
}
//we are not writing out line prefix data, need to correct these parameters
//as they are used in doppler computation.
prm->first_sample = 0;
prm->bytes_per_line -= line_prefix_size;
prm->good_bytes -= line_prefix_size;
/* calculate end time and fix prf */
prm->prf = 0.001*prm->prf;
//this is the sdr of the first prf-change data line, should seek back to get last sdr to be used here.
prm->SC_clock_stop = get_clock(sdr, tbias);
/* m is non-zero only in the event of a prf change */
prm->num_lines = n - m - 1;
//not correct if PRF changes, so I updated it here.
prm->num_lines = n - m + 1;
/* calculate the maximum number of patches and use that if the default is set to 1000 */
npatch_max = (int)((1.0*n)/(1.0*prm->num_valid_az));
@ -175,6 +217,15 @@ long read_ALOS_data (FILE *imagefile, FILE *outfile, struct PRM *prm, long *byte
if (prm->num_lines == 0) prm->num_lines = 1;
prm->xmi = 63.5;
prm->xmq = 63.5;
rspi->prf[nPRF] = prm->prf;
rspi->SC_clock_start[nPRF] = prm->SC_clock_start;
rspi->num_lines[nPRF] = prm->num_lines;
rspi->num_bins[nPRF] = prm->bytes_per_line/(2*sizeof(char));
if (verbose) print_params(prm);
free(data);
@ -293,7 +344,7 @@ double get_clock();
return(EXIT_SUCCESS);
}
/***************************************************************************/
int check_shift(struct PRM *prm, int *shift, int *ishift, int *shift0, int record_length1)
int check_shift(struct PRM *prm, int *shift, int *ishift, int *shift0, int record_length1, int ALOS_format)
{
*shift = 2*floor(0.5 + (sdr.slant_range - prm->near_range)/(0.5*SOL/prm->fs));
*ishift = abs(*shift);
@ -304,7 +355,13 @@ int check_shift(struct PRM *prm, int *shift, int *ishift, int *shift0, int recor
}
if(*shift != *shift0) {
printf(" near_range, shift = %d %d \n", sdr.slant_range, *shift);
if(ALOS_format==0)
printf(" near_range, shift = %d %d , at frame_counter: %d, line number: %d\n", sdr.slant_range, *shift, sdr.frame_counter, sdr.sequence_number-1);
if(ALOS_format==1)
printf(" near_range, shift = %d %d\n", sdr.slant_range, *shift);
*shift0 = *shift;
}
@ -324,21 +381,21 @@ int set_file_position(FILE *imagefile, long *byte_offset, int header_size)
return(EXIT_SUCCESS);
}
/***************************************************************************/
int reset_params(struct PRM *prm, long *byte_offset, int *n, int *m)
{
double get_clock();
int reset_params(struct PRM *prm, long *byte_offset, int *n, int *m) {
double get_clock();
prm->SC_clock_start = get_clock(sdr, tbias);
prm->prf = sdr.PRF;
prm->near_range = sdr.slant_range;
//comment out so that all data files with different prfs can be aligned at the same starting range
//prm->near_range = sdr.slant_range;
*n = sdr.sequence_number;
*m = *n;
*byte_offset = 0;
if (verbose) {
fprintf(stderr," new parameters: \n sequence number %d \n PRF %f\n near_range %lf\n",
*n, 0.001*prm->prf,prm->near_range);
}
return(EXIT_SUCCESS);
fprintf(stderr, " new parameters: \n sequence number %d \n PRF %f\n near_range %lf\n", *n, 0.001 * prm->prf,
prm->near_range);
}
return (EXIT_SUCCESS);
}
/***************************************************************************/
int fill_shift_data(int shift, int ishift, int data_length,
@ -359,6 +416,7 @@ int k;
}
/* write the shifted data out */
change_dynamic_range(shift_data, data_length);
fwrite((char *) shift_data, data_length, 1, outfile);
return(EXIT_SUCCESS);
@ -366,7 +424,7 @@ int k;
/***************************************************************************/
int handle_prf_change(struct PRM *prm, FILE *imagefile, long *byte_offset, int n)
{
prm->num_lines = n;
//prm->num_lines = n;
/* skip back to beginning of the line */
fseek(imagefile, -1*sizeof(struct sardata_info), SEEK_CUR);
@ -375,9 +433,34 @@ int handle_prf_change(struct PRM *prm, FILE *imagefile, long *byte_offset, int n
*byte_offset = ftell(imagefile);
/* tell the world */
printf(" *** PRF changed from %lf to %lf at line %d (byte %ld)\n", (0.001*prm->prf),(0.001*sdr.PRF), n, *byte_offset);
printf(" end: PRF changed from %lf to %lf at line %d \n", (0.001*prm->prf),(0.001*sdr.PRF), n);
printf(" *** PRF changed from %lf to %lf at line %d (byte %ld)\n", (0.001*prm->prf),(0.001*sdr.PRF), n-1, *byte_offset);
// printf(" end: PRF changed from %lf to %lf at line %d \n", (0.001*prm->prf),(0.001*sdr.PRF), n);
return(EXIT_SUCCESS);
}
/***************************************************************************/
void change_dynamic_range(char *data, long length){
long i;
for(i = 0; i < length; i++)
//THIS SHOULD NOT AFFECT DOPPLER COMPUTATION (SUCH AS IN calc_dop.c), BECAUSE
// 1. IQ BIAS IS REMOVED BEFORE COMPUTATION OF DOPPLER.
// 2. 2.0 WILL BE CANCELLED OUT IN atan2f().
// 3. actual computation results also verified this (even if there is a difference, it is about 0.* Hz)
//data[i] = (unsigned char)clip127(rintf(2. * (data[i] - 15.5) + 63.5));
data[i] = (unsigned char)clip127(rintf(2.0 * (data[i] - 15.5) + ZERO_VALUE));
}

View File

@ -0,0 +1,106 @@
//////////////////////////////////////
// Cunren Liang, NASA JPL/Caltech
// Copyright 2017
//////////////////////////////////////
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <math.h>
#define NR_END 1
#define FREE_ARG char*
#define PI 3.1415926535897932384626433832795028841971693993751058
typedef struct {
float re;
float im;
} fcomplex;
typedef struct {
double re;
double im;
} dcomplex;
//allocate arrays
signed char *vector_char(long nl, long nh);
void free_vector_char(signed char *v, long nl, long nh);
unsigned char *vector_unchar(long nl, long nh);
void free_vector_unchar(unsigned char *v, long nl, long nh);
int *vector_int(long nl, long nh);
void free_vector_int(int *v, long nl, long nh);
float *vector_float(long nl, long nh);
void free_vector_float(float *v, long nl, long nh);
double *vector_double(long nl, long nh);
void free_vector_double(double *v, long nl, long nh);
fcomplex *vector_fcomplex(long nl, long nh);
void free_vector_fcomplex(fcomplex *v, long nl, long nh);
signed char **matrix_char(long nrl, long nrh, long ncl, long nch);
void free_matrix_char(signed char **m, long nrl, long nrh, long ncl, long nch);
unsigned char **matrix_unchar(long nrl, long nrh, long ncl, long nch);
void free_matrix_unchar(unsigned char **m, long nrl, long nrh, long ncl, long nch);
float **matrix_float(long nrl, long nrh, long ncl, long nch);
void free_matrix_float(float **m, long nrl, long nrh, long ncl, long nch);
double **matrix_double(long nrl, long nrh, long ncl, long nch);
void free_matrix_double(double **m, long nrl, long nrh, long ncl, long nch);
//allocate C-style arrays
FILE **array1d_FILE(long nc);
void free_array1d_FILE(FILE **fv);
signed char *array1d_char(long nc);
void free_array1d_char(signed char *fv);
unsigned char *array1d_unchar(long nc);
void free_array1d_unchar(unsigned char *fv);
int *array1d_int(long nc);
void free_array1d_int(int *fv);
float *array1d_float(long nc);
void free_array1d_float(float *fv);
double *array1d_double(long nc);
void free_array1d_double(double *fv);
fcomplex *array1d_fcomplex(long nc);
void free_array1d_fcomplex(fcomplex *fcv);
dcomplex *array1d_dcomplex(long nc);
void free_array1d_dcomplex(dcomplex *fcv);
signed char **array2d_char(long nl, long nc);
void free_array2d_char(signed char **m);
unsigned char **array2d_unchar(long nl, long nc);
void free_array2d_unchar(unsigned char **m);
float **array2d_float(long nl, long nc);
void free_array2d_float(float **m);
double **array2d_double(long nl, long nc);
void free_array2d_double(double **m);
fcomplex **array2d_fcomplex(long nl, long nc);
void free_array2d_fcomplex(fcomplex **m);
//handling error
void nrerror(char error_text[]);
//complex operations
fcomplex cmul(fcomplex a, fcomplex b);
fcomplex cconj(fcomplex z);
fcomplex cadd(fcomplex a, fcomplex b);
float xcabs(fcomplex z);
float cphs(fcomplex z);
//functions
long next_pow2(long a);
void circ_shift(fcomplex *in, int na, int nc);
void left_shift(fcomplex *in, int na);
void right_shift(fcomplex *in, int na);
int roundfi(float a);
void sinc(int n, int m, float *coef);
void kaiser(int n, int m, float *coef, float beta);
void kaiser2(float beta, int n, float *coef);
void bandpass_filter(float bw, float bc, int n, int nfft, int ncshift, float beta, fcomplex *filter);
float bessi0(float x);
void four1(float data[], unsigned long nn, int isign);
//file operations
FILE *openfile(char *filename, char *pattern);
void readdata(void *data, size_t blocksize, FILE *fp);
void writedata(void *data, size_t blocksize, FILE *fp);
long file_length(FILE* fp, long width, long element_size);

View File

@ -0,0 +1,246 @@
//////////////////////////////////////
// Cunren Liang
// California Institute of Technology
// Copyright 2019
//////////////////////////////////////
//this program is tested against resamp.c, the outputs of the two are exactly the same.
#include "resamp.h"
//ALOS I or Q mean = 15.5, so get 15 or 16 randomly here
//#define ZERO_VALUE (char)(15 + rand() % 2)
//I changed the dynamic range when reading data
//ALOS I or Q mean = 63.5, so get 63 or 64 randomly here
#define ZERO_VALUE (char)(63 + rand() % 2)
typedef struct {
char re;
char im;
} char_complex;
char_complex *array1d_char_complex(long nc);
void free_array1d_char_complex(char_complex *fcv);
void normalize_kernel(float *kernel, long start_index, long end_index);
int resamp_azimuth(char *slc2, char *rslc2, int nrg, int naz1, int naz2, double prf, double *dopcoeff, double *azcoef, int n, double beta){
int i;
int verbose = 0;
if(verbose){
printf("\n\ninput parameters:\n");
printf("slc2: %s\n", slc2);
printf("rslc2: %s\n", rslc2);
printf("nrg: %d\n", nrg);
printf("naz1: %d\n", naz1);
printf("naz2: %d\n\n", naz2);
printf("prf: %f\n\n", prf);
for(i = 0; i < 4; i++){
printf("dopcoeff[%d]: %e\n", i, dopcoeff[i]);
}
printf("\n");
for(i = 0; i < 2; i++){
printf("azcoef[%d]: %e\n", i, azcoef[i]);
}
printf("\n");
}
FILE *slc2fp;
FILE *rslc2fp;
int m;
int interp_method;
int edge_method;
float azpos;
float azoff;
float az2;
int azi2;
float azf;
int azfn;
int hnm;
int hn;
float *sincc;
float *kaiserc;
float *kernel;
float *azkernel;
fcomplex *azkernel_fc;
fcomplex *rgrs;
fcomplex *azca;
fcomplex *rgrsb;
fcomplex *azrs;
char_complex *inl;
char_complex *outl;
float *dop;
float dopx;
fcomplex **inb;
int j, k, k1, k2;
int tmp1, tmp2;
int zero_flag;
float ftmp1, ftmp2;
fcomplex fctmp1, fctmp2;
m = 10000;
interp_method = 0;
edge_method = 2;
if((n % 2 == 0) || (n < 3)){
fprintf(stderr, "number of samples to be used in the resampling must be odd, and larger or equal to than 3\n");
exit(1);
}
slc2fp = openfile(slc2, "rb");
rslc2fp = openfile(rslc2, "wb");
hn = n / 2;
hnm = n * m / 2;
sincc = vector_float(-hnm, hnm);
kaiserc = vector_float(-hnm, hnm);
kernel = vector_float(-hnm, hnm);
azkernel = vector_float(-hn, hn);
azkernel_fc = vector_fcomplex(-hn, hn);
rgrs = vector_fcomplex(-hn, hn);
azca = vector_fcomplex(-hn, hn);
rgrsb = vector_fcomplex(-hn, hn);
azrs = array1d_fcomplex(nrg);
inl = array1d_char_complex(nrg);
outl = array1d_char_complex(nrg);
dop = array1d_float(nrg);
inb = array2d_fcomplex(naz2, nrg);
sinc(n, m, sincc);
kaiser(n, m, kaiserc, beta);
for(i = -hnm; i <= hnm; i++)
kernel[i] = kaiserc[i] * sincc[i];
for(i = 0; i < nrg; i++){
dop[i] = dopcoeff[0] + dopcoeff[1] * i + dopcoeff[2] * i * i + dopcoeff[3] * i * i * i;
if(verbose){
if(i % 500 == 0)
printf("range sample: %5d, doppler centroid frequency: %8.2f Hz\n", i, dop[i]);
}
}
for(i = 0; i < naz2; i++){
readdata((char_complex *)inl, (size_t)nrg * sizeof(char_complex), slc2fp);
for(j =0; j < nrg; j++){
inb[i][j].re = inl[j].re;
inb[i][j].im = inl[j].im;
}
}
for(i = 0; i < naz1; i++){
if((i + 1) % 100 == 0)
fprintf(stderr,"processing line: %6d of %6d\r", i+1, naz1);
for(j = 0; j < nrg; j++){
azrs[j].re = 0.0;
azrs[j].im = 0.0;
}
azpos = i;
azoff = azcoef[0] + azpos * azcoef[1];
az2 = i + azoff;
azi2 = roundfi(az2);
azf = az2 - azi2;
azfn = roundfi(azf * m);
if(edge_method == 0){
if(azi2 < hn || azi2 > naz2 - 1 - hn){
for(j = 0; j < nrg; j++){
outl[j].re = ZERO_VALUE;
outl[j].im = ZERO_VALUE;
}
writedata((char_complex *)outl, (size_t)nrg * sizeof(char_complex), rslc2fp);
continue;
}
}
else if(edge_method == 1){
if(azi2 < 0 || azi2 > naz2 - 1){
for(j = 0; j < nrg; j++){
outl[j].re = ZERO_VALUE;
outl[j].im = ZERO_VALUE;
}
writedata((char_complex *)outl, (size_t)nrg * sizeof(char_complex), rslc2fp);
continue;
}
}
else{
if(azi2 < -hn || azi2 > naz2 - 1 + hn){
for(j = 0; j < nrg; j++){
outl[j].re = ZERO_VALUE;
outl[j].im = ZERO_VALUE;
}
writedata((char_complex *)outl, (size_t)nrg * sizeof(char_complex), rslc2fp);
continue;
}
}
for(k = -hn; k <= hn; k++){
tmp2 = k * m - azfn;
if(tmp2 > hnm) tmp2 = hnm;
if(tmp2 < -hnm) tmp2 = -hnm;
azkernel[k] = kernel[tmp2];
}
normalize_kernel(azkernel, -hn, hn);
for(j = 0; j < nrg; j++){
for(k1 = -hn; k1 <= hn; k1++){
if((azi2 + k1 >= 0)&&(azi2 + k1 <= naz2-1)){
rgrs[k1].re = inb[azi2 + k1][j].re;
rgrs[k1].im = inb[azi2 + k1][j].im;
}
else{
rgrs[k1].re = ZERO_VALUE;
rgrs[k1].im = ZERO_VALUE;
}
}
dopx = dop[j];
for(k = -hn; k <= hn; k++){
ftmp1 = 2.0 * PI * dopx * k / prf;
azca[k].re = cos(ftmp1);
azca[k].im = sin(ftmp1);
if(interp_method == 0){
rgrsb[k] = cmul(rgrs[k], cconj(azca[k]));
azrs[j].re += rgrsb[k].re * azkernel[k];
azrs[j].im += rgrsb[k].im * azkernel[k];
}
else{
azkernel_fc[k].re = azca[k].re * azkernel[k];
azkernel_fc[k].im = azca[k].im * azkernel[k];
azrs[j] = cadd(azrs[j], cmul(rgrs[k], azkernel_fc[k]));
}
}
if(interp_method == 0){
ftmp1 = 2.0 * PI * dopx * azf / prf;
fctmp1.re = cos(ftmp1);
fctmp1.im = sin(ftmp1);
azrs[j] = cmul(azrs[j], fctmp1);
}
}
for(j = 0; j < nrg; j++){
outl[j].re = roundfi(azrs[j].re);
outl[j].im = roundfi(azrs[j].im);
}
writedata((char_complex *)outl, (size_t)nrg * sizeof(char_complex), rslc2fp);
}
fprintf(stderr,"processing line: %6d of %6d\n", naz1, naz1);
free_vector_float(sincc, -hnm, hnm);
free_vector_float(kaiserc, -hnm, hnm);
free_vector_float(kernel, -hnm, hnm);
free_vector_float(azkernel, -hn, hn);
free_vector_fcomplex(azkernel_fc, -hn, hn);
free_vector_fcomplex(rgrs, -hn, hn);
free_vector_fcomplex(azca, -hn, hn);
free_vector_fcomplex(rgrsb, -hn, hn);
free_array1d_fcomplex(azrs);
free_array1d_char_complex(inl);
free_array1d_char_complex(outl);
free_array1d_float(dop);
free_array2d_fcomplex(inb);
fclose(slc2fp);
fclose(rslc2fp);
return 0;
}
char_complex *array1d_char_complex(long nc){
char_complex *fcv;
fcv = (char_complex*) malloc(nc * sizeof(char_complex));
if(!fcv){
fprintf(stderr,"Error: cannot allocate 1-D char complex array\n");
exit(1);
}
return fcv;
}
void free_array1d_char_complex(char_complex *fcv){
free(fcv);
}
void normalize_kernel(float *kernel, long start_index, long end_index){
double sum;
long i;
sum = 0.0;
for(i = start_index; i <= end_index; i++)
sum += kernel[i];
if(sum!=0)
for(i = start_index; i <= end_index; i++)
kernel[i] /= sum;
}

View File

@ -2,47 +2,47 @@
#include "siocomplex.h"
#include <math.h>
fcomplex Cmul(fcomplex x, fcomplex y)
fcomplex_sio Cmul(fcomplex_sio x, fcomplex_sio y)
{
fcomplex z;
fcomplex_sio z;
z.r = x.r*y.r - x.i*y.i;
z.i = x.i*y.r + x.r*y.i;
return z;
}
fcomplex Cexp(float theta)
fcomplex_sio Cexp(float theta)
{
fcomplex z;
fcomplex_sio z;
z.r = cos(theta);
z.i = sin(theta);
return z;
}
fcomplex Conjg(fcomplex z)
fcomplex_sio Conjg(fcomplex_sio z)
{
fcomplex x;
fcomplex_sio x;
x.r = z.r;
x.i = -z.i;
return x;
}
fcomplex RCmul(float a, fcomplex z)
fcomplex_sio RCmul(float a, fcomplex_sio z)
{
fcomplex x;
fcomplex_sio x;
x.r = a*z.r;
x.i = a*z.i;
return x;
}
fcomplex Cadd(fcomplex x, fcomplex y)
fcomplex_sio Cadd(fcomplex_sio x, fcomplex_sio y)
{
fcomplex z;
fcomplex_sio z;
z.r = x.r + y.r;
z.i = x.i + y.i;
return z;
}
float Cabs(fcomplex z)
float Cabs(fcomplex_sio z)
{
return hypot(z.r, z.i);
}

View File

@ -1,11 +1,11 @@
#ifndef _COMPLEX_H
#define _COMPLEX_H
fcomplex Cmul(fcomplex x, fcomplex y);
fcomplex Cexp(float theta);
fcomplex Conjg(fcomplex z);
fcomplex RCmul(float a, fcomplex z);
fcomplex Cadd(fcomplex x, fcomplex y);
float Cabs(fcomplex z);
fcomplex_sio Cmul(fcomplex_sio x, fcomplex_sio y);
fcomplex_sio Cexp(float theta);
fcomplex_sio Conjg(fcomplex_sio z);
fcomplex_sio RCmul(float a, fcomplex_sio z);
fcomplex_sio Cadd(fcomplex_sio x, fcomplex_sio y);
float Cabs(fcomplex_sio z);
#endif /* _COMPLEX_H */

View File

@ -42,6 +42,7 @@ def setup(self):
#SECTION 1. PROCESSING CONTROL PARAMETERS
#1. suggested default values of the parameters
ionParam.doIon = False
ionParam.considerBurstProperties = False
ionParam.startStep = ionParam.allSteps[0]
ionParam.endStep = ionParam.allSteps[-1]
@ -77,6 +78,7 @@ def setup(self):
#2. accept the above parameters from topsApp.py
ionParam.doIon = self.ION_doIon
ionParam.considerBurstProperties = self.ION_considerBurstProperties
ionParam.startStep = self.ION_startStep
ionParam.endStep = self.ION_endStep
@ -199,13 +201,13 @@ def setup(self):
ionParam.calIonWithMerged = False
else:
ionParam.calIonWithMerged = True
#for cross Sentinel-1A/B interferogram, always not using merged interferogram
if reference.mission != secondary.mission:
ionParam.calIonWithMerged = False
#there is no need to process swath by swath when there is only one swath
#ionSwathBySwath only works when number of swaths >=2
if len(swathList) == 1:
ionParam.calIonWithMerged = True
#for cross Sentinel-1A/B interferogram, always not using merged interferogram
if reference.mission != secondary.mission:
ionParam.calIonWithMerged = False
#determine if remove an empirical ramp
if reference.mission == secondary.mission:
@ -2637,16 +2639,17 @@ def runIon(self):
if run_step('filt_gaussian', ionParam):
filt_gaussian(self, ionParam)
#only do the following steps when considering burst properties
#ionosphere shift
if run_step('ionosphere_shift', ionParam):
if run_step('ionosphere_shift', ionParam) and ionParam.considerBurstProperties:
ionosphere_shift(self, ionParam)
#resample from ionospheric layer to ground layer, get ionosphere for each burst
if run_step('ion2grd', ionParam):
if run_step('ion2grd', ionParam) and ionParam.considerBurstProperties:
ion2grd(self, ionParam)
#esd
if run_step('esd', ionParam):
if run_step('esd', ionParam) and ionParam.considerBurstProperties:
esd(self, ionParam)
#pure esd without applying ionospheric correction

View File

@ -20,6 +20,38 @@ import logging
from isceobj.Util.ImageUtil import ImageLib as IML
def interpolateDifferentNumberOfLooks(inputData, lengtho, widtho, nrli, nali, nrlo, nalo):
'''
inputData: input numpy 2-d array
lengtho: length of output array
widtho: width of output array
nrli: number of range looks input
nali: number of azimuth looks input
nrlo: number of range looks output
nalo: number of azimuth looks output
'''
import numpy as np
from scipy.interpolate import interp1d
(lengthi, widthi) = inputData.shape
indexi = np.linspace(0, widthi-1, num=widthi, endpoint=True)
indexo = np.linspace(0, widtho-1, num=widtho, endpoint=True) * nrli/nrlo + (nrli-nrlo)/(2.0*nrlo)
outputData0 = np.zeros((lengthi, widtho), dtype=inputData.dtype)
for i in range(lengthi):
f = interp1d(indexi, inputData[i,:], kind='cubic', fill_value="extrapolate")
outputData0[i, :] = f(indexo)
indexi = np.linspace(0, lengthi-1, num=lengthi, endpoint=True)
indexo = np.linspace(0, lengtho-1, num=lengtho, endpoint=True) * nali/nalo + (nali-nalo)/(2.0*nalo)
outputData = np.zeros((lengtho, widtho), dtype=inputData.dtype)
for j in range(widtho):
f = interp1d(indexi, outputData0[:, j], kind='cubic', fill_value="extrapolate")
outputData[:, j] = f(indexo)
return outputData
def mergeBox(frame):
'''
Merging using VRTs.
@ -666,14 +698,16 @@ def runMergeBursts(self, adjust=1):
#totalLooksThreshold = 9
totalLooksThreshold = 99999999999999
#if doing ionospheric correction
ionCorrection = self.ION_doIon
doIon = self.ION_doIon
applyIon = self.ION_applyIon
considerBurstProperties = self.ION_considerBurstProperties
ionDirname = 'ion/ion_burst'
mergedIonname = 'topophase.ion'
originalIfgname = 'topophase_ori.flat'
#########################################
# backing out the tigher constraints for ionosphere as it could itnroduce gabs between along track products produced seperately
if not ionCorrection:
if not (doIon and considerBurstProperties):
adjust=0
#########################################
@ -712,7 +746,7 @@ def runMergeBursts(self, adjust=1):
#restore
frames = frames_bak
else:
validOnly==True
validOnly=True
#########################################
@ -738,7 +772,7 @@ def runMergeBursts(self, adjust=1):
mergeBursts2(frames, os.path.join(self._insar.fineIfgDirname, 'IW%d', 'burst_%02d.int'), burstIndex, box, os.path.join(mergedir, self._insar.mergedIfgname+suffix), virtual=virtual, validOnly=True)
if self.numberAzimuthLooks * self.numberRangeLooks < totalLooksThreshold:
mergeBursts2(frames, os.path.join(self._insar.fineIfgDirname, 'IW%d', 'burst_%02d.cor'), burstIndex, box, os.path.join(mergedir, self._insar.correlationFilename+suffix), virtual=virtual, validOnly=True)
if ionCorrection == True:
if doIon and considerBurstProperties:
mergeBursts2(frames, os.path.join(ionDirname, 'IW%d', 'burst_%02d.ion'), burstIndex, box, os.path.join(mergedir, mergedIonname+suffix), virtual=virtual, validOnly=True)
@ -782,13 +816,61 @@ def runMergeBursts(self, adjust=1):
os.remove(os.path.join(mergedir, pwrfile+'.xml'))
os.remove(os.path.join(mergedir, pwrfile+'.vrt'))
if ionCorrection:
multilook(os.path.join(mergedir, mergedIonname+suffix),
outname = os.path.join(mergedir, mergedIonname),
alks = self.numberAzimuthLooks, rlks=self.numberRangeLooks)
if doIon:
if considerBurstProperties:
multilook(os.path.join(mergedir, mergedIonname+suffix),
outname = os.path.join(mergedir, mergedIonname),
alks = self.numberAzimuthLooks, rlks=self.numberRangeLooks)
else:
ionFilt = 'ion/ion_cal/filt.ion'
img = isceobj.createImage()
img.load(ionFilt+'.xml')
ionFiltImage = (np.fromfile(ionFilt, dtype=np.float32).reshape(img.length*2, img.width))[1:img.length*2:2, :]
img = isceobj.createImage()
img.load(os.path.join(mergedir, self._insar.mergedIfgname)+'.xml')
#interpolate original
ionFiltImageOut = interpolateDifferentNumberOfLooks(ionFiltImage, img.length, img.width, self.numberRangeLooks, self.numberAzimuthLooks, self.ION_numberRangeLooks, self.ION_numberAzimuthLooks)
ionFiltOut = os.path.join(mergedir, mergedIonname)
ionFiltImageOut.astype(np.float32).tofile(ionFiltOut)
image = isceobj.createImage()
image.setDataType('FLOAT')
image.setFilename(ionFiltOut)
image.extraFilename = ionFiltOut + '.vrt'
image.setWidth(img.width)
image.setLength(img.length)
#image.setAccessMode('read')
#image.createImage()
image.renderHdr()
#image.finalizeImage()
else:
print('Skipping multi-looking ....')
if self.doInSAR and doIon and (not considerBurstProperties):
ionFilt = 'ion/ion_cal/filt.ion'
img = isceobj.createImage()
img.load(ionFilt+'.xml')
ionFiltImage = (np.fromfile(ionFilt, dtype=np.float32).reshape(img.length*2, img.width))[1:img.length*2:2, :]
img = isceobj.createImage()
img.load(os.path.join(mergedir, self._insar.mergedIfgname+suffix)+'.xml')
#interpolate original
ionFiltImageOut = interpolateDifferentNumberOfLooks(ionFiltImage, img.length, img.width, self.numberRangeLooks, self.numberAzimuthLooks, self.ION_numberRangeLooks, self.ION_numberAzimuthLooks)
ionFiltOut = os.path.join(mergedir, mergedIonname)
ionFiltImageOut.astype(np.float32).tofile(ionFiltOut)
image = isceobj.createImage()
image.setDataType('FLOAT')
image.setFilename(ionFiltOut)
image.extraFilename = ionFiltOut + '.vrt'
image.setWidth(img.width)
image.setLength(img.length)
#image.setAccessMode('read')
#image.createImage()
image.renderHdr()
#image.finalizeImage()
#########################################
# STEP 4. APPLY CORRECTIONS
@ -796,8 +878,8 @@ def runMergeBursts(self, adjust=1):
#do ionospheric and other corrections here
#should also consider suffix, but usually we use multiple looks, so I ignore it for now.
if self.doInSAR:
if ionCorrection:
print('user choose to do ionospheric correction')
if doIon and applyIon:
print('user choose to apply ionospheric correction')
#define file names
interferogramFilename = os.path.join(mergedir, self._insar.mergedIfgname)

View File

@ -25,9 +25,6 @@
// Author: Giangi Sacco
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#include <Python.h>
#include "image_sio.h"
#include "siocomplex.h"
@ -219,8 +216,8 @@ PyObject * ALOS_fbd2fbs_C(PyObject* self, PyObject* args)
i = j + r.first_sample;
/* increase dynamic range by 2 and set the mean value to 63.5 */
rtest = rintf(2.*cout[j].r+63.5);
itest = rintf(2.*cout[j].i+63.5);
rtest = rintf(cout[j].r+r.xmi);
itest = rintf(cout[j].i+r.xmq);
/* sometimes the range can exceed 0-127 so
clip the numbers to be in the correct range */

View File

@ -25,9 +25,6 @@
// Author: Giangi Sacco
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#include <Python.h>
#include "image_sio.h"
#include "siocomplex.h"
@ -197,10 +194,10 @@ PyObject * ALOS_fbs2fbd_C(PyObject* self, PyObject* args)
n4 = nffti/4;
for(i=0; i<n4;i++)
{
cout[i].r = cin[i].r;
cout[i].i = cin[i].i;
cout[i+n4].r = cin[i+3*n4].r;
cout[i+n4].i = cin[i+3*n4].i;
cout[i].r = 0.5*cin[i].r;
cout[i].i = 0.5*cin[i].i;
cout[i+n4].r = 0.5*cin[i+3*n4].r;
cout[i+n4].i = 0.5*cin[i+3*n4].i;
}
/*****Inverse FFT*****/
@ -219,8 +216,8 @@ PyObject * ALOS_fbs2fbd_C(PyObject* self, PyObject* args)
i = j + r.first_sample;
/* increase dynamic range by 2 and set the mean value to 63.5 */
rtest = rintf(2.*cout[j].r+63.5);
itest = rintf(2.*cout[j].i+63.5);
rtest = rintf(cout[j].r+r.xmi);
itest = rintf(cout[j].i+r.xmq);
/* sometimes the range can exceed 0-127 so
clip the numbers to be in the correct range */

View File

@ -213,7 +213,9 @@
cycle
endif
r_dop = evalPoly2d_f(dopplerPoly, r_at, r_rt)
!r_dop = evalPoly2d_f(dopplerPoly, r_at, r_rt)
! doppler should be computed using secondary's coordinate. Cunren Liang, 12-AUG-2020
r_dop = evalPoly2d_f(dopplerPoly, r_at+r_ao, r_rt+r_ro)
!!!!!!Data chip without the carriers
do jj=1,sincone

View File

@ -55,6 +55,7 @@
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <time.h>
#include <sys/time.h>
#include <sys/resource.h>
#include <assert.h>
@ -1755,6 +1756,8 @@ void SolveCS2(signed char **residue, short **mstcosts, long nrow, long ncol,
double cost, c_max;
short *cap; /* cap changed to short by CWC */
long row_index, col_index; /* report out-of-bounds index by Cunren, 18-aug-2020 */
short **rowcost, **colcost;
short **rowflow, **colflow;
@ -1808,19 +1811,10 @@ void SolveCS2(signed char **residue, short **mstcosts, long nrow, long ncol,
exit(ABNORMAL_EXIT);
}
if(from==(to+1)){
num=from+(int )((from-1)/nNrow);
colflow[(num-1) % (nNrow+1)][(int )(num-1)/(nNrow+1)]-=flow;
}else if(from==(to-1)){
num=from+(int )((from-1)/nNrow)+1;
colflow[(num-1) % (nNrow+1)][(int )(num-1)/(nNrow+1)]+=flow;
}else if(from==(to-nNrow)){
num=from+nNrow;
rowflow[(num-1) % nNrow][(int )((num-1)/nNrow)]+=flow;
}else if(from==(to+nNrow)){
num=from;
rowflow[(num-1) % nNrow][(int )((num-1)/nNrow)]-=flow;
}else if((from==ground) || (to==ground)){
/* node indices are indexed from 1, not 0 */
/* node indices are in column major order, not row major */
/* handle flow to/from ground first */
if((from==ground) || (to==ground)){
if(to==ground){
num=to;
to=from;
@ -1828,17 +1822,69 @@ void SolveCS2(signed char **residue, short **mstcosts, long nrow, long ncol,
flow=-flow;
}
if(!((to-1) % nNrow)){
colflow[0][(int )((to-1)/nNrow)]+=flow;
row_index = 0;
col_index = (int )((to-1)/nNrow);
if (0 <= row_index && row_index <= nrow-1 && 0 <= col_index && col_index <= ncol-2)
colflow[row_index][col_index]+=flow;
else
fprintf(sp0,"Warning: out-of-bounds index in computing flow\n");
}else if(to<=nNrow){
rowflow[to-1][0]+=flow;
row_index = to-1;
col_index = 0;
if (0 <= row_index && row_index <= nrow-2 && 0 <= col_index && col_index <= ncol-1)
rowflow[row_index][col_index]+=flow;
else
fprintf(sp0,"Warning: out-of-bounds index in computing flow\n");
}else if(to>=(ground-nNrow-1)){
rowflow[(to-1) % nNrow][nNcol]-=flow;
row_index = (to-1) % nNrow;
col_index = nNcol;
if (0 <= row_index && row_index <= nrow-2 && 0 <= col_index && col_index <= ncol-1)
rowflow[row_index][col_index]-=flow;
else
fprintf(sp0,"Warning: out-of-bounds index in computing flow\n");
}else if(!(to % nNrow)){
colflow[nNrow][(int )((to/nNrow)-1)]-=flow;
row_index = nNrow;
col_index = (int )((to/nNrow)-1);
if (0 <= row_index && row_index <= nrow-1 && 0 <= col_index && col_index <= ncol-2)
colflow[row_index][col_index]-=flow;
else
fprintf(sp0,"Warning: out-of-bounds index in computing flow\n");
}else{
fprintf(sp0,"Unassigned ground arc parsing cs2 solution\nAbort\n");
exit(ABNORMAL_EXIT);
}
}else if(from==(to+1)){
num=from+(int )((from-1)/nNrow);
row_index = (num-1) % (nNrow+1);
col_index = (int )(num-1)/(nNrow+1);
if (0 <= row_index && row_index <= nrow-1 && 0 <= col_index && col_index <= ncol-2)
colflow[row_index][col_index]-=flow;
else
fprintf(sp0,"Warning: out-of-bounds index in computing flow\n");
}else if(from==(to-1)){
num=from+(int )((from-1)/nNrow)+1;
row_index = (num-1) % (nNrow+1);
col_index = (int )(num-1)/(nNrow+1);
if (0 <= row_index && row_index <= nrow-1 && 0 <= col_index && col_index <= ncol-2)
colflow[row_index][col_index]+=flow;
else
fprintf(sp0,"Warning: out-of-bounds index in computing flow\n");
}else if(from==(to-nNrow)){
num=from+nNrow;
row_index = (num-1) % nNrow;
col_index = (int )((num-1)/nNrow);
if (0 <= row_index && row_index <= nrow-2 && 0 <= col_index && col_index <= ncol-1)
rowflow[row_index][col_index]+=flow;
else
fprintf(sp0,"Warning: out-of-bounds index in computing flow\n");
}else if(from==(to+nNrow)){
num=from;
row_index = (num-1) % nNrow;
col_index = (int )((num-1)/nNrow);
if (0 <= row_index && row_index <= nrow-2 && 0 <= col_index && col_index <= ncol-1)
rowflow[row_index][col_index]-=flow;
else
fprintf(sp0,"Warning: out-of-bounds index in computing flow\n");
}else{
fprintf(sp0,"Non-grid arc parsing cs2 solution\nAbort\n");
exit(ABNORMAL_EXIT);

View File

@ -4,16 +4,18 @@ Read the document for each stack processor for details.
+ [stripmapStack](./stripmapStack/README.md)
+ [topsStack](./topsStack/README.md)
+ [alosStack](./alosStack/alosStack_tutorial.txt)
### Installation
To use the TOPS or Stripmap stack processors you need to:
To use a stack processor you need to:
1. Install ISCE as usual
2. Depending on which stack processor you need to try, add the path of the folder containing the python scripts to your `$PATH` environment variable as follows:
- add the full path of your **contrib/stack/topsStack** to `$PATH` to use the topsStack for processing a stack of Sentinel-1 TOPS data
- add the full path of your **contrib/stack/stripmapStack** to `$PATH` to use the stripmapStack for processing a stack of StripMap data
- set environment variable `$PATH_ALOSSTACK` by doing: export PATH_ALOSSTACK=CODE_DIR/contrib/stack/alosStack to use the alosStack for processing a stack of ALOS-2 data
Note: The stack processors do not show up in the install directory of your isce software. They can be found in the isce source directory.
@ -32,3 +34,4 @@ For StripMap stack processor and ionospheric phase estimation:
For TOPS stack processing:
+ H. Fattahi, P. Agram, and M. Simons, “A network-based enhanced spectral diversity approach for TOPS time-series analysis,” IEEE Trans. Geosci. Remote Sens., vol. 55, no. 2, pp. 777786, Feb. 2017. (https://ieeexplore.ieee.org/abstract/document/7637021/)

View File

@ -0,0 +1,426 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import isce
import isceobj
import iscesys
from iscesys.Component.Application import Application
DATA_DIR = Application.Parameter('dataDir',
public_name='data directory',
default=None,
type=str,
mandatory=False,
doc="directory of data, where data of each date are in an individual directory")
FRAMES = Application.Parameter('frames',
public_name = 'frames',
default = None,
type=str,
container=list,
mandatory=False,
doc = 'frames to process')
POLARIZATION = Application.Parameter('polarization',
public_name='polarization',
default='HH',
type=str,
mandatory=False,
doc="polarization to process")
STARTING_SWATH = Application.Parameter('startingSwath',
public_name='starting swath',
default=None,
type=int,
mandatory=False,
doc="starting swath to process")
ENDING_SWATH = Application.Parameter('endingSwath',
public_name='ending swath',
default=None,
type=int,
mandatory=False,
doc="ending swath to process")
DEM = Application.Parameter('dem',
public_name='dem for coregistration',
default=None,
type=str,
mandatory=False,
doc='dem for coregistration file')
DEM_GEO = Application.Parameter('demGeo',
public_name='dem for geocoding',
default=None,
type=str,
mandatory=False,
doc='dem for geocoding file')
WBD = Application.Parameter('wbd',
public_name='water body',
default=None,
type=str,
mandatory=False,
doc='water body file')
DATE_REFERENCE_STACK = Application.Parameter('dateReferenceStack',
public_name='reference date of the stack',
default=None,
type=str,
mandatory=False,
doc="reference date of the stack")
GRID_FRAME = Application.Parameter('gridFrame',
public_name='grid frame',
default=None,
type=str,
mandatory=False,
doc="resample all frames/swaths to the grid size of this frame")
GRID_SWATH = Application.Parameter('gridSwath',
public_name='grid swath',
default=None,
type=int,
mandatory=False,
doc="resample all frames/swaths to the grid size of this swath")
NUMBER_OF_SUBSEQUENT_DATES = Application.Parameter('numberOfSubsequentDates',
public_name='number of subsequent dates',
default=4,
type=int,
mandatory=False,
doc="number of subsequent dates used to form pairs")
PAIR_TIME_SPAN_MINIMUM = Application.Parameter('pairTimeSpanMinimum',
public_name = 'pair time span minimum in years',
default = None,
type=float,
mandatory=False,
doc = 'pair time span minimum in years')
PAIR_TIME_SPAN_MAXIMUM = Application.Parameter('pairTimeSpanMaximum',
public_name = 'pair time span maximum in years',
default = None,
type=float,
mandatory=False,
doc = 'pair time span maximum in years')
DATES_INCLUDED = Application.Parameter('datesIncluded',
public_name = 'dates to be included',
default = None,
type=str,
container=list,
mandatory=False,
doc = 'dates to be included')
#MUST BE FIRST DATE - SECOND DATE!!!
PAIRS_INCLUDED = Application.Parameter('pairsIncluded',
public_name = 'pairs to be included',
default = None,
type=str,
container=list,
mandatory=False,
doc = 'pairs to be included')
DATES_EXCLUDED = Application.Parameter('datesExcluded',
public_name = 'dates to be excluded',
default = None,
type=str,
container=list,
mandatory=False,
doc = 'dates to be excluded')
#MUST BE FIRST DATE - SECOND DATE!!!
PAIRS_EXCLUDED = Application.Parameter('pairsExcluded',
public_name = 'pairs to be excluded',
default = None,
type=str,
container=list,
mandatory=False,
doc = 'pairs to be excluded')
DATE_REFERENCE_STACK_ION = Application.Parameter('dateReferenceStackIon',
public_name='reference date of the stack for estimating ionosphere',
default=None,
type=str,
mandatory=False,
doc="reference date of the stack in estimating ionosphere")
NUMBER_OF_SUBSEQUENT_DATES_ION = Application.Parameter('numberOfSubsequentDatesIon',
public_name='number of subsequent dates for estimating ionosphere',
default=4,
type=int,
mandatory=False,
doc="number of subsequent dates used to form pairs for estimating ionosphere")
PAIR_TIME_SPAN_MINIMUM_ION = Application.Parameter('pairTimeSpanMinimumIon',
public_name = 'pair time span minimum in years for estimating ionosphere',
default = None,
type=float,
mandatory=False,
doc = 'pair time span minimum in years for estimating ionosphere')
PAIR_TIME_SPAN_MAXIMUM_ION = Application.Parameter('pairTimeSpanMaximumIon',
public_name = 'pair time span maximum in years for estimating ionosphere',
default = None,
type=float,
mandatory=False,
doc = 'pair time span maximum in years for estimating ionosphere')
DATES_INCLUDED_ION = Application.Parameter('datesIncludedIon',
public_name = 'dates to be included for estimating ionosphere',
default = None,
type=str,
container=list,
mandatory=False,
doc = 'dates to be included for estimating ionosphere')
#MUST BE FIRST DATE - SECOND DATE!!!
PAIRS_INCLUDED_ION = Application.Parameter('pairsIncludedIon',
public_name = 'pairs to be included for estimating ionosphere',
default = None,
type=str,
container=list,
mandatory=False,
doc = 'pairs to be included for estimating ionosphere')
DATES_EXCLUDED_ION = Application.Parameter('datesExcludedIon',
public_name = 'dates to be excluded for estimating ionosphere',
default = None,
type=str,
container=list,
mandatory=False,
doc = 'dates to be excluded for estimating ionosphere')
#MUST BE FIRST DATE - SECOND DATE!!!
PAIRS_EXCLUDED_ION = Application.Parameter('pairsExcludedIon',
public_name = 'pairs to be excluded for estimating ionosphere',
default = None,
type=str,
container=list,
mandatory=False,
doc = 'pairs to be excluded for estimating ionosphere')
DATES_REPROCESS = Application.Parameter('datesReprocess',
public_name = 'reprocess already processed dates',
default=False,
type=bool,
mandatory=False,
doc = 'reprocess already processed dates')
PAIRS_REPROCESS = Application.Parameter('pairsReprocess',
public_name = 'reprocess already processed pairs',
default=False,
type=bool,
mandatory=False,
doc = 'reprocess already processed pairs')
PAIRS_REPROCESS_ION = Application.Parameter('pairsReprocessIon',
public_name = 'reprocess already processed pairs for estimating ionosphere',
default=False,
type=bool,
mandatory=False,
doc = 'reprocess already processed pairs for estimating ionosphere')
DATES_PROCESSING_DIR = Application.Parameter('datesProcessingDir',
public_name='dates processing directory',
default='dates',
type=str,
mandatory=False,
doc="directory for processing all dates")
DATES_RESAMPLED_DIR = Application.Parameter('datesResampledDir',
public_name='dates resampled directory',
default='dates_resampled',
type=str,
mandatory=False,
doc="directory for all dates resampled")
PAIRS_PROCESSING_DIR = Application.Parameter('pairsProcessingDir',
public_name='pairs processing directory',
default='pairs',
type=str,
mandatory=False,
doc="directory for processing all pairs")
BASELINE_DIR = Application.Parameter('baselineDir',
public_name='baseline directory',
default='baseline',
type=str,
mandatory=False,
doc="directory for baselines")
DATES_DIR_ION = Application.Parameter('datesDirIon',
public_name='dates directory for ionosphere',
default='dates_ion',
type=str,
mandatory=False,
doc="dates directory for ionosphere")
PAIRS_PROCESSING_DIR_ION = Application.Parameter('pairsProcessingDirIon',
public_name='pairs processing directory for estimating ionosphere',
default='pairs_ion',
type=str,
mandatory=False,
doc="directory for processing all pairs for estimating ionosphere")
#import insar processing parameters from alos2App.py
#from alos2App import REFERENCE_DIR
#from alos2App import SECONDARY_DIR
#from alos2App import REFERENCE_FRAMES
#from alos2App import SECONDARY_FRAMES
#from alos2App import REFERENCE_POLARIZATION
#from alos2App import SECONDARY_POLARIZATION
#from alos2App import STARTING_SWATH
#from alos2App import ENDING_SWATH
#from alos2App import DEM
#from alos2App import DEM_GEO
#from alos2App import WBD
from alos2App import USE_VIRTUAL_FILE
from alos2App import USE_GPU
#from alos2App import BURST_SYNCHRONIZATION_THRESHOLD
#from alos2App import CROP_SLC
from alos2App import USE_WBD_FOR_NUMBER_OFFSETS
from alos2App import NUMBER_RANGE_OFFSETS
from alos2App import NUMBER_AZIMUTH_OFFSETS
from alos2App import NUMBER_RANGE_LOOKS1
from alos2App import NUMBER_AZIMUTH_LOOKS1
from alos2App import NUMBER_RANGE_LOOKS2
from alos2App import NUMBER_AZIMUTH_LOOKS2
from alos2App import NUMBER_RANGE_LOOKS_SIM
from alos2App import NUMBER_AZIMUTH_LOOKS_SIM
from alos2App import SWATH_OFFSET_MATCHING
from alos2App import FRAME_OFFSET_MATCHING
from alos2App import FILTER_STRENGTH
from alos2App import FILTER_WINSIZE
from alos2App import FILTER_STEPSIZE
from alos2App import REMOVE_MAGNITUDE_BEFORE_FILTERING
from alos2App import WATERBODY_MASK_STARTING_STEP
#from alos2App import GEOCODE_LIST
from alos2App import GEOCODE_BOUNDING_BOX
from alos2App import GEOCODE_INTERP_METHOD
#ionospheric correction parameters
from alos2App import DO_ION
from alos2App import APPLY_ION
from alos2App import NUMBER_RANGE_LOOKS_ION
from alos2App import NUMBER_AZIMUTH_LOOKS_ION
from alos2App import MASKED_AREAS_ION
from alos2App import SWATH_PHASE_DIFF_SNAP_ION
from alos2App import SWATH_PHASE_DIFF_LOWER_ION
from alos2App import SWATH_PHASE_DIFF_UPPER_ION
from alos2App import FIT_ION
from alos2App import FILT_ION
from alos2App import FIT_ADAPTIVE_ION
from alos2App import FILT_SECONDARY_ION
from alos2App import FILTERING_WINSIZE_MAX_ION
from alos2App import FILTERING_WINSIZE_MIN_ION
from alos2App import FILTERING_WINSIZE_SECONDARY_ION
from alos2App import FILTER_STD_ION
from alos2App import FILTER_SUBBAND_INT
from alos2App import FILTER_STRENGTH_SUBBAND_INT
from alos2App import FILTER_WINSIZE_SUBBAND_INT
from alos2App import FILTER_STEPSIZE_SUBBAND_INT
from alos2App import REMOVE_MAGNITUDE_BEFORE_FILTERING_SUBBAND_INT
## Common interface for all insar applications.
class Stack(Application):
family = 'stackinsar'
parameter_list = (DATA_DIR,
FRAMES,
POLARIZATION,
STARTING_SWATH,
ENDING_SWATH,
DEM,
DEM_GEO,
WBD,
DATE_REFERENCE_STACK,
GRID_FRAME,
GRID_SWATH,
NUMBER_OF_SUBSEQUENT_DATES,
PAIR_TIME_SPAN_MINIMUM,
PAIR_TIME_SPAN_MAXIMUM,
DATES_INCLUDED,
PAIRS_INCLUDED,
DATES_EXCLUDED,
PAIRS_EXCLUDED,
DATE_REFERENCE_STACK_ION,
NUMBER_OF_SUBSEQUENT_DATES_ION,
PAIR_TIME_SPAN_MINIMUM_ION,
PAIR_TIME_SPAN_MAXIMUM_ION,
DATES_INCLUDED_ION,
PAIRS_INCLUDED_ION,
DATES_EXCLUDED_ION,
PAIRS_EXCLUDED_ION,
DATES_REPROCESS,
PAIRS_REPROCESS,
PAIRS_REPROCESS_ION,
DATES_PROCESSING_DIR,
DATES_RESAMPLED_DIR,
PAIRS_PROCESSING_DIR,
BASELINE_DIR,
DATES_DIR_ION,
PAIRS_PROCESSING_DIR_ION,
#insar processing parameters, same as those in alos2App.py
USE_VIRTUAL_FILE,
USE_GPU,
USE_WBD_FOR_NUMBER_OFFSETS,
NUMBER_RANGE_OFFSETS,
NUMBER_AZIMUTH_OFFSETS,
NUMBER_RANGE_LOOKS1,
NUMBER_AZIMUTH_LOOKS1,
NUMBER_RANGE_LOOKS2,
NUMBER_AZIMUTH_LOOKS2,
NUMBER_RANGE_LOOKS_SIM,
NUMBER_AZIMUTH_LOOKS_SIM,
SWATH_OFFSET_MATCHING,
FRAME_OFFSET_MATCHING,
FILTER_STRENGTH,
FILTER_WINSIZE,
FILTER_STEPSIZE,
REMOVE_MAGNITUDE_BEFORE_FILTERING,
WATERBODY_MASK_STARTING_STEP,
GEOCODE_BOUNDING_BOX,
GEOCODE_INTERP_METHOD,
#ionospheric correction parameters
DO_ION,
APPLY_ION,
NUMBER_RANGE_LOOKS_ION,
NUMBER_AZIMUTH_LOOKS_ION,
MASKED_AREAS_ION,
SWATH_PHASE_DIFF_SNAP_ION,
SWATH_PHASE_DIFF_LOWER_ION,
SWATH_PHASE_DIFF_UPPER_ION,
FIT_ION,
FILT_ION,
FIT_ADAPTIVE_ION,
FILT_SECONDARY_ION,
FILTERING_WINSIZE_MAX_ION,
FILTERING_WINSIZE_MIN_ION,
FILTERING_WINSIZE_SECONDARY_ION,
FILTER_STD_ION,
FILTER_SUBBAND_INT,
FILTER_STRENGTH_SUBBAND_INT,
FILTER_WINSIZE_SUBBAND_INT,
FILTER_STEPSIZE_SUBBAND_INT,
REMOVE_MAGNITUDE_BEFORE_FILTERING_SUBBAND_INT)
facility_list = ()
def __init__(self, family='', name='',cmdline=None):
import isceobj
super().__init__(
family=family if family else self.__class__.family, name=name,
cmdline=cmdline)
return None

View File

@ -0,0 +1,325 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
def loadInsarUserParameters(filename):
import os
from isce.applications.alos2App import Alos2InSAR
#application object cannot recognize extension
if filename.endswith('.xml'):
filename = os.path.splitext(filename)[0]
#here, Alos2InSAR object is only used for reading and storing parameters
#none of its other attibutes or functions are used.
insar = Alos2InSAR(name=filename)
insar.configure()
return insar
def loadStackUserParameters(filename):
import os
from Stack import Stack
#application object cannot recognize extension
if filename.endswith('.xml'):
filename = os.path.splitext(filename)[0]
stack = Stack(name=filename)
stack.configure()
return stack
def loadInsarProcessingParameters(name):
import os
import pickle
from isceobj.Alos2Proc import Alos2Proc
try:
toLoad = Alos2Proc()
toLoad.load(name + '.xml')
with open(name, 'rb') as f:
setattr(toLoad, 'procDoc', pickle.load(f))
except IOError:
print("Cannot open %s" % (name))
return toLoad
def dumpInsarProcessingParameters(obj, name):
import os
import pickle
##############################
#do this to output important paramters to xml (alos2Proc.xml) after each step.
#self.renderProcDoc()
##############################
os.makedirs(os.path.dirname(name), exist_ok=True)
try:
toDump = obj
toDump.dump(name + '.xml')
#dump the procDoc separately
with open(name, 'wb') as f:
pickle.dump(getattr(toDump, 'procDoc'), f,
protocol=pickle.HIGHEST_PROTOCOL)
except IOError:
print("Cannot dump %s" % (name))
return None
def loadProduct(xmlname):
'''
Load the product using Product Manager.
'''
from iscesys.Component.ProductManager import ProductManager as PM
pm = PM()
pm.configure()
obj = pm.loadProduct(xmlname)
return obj
def saveProduct(obj, xmlname):
'''
Save the product to an XML file using Product Manager.
'''
from iscesys.Component.ProductManager import ProductManager as PM
pm = PM()
pm.configure()
pm.dumpProduct(obj, xmlname)
return None
def loadTrack(trackDir, date):
'''
Load the track using Product Manager.
trackDir: where *.track.xml is located
date: YYMMDD
'''
import os
import glob
frames = sorted(glob.glob(os.path.join(trackDir, 'f*_*/{}.frame.xml'.format(date))))
track = loadProduct(os.path.join(trackDir, '{}.track.xml'.format(date)))
track.frames = []
for x in frames:
track.frames.append(loadProduct(x))
return track
def saveTrack(track, date):
'''
Save the track to XML files using Product Manager.
track: track object
#trackDir: where *.track.xml is located
date: YYMMDD
'''
import os
import glob
#dump track object
#os.chdir(trackDir)
saveProduct(track, date+'.track.xml')
for i in range(len(track.frames)):
#find frame folder
frameDirs = sorted(glob.glob('f{}_*'.format(i+1)))
if frameDirs == []:
frameDir = 'f{}_{}'.format(i+1, track.frames[i].frameNumber)
print('no existing frame folder found at frame {}, create a frame folder {}'.format(i+1, frameDir))
else:
frameDir = frameDirs[0]
#dump frame object
if track.frames[i].frameNumber != frameDir[-4:]:
print('frame number in track object {} is different from that in frame folder name: {} at frame {}'.format(
track.frames[i].frameNumber, frameDir[-4:], i+1))
print('dumping it to {}'.format(frameDir))
os.chdir(frameDir)
saveProduct(track.frames[i], date+'.frame.xml')
os.chdir('../')
return None
def datesFromPairs(pairs):
dates = []
for x in pairs:
dateReference = x.split('-')[0]
dateSecondary = x.split('-')[1]
if dateReference not in dates:
dates.append(dateReference)
if dateSecondary not in dates:
dates.append(dateSecondary)
dates = sorted(dates)
return dates
def stackDateStatistics(idir, dateReference):
'''
idir: input directory where data of each date is located. only folders are recognized
dateReference: reference date, str type format: 'YYMMDD'
'''
import os
import glob
#get date folders
dateDirs = sorted(glob.glob(os.path.join(os.path.abspath(idir), '*')))
dateDirs = [x for x in dateDirs if os.path.isdir(x)]
#find index of reference date:
dates = []
dateIndexReference = None
for i in range(len(dateDirs)):
date = os.path.basename(dateDirs[i])
dates.append(date)
if date == dateReference:
dateIndexReference = i
if dateIndexReference is None:
raise Exception('cannot get reference date {} from the data list, pleasae check your input'.format(dateReference))
else:
print('reference date index {}'.format(dateIndexReference))
#use one date to find frames and swaths. any date should work, here we use dateIndexReference
frames = sorted([x[-4:] for x in glob.glob(os.path.join(dateDirs[dateIndexReference], 'f*_*'))])
swaths = sorted([int(x[-1]) for x in glob.glob(os.path.join(dateDirs[dateIndexReference], 'f1_*', 's*'))])
ndate = len(dates)
nframe = len(frames)
nswath = len(swaths)
#print result
print('\nlist of dates:')
print(' index date frames')
print('=======================================================')
for i in range(ndate):
if dates[i] == dateReference:
print(' %03d %s'%(i, dates[i])+' {}'.format(frames)+' reference')
else:
print(' %03d %s'%(i, dates[i])+' {}'.format(frames))
print('\n')
# str list, str list, str list, int list int
return (dateDirs, dates, frames, swaths, dateIndexReference)
def acquisitionModesAlos2():
'''
return ALOS-2 acquisition mode
'''
spotlightModes = ['SBS']
stripmapModes = ['UBS', 'UBD', 'HBS', 'HBD', 'HBQ', 'FBS', 'FBD', 'FBQ']
scansarNominalModes = ['WBS', 'WBD', 'WWS', 'WWD']
scansarWideModes = ['VBS', 'VBD']
scansarModes = ['WBS', 'WBD', 'WWS', 'WWD', 'VBS', 'VBD']
return (spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes)
def hasGPU():
'''
Determine if GPU modules are available.
'''
flag = False
try:
from zerodop.GPUtopozero.GPUtopozero import PyTopozero
from zerodop.GPUgeo2rdr.GPUgeo2rdr import PyGeo2rdr
flag = True
except:
pass
return flag
class createObject(object):
pass
def subbandParameters(track):
'''
compute subband parameters
'''
#speed of light from: components/isceobj/Planet/AstronomicalHandbook.py
SPEED_OF_LIGHT = 299792458.0
#using 1/3, 1/3, 1/3 band split
radarWavelength = track.radarWavelength
rangeBandwidth = track.frames[0].swaths[0].rangeBandwidth
rangeSamplingRate = track.frames[0].swaths[0].rangeSamplingRate
radarWavelengthLower = SPEED_OF_LIGHT/(SPEED_OF_LIGHT / radarWavelength - rangeBandwidth / 3.0)
radarWavelengthUpper = SPEED_OF_LIGHT/(SPEED_OF_LIGHT / radarWavelength + rangeBandwidth / 3.0)
subbandRadarWavelength = [radarWavelengthLower, radarWavelengthUpper]
subbandBandWidth = [rangeBandwidth / 3.0 / rangeSamplingRate, rangeBandwidth / 3.0 / rangeSamplingRate]
subbandFrequencyCenter = [-rangeBandwidth / 3.0 / rangeSamplingRate, rangeBandwidth / 3.0 / rangeSamplingRate]
subbandPrefix = ['lower', 'upper']
return (subbandRadarWavelength, subbandBandWidth, subbandFrequencyCenter, subbandPrefix)
def formInterferogram(slcReference, slcSecondary, interferogram, amplitude, numberRangeLooks, numberAzimuthLooks):
import numpy as np
import isce, isceobj
from isceobj.Alos2Proc.Alos2ProcPublic import multilook
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
img = isceobj.createImage()
img.load(slcReference+'.xml')
width = img.width
length = img.length
width2 = int(width / numberRangeLooks)
length2 = int(length / numberAzimuthLooks)
fpRef = open(slcReference,'rb')
fpSec = open(slcSecondary,'rb')
fpInf = open(interferogram,'wb')
fpAmp = open(amplitude,'wb')
for k in range(length2):
if (((k+1)%200) == 0):
print("processing line %6d of %6d" % (k+1, length2), end='\r', flush=True)
ref = np.fromfile(fpRef, dtype=np.complex64, count=numberAzimuthLooks * width).reshape(numberAzimuthLooks, width)
sec = np.fromfile(fpSec, dtype=np.complex64, count=numberAzimuthLooks * width).reshape(numberAzimuthLooks, width)
inf = multilook(ref*np.conjugate(sec), numberAzimuthLooks, numberRangeLooks, mean=False)
amp = np.sqrt(multilook(ref.real*ref.real+ref.imag*ref.imag, numberAzimuthLooks, numberRangeLooks, mean=False)) + 1j * \
np.sqrt(multilook(sec.real*sec.real+sec.imag*sec.imag, numberAzimuthLooks, numberRangeLooks, mean=False))
index = np.nonzero( (np.real(amp)==0) + (np.imag(amp)==0) )
amp[index]=0
inf.tofile(fpInf)
amp.tofile(fpAmp)
print("processing line %6d of %6d" % (length2, length2))
fpRef.close()
fpSec.close()
fpInf.close()
fpAmp.close()
create_xml(interferogram, width2, length2, 'int')
create_xml(amplitude, width2, length2, 'amp')

View File

@ -0,0 +1,86 @@
#!/usr/bin/env python3
#Cunren Liang, 05-MAR-2020
import os
import sys
import glob
import zipfile
import argparse
import datetime
import numpy as np
import xml.etree.ElementTree as ET
def cmdLineParse():
'''
Command line parser.
'''
parser = argparse.ArgumentParser(description='prepare alos2App.py OR alos2burstApp.py input files')
parser.add_argument('-dir', dest='dir', type=str, required=True,
help = 'directory containing the alos-2 data directories [data dir format: YYMMDD]')
parser.add_argument('-xml', dest='xml', type=str, required=True,
help = 'example alos2App.py input file')
parser.add_argument('-num', dest='num', type=int, default=3,
help = 'number of pairs for each acquistion. default: 3')
parser.add_argument('-yr', dest='yr', type=float, default=1.0,
help = 'time span threshhold. default: 1.0 year')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
dates = sorted(glob.glob(os.path.join(inps.dir, '*')))
dates = sorted([os.path.basename(x) for x in dates])
#for x in dates:
# print(x)
#read standard configurations
tree = ET.parse(inps.xml)
root = tree.getroot()
ndate = len(dates)
datefmt = "%y%m%d"
pairs_created = []
pairs_not_created = []
for i in range(ndate):
mdate = dates[i]
mtime = datetime.datetime.strptime(mdate, datefmt)
for j in range(inps.num):
if i+j+1 <= ndate - 1:
sdate = dates[i+j+1]
stime = datetime.datetime.strptime(sdate, datefmt)
pair = mdate + '-' + sdate
if np.absolute((stime - mtime).total_seconds()) < inps.yr * 365.0 * 24.0 * 3600:
pairs_created.append(pair)
print('creating pair: {}'.format(pair))
#create pair dir
if not os.path.exists(pair):
os.makedirs(pair)
#create xml
safe = root.find("component/property[@name='master directory']")
safe.text = '{}'.format(os.path.join(inps.dir, mdate))
safe = root.find("component/property[@name='slave directory']")
safe.text = '{}'.format(os.path.join(inps.dir, sdate))
tree.write(os.path.join(pair, 'alos2App.xml'))
else:
pairs_not_created.append(pair)
print('total number of pairs created: {}'.format(len(pairs_created)))
if pairs_not_created != []:
print('\nthe following pairs are not created because their time spans >= {} years'.format(inps.yr))
for x in pairs_not_created:
print(x)
print('total number of pairs not created: {}'.format(len(pairs_not_created)))
else:
print('\nall possible pairs are created')

View File

@ -0,0 +1,379 @@
<?xml version="1.0" encoding="UTF-8"?>
<stack>
<component name="stackinsar">
<!--=========================================================================================
Set the following mandatory parameters to process data
==========================================================================================-->
<property name="data directory">../data/saf_d169</property>
<property name="dem for coregistration">../data/saf_d169_dem/dem_1_arcsec/demLat_N35_N44_Lon_W126_W118.dem.wgs84</property>
<property name="dem for geocoding">../data/saf_d169_dem/dem_3_arcsec/demLat_N35_N44_Lon_W126_W118.dem.wgs84</property>
<property name="water body">../data/saf_d169_dem/wbd_1_arcsec/swbdLat_N35_N44_Lon_W126_W118.wbd</property>
<property name="reference date of the stack">150408</property>
<!--=========================================================================================
See also comments of parameters "number of range looks ion" and "number of azimuth looks ion"
below to set a smaller number of looks to avoid phase aliasing in some areas (such as edges of
Tibetan Plateau, where there might be strong tropospheric variations due to large height
differences).
==========================================================================================-->
<!--=====================================================================================================
instructions for ALOS-2 stack processor
This is the input file of ALOS-2 stack processor. Below are all parameters users can set.
Instructions on how to set these parameters are also provided. Parameter default values are shown in the
brackets. Remove the first four characters and the last three characters in a parameter line to set a
parameter value.
For the techinques and algorithms implemented in the software, refer to:
1. ScanSAR or multi-mode InSAR processing
C. Liang and E. J. Fielding, "Interferometry with ALOS-2 full-aperture ScanSAR data,"
IEEE Transactions on Geoscience and Remote Sensing, vol. 55, no. 5, pp. 2739-2750, May 2017.
2. Ionospheric correction, burst-by-burst ScanSAR processing, and burst-mode spectral diversity (SD) or
multi-aperture InSAR (MAI) processing
C. Liang and E. J. Fielding, "Measuring azimuth deformation with L-band ALOS-2 ScanSAR interferometry,"
IEEE Transactions on Geoscience and Remote Sensing, vol. 55, no. 5, pp. 2725-2738, May 2017.
3. Ionospheric correction
C. Liang, Z. Liu, E. J. Fielding, and R. Bürgmann, "InSAR time series analysis of L-band wide-swath SAR
data acquired by ALOS-2,"
IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 8, pp. 4492-4506, Aug. 2018.
======================================================================================================-->
<!--=========================================================================================
Directory of unpacked ALOS-2 data containing data of all dates. Data of each date is in an
individual folder named YYMMDD, which is the acquistion date of the data and can be found in
ALOS-2 image or leader files (e.g. LED-ALOS2041062800-150225-WBDR1.1__D, 150225 is YYMMDD)
==========================================================================================-->
<!--<property name="data directory">None</property>-->
<!--=========================================================================================
This is a list of frames, e.g., ['0680', '0690']. Here is how you can find frame number. Below
is a JAXA SLC product
0000168233_001001_ALOS2183010690-171012.zip
After you unpack the JAXA SLC product, you will find an image file like:
IMG-HH-ALOS2183010685-171012-FBDR1.1__A
^^^^
The number 0685 (indicated by ^) is the frame number. DON'T use the frame number in the zip
file name, as it may be incorrect (like the above example).
If all dates have equal number of frames and the frames meet the following one-to-one
correspondence, there is no need to set frames.
Date1 Folder Date2 Folder Date3 Folder
Frame **** ... Frame **** ... Frame ****
Frame **** ... Frame **** ... Frame ****
Frame **** ... Frame **** ... Frame ****
Frame **** ... Frame **** ... Frame ****
==========================================================================================-->
<!--<property name="frames">None</property>-->
<!--<property name="polarization">HH</property>-->
<!--<property name="starting swath">None</property>-->
<!--<property name="ending swath">None</property>-->
<!--=========================================================================================
Here is how you can download a DEM and water body.
#3 arcsec for geocoding
mkdir dem_3_arcsec
cd dem_3_arcsec
dem.py -a stitch -b 29 37 125 133 -k -s 3 -c -f -u http://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL3.003/2000.02.11
fixImageXml.py -i demLat_*_*_Lon_*_*.dem.wgs84 -f
rm *.hgt* *.log demLat_*_*_Lon_*_*.dem demLat_*_*_Lon_*_*.dem.vrt demLat_*_*_Lon_*_*.dem.xml
cd ../
#1 arcsec for creating differential interferogram
mkdir dem_1_arcsec
cd dem_1_arcsec
dem.py -a stitch -b 29 37 125 133 -k -s 1 -c -f -u http://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL1.003/2000.02.11
fixImageXml.py -i demLat_*_*_Lon_*_*.dem.wgs84 -f
rm *.hgt* *.log demLat_*_*_Lon_*_*.dem demLat_*_*_Lon_*_*.dem.vrt demLat_*_*_Lon_*_*.dem.xml
cd ../
#water body
#do correct missing water body tiles problem here!!! check usage of wbd.py for more details,
#or simply follow the commands below
mkdir wbd_1_arcsec
cd wbd_1_arcsec
wbd.py 29 37 125 133
fixImageXml.py -i swbdLat_*_*_Lon_*_*.wbd -f
cd ../
==========================================================================================-->
<!--<property name="dem for coregistration">None</property>-->
<!--<property name="dem for geocoding">None</property>-->
<!--<property name="water body">None</property>-->
<!--=========================================================================================
It must be set, and should be the same throughout all processings! Format must be YYMMDD.
==========================================================================================-->
<!--<property name="reference date of the stack">None</property>-->
<!--=========================================================================================
In the processing, all swaths of all frames will be resampled to the same sampling size of a
particular swath, whose swath and frame numbers can be set here. If not set, first swath of
first frame is used.
==========================================================================================-->
<!--<property name="grid frame">None</property>-->
<!--<property name="grid swath">None</property>-->
<!--=========================================================================================
Number of subsequent dates to pair up with a date.
==========================================================================================-->
<!--<property name="number of subsequent dates">4</property>-->
<!--<property name="pair time span minimum in years">None</property>-->
<!--<property name="pair time span maximum in years">None</property>-->
<!--=========================================================================================
The following parameters are lists. Date format must be YYMMDD. Pair format must be
FIRST_DATE(YYMMDD)-SECOND_DATE(YYMMDD). An example input of pairs to be included or excluded:
['150225-150408', '150225-150520']
==========================================================================================-->
<!--<property name="dates to be included">None</property>-->
<!--<property name="pairs to be included">None</property>-->
<!--<property name="dates to be excluded">None</property>-->
<!--<property name="pairs to be excluded">None</property>-->
<!--=========================================================================================
Date in least squares estimation of ionospheric phase whose ionospheric phase is assumed to
be zero. Format must be YYMMDD. By default, first date of dates envolved in estimating
ionosphere is used.
==========================================================================================-->
<!--<property name="reference date of the stack for estimating ionosphere">None</property>-->
<!--=========================================================================================
The following parameters are the same as those above, but are for pairs for ionospheric
estimation. Formats are also same.
==========================================================================================-->
<!--<property name="number of subsequent dates for estimating ionosphere">4</property>-->
<!--<property name="pair time span minimum in years for estimating ionosphere">None</property>-->
<!--<property name="pair time span maximum in years for estimating ionosphere">None</property>-->
<!--<property name="dates to be included for estimating ionosphere">None</property>-->
<!--<property name="pairs to be included for estimating ionosphere">None</property>-->
<!--<property name="dates to be excluded for estimating ionosphere">None</property>-->
<!--<property name="pairs to be excluded for estimating ionosphere">None</property>-->
<!--=========================================================================================
Whether reprocess already processed dates or pairs.
==========================================================================================-->
<!--<property name="reprocess already processed dates">False</property>-->
<!--<property name="reprocess already processed pairs">False</property>-->
<!--<property name="reprocess already processed pairs for estimating ionosphere">False</property>-->
<!--=========================================================================================
Data processing directories.
==========================================================================================-->
<!--<property name="dates processing directory">dates</property>-->
<!--<property name="dates resampled directory">dates_resampled</property>-->
<!--<property name="pairs processing directory">pairs</property>-->
<!--<property name="baseline directory">baseline</property>-->
<!--<property name="dates directory for ionosphere">dates_ion</property>-->
<!--<property name="pairs processing directory for estimating ionosphere">pairs_ion</property>-->
<!--=========================================================================================
The following InSAR processing parameters are exactly the same as those in alos2App.py.
==========================================================================================-->
<!--<property name="use virtual file">True</property>-->
<!--<property name="use GPU">False</property>-->
<!--=========================================================================================
This is for determining the number of offsets to be estimated between reference and secondary SLCs.
for areas where no water body data available, turn this off, otherwise the program will use
geometrical offset, which is not accuate enough. If it still does not work, set
"number of range offsets for slc matching" and "number of azimuth offsets for slc matching"
==========================================================================================-->
<!--<property name="use water body to dertermine number of matching offsets">True</property>-->
<!--=========================================================================================
These are 2-D lists, with frame as the first dimension and swath as the second dimension.
For example, if you want to process two frames and three swaths, you can specify one of
these parameters as:
[[20, 30, 20],[15, 20, 20]]
==========================================================================================-->
<!--<property name="number of range offsets for slc matching">None</property>-->
<!--<property name="number of azimuth offsets for slc matching">None</property>-->
<!--============================================================================================================================================
Instructions on number of looks used by the software
The software first takes number of range/azimuth looks 1, and then take any other number of range/azimuth looks (2, sim and ion).
Here are the purposes of these number of looks. Usually there is no need to set number of range/azimuth looks sim, so it is not explained here.
number of range/azimuth looks 1: save space, remove speckle noise, equalize sample size, match original resolution (full-aperture)
number of range/azimuth looks 2: make interferogram not too small or large
number of range/azimuth looks ion: make interferogram for ionosphere estimation not too small or large, facilitate ionosphere filtering
total number of looks of InSAR processing is: number of range/azimuth looks 1 * number of range/azimuth looks 2
total number of looks in ionosphere estimation is: number of range/azimuth looks 1 * number of range/azimuth looks ion
total number of looks in radar/DEM matching is: number of range/azimuth looks 1 * number of range/azimuth looks sim
Below is the default number of looks used by the software. REMEMBER, NORMALLY YOU ONLY NEED TO CHANGE number of range/azimuth looks 2!!!
============================================================================================================================================
Operation Mode | Mode (AUIG2) | Mode (in file name) | look1 (r*a) | look2 (r*a) | total insar (r*a) | look_ion (r*a) | total ion (r*a)
============================================================================================================================================
spotlight | SPT | SBS | 2*4 | 4*4 | 8*16 | 16*16 | 32*64
============================================================================================================================================
stripmap | SM1 | UBS, UBD | 2*3 | 4*4 | 8*12 | 32*32 | 64*96
| SM2 | HBS, HBD, HBQ | 2*4 | 4*4 | 8*16 | 16*16 | 32*64
| SM3 | FBS, FBD, FBQ | 2*4 | 4*4 | 8*16 | 16*16 | 32*64
============================================================================================================================================
ScanSAR | WD1 | WBS, WBD | 1*14 | 5*2 | 5*28 | 80*32 | 80*448
ScanSAR | WD1 | WWS, WWD | 2*14 | 5*2 | 10*28 | 80*32 | 160*448
| WD2 | VBS, VBD | 1*14 | 5*2 | 5*28 | 80*32 | 80*448
============================================================================================================================================
To find the acquisition mode code, check the unpacked ALOS-2 product. For example, in the following
file name
IMG-HH-ALOS2183010685-171012-FBDR1.1__A
^^^
FBD (indicated by ^) is the acquisition mode code.
=============================================================================================================================================-->
<!--=========================================================================================
These are the numbers of looks to be taken when forming the interferogram
==========================================================================================-->
<!--<property name="number of range looks 1">None</property>-->
<!--<property name="number of azimuth looks 1">None</property>-->
<!--=========================================================================================
These are the numbers of looks to be taken after taking the numbers of range/azimuth looks 1
==========================================================================================-->
<!--<property name="number of range looks 2">None</property>-->
<!--<property name="number of azimuth looks 2">None</property>-->
<!--=========================================================================================
These are the numbers of looks to be taken after taking the numbers of range/azimuth looks 1.
This is for matching the radar image and DEM
==========================================================================================-->
<!--<property name="number of range looks sim">None</property>-->
<!--<property name="number of azimuth looks sim">None</property>-->
<!--<property name="do matching when computing adjacent swath offset">True</property>-->
<!--<property name="do matching when computing adjacent frame offset">True</property>-->
<!--=========================================================================================
These are interferogram filtering parameters
==========================================================================================-->
<!--<property name="interferogram filter strength">0.3</property>-->
<!--<property name="interferogram filter window size">32</property>-->
<!--<property name="interferogram filter step size">4</property>-->
<!--<property name="remove magnitude before filtering">True</property>-->
<!--=========================================================================================
water body mask starting step: None, filt, unwrap
==========================================================================================-->
<!--<property name="water body mask starting step">unwrap</property>-->
<!--=========================================================================================
This is a four-element list [s, n, w, e], e.g. [26.24, 30.04, 33.45, 37.79].
==========================================================================================-->
<!--<property name="geocode bounding box">None</property>-->
<!--=========================================================================================
geocode interpolation method: sinc, bilinear, bicubic, nearest
==========================================================================================-->
<!--<property name="geocode interpolation method">None</property>-->
<!--=========================================================================================
These parameters are for ionospheric corrections
==========================================================================================-->
<!--<property name="do ionospheric phase estimation">True</property>-->
<!--<property name="apply ionospheric phase correction">True</property>-->
<!--=========================================================================================
These are the numbers of looks to be taken after taking the numbers of range/azimuth looks 1.
This is for ionospheric correction.
Use a larger number of looks results in smaller image size, which saves time in filtering in
ionosphere estimation. However, a larger number of looks may also lead to phase aliasing in
the resulting inteferograms and therefore lead to phase unwrapping errors, which causes
significant errors in ionosphere estimation.
If the area has strong troposhere or phase variations (normally in areas with large height
differences such as edges of Tibetan Plateau), a smaller number of looks should be used to
avoid phase aliasing after taking looks. E.g. 1/2 of the default number of range/azimuth looks
ion that can be found in the annotation of parameter 'number of range looks 1'.
==========================================================================================-->
<!--<property name="number of range looks ion">None</property>-->
<!--<property name="number of azimuth looks ion">None</property>-->
<!--=========================================================================================
seperated islands or areas usually affect ionosphere estimation and it's better to mask them
out. check ion/ion_cal/lower_40rlks_224alks.int (here number of looks 40 and 224 depends on
your particular case) for areas to be masked out.
The parameter is a 2-D list. Each element in the 2-D list is a four-element list: [firstLine,
lastLine, firstColumn, lastColumn], with line/column numbers starting with 1. If one of the
four elements is specified as -1, the program will use firstLine/lastLine/firstColumn/
lastColumn instead. For exmple, if you want to mask the following two areas out, you can
specify a 2-D list like:
[[100, 200, 100, 200],[1000, 1200, 500, 600]]
==========================================================================================-->
<!--<property name="areas masked out in ionospheric phase estimation">None</property>-->
<!--=========================================================================================
a 2-D list. e.g. if you are processing two ScanSAR frames, each with five swaths, and you do
not want phase difference of swath 1 and 2 in frame 2 snap to fixed values, the parameter can be specified
as:
[[True, True, True, True], [False, True, True, True]]
==========================================================================================-->
<!--<property name="swath phase difference snap to fixed values">None</property>-->
<!--=========================================================================================
a 2-D list. e.g. if you are processing two ScanSAR frames, each with five swaths, and you want
to use a phase difference value 0.21 (rad) for swath 1 and 2 in frame 2, the parameter can be
specified as:
[[None, None, None, None], [0.21, None, None, None]]
This parameter has highest priority in determing phase difference between swaths.
==========================================================================================-->
<!--<property name="swath phase difference of lower band">None</property>-->
<!--<property name="swath phase difference of upper band">None</property>-->
<!--<property name="apply polynomial fit before filtering ionosphere phase">True</property>-->
<!--<property name="whether filtering ionosphere phase">True</property>-->
<!--<property name="apply polynomial fit in adaptive filtering window">True</property>-->
<!--<property name="whether do secondary filtering of ionosphere phase">True</property>-->
<!--<property name="maximum window size for filtering ionosphere phase">301</property>-->
<!--<property name="minimum window size for filtering ionosphere phase">11</property>-->
<!--<property name="window size of secondary filtering of ionosphere phase">5</property>-->
<!--=========================================================================================
Normally no need to set this parameter, it will be automatically determined.
==========================================================================================-->
<!--<property name="standard deviation of ionosphere phase after filtering">None</property>-->
<!--=========================================================================================
parameters for filtering subband interferograms used for ionospheric phase estimation
==========================================================================================-->
<!--<property name="filter subband interferogram">False</property>-->
<!--<property name="subband interferogram filter strength">0.3</property>-->
<!--<property name="subband interferogram filter window size">32</property>-->
<!--<property name="subband interferogram filter step size">4</property>-->
<!--<property name="remove magnitude before filtering subband interferogram">True</property>-->
</component>
</stack>

View File

@ -0,0 +1,265 @@
######################################################################################
# Tutorial for alosStack
# Cunren Liang, October 2020
######################################################################################
This is the tutorial of alosStack processor.
###########################################
# 0. SET ENVIRONMENT VARIABLE
###########################################
Set environment variable 'PATH_ALOSSTACK'
export PATH_ALOSSTACK=CODE_DIR/contrib/stack/alosStack
where CODE_DIR is the directory of your isce code. Note that alosStack is not installed when you install
the software, so CODE_DIR is your code directory rather than installation directory.
###########################################
# 1. PREPARE DATA
###########################################
1. ALOS-2 data
Currently the processor only supports the processing of a stack of data acquired in the same mode.
To find the acquisition mode code, check the unpacked ALOS-2 product. For example, in the following
file name
IMG-HH-ALOS2183010685-171012-FBDR1.1__A
^^^
FBD (indicated by ^) is the acquisition mode code. Here is the list of acquistion modes:
Operation Mode | Mode (AUIG2) | Mode (in file name)
--------------------------------------------------------------
spotlight | SPT | SBS
--------------------------------------------------------------
stripmap | SM1 | UBS, UBD
| SM2 | HBS, HBD, HBQ
| SM3 | FBS, FBD, FBQ
--------------------------------------------------------------
ScanSAR | WD1 | WBS, WBD, WWS, WWD
| WD2 | VBS, VBD
Create a folder such as 'saf_d169', and in this folder, unpack all frames of each date in an individual folder
named YYMMDD. YYMMDD is the acquistion date, and it must be in this format. Now the data directory should look
like
saf_d169_data-------150225-------IMG-HH-ALOS2041062800-150225-WBDR1.1__D-F1
|__150408 |__IMG-HH-ALOS2041062800-150225-WBDR1.1__D-F2
|__150520 |__IMG-HH-ALOS2041062800-150225-WBDR1.1__D-F3
|__150701 |__IMG-HH-ALOS2041062800-150225-WBDR1.1__D-F4
|__... |__IMG-HH-ALOS2041062800-150225-WBDR1.1__D-F5
|__IMG-HH-ALOS2041062850-150225-WBDR1.1__D-F1
|__IMG-HH-ALOS2041062850-150225-WBDR1.1__D-F2
|__IMG-HH-ALOS2041062850-150225-WBDR1.1__D-F3
|__IMG-HH-ALOS2041062850-150225-WBDR1.1__D-F4
|__IMG-HH-ALOS2041062850-150225-WBDR1.1__D-F5
|__LED-ALOS2041062800-150225-WBDR1.1__D
|__LED-ALOS2041062850-150225-WBDR1.1__D
2. DEM and water body
You MUST FIRST have an account to download DEM and water body. See
https://github.com/isce-framework/isce2#notes-on-digital-elevation-models
or
https://github.com/isce-framework/isce2
for more details.
See input xml file alosStack.xml in this folder on how to download DEM and water body.
###########################################
# 2. PROCESS DATA
###########################################
1. Create and enter a folder for processing data, e.g.
mkdir saf_d169_proc
cd saf_d169_proc
2. Input xml file alosStack.xml can be found in code directory. Copy it to current folder and simply set
the parameters.
cp ${PATH_ALOSSTACK}/alosStack.xml ./
3. Create command files for processing data. Run
${PATH_ALOSSTACK}/create_cmds.py -stack_par alosStack.xml
4. Do most of the single date processing. Run
./cmd_1.sh
In cmd_1.sh and other command files, note that you can split the 'for loop' in each step into a number
of parallel runs. See command file for details.
Higly recommended parallel processing steps in each command file.
cmd_1.sh:
estimate SLC offsets
resample to a common grid (WD1 SLC size may be up to 7.2 G, so each run requires this much memory!)
cmd_2.sh
form interferograms (does not requires a lot of computation, more parallel runs recommended)
mosaic interferograms (does not requires a lot of computation, more parallel runs recommended)
cmd_3.sh
subband interferograms (does not requires a lot of computation, more parallel runs recommended)
cmd_4.sh
all steps
5. InSAR processing before ionosphere correction. Run
./cmd_2.sh
6. Ionosphere correction (if do ionospheric phase estimation, by default True). If the following parameter of
the input xml file is True (default)
<!--<property name="do ionospheric phase estimation">True</property>-->
Run
./cmd_3.sh
After it finishes, check the images in folder 'fig_ion' to see if ionosphere estimation is OK for each
pair. The anomalies include dense fringes or slight phase difference between adjacent swaths in ScanSAR
interferograms after removing ionosphere. There might also be dense fringes elsewhere. These are all anomalies
and the associated ionosphere estimation results should not be used in the next steps.
At the end of this command file, there is a step called 'estimate ionospheric phase for each date'. If you found
some pairs with ionosphere estimation anomalies, specify them by adding argument '-exc_pair' to the command ion_ls.py.
Make sure all dates are still connected after excluding these pairs, and then run ion_ls.py.
You can plot baselines to see if the pairs are fully connected, e.g.
${PATH_ALOSSTACK}/plot_baseline.py -baseline baseline/baseline_center.txt -pairs_dir pairs_ion -pairs_exc 150520-150701 -output baselines.pdf
If the following parameters of the input xml file are True (default)
<!--<property name="do ionospheric phase estimation">True</property>-->
<!--<property name="apply ionospheric phase correction">True</property>-->
there is a final step called 'correct ionosphere' in cmd_3.sh, uncomment the code marked by '#uncomment to run this command'
and then run the entire step.
7. InSAR processing after ionosphere correction. Run
./cmd_4.sh
If everything is OK, you may consider removing the huge slc files in folder dates_resampled. When you need them in
the future, you can re-run the commands in the '#resample to a common grid' step in cmd_1.sh.
Furthermore, you may consider removing the huge original data files you unpacked previously.
###########################################
# 3. ADDING MORE DATES
###########################################
Sometimes we want to add new acquistions to the already processed stack. To do this,
1. Upack the new acquistions in data directory following #1. PREPARE DATA.
2. Repeat the processing in #2. PROCESS DATA.
We recommend saving previous command files in a folder before new processing. Note that even the previously processed
pairs will be reprocessed again by cmd_4.sh if the following parameters of the input xml file are True (default)
<!--<property name="do ionospheric phase estimation">True</property>-->
<!--<property name="apply ionospheric phase correction">True</property>-->
because ionospheric phase will be estimated by ion_ls.py at the end of cmd_3.sh for each date with new pairs included,
and therefore all steps after ion_ls.py should be reprocessed.
###########################################
# 4. CHECK RESULTS
###########################################
baseline basline files
burst_synchronization.txt burst synchronization
dates original date of each date
dates_ion ionospheric phase of each date
dates_resampled resampled date of each date. Data of all other dates are coregistered to reference date.
The parameter xml files including *.track.xml and f*_*/*.frame.xml are in reference date
folder. These should be the files you should use in most cases, such as looking for data
parameters, preparing for time series analysis etc.
fig_ion figures for checking ionosphere estimation results
pairs pairs of InSAR processing
pairs_ion pairs for ionosphere estimation
If you want to know more details about the files in each folder, read
CODE_DIR/examples/input_files/alos2/alos2_tutorial.txt
File name conventions and directory structures are mostly the same.
###########################################
# 5. KNOWN ISSUES
###########################################
1. Issues with Ionospheric Correction
According to our experience, ionospheric correction works for most of the interferograms. Because it
relies on coherence and phase unwrapping, it does not work in some cases. These include:
(1) data have low coherence
(2) the majority of the imaged area is low coherence area like lake, ocean...
(3) the imaged area is completely divided into several isolated areas by low coherence areas, such as
islands.
In addition to the above issues, there are also data-mode-related issues.
(1) ScanSAR-ScanSAR interferometry. While you can process one single subswath, it's better to process
more than one subswath if the addistional subswath has good coherence. This is good for ionospheric
correction.
(2) Range distortions in JAXA product. This mostly happens in stripmap-stripmap interferometry using
data not covering Japan. If you see very dense fringes in the corrected inteferogram, probably it is
caused by this problem. This has been reported to JAXA and JAXA is working on debugging the focusing
program.
UPDATE: On November 20, 2018 (JST), JAXA updated the software for PALSAR-2 standard products. Therefore,
if your product is ordered after this time, you don't have this problem.
2. How do I improve ionospheric correction?
First of all, we recommend reading through cmd_3.sh before manually improving ionosphere estimation results.
Isolated areas lead to relative phase unwrapping errors, and therefore leads to significant errors in ionosphere
estimation result, usually shown as dense fringes in the corrected interferograms. If your scene covers an area
with two or more isolated areas and you are interested in one of the areas, you can mask out the other areas by
setting "areas masked out in ionospheric phase estimation".
Or if you have processed the data, you can also specify the argument -masked_areas in ion_filt.py in cmd_3.sh.
Then check the updated results following step '#check ionosphere estimation results' in cmd_3.sh
For ScanSAR, the software uses some accurate values for removing phase difference between adjacent swaths.
This, however, does not work well sometimes as a result of the inconistencies between different JAXA products,
especially products processed by different versions of JAXA software. As a result of this, you may see dense
fringes in the ionospheric correction result. In this case, you can try not to use aforementioned accurate
values by setting -snap in ion_subband.py in cmd_3.sh, and run this command and the remaining commands to see
if ionosphere estimation results have improvement.
Note that each time you updated ionosphere estimation results, you need to re-run the steps after
'#estimate ionospheric phase for each date' (including this step) in cmd_3.sh, as well as cmd_4.sh
4. ScanSAR burst synchronization
For ScanSAR data acquired before February 8, 2015, chances of having enough burst synchronization for
interferometry are very low. Don't include data acquired before this date in your stack processing.
###########################################
# 6. REFRENCES
###########################################
The methods and algorithms implemented can be found in the following papers.
1. ScanSAR or multi-mode InSAR processing
C. Liang and E. J. Fielding, "Interferometry with ALOS-2 full-aperture ScanSAR data,"
IEEE Transactions on Geoscience and Remote Sensing, vol. 55, no. 5, pp. 2739-2750, May 2017.
2. Ionospheric correction, burst-by-burst ScanSAR processing, and burst-mode spectral diversity (SD) or
multi-aperture InSAR (MAI) processing
C. Liang and E. J. Fielding, "Measuring azimuth deformation with L-band ALOS-2 ScanSAR interferometry,"
IEEE Transactions on Geoscience and Remote Sensing, vol. 55, no. 5, pp. 2725-2738, May 2017.
3. Ionospheric correction
C. Liang, Z. Liu, E. J. Fielding, and R. Bürgmann, "InSAR time series analysis of L-band wide-swath SAR
data acquired by ALOS-2,"
IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 8, pp. 4492-4506, Aug. 2018.

View File

@ -0,0 +1,186 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import datetime
import numpy as np
import isce, isceobj
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
from isceobj.Alos2Proc.Alos2ProcPublic import getBboxRdr
from StackPulic import loadTrack
from StackPulic import stackDateStatistics
def computeBaseline(trackReference, trackSecondary, azimuthTime, rangeDistance):
import numpy as np
from isceobj.Planet.Planet import Planet
#modify Piyush's code for computing baslines
refElp = Planet(pname='Earth').ellipsoid
#for x in points:
referenceSV = trackReference.orbit.interpolate(azimuthTime, method='hermite')
target = trackReference.orbit.rdr2geo(azimuthTime, rangeDistance)
slvTime, slvrng = trackSecondary.orbit.geo2rdr(target)
secondarySV = trackSecondary.orbit.interpolateOrbit(slvTime, method='hermite')
targxyz = np.array(refElp.LLH(target[0], target[1], target[2]).ecef().tolist())
mxyz = np.array(referenceSV.getPosition())
mvel = np.array(referenceSV.getVelocity())
sxyz = np.array(secondarySV.getPosition())
#to fix abrupt change near zero in baseline grid. JUN-05-2020
mvelunit = mvel / np.linalg.norm(mvel)
sxyz = sxyz - np.dot ( sxyz-mxyz, mvelunit) * mvelunit
aa = np.linalg.norm(sxyz-mxyz)
costheta = (rangeDistance*rangeDistance + aa*aa - slvrng*slvrng)/(2.*rangeDistance*aa)
Bpar = aa*costheta
perp = aa * np.sqrt(1 - costheta*costheta)
direction = np.sign(np.dot( np.cross(targxyz-mxyz, sxyz-mxyz), mvel))
Bperp = direction*perp
return (Bpar, Bperp)
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='compute baselines for a number of dates')
parser.add_argument('-idir', dest='idir', type=str, required=True,
help = 'input directory where data of each date (YYMMDD) is located. only folders are recognized')
parser.add_argument('-odir', dest='odir', type=str, required=True,
help = 'output directory where baseline of each date is output')
parser.add_argument('-ref_date', dest='ref_date', type=str, required=True,
help = 'reference date. format: YYMMDD')
parser.add_argument('-sec_date', dest='sec_date', type=str, nargs='+', default=[],
help = 'a number of secondary dates seperated by blanks. format: YYMMDD YYMMDD YYMMDD. If provided, only compute baseline grids of these dates')
parser.add_argument('-baseline_center', dest='baseline_center', type=str, default=None,
help = 'output baseline file at image center for all dates. If not provided, it will not be computed')
parser.add_argument('-baseline_grid', dest='baseline_grid', action='store_true', default=False,
help='compute baseline grid for each date')
parser.add_argument('-baseline_grid_width', dest='baseline_grid_width', type=int, default=10,
help = 'baseline grid width if compute baseline grid, default: 10')
parser.add_argument('-baseline_grid_length', dest='baseline_grid_length', type=int, default=10,
help = 'baseline grid length if compute baseline grid, default: 10')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
idir = inps.idir
odir = inps.odir
dateReference = inps.ref_date
dateSecondary = inps.sec_date
baselineCenterFile = inps.baseline_center
baselineGrid = inps.baseline_grid
widthBaseline = inps.baseline_grid_width
lengthBaseline = inps.baseline_grid_length
#######################################################
#get date statistics
dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir, dateReference)
ndate = len(dates)
nframe = len(frames)
nswath = len(swaths)
#create output directory if it does not already exist
if not os.path.isdir(odir):
print('output directory {} does not exist, create'.format(odir))
os.makedirs(odir, exist_ok=True)
os.chdir(odir)
#compute baseline
trackReference = loadTrack(dateDirs[dateIndexReference], dates[dateIndexReference])
bboxRdr = getBboxRdr(trackReference)
#at four corners
rangeMin = bboxRdr[0]
rangeMax = bboxRdr[1]
azimuthTimeMin = bboxRdr[2]
azimuthTimeMax = bboxRdr[3]
#at image center
azimuthTimeMid = azimuthTimeMin+datetime.timedelta(seconds=(azimuthTimeMax-azimuthTimeMin).total_seconds()/2.0)
rangeMid = (rangeMin + rangeMax) / 2.0
#grid size
rangeDelta = (rangeMax - rangeMin) / (widthBaseline - 1.0)
azimuthDelta = (azimuthTimeMax-azimuthTimeMin).total_seconds() / (lengthBaseline - 1.0)
#baseline at image center
if baselineCenterFile is not None:
baselineCenter = ' reference date secondary date parallel baseline [m] perpendicular baseline [m]\n'
baselineCenter += '===========================================================================================\n'
#baseline grid: two-band BIL image, first band: parallel baseline, perpendicular baseline
baseline = np.zeros((lengthBaseline*2, widthBaseline), dtype=np.float32)
#compute baseline
for i in range(ndate):
if i == dateIndexReference:
continue
trackSecondary = loadTrack(dateDirs[i], dates[i])
#compute baseline at image center
if baselineCenterFile is not None:
(Bpar, Bperp) = computeBaseline(trackReference, trackSecondary, azimuthTimeMid, rangeMid)
baselineCenter += ' %s %s %9.3f %9.3f\n'%(dates[dateIndexReference], dates[i], Bpar, Bperp)
if dateSecondary != []:
if dates[i] not in dateSecondary:
continue
#compute baseline grid
if baselineGrid:
baselineFile = '{}-{}.rmg'.format(dates[dateIndexReference], dates[i])
if os.path.isfile(baselineFile):
print('baseline grid file {} already exists, do not create'.format(baselineFile))
else:
for j in range(lengthBaseline):
for k in range(widthBaseline):
(baseline[j*2, k], baseline[j*2+1, k]) = computeBaseline(trackReference, trackSecondary,
azimuthTimeMin+datetime.timedelta(seconds=azimuthDelta*j),
rangeMin+rangeDelta*k)
baseline.astype(np.float32).tofile(baselineFile)
create_xml(baselineFile, widthBaseline, lengthBaseline, 'rmg')
#dump baseline at image center
if baselineCenterFile is not None:
print('\nbaselines at image centers')
print(baselineCenter)
with open(baselineCenterFile, 'w') as f:
f.write(baselineCenter)

View File

@ -0,0 +1,207 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import datetime
import numpy as np
import isce, isceobj
from StackPulic import loadTrack
from StackPulic import stackDateStatistics
def computeBurstSynchronization(trackReference, trackSecondary):
'''compute burst synchronization
'''
import datetime
import numpy as np
frames = [frame.frameNumber for frame in trackReference.frames]
swaths = [swath.swathNumber for swath in trackReference.frames[0].swaths]
startingSwath = swaths[0]
endingSwath = swaths[-1]
#burst synchronization may slowly change along a track as a result of the changing relative speed of the two flights
#in one frame, real unsynchronized time is the same for all swaths
unsynTime = 0
#real synchronized time/percentage depends on the swath burst length (synTime = burstlength - abs(unsynTime))
#synTime = 0
synPercentage = 0
numberOfFrames = len(frames)
numberOfSwaths = endingSwath - startingSwath + 1
unsynTimeAll = []
synPercentageAll = []
for i, frameNumber in enumerate(frames):
unsynTimeAll0 = []
synPercentageAll0 = []
for j, swathNumber in enumerate(range(startingSwath, endingSwath + 1)):
referenceSwath = trackReference.frames[i].swaths[j]
secondarySwath = trackSecondary.frames[i].swaths[j]
#using Piyush's code for computing range and azimuth offsets
midRange = referenceSwath.startingRange + referenceSwath.rangePixelSize * referenceSwath.numberOfSamples * 0.5
midSensingStart = referenceSwath.sensingStart + datetime.timedelta(seconds = referenceSwath.numberOfLines * 0.5 / referenceSwath.prf)
llh = trackReference.orbit.rdr2geo(midSensingStart, midRange)
slvaz, slvrng = trackSecondary.orbit.geo2rdr(llh)
###Translate to offsets
#note that secondary range pixel size and prf might be different from reference, here we assume there is a virtual secondary with same
#range pixel size and prf
rgoff = ((slvrng - secondarySwath.startingRange) / referenceSwath.rangePixelSize) - referenceSwath.numberOfSamples * 0.5
azoff = ((slvaz - secondarySwath.sensingStart).total_seconds() * referenceSwath.prf) - referenceSwath.numberOfLines * 0.5
#compute burst synchronization
#burst parameters for ScanSAR wide mode not estimed yet
#if self._insar.modeCombination == 21:
scburstStartLine = (referenceSwath.burstStartTime - referenceSwath.sensingStart).total_seconds() * referenceSwath.prf + azoff
#secondary burst start times corresponding to reference burst start times (100% synchronization)
scburstStartLines = np.arange(scburstStartLine - 100000*referenceSwath.burstCycleLength, \
scburstStartLine + 100000*referenceSwath.burstCycleLength, \
referenceSwath.burstCycleLength)
dscburstStartLines = -((secondarySwath.burstStartTime - secondarySwath.sensingStart).total_seconds() * secondarySwath.prf - scburstStartLines)
#find the difference with minimum absolute value
unsynLines = dscburstStartLines[np.argmin(np.absolute(dscburstStartLines))]
if np.absolute(unsynLines) >= secondarySwath.burstLength:
synLines = 0
if unsynLines > 0:
unsynLines = secondarySwath.burstLength
else:
unsynLines = -secondarySwath.burstLength
else:
synLines = secondarySwath.burstLength - np.absolute(unsynLines)
unsynTime += unsynLines / referenceSwath.prf
synPercentage += synLines / referenceSwath.burstLength * 100.0
unsynTimeAll0.append(unsynLines / referenceSwath.prf)
synPercentageAll0.append(synLines / referenceSwath.burstLength * 100.0)
unsynTimeAll.append(unsynTimeAll0)
synPercentageAll.append(synPercentageAll0)
############################################################################################
#illustration of the sign of the number of unsynchronized lines (unsynLines)
#The convention is the same as ampcor offset, that is,
# secondaryLineNumber = referenceLineNumber + unsynLines
#
# |-----------------------| ------------
# | | ^
# | | |
# | | | unsynLines < 0
# | | |
# | | \ /
# | | |-----------------------|
# | | | |
# | | | |
# |-----------------------| | |
# Reference Burst | |
# | |
# | |
# | |
# | |
# |-----------------------|
# Secondary Burst
#
#
############################################################################################
#getting average
#if self._insar.modeCombination == 21:
unsynTime /= numberOfFrames*numberOfSwaths
synPercentage /= numberOfFrames*numberOfSwaths
return (unsynTimeAll, synPercentageAll)
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='compute burst synchronization for a number of dates')
parser.add_argument('-idir', dest='idir', type=str, required=True,
help = 'input directory where data of each date (YYMMDD) is located. only folders are recognized')
parser.add_argument('-burst_sync_file', dest='burst_sync_file', type=str, required=True,
help = 'output burst synchronization file')
parser.add_argument('-ref_date', dest='ref_date', type=str, required=True,
help = 'reference date. format: YYMMDD')
parser.add_argument('-sec_date', dest='sec_date', type=str, nargs='+', default=[],
help = 'a number of secondary dates seperated by blanks. format: YYMMDD YYMMDD YYMMDD. If provided, only compute burst synchronization of these dates')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
idir = inps.idir
burstSyncFile = inps.burst_sync_file
dateReference = inps.ref_date
dateSecondary = inps.sec_date
#######################################################
#get date statistics
dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir, dateReference)
ndate = len(dates)
nframe = len(frames)
nswath = len(swaths)
#compute burst synchronization
trackReference = loadTrack(dateDirs[dateIndexReference], dates[dateIndexReference])
frames = [frame.frameNumber for frame in trackReference.frames]
swaths = [swath.swathNumber for swath in trackReference.frames[0].swaths]
startingSwath = swaths[0]
endingSwath = swaths[-1]
burstSync = ' reference date secondary date frame swath burst UNsync time [ms] burst sync [%]\n'
burstSync += '==================================================================================================\n'
#compute burst synchronization
for i in range(ndate):
if i == dateIndexReference:
continue
if dateSecondary != []:
if dates[i] not in dateSecondary:
continue
trackSecondary = loadTrack(dateDirs[i], dates[i])
unsynTimeAll, synPercentageAll = computeBurstSynchronization(trackReference, trackSecondary)
for j in range(nframe):
for k in range(nswath):
if (j == 0) and (k == 0):
burstSync += ' %s %s %s %d %8.2f %6.2f\n'%\
(dates[dateIndexReference], dates[i], frames[j], swaths[k], unsynTimeAll[j][k]*1000.0, synPercentageAll[j][k])
else:
burstSync += ' %s %d %8.2f %6.2f\n'%\
(frames[j], swaths[k], unsynTimeAll[j][k]*1000.0, synPercentageAll[j][k])
burstSync += ' %8.2f (mean) %6.2f (mean)\n\n'%(np.mean(np.array(unsynTimeAll), dtype=np.float64)*1000.0, np.mean(np.array(synPercentageAll), dtype=np.float64))
#dump burstSync
print('\nburst synchronization')
print(burstSync)
with open(burstSyncFile, 'w') as f:
f.write(burstSync)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,97 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import shutil
import datetime
import numpy as np
import xml.etree.ElementTree as ET
import isce, isceobj
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
from StackPulic import loadProduct
from StackPulic import stackDateStatistics
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='form interferogram')
parser.add_argument('-idir', dest='idir', type=str, required=True,
help = 'input directory where resampled data of each date (YYMMDD) is located. only folders are recognized')
parser.add_argument('-ref_date_stack', dest='ref_date_stack', type=str, required=True,
help = 'reference date of stack. format: YYMMDD')
parser.add_argument('-ref_date', dest='ref_date', type=str, required=True,
help = 'reference date of this pair. format: YYMMDD')
parser.add_argument('-sec_date', dest='sec_date', type=str, required=True,
help = 'reference date of this pair. format: YYMMDD')
parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1,
help = 'number of range looks 1. default: 1')
parser.add_argument('-nalks1', dest='nalks1', type=int, default=1,
help = 'number of azimuth looks 1. default: 1')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
idir = inps.idir
dateReferenceStack = inps.ref_date_stack
dateReference = inps.ref_date
dateSecondary = inps.sec_date
numberRangeLooks1 = inps.nrlks1
numberAzimuthLooks1 = inps.nalks1
#######################################################
pair = '{}-{}'.format(dateReference, dateSecondary)
ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1)
dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir, dateReferenceStack)
trackParameter = os.path.join(dateDirs[dateIndexReference], dates[dateIndexReference]+'.track.xml')
trackReferenceStack = loadProduct(trackParameter)
rangePixelSize = numberRangeLooks1 * trackReferenceStack.rangePixelSize
radarWavelength = trackReferenceStack.radarWavelength
insarDir = 'insar'
os.makedirs(insarDir, exist_ok=True)
os.chdir(insarDir)
interferogram = pair + ml1 + '.int'
differentialInterferogram = 'diff_' + pair + ml1 + '.int'
if dateReference == dateReferenceStack:
rectRangeOffset = os.path.join('../', idir, dateSecondary, 'insar', dateSecondary + ml1 + '_rg_rect.off')
cmd = "imageMath.py -e='a*exp(-1.0*J*b*4.0*{}*{}/{})*(b!=0)' --a={} --b={} -o {} -t cfloat".format(np.pi, rangePixelSize, radarWavelength, interferogram, rectRangeOffset, differentialInterferogram)
elif dateSecondary == dateReferenceStack:
rectRangeOffset = os.path.join('../', idir, dateReference, 'insar', dateReference + ml1 + '_rg_rect.off')
cmd = "imageMath.py -e='a*exp(1.0*J*b*4.0*{}*{}/{})*(b!=0)' --a={} --b={} -o {} -t cfloat".format(np.pi, rangePixelSize, radarWavelength, interferogram, rectRangeOffset, differentialInterferogram)
else:
rectRangeOffset1 = os.path.join('../', idir, dateReference, 'insar', dateReference + ml1 + '_rg_rect.off')
rectRangeOffset2 = os.path.join('../', idir, dateSecondary, 'insar', dateSecondary + ml1 + '_rg_rect.off')
cmd = "imageMath.py -e='a*exp(1.0*J*(b-c)*4.0*{}*{}/{})*(b!=0)*(c!=0)' --a={} --b={} --c={} -o {} -t cfloat".format(np.pi, rangePixelSize, radarWavelength, interferogram, rectRangeOffset1, rectRangeOffset2, differentialInterferogram)
runCmd(cmd)
os.chdir('../')

View File

@ -0,0 +1,83 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import isce, isceobj
from isceobj.Alos2Proc.runFrameOffset import frameOffset
from StackPulic import loadTrack
from StackPulic import acquisitionModesAlos2
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='estimate frame offset')
parser.add_argument('-idir', dest='idir', type=str, required=True,
help = 'data directory')
parser.add_argument('-date', dest='date', type=str, required=True,
help = 'data acquisition date. format: YYMMDD')
parser.add_argument('-output', dest='output', type=str, required=True,
help = 'output file')
#parser.add_argument('-match', dest='match', type=int, default=1,
# help = 'do matching when computing adjacent frame offset. 0: no. 1: yes (default)')
parser.add_argument('-match', dest='match', action='store_true', default=False,
help='do matching when computing adjacent swath offset')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
idir = inps.idir
date = inps.date
outputFile = inps.output
match = inps.match
#######################################################
spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2()
track = loadTrack(idir, date)
#save current dir
dirOriginal = os.getcwd()
os.chdir(idir)
if len(track.frames) > 1:
if track.operationMode in scansarModes:
matchingMode=0
else:
matchingMode=1
mosaicDir = 'insar'
os.makedirs(mosaicDir, exist_ok=True)
os.chdir(mosaicDir)
#compute swath offset
offsetReference = frameOffset(track, date+'.slc', 'frame_offset.txt',
crossCorrelation=match, matchingMode=matchingMode)
os.chdir('../')
else:
print('there is only one frame, no need to estimate frame offset')

View File

@ -0,0 +1,425 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import datetime
import numpy as np
import isce, isceobj
import mroipac
from mroipac.ampcor.Ampcor import Ampcor
from isceobj.Alos2Proc.Alos2ProcPublic import topo
from isceobj.Alos2Proc.Alos2ProcPublic import geo2rdr
from isceobj.Alos2Proc.Alos2ProcPublic import waterBodyRadar
from isceobj.Alos2Proc.Alos2ProcPublic import reformatGeometricalOffset
from isceobj.Alos2Proc.Alos2ProcPublic import writeOffset
from isceobj.Alos2Proc.Alos2ProcPublic import cullOffsets
from isceobj.Alos2Proc.Alos2ProcPublic import computeOffsetFromOrbit
from StackPulic import loadTrack
from StackPulic import stackDateStatistics
from StackPulic import acquisitionModesAlos2
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='estimate offset between a pair of SLCs for a number of dates')
parser.add_argument('-idir', dest='idir', type=str, required=True,
help = 'input directory where data of each date (YYMMDD) is located. only folders are recognized')
parser.add_argument('-ref_date', dest='ref_date', type=str, required=True,
help = 'reference date. format: YYMMDD')
parser.add_argument('-sec_date', dest='sec_date', type=str, nargs='+', default=[],
help = 'a number of secondary dates seperated by blanks. format: YYMMDD YYMMDD YYMMDD. If provided, only estimate offsets of these dates')
parser.add_argument('-wbd', dest='wbd', type=str, default=None,
help = 'water body used to determine number of offsets in range and azimuth')
parser.add_argument('-dem', dest='dem', type=str, default=None,
help = 'if water body is provided, dem file must also be provided')
parser.add_argument('-use_wbd_offset', dest='use_wbd_offset', action='store_true', default=False,
help='use water body to dertermine number of matching offsets')
parser.add_argument('-num_rg_offset', dest='num_rg_offset', type=int, nargs='+', action='append', default=[],
help = 'number of offsets in range. format (e.g. 2 frames, 3 swaths): -num_rg_offset 11 12 13 -num_rg_offset 14 15 16')
parser.add_argument('-num_az_offset', dest='num_az_offset', type=int, nargs='+', action='append', default=[],
help = 'number of offsets in azimuth. format (e.g. 2 frames, 3 swaths): -num_az_offset 11 12 13 -num_az_offset 14 15 16')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
idir = inps.idir
dateReference = inps.ref_date
dateSecondary = inps.sec_date
wbd = inps.wbd
dem = inps.dem
useWbdForNumberOffsets = inps.use_wbd_offset
numberOfOffsetsRangeInput = inps.num_rg_offset
numberOfOffsetsAzimuthInput = inps.num_az_offset
if wbd is not None:
wbdFile = os.path.abspath(wbd)
else:
wbdFile = None
if dem is not None:
demFile = os.path.abspath(dem)
else:
demFile = None
#######################################################
spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2()
warningMessage = ''
#get date statistics
dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir, dateReference)
ndate = len(dates)
nframe = len(frames)
nswath = len(swaths)
#load reference track
referenceTrack = loadTrack(dateDirs[dateIndexReference], dates[dateIndexReference])
dateSecondaryFirst = None
for idate in range(ndate):
if idate == dateIndexReference:
continue
if dateSecondary != []:
if dates[idate] not in dateSecondary:
continue
dateSecondaryFirst = dates[idate]
break
if dateSecondaryFirst is None:
raise Exception('no secondary date is to be processed\n')
#set number of matching points
numberOfOffsetsRangeUsed = [[None for j in range(nswath)] for i in range(nframe)]
numberOfOffsetsAzimuthUsed = [[None for j in range(nswath)] for i in range(nframe)]
for i, frameNumber in enumerate(frames):
frameDir = 'f{}_{}'.format(i+1, frameNumber)
for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)):
swathDir = 's{}'.format(swathNumber)
print('determine number of range/azimuth offsets frame {}, swath {}'.format(frameNumber, swathNumber))
referenceSwath = referenceTrack.frames[i].swaths[j]
#1. set initinial numbers
#in case there are long time span pairs that have bad coherence
ratio = np.sqrt(1.5)
if referenceTrack.operationMode in scansarModes:
numberOfOffsetsRange = int(10*ratio+0.5)
numberOfOffsetsAzimuth = int(40*ratio+0.5)
else:
numberOfOffsetsRange = int(20*ratio+0.5)
numberOfOffsetsAzimuth = int(20*ratio+0.5)
#2. change the initial numbers using water body
if useWbdForNumberOffsets and (wbdFile is not None) and (demFile is not None):
numberRangeLooks=100
numberAzimuthLooks=100
#compute land ratio using topo module
# latFile = 'lat_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber)
# lonFile = 'lon_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber)
# hgtFile = 'hgt_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber)
# losFile = 'los_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber)
# wbdRadarFile = 'wbd_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber)
latFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'lat.rdr')
lonFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'lon.rdr')
hgtFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'hgt.rdr')
losFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'los.rdr')
wbdRadarFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'wbd.rdr')
topo(referenceSwath, referenceTrack, demFile, latFile, lonFile, hgtFile, losFile=losFile,
incFile=None, mskFile=None,
numberRangeLooks=numberRangeLooks, numberAzimuthLooks=numberAzimuthLooks, multilookTimeOffset=False)
waterBodyRadar(latFile, lonFile, wbdFile, wbdRadarFile)
wbdImg = isceobj.createImage()
wbdImg.load(wbdRadarFile+'.xml')
width = wbdImg.width
length = wbdImg.length
wbd = np.fromfile(wbdRadarFile, dtype=np.byte).reshape(length, width)
landRatio = np.sum(wbd==0) / (length*width)
if (landRatio <= 0.00125):
print('\n\nWARNING: land too small for estimating slc offsets at frame {}, swath {}'.format(frameNumber, swathNumber))
print('proceed to use geometric offsets for forming interferogram')
print('but please consider not using this swath\n\n')
warningMessage += 'land too small for estimating slc offsets at frame {}, swath {}, use geometric offsets\n'.format(frameNumber, swathNumber)
numberOfOffsetsRange = 0
numberOfOffsetsAzimuth = 0
else:
#put the results on a grid with a specified interval
interval = 0.2
axisRatio = int(np.sqrt(landRatio)/interval)*interval + interval
if axisRatio > 1:
axisRatio = 1
numberOfOffsetsRange = int(numberOfOffsetsRange/axisRatio)
numberOfOffsetsAzimuth = int(numberOfOffsetsAzimuth/axisRatio)
else:
warningMessage += 'no water mask used to determine number of matching points. frame {} swath {}\n'.format(frameNumber, swathNumber)
#3. user's settings
if numberOfOffsetsRangeInput != []:
numberOfOffsetsRange = numberOfOffsetsRangeInput[i][j]
if numberOfOffsetsAzimuthInput != []:
numberOfOffsetsAzimuth = numberOfOffsetsAzimuthInput[i][j]
#4. save final results
numberOfOffsetsRangeUsed[i][j] = numberOfOffsetsRange
numberOfOffsetsAzimuthUsed[i][j] = numberOfOffsetsAzimuth
#estimate offsets
for idate in range(ndate):
if idate == dateIndexReference:
continue
if dateSecondary != []:
if dates[idate] not in dateSecondary:
continue
secondaryTrack = loadTrack(dateDirs[idate], dates[idate])
for i, frameNumber in enumerate(frames):
frameDir = 'f{}_{}'.format(i+1, frameNumber)
for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)):
swathDir = 's{}'.format(swathNumber)
print('estimating offset frame {}, swath {}'.format(frameNumber, swathNumber))
referenceDir = os.path.join(dateDirs[dateIndexReference], frameDir, swathDir)
secondaryDir = os.path.join(dateDirs[idate], frameDir, swathDir)
referenceSwath = referenceTrack.frames[i].swaths[j]
secondarySwath = secondaryTrack.frames[i].swaths[j]
#compute geometrical offsets
if (wbdFile is not None) and (demFile is not None) and (numberOfOffsetsRangeUsed[i][j] == 0) and (numberOfOffsetsAzimuthUsed[i][j] == 0):
#compute geomtricla offsets
# latFile = 'lat_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber)
# lonFile = 'lon_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber)
# hgtFile = 'hgt_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber)
# losFile = 'los_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber)
# rgOffsetFile = 'rg_offset_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber)
# azOffsetFile = 'az_offset_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber)
# wbdRadarFile = 'wbd_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber)
latFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'lat.rdr')
lonFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'lon.rdr')
hgtFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'hgt.rdr')
losFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'los.rdr')
#put them in current date directory
rgOffsetFile = os.path.join(idir, dates[idate], frameDir, swathDir, 'rg_offset.rdr')
azOffsetFile = os.path.join(idir, dates[idate], frameDir, swathDir, 'az_offset.rdr')
wbdRadarFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'wbd.rdr')
geo2rdr(secondarySwath, secondaryTrack, latFile, lonFile, hgtFile, rgOffsetFile, azOffsetFile, numberRangeLooks=numberRangeLooks, numberAzimuthLooks=numberAzimuthLooks, multilookTimeOffset=False)
reformatGeometricalOffset(rgOffsetFile, azOffsetFile, os.path.join(secondaryDir, 'cull.off'), rangeStep=numberRangeLooks, azimuthStep=numberAzimuthLooks, maximumNumberOfOffsets=2000)
os.remove(rgOffsetFile)
os.remove(rgOffsetFile+'.vrt')
os.remove(rgOffsetFile+'.xml')
os.remove(azOffsetFile)
os.remove(azOffsetFile+'.vrt')
os.remove(azOffsetFile+'.xml')
#estimate offsets using ampcor
else:
ampcor = Ampcor(name='insarapp_slcs_ampcor')
ampcor.configure()
mSLC = isceobj.createSlcImage()
mSLC.load(os.path.join(referenceDir, dates[dateIndexReference]+'.slc.xml'))
mSLC.filename = os.path.join(referenceDir, dates[dateIndexReference]+'.slc')
mSLC.extraFilename = os.path.join(referenceDir, dates[dateIndexReference]+'.slc.vrt')
mSLC.setAccessMode('read')
mSLC.createImage()
sSLC = isceobj.createSlcImage()
sSLC.load(os.path.join(secondaryDir, dates[idate]+'.slc.xml'))
sSLC.filename = os.path.join(secondaryDir, dates[idate]+'.slc')
sSLC.extraFilename = os.path.join(secondaryDir, dates[idate]+'.slc.vrt')
sSLC.setAccessMode('read')
sSLC.createImage()
ampcor.setImageDataType1('complex')
ampcor.setImageDataType2('complex')
ampcor.setReferenceSlcImage(mSLC)
ampcor.setSecondarySlcImage(sSLC)
#MATCH REGION
#compute an offset at image center to use
rgoff, azoff = computeOffsetFromOrbit(referenceSwath, referenceTrack, secondarySwath, secondaryTrack,
referenceSwath.numberOfSamples * 0.5,
referenceSwath.numberOfLines * 0.5)
#it seems that we cannot use 0, haven't look into the problem
if rgoff == 0:
rgoff = 1
if azoff == 0:
azoff = 1
firstSample = 1
if rgoff < 0:
firstSample = int(35 - rgoff)
firstLine = 1
if azoff < 0:
firstLine = int(35 - azoff)
ampcor.setAcrossGrossOffset(rgoff)
ampcor.setDownGrossOffset(azoff)
ampcor.setFirstSampleAcross(firstSample)
ampcor.setLastSampleAcross(mSLC.width)
ampcor.setNumberLocationAcross(numberOfOffsetsRangeUsed[i][j])
ampcor.setFirstSampleDown(firstLine)
ampcor.setLastSampleDown(mSLC.length)
ampcor.setNumberLocationDown(numberOfOffsetsAzimuthUsed[i][j])
#MATCH PARAMETERS
#full-aperture mode
if referenceTrack.operationMode in scansarModes:
ampcor.setWindowSizeWidth(64)
ampcor.setWindowSizeHeight(512)
#note this is the half width/length of search area, number of resulting correlation samples: 32*2+1
ampcor.setSearchWindowSizeWidth(32)
ampcor.setSearchWindowSizeHeight(32)
#triggering full-aperture mode matching
ampcor.setWinsizeFilt(8)
ampcor.setOversamplingFactorFilt(64)
#regular mode
else:
ampcor.setWindowSizeWidth(64)
ampcor.setWindowSizeHeight(64)
ampcor.setSearchWindowSizeWidth(32)
ampcor.setSearchWindowSizeHeight(32)
#REST OF THE STUFF
ampcor.setAcrossLooks(1)
ampcor.setDownLooks(1)
ampcor.setOversamplingFactor(64)
ampcor.setZoomWindowSize(16)
#1. The following not set
#Matching Scale for Sample/Line Directions (-) = 1. 1.
#should add the following in Ampcor.py?
#if not set, in this case, Ampcor.py'value is also 1. 1.
#ampcor.setScaleFactorX(1.)
#ampcor.setScaleFactorY(1.)
#MATCH THRESHOLDS AND DEBUG DATA
#2. The following not set
#in roi_pac the value is set to 0 1
#in isce the value is set to 0.001 1000.0
#SNR and Covariance Thresholds (-) = {s1} {s2}
#should add the following in Ampcor?
#THIS SHOULD BE THE ONLY THING THAT IS DIFFERENT FROM THAT OF ROI_PAC
#ampcor.setThresholdSNR(0)
#ampcor.setThresholdCov(1)
ampcor.setDebugFlag(False)
ampcor.setDisplayFlag(False)
#in summary, only two things not set which are indicated by 'The following not set' above.
#run ampcor
ampcor.ampcor()
offsets = ampcor.getOffsetField()
ampcorOffsetFile = os.path.join(secondaryDir, 'ampcor.off')
writeOffset(offsets, ampcorOffsetFile)
#finalize image, and re-create it
#otherwise the file pointer is still at the end of the image
mSLC.finalizeImage()
sSLC.finalizeImage()
##########################################
#3. cull offsets
##########################################
refinedOffsets = cullOffsets(offsets)
if refinedOffsets == None:
print('******************************************************************')
print('WARNING: There are not enough offsets left, so we are forced to')
print(' use offset without culling. frame {}, swath {}'.format(frameNumber, swathNumber))
print('******************************************************************')
warningMessage += 'not enough offsets left, use offset without culling. frame {} swath {}'.format(frameNumber, swathNumber)
refinedOffsets = offsets
cullOffsetFile = os.path.join(secondaryDir, 'cull.off')
writeOffset(refinedOffsets, cullOffsetFile)
#os.chdir('../')
#os.chdir('../')
#delete geometry files
for i, frameNumber in enumerate(frames):
frameDir = 'f{}_{}'.format(i+1, frameNumber)
for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)):
swathDir = 's{}'.format(swathNumber)
if (wbdFile is not None) and (demFile is not None):
# latFile = 'lat_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber)
# lonFile = 'lon_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber)
# hgtFile = 'hgt_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber)
# losFile = 'los_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber)
# wbdRadarFile = 'wbd_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber)
latFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'lat.rdr')
lonFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'lon.rdr')
hgtFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'hgt.rdr')
losFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'los.rdr')
wbdRadarFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'wbd.rdr')
os.remove(latFile)
os.remove(latFile+'.vrt')
os.remove(latFile+'.xml')
os.remove(lonFile)
os.remove(lonFile+'.vrt')
os.remove(lonFile+'.xml')
os.remove(hgtFile)
os.remove(hgtFile+'.vrt')
os.remove(hgtFile+'.xml')
os.remove(losFile)
os.remove(losFile+'.vrt')
os.remove(losFile+'.xml')
os.remove(wbdRadarFile)
os.remove(wbdRadarFile+'.vrt')
os.remove(wbdRadarFile+'.xml')
numberOfOffsetsUsedTxt = '\nnumber of offsets in cross correlation:\n'
numberOfOffsetsUsedTxt += ' frame swath range azimuth\n'
numberOfOffsetsUsedTxt += '============================================\n'
for i, frameNumber in enumerate(frames):
frameDir = 'f{}_{}'.format(i+1, frameNumber)
for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)):
swathDir = 's{}'.format(swathNumber)
numberOfOffsetsUsedTxt += ' {} {} {} {}\n'.format(frameNumber, swathNumber, numberOfOffsetsRangeUsed[i][j], numberOfOffsetsAzimuthUsed[i][j])
print(numberOfOffsetsUsedTxt)
if warningMessage != '':
print('\n'+warningMessage+'\n')

View File

@ -0,0 +1,86 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import datetime
import numpy as np
import isce, isceobj
from isceobj.Alos2Proc.runSwathOffset import swathOffset
from StackPulic import loadTrack
from StackPulic import acquisitionModesAlos2
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='estimate swath offset')
parser.add_argument('-idir', dest='idir', type=str, required=True,
help = 'data directory')
parser.add_argument('-date', dest='date', type=str, required=True,
help = 'data acquisition date. format: YYMMDD')
parser.add_argument('-output', dest='output', type=str, required=True,
help = 'output file')
#parser.add_argument('-match', dest='match', type=int, default=1,
# help = 'do matching when computing adjacent swath offset. 0: no. 1: yes (default)')
parser.add_argument('-match', dest='match', action='store_true', default=False,
help='do matching when computing adjacent swath offset')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
idir = inps.idir
date = inps.date
outputFile = inps.output
match = inps.match
#######################################################
spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2()
frames = sorted([x[-4:] for x in glob.glob(os.path.join(idir, 'f*_*'))])
track = loadTrack(idir, date)
#save current dir
dirOriginal = os.getcwd()
os.chdir(idir)
if (track.operationMode in scansarModes) and (len(track.frames[0].swaths) >= 2):
for i, frameNumber in enumerate(frames):
frameDir = 'f{}_{}'.format(i+1, frameNumber)
os.chdir(frameDir)
mosaicDir = 'mosaic'
os.makedirs(mosaicDir, exist_ok=True)
os.chdir(mosaicDir)
#compute swath offset
offsetReference = swathOffset(track.frames[i], date+'.slc', outputFile,
crossCorrelation=match, numberOfAzimuthLooks=10)
os.chdir('../../')
else:
print('there is only one swath, no need to estimate swath offset')

108
contrib/stack/alosStack/filt.py Executable file
View File

@ -0,0 +1,108 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import shutil
import datetime
import numpy as np
import xml.etree.ElementTree as ET
import isce, isceobj
from isceobj.Alos2Proc.runFilt import filt
from StackPulic import createObject
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='take more looks and compute coherence')
parser.add_argument('-idir', dest='idir', type=str, required=True,
help = 'input directory where resampled data of each date (YYMMDD) is located. only folders are recognized')
parser.add_argument('-ref_date_stack', dest='ref_date_stack', type=str, required=True,
help = 'reference date of stack. format: YYMMDD')
parser.add_argument('-ref_date', dest='ref_date', type=str, required=True,
help = 'reference date of this pair. format: YYMMDD')
parser.add_argument('-sec_date', dest='sec_date', type=str, required=True,
help = 'reference date of this pair. format: YYMMDD')
parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1,
help = 'number of range looks 1. default: 1')
parser.add_argument('-nalks1', dest='nalks1', type=int, default=1,
help = 'number of azimuth looks 1. default: 1')
parser.add_argument('-nrlks2', dest='nrlks2', type=int, default=1,
help = 'number of range looks 2. default: 1')
parser.add_argument('-nalks2', dest='nalks2', type=int, default=1,
help = 'number of azimuth looks 2. default: 1')
parser.add_argument('-alpha', dest='alpha', type=float, default=0.3,
help='filtering strength. default: 0.3')
parser.add_argument('-win', dest='win', type=int, default=32,
help = 'filter window size. default: 32')
parser.add_argument('-step', dest='step', type=int, default=4,
help = 'filter step size. default: 4')
parser.add_argument('-keep_mag', dest='keep_mag', action='store_true', default=False,
help='keep magnitude before filtering interferogram')
parser.add_argument('-wbd_msk', dest='wbd_msk', action='store_true', default=False,
help='mask filtered interferogram with water body')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
idir = inps.idir
dateReferenceStack = inps.ref_date_stack
dateReference = inps.ref_date
dateSecondary = inps.sec_date
numberRangeLooks1 = inps.nrlks1
numberAzimuthLooks1 = inps.nalks1
numberRangeLooks2 = inps.nrlks2
numberAzimuthLooks2 = inps.nalks2
filterStrength = inps.alpha
filterWinsize = inps.win
filterStepsize = inps.step
removeMagnitudeBeforeFiltering = not inps.keep_mag
waterBodyMaskStartingStep = inps.wbd_msk
#######################################################
pair = '{}-{}'.format(dateReference, dateSecondary)
ms = pair
ml2 = '_{}rlks_{}alks'.format(numberRangeLooks1*numberRangeLooks2, numberAzimuthLooks1*numberAzimuthLooks2)
self = createObject()
self._insar = createObject()
self.filterStrength = filterStrength
self.filterWinsize = filterWinsize
self.filterStepsize = filterStepsize
self.removeMagnitudeBeforeFiltering = removeMagnitudeBeforeFiltering
self._insar.multilookDifferentialInterferogram = 'diff_' + ms + ml2 + '.int'
self._insar.filteredInterferogram = 'filt_' + ms + ml2 + '.int'
self._insar.multilookAmplitude = ms + ml2 + '.amp'
self._insar.multilookPhsig = ms + ml2 + '.phsig'
self._insar.multilookWbdOut = os.path.join(idir, dateReferenceStack, 'insar', dateReferenceStack + ml2 + '.wbd')
if waterBodyMaskStartingStep:
self.waterBodyMaskStartingStep='filt'
else:
self.waterBodyMaskStartingStep=None
filt(self)

View File

@ -0,0 +1,92 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import shutil
import datetime
import numpy as np
import xml.etree.ElementTree as ET
import isce, isceobj
from isceobj.Alos2Proc.Alos2ProcPublic import multilook
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
from StackPulic import stackDateStatistics
from StackPulic import acquisitionModesAlos2
from StackPulic import formInterferogram
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='form interferogram')
parser.add_argument('-ref_date', dest='ref_date', type=str, required=True,
help = 'reference date of this pair. format: YYMMDD')
parser.add_argument('-sec_date', dest='sec_date', type=str, required=True,
help = 'reference date of this pair. format: YYMMDD')
parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1,
help = 'number of range looks 1. default: 1')
parser.add_argument('-nalks1', dest='nalks1', type=int, default=1,
help = 'number of azimuth looks 1. default: 1')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
dateReference = inps.ref_date
dateSecondary = inps.sec_date
numberRangeLooks1 = inps.nrlks1
numberAzimuthLooks1 = inps.nalks1
#######################################################
pair = '{}-{}'.format(dateReference, dateSecondary)
ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1)
#use one date to find frames and swaths. any date should work, here we use dateIndexReference
frames = sorted([x[-4:] for x in glob.glob(os.path.join('./', 'f*_*'))])
swaths = sorted([int(x[-1]) for x in glob.glob(os.path.join('./', 'f1_*', 's*'))])
nframe = len(frames)
nswath = len(swaths)
for i, frameNumber in enumerate(frames):
frameDir = 'f{}_{}'.format(i+1, frameNumber)
os.chdir(frameDir)
for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)):
swathDir = 's{}'.format(swathNumber)
os.chdir(swathDir)
print('processing swath {}, frame {}'.format(swathNumber, frameNumber))
slcReference = dateReference+'.slc'
slcSecondary = dateSecondary+'.slc'
interferogram = pair + ml1 + '.int'
amplitude = pair + ml1 + '.amp'
formInterferogram(slcReference, slcSecondary, interferogram, amplitude, numberRangeLooks1, numberAzimuthLooks1)
os.chdir('../')
os.chdir('../')

View File

@ -0,0 +1,132 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import datetime
import numpy as np
import isce, isceobj
from isceobj.Alos2Proc.runGeo2Rdr import geo2RdrCPU
from isceobj.Alos2Proc.runGeo2Rdr import geo2RdrGPU
from StackPulic import loadTrack
from StackPulic import hasGPU
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='compute range and azimuth offsets')
parser.add_argument('-date', dest='date', type=str, required=True,
help = 'date. format: YYMMDD')
parser.add_argument('-date_par_dir', dest='date_par_dir', type=str, default='./',
help = 'date parameter directory. default: ./')
parser.add_argument('-lat', dest='lat', type=str, required=True,
help = 'latitude file')
parser.add_argument('-lon', dest='lon', type=str, required=True,
help = 'longtitude file')
parser.add_argument('-hgt', dest='hgt', type=str, required=True,
help = 'height file')
parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1,
help = 'number of range looks 1. default: 1')
parser.add_argument('-nalks1', dest='nalks1', type=int, default=1,
help = 'number of azimuth looks 1. default: 1')
#parser.add_argument('-gpu', dest='gpu', type=int, default=1,
# help = 'use GPU when available. 0: no. 1: yes (default)')
parser.add_argument('-gpu', dest='gpu', action='store_true', default=False,
help='use GPU when available')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
date = inps.date
dateParDir = os.path.join('../', inps.date_par_dir)
latitude = os.path.join('../', inps.lat)
longitude = os.path.join('../', inps.lon)
height = os.path.join('../', inps.hgt)
numberRangeLooks1 = inps.nrlks1
numberAzimuthLooks1 = inps.nalks1
useGPU = inps.gpu
#######################################################
insarDir = 'insar'
os.makedirs(insarDir, exist_ok=True)
os.chdir(insarDir)
ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1)
rangeOffset = date + ml1 + '_rg.off'
azimuthOffset = date + ml1 + '_az.off'
if not os.path.isfile(os.path.basename(latitude)):
latitudeLink = True
os.symlink(latitude, os.path.basename(latitude))
os.symlink(latitude+'.vrt', os.path.basename(latitude)+'.vrt')
os.symlink(latitude+'.xml', os.path.basename(latitude)+'.xml')
else:
latitudeLink = False
if not os.path.isfile(os.path.basename(longitude)):
longitudeLink = True
os.symlink(longitude, os.path.basename(longitude))
os.symlink(longitude+'.vrt', os.path.basename(longitude)+'.vrt')
os.symlink(longitude+'.xml', os.path.basename(longitude)+'.xml')
else:
longitudeLink = False
if not os.path.isfile(os.path.basename(height)):
heightLink = True
os.symlink(height, os.path.basename(height))
os.symlink(height+'.vrt', os.path.basename(height)+'.vrt')
os.symlink(height+'.xml', os.path.basename(height)+'.xml')
else:
heightLink = False
track = loadTrack(dateParDir, date)
if useGPU and hasGPU():
geo2RdrGPU(track, numberRangeLooks1, numberAzimuthLooks1,
latitude, longitude, height, rangeOffset, azimuthOffset)
else:
geo2RdrCPU(track, numberRangeLooks1, numberAzimuthLooks1,
latitude, longitude, height, rangeOffset, azimuthOffset)
if latitudeLink == True:
os.remove(os.path.basename(latitude))
os.remove(os.path.basename(latitude)+'.vrt')
os.remove(os.path.basename(latitude)+'.xml')
if longitudeLink == True:
os.remove(os.path.basename(longitude))
os.remove(os.path.basename(longitude)+'.vrt')
os.remove(os.path.basename(longitude)+'.xml')
if heightLink == True:
os.remove(os.path.basename(height))
os.remove(os.path.basename(height)+'.vrt')
os.remove(os.path.basename(height)+'.xml')

View File

@ -0,0 +1,87 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import shutil
import datetime
import numpy as np
import xml.etree.ElementTree as ET
import isce, isceobj
from isceobj.Alos2Proc.runGeocode import geocode
from isceobj.Alos2Proc.Alos2ProcPublic import getBboxGeo
from StackPulic import loadProduct
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='geocode')
parser.add_argument('-ref_date_stack_track', dest='ref_date_stack_track', type=str, required=True,
help = 'track parameter of reference date of stack. format: YYMMDD.track.xml')
parser.add_argument('-dem', dest='dem', type=str, required=True,
help = 'dem file used for geocoding')
parser.add_argument('-input', dest='input', type=str, required=True,
help='input file to be geocoded')
parser.add_argument('-bbox', dest='bbox', type=str, default=None,
help = 'user input bounding box, format: s/n/w/e. default: bbox of ref_date_stack_track')
parser.add_argument('-interp_method', dest='interp_method', type=str, default='nearest',
help = 'interpolation method: sinc, bilinear, bicubic, nearest. default: nearest')
parser.add_argument('-nrlks', dest='nrlks', type=int, default=1,
help = 'total number of range looks = number of range looks 1 * number of range looks 2. default: 1')
parser.add_argument('-nalks', dest='nalks', type=int, default=1,
help = 'total number of azimuth looks = number of azimuth looks 1 * number of azimuth looks 2. default: 1')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
ref_date_stack_track = inps.ref_date_stack_track
demGeo = inps.dem
inputFile = inps.input
bbox = inps.bbox
geocodeInterpMethod = inps.interp_method
numberRangeLooks = inps.nrlks
numberAzimuthLooks = inps.nalks
#######################################################
demFile = os.path.abspath(demGeo)
trackReferenceStack = loadProduct(ref_date_stack_track)
#compute bounding box for geocoding
if bbox is not None:
bbox = [float(x) for x in bbox.split('/')]
if len(bbox)!=4:
raise Exception('user input bbox must have four elements')
else:
img = isceobj.createImage()
img.load(inputFile+'.xml')
bbox = getBboxGeo(trackReferenceStack, useTrackOnly=True, numberOfSamples=img.width, numberOfLines=img.length, numberRangeLooks=numberRangeLooks, numberAzimuthLooks=numberAzimuthLooks)
print('=====================================================================================================')
print('geocode bounding box: {}'.format(bbox))
print('=====================================================================================================')
interpMethod = geocodeInterpMethod
geocode(trackReferenceStack, demFile, inputFile, bbox, numberRangeLooks, numberAzimuthLooks, interpMethod, 0, 0)

View File

@ -0,0 +1,134 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import shutil
import datetime
import numpy as np
import xml.etree.ElementTree as ET
import isce, isceobj
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='check ionospheric correction results')
parser.add_argument('-idir', dest='idir', type=str, required=True,
help = 'input directory where each pair (YYMMDD-YYMMDD) is located. only folders are recognized')
parser.add_argument('-odir', dest='odir', type=str, required=True,
help = 'output directory for estimated ionospheric phase of each date')
parser.add_argument('-pairs', dest='pairs', type=str, nargs='+', default=None,
help = 'a number of pairs seperated by blanks. format: YYMMDD-YYMMDD YYMMDD-YYMMDD YYMMDD-YYMMDD... This argument has highest priority. When provided, only process these pairs')
parser.add_argument('-wbd_msk', dest='wbd_msk', action='store_true',
help='apply water body mask in the output image')
# parser.add_argument('-nrlks', dest='nrlks', type=int, default=1,
# help = 'number of range looks 1 * number of range looks ion. default: 1')
# parser.add_argument('-nalks', dest='nalks', type=int, default=1,
# help = 'number of azimuth looks 1 * number of azimuth looks ion. default: 1')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
idir = inps.idir
odir = inps.odir
pairsUser = inps.pairs
wbdMsk = inps.wbd_msk
#######################################################
if shutil.which('montage') is None:
raise Exception('this command requires montage in ImageMagick\n')
#get date folders
dateDirs = sorted(glob.glob(os.path.join(os.path.abspath(idir), '*')))
dateDirs = [os.path.basename(x) for x in dateDirs if os.path.isdir(x)]
if pairsUser is not None:
pairs = pairsUser
else:
pairs = dateDirs
os.makedirs(odir, exist_ok=True)
img = isceobj.createImage()
img.load(glob.glob(os.path.join(idir, pairs[0], 'ion', 'ion_cal', 'filt_ion_*rlks_*alks.ion'))[0] + '.xml')
width = img.width
length = img.length
widthMax = 600
if width >= widthMax:
ratio = widthMax / width
resize = ' -resize {}%'.format(ratio*100.0)
else:
ratio = 1.0
resize = ''
for ipair in pairs:
diffOriginal = glob.glob(os.path.join(idir, ipair, 'ion', 'ion_cal', 'diff_{}_*rlks_*alks_ori.int'.format(ipair)))[0]
ion = glob.glob(os.path.join(idir, ipair, 'ion', 'ion_cal', 'filt_ion_*rlks_*alks.ion'))[0]
diff = glob.glob(os.path.join(idir, ipair, 'ion', 'ion_cal', 'diff_{}_*rlks_*alks.int'.format(ipair)))[0]
if wbdMsk:
wbd = glob.glob(os.path.join(idir, ipair, 'ion', 'ion_cal', 'wbd_*rlks_*alks.wbd'))[0]
wbdArguments = ' {} -s {} -i1 -cmap grey -percent 100'.format(wbd, width)
else:
wbdArguments = ''
runCmd('mdx {} -s {} -c8pha -cmap cmy -wrap 6.283185307179586 -addr -3.141592653589793{} -P -workdir {}'.format(diffOriginal, width, wbdArguments, odir))
runCmd('mv {} {}'.format(os.path.join(odir, 'out.ppm'), os.path.join(odir, 'out1.ppm')))
runCmd('mdx {} -s {} -cmap cmy -wrap 6.283185307179586 -addr -3.141592653589793{} -P -workdir {}'.format(ion, width, wbdArguments, odir))
runCmd('mv {} {}'.format(os.path.join(odir, 'out.ppm'), os.path.join(odir, 'out2.ppm')))
runCmd('mdx {} -s {} -c8pha -cmap cmy -wrap 6.283185307179586 -addr -3.141592653589793{} -P -workdir {}'.format(diff, width, wbdArguments, odir))
runCmd('mv {} {}'.format(os.path.join(odir, 'out.ppm'), os.path.join(odir, 'out3.ppm')))
runCmd("montage -pointsize {} -label 'original' {} -label 'ionosphere' {} -label 'corrected' {} -geometry +{} -compress LZW{} {}.tif".format(
int((ratio*width)/111*18+0.5),
os.path.join(odir, 'out1.ppm'),
os.path.join(odir, 'out2.ppm'),
os.path.join(odir, 'out3.ppm'),
int((ratio*width)/111*5+0.5),
resize,
os.path.join(odir, ipair)))
runCmd('rm {} {} {}'.format(
os.path.join(odir, 'out1.ppm'),
os.path.join(odir, 'out2.ppm'),
os.path.join(odir, 'out3.ppm')))
#create colorbar
width_colorbar = 100
length_colorbar = 20
colorbar = np.ones((length_colorbar, width_colorbar), dtype=np.float32) * \
(np.linspace(-np.pi, np.pi, num=width_colorbar,endpoint=True,dtype=np.float32))[None,:]
colorbar.astype(np.float32).tofile(os.path.join(odir, 'colorbar'))
runCmd('mdx {} -s {} -cmap cmy -wrap 6.283185307179586 -addr -3.141592653589793 -P -workdir {}'.format(os.path.join(odir, 'colorbar'), width_colorbar, odir))
runCmd('convert {} -compress LZW -resize 100% {}'.format(os.path.join(odir, 'out.ppm'), os.path.join(odir, 'colorbar_-pi_pi.tiff')))
runCmd('rm {} {}'.format(
os.path.join(odir, 'colorbar'),
os.path.join(odir, 'out.ppm')))

View File

@ -0,0 +1,99 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import shutil
import datetime
import numpy as np
import xml.etree.ElementTree as ET
import isce, isceobj
from isceobj.Alos2Proc.Alos2ProcPublic import renameFile
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='ionospheric correction')
parser.add_argument('-ion_dir', dest='ion_dir', type=str, required=True,
help = 'directory of ionospheric phase for each date')
parser.add_argument('-ref_date', dest='ref_date', type=str, required=True,
help = 'reference date of this pair. format: YYMMDD')
parser.add_argument('-sec_date', dest='sec_date', type=str, required=True,
help = 'reference date of this pair. format: YYMMDD')
parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1,
help = 'number of range looks 1. default: 1')
parser.add_argument('-nalks1', dest='nalks1', type=int, default=1,
help = 'number of azimuth looks 1. default: 1')
parser.add_argument('-nrlks2', dest='nrlks2', type=int, default=1,
help = 'number of range looks 2. default: 1')
parser.add_argument('-nalks2', dest='nalks2', type=int, default=1,
help = 'number of azimuth looks 2. default: 1')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
ion_dir = inps.ion_dir
dateReference = inps.ref_date
dateSecondary = inps.sec_date
numberRangeLooks1 = inps.nrlks1
numberAzimuthLooks1 = inps.nalks1
numberRangeLooks2 = inps.nrlks2
numberAzimuthLooks2 = inps.nalks2
#######################################################
pair = '{}-{}'.format(dateReference, dateSecondary)
ms = pair
ml2 = '_{}rlks_{}alks'.format(numberRangeLooks1*numberRangeLooks2, numberAzimuthLooks1*numberAzimuthLooks2)
multilookDifferentialInterferogram = 'diff_' + ms + ml2 + '.int'
multilookDifferentialInterferogramOriginal = 'diff_' + ms + ml2 + '_ori.int'
ionosphereReference = os.path.join('../', ion_dir, 'filt_ion_'+dateReference+ml2+'.ion')
ionosphereSecondary = os.path.join('../', ion_dir, 'filt_ion_'+dateSecondary+ml2+'.ion')
insarDir = 'insar'
#os.makedirs(insarDir, exist_ok=True)
os.chdir(insarDir)
if not os.path.isfile(ionosphereReference):
raise Exception('ionospheric phase file: {} of reference date does not exist in {}.\n'.format(os.path.basename(ionosphereReference), ion_dir))
if not os.path.isfile(ionosphereSecondary):
raise Exception('ionospheric phase file: {} of secondary date does not exist in {}.\n'.format(os.path.basename(ionosphereSecondary), ion_dir))
#correct interferogram
if os.path.isfile(multilookDifferentialInterferogramOriginal):
print('original interferogram: {} is already here, do not rename: {}'.format(multilookDifferentialInterferogramOriginal, multilookDifferentialInterferogram))
else:
print('renaming {} to {}'.format(multilookDifferentialInterferogram, multilookDifferentialInterferogramOriginal))
renameFile(multilookDifferentialInterferogram, multilookDifferentialInterferogramOriginal)
cmd = "imageMath.py -e='a*exp(-1.0*J*(b-c))' --a={} --b={} --c={} -s BIP -t cfloat -o {}".format(
multilookDifferentialInterferogramOriginal,
ionosphereReference,
ionosphereSecondary,
multilookDifferentialInterferogram)
runCmd(cmd)
os.chdir('../')

View File

@ -0,0 +1,499 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import shutil
import datetime
import numpy as np
import xml.etree.ElementTree as ET
import isce, isceobj
from isceobj.Alos2Proc.runIonFilt import computeIonosphere
from isceobj.Alos2Proc.runIonFilt import gaussian
#from isceobj.Alos2Proc.runIonFilt import least_sqares
from isceobj.Alos2Proc.runIonFilt import polyfit_2d
from isceobj.Alos2Proc.runIonFilt import adaptive_gaussian
from isceobj.Alos2Proc.runIonFilt import reformatMaskedAreas
from StackPulic import loadTrack
from StackPulic import createObject
from StackPulic import stackDateStatistics
from StackPulic import acquisitionModesAlos2
from StackPulic import subbandParameters
from compute_burst_sync import computeBurstSynchronization
def ionFilt(self, referenceTrack, catalog=None):
from isceobj.Alos2Proc.runIonSubband import defineIonDir
ionDir = defineIonDir()
subbandPrefix = ['lower', 'upper']
ionCalDir = os.path.join(ionDir['ion'], ionDir['ionCal'])
os.makedirs(ionCalDir, exist_ok=True)
os.chdir(ionCalDir)
log = ''
############################################################
# STEP 1. compute ionospheric phase
############################################################
from isceobj.Constants import SPEED_OF_LIGHT
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
###################################
#SET PARAMETERS HERE
#THESE SHOULD BE GOOD ENOUGH, NO NEED TO SET IN setup(self)
corThresholdAdj = 0.97
corOrderAdj = 20
###################################
print('\ncomputing ionosphere')
#get files
ml2 = '_{}rlks_{}alks'.format(self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon,
self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon)
lowerUnwfile = subbandPrefix[0]+ml2+'.unw'
upperUnwfile = subbandPrefix[1]+ml2+'.unw'
corfile = 'diff'+ml2+'.cor'
#use image size from lower unwrapped interferogram
img = isceobj.createImage()
img.load(lowerUnwfile + '.xml')
width = img.width
length = img.length
lowerUnw = (np.fromfile(lowerUnwfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :]
upperUnw = (np.fromfile(upperUnwfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :]
cor = (np.fromfile(corfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :]
#amp = (np.fromfile(corfile, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :]
#masked out user-specified areas
if self.maskedAreasIon != None:
maskedAreas = reformatMaskedAreas(self.maskedAreasIon, length, width)
for area in maskedAreas:
lowerUnw[area[0]:area[1], area[2]:area[3]] = 0
upperUnw[area[0]:area[1], area[2]:area[3]] = 0
cor[area[0]:area[1], area[2]:area[3]] = 0
#remove possible wired values in coherence
cor[np.nonzero(cor<0)] = 0.0
cor[np.nonzero(cor>1)] = 0.0
#remove water body
wbd = np.fromfile('wbd'+ml2+'.wbd', dtype=np.int8).reshape(length, width)
cor[np.nonzero(wbd==-1)] = 0.0
#remove small values
cor[np.nonzero(cor<corThresholdAdj)] = 0.0
#compute ionosphere
fl = SPEED_OF_LIGHT / self._insar.subbandRadarWavelength[0]
fu = SPEED_OF_LIGHT / self._insar.subbandRadarWavelength[1]
adjFlag = 1
ionos = computeIonosphere(lowerUnw, upperUnw, cor**corOrderAdj, fl, fu, adjFlag, 0)
#dump ionosphere
ionfile = 'ion'+ml2+'.ion'
# ion = np.zeros((length*2, width), dtype=np.float32)
# ion[0:length*2:2, :] = amp
# ion[1:length*2:2, :] = ionos
# ion.astype(np.float32).tofile(ionfile)
# img.filename = ionfile
# img.extraFilename = ionfile + '.vrt'
# img.renderHdr()
ionos.astype(np.float32).tofile(ionfile)
create_xml(ionfile, width, length, 'float')
############################################################
# STEP 2. filter ionospheric phase
############################################################
import scipy.signal as ss
#################################################
#SET PARAMETERS HERE
#fit and filter ionosphere
fit = self.fitIon
filt = self.filtIon
fitAdaptive = self.fitAdaptiveIon
filtSecondary = self.filtSecondaryIon
if (fit == False) and (filt == False):
raise Exception('either fit ionosphere or filt ionosphere should be True when doing ionospheric correction\n')
#filtering window size
size_max = self.filteringWinsizeMaxIon
size_min = self.filteringWinsizeMinIon
size_secondary = self.filteringWinsizeSecondaryIon
if size_min > size_max:
print('\n\nWARNING: minimum window size for filtering ionosphere phase {} > maximum window size {}'.format(size_min, size_max))
print(' re-setting maximum window size to {}\n\n'.format(size_min))
size_max = size_min
if size_secondary % 2 != 1:
size_secondary += 1
print('window size of secondary filtering of ionosphere phase should be odd, window size changed to {}'.format(size_secondary))
#coherence threshold for fitting a polynomial
corThresholdFit = 0.25
#ionospheric phase standard deviation after filtering
std_out0 = self.filterStdIon
#std_out0 = 0.1
#################################################
print('\nfiltering ionosphere')
#input files
ionfile = 'ion'+ml2+'.ion'
#corfile = 'diff'+ml2+'.cor'
corLowerfile = subbandPrefix[0]+ml2+'.cor'
corUpperfile = subbandPrefix[1]+ml2+'.cor'
#output files
ionfiltfile = 'filt_ion'+ml2+'.ion'
stdfiltfile = 'filt_ion'+ml2+'.std'
windowsizefiltfile = 'filt_ion'+ml2+'.win'
#read data
img = isceobj.createImage()
img.load(ionfile + '.xml')
width = img.width
length = img.length
ion = np.fromfile(ionfile, dtype=np.float32).reshape(length, width)
corLower = (np.fromfile(corLowerfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :]
corUpper = (np.fromfile(corUpperfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :]
cor = (corLower + corUpper) / 2.0
index = np.nonzero(np.logical_or(corLower==0, corUpper==0))
cor[index] = 0
del corLower, corUpper
#masked out user-specified areas
if self.maskedAreasIon != None:
maskedAreas = reformatMaskedAreas(self.maskedAreasIon, length, width)
for area in maskedAreas:
ion[area[0]:area[1], area[2]:area[3]] = 0
cor[area[0]:area[1], area[2]:area[3]] = 0
#remove possible wired values in coherence
cor[np.nonzero(cor<0)] = 0.0
cor[np.nonzero(cor>1)] = 0.0
#remove water body. Not helpful, just leave it here
wbd = np.fromfile('wbd'+ml2+'.wbd', dtype=np.int8).reshape(length, width)
cor[np.nonzero(wbd==-1)] = 0.0
# #applying water body mask here
# waterBodyFile = 'wbd'+ml2+'.wbd'
# if os.path.isfile(waterBodyFile):
# print('applying water body mask to coherence used to compute ionospheric phase')
# wbd = np.fromfile(waterBodyFile, dtype=np.int8).reshape(length, width)
# cor[np.nonzero(wbd!=0)] = 0.00001
#minimize the effect of low coherence pixels
#cor[np.nonzero( (cor<0.85)*(cor!=0) )] = 0.00001
#filt = adaptive_gaussian(ion, cor, size_max, size_min)
#cor**14 should be a good weight to use. 22-APR-2018
#filt = adaptive_gaussian_v0(ion, cor**corOrderFilt, size_max, size_min)
#1. compute number of looks
azimuthBandwidth = 0
for i, frameNumber in enumerate(self._insar.referenceFrames):
for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)):
#azimuthBandwidth += 2270.575 * 0.85
azimuthBandwidth += referenceTrack.frames[i].swaths[j].azimuthBandwidth
azimuthBandwidth = azimuthBandwidth / (len(self._insar.referenceFrames)*(self._insar.endingSwath-self._insar.startingSwath+1))
#azimuth number of looks should also apply to burst mode
#assume range bandwidth of subband image is 1/3 of orginal range bandwidth, as in runIonSubband.py!!!
numberOfLooks = referenceTrack.azimuthLineInterval * self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon / (1.0/azimuthBandwidth) *\
referenceTrack.frames[0].swaths[0].rangeBandwidth / 3.0 / referenceTrack.rangeSamplingRate * self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon
#consider also burst characteristics. In ScanSAR-stripmap interferometry, azimuthBandwidth is from referenceTrack (ScanSAR)
if self._insar.modeCombination in [21, 31]:
numberOfLooks /= 5.0
if self._insar.modeCombination in [22, 32]:
numberOfLooks /= 7.0
if self._insar.modeCombination in [21]:
numberOfLooks *= (self._insar.burstSynchronization/100.0)
#numberOfLooks checked
print('number of looks to be used for computing subband interferogram standard deviation: {}'.format(numberOfLooks))
if catalog is not None:
catalog.addItem('number of looks of subband interferograms', numberOfLooks, 'runIonFilt')
log += 'number of looks of subband interferograms: {}\n'.format(numberOfLooks)
#2. compute standard deviation of the raw ionospheric phase
#f0 same as in runIonSubband.py!!!
def ion_std(fl, fu, numberOfLooks, cor):
'''
compute standard deviation of ionospheric phase
fl: lower band center frequency
fu: upper band center frequency
cor: coherence, must be numpy array
'''
f0 = (fl + fu) / 2.0
interferogramVar = (1.0 - cor**2) / (2.0 * numberOfLooks * cor**2 + (cor==0))
std = fl*fu/f0/(fu**2-fl**2)*np.sqrt(fu**2*interferogramVar+fl**2*interferogramVar)
std[np.nonzero(cor==0)] = 0
return std
std = ion_std(fl, fu, numberOfLooks, cor)
#3. compute minimum filter window size for given coherence and standard deviation of filtered ionospheric phase
cor2 = np.linspace(0.1, 0.9, num=9, endpoint=True)
std2 = ion_std(fl, fu, numberOfLooks, cor2)
std_out2 = np.zeros(cor2.size)
win2 = np.zeros(cor2.size, dtype=np.int32)
for i in range(cor2.size):
for size in range(9, 10001, 2):
#this window must be the same as those used in adaptive_gaussian!!!
gw = gaussian(size, size/2.0, scale=1.0)
scale = 1.0 / np.sum(gw / std2[i]**2)
std_out2[i] = scale * np.sqrt(np.sum(gw**2 / std2[i]**2))
win2[i] = size
if std_out2[i] <= std_out0:
break
print('if ionospheric phase standard deviation <= {} rad, minimum filtering window size required:'.format(std_out0))
print('coherence window size')
print('************************')
for x, y in zip(cor2, win2):
print(' %5.2f %5d'%(x, y))
print()
if catalog is not None:
catalog.addItem('coherence value', cor2, 'runIonFilt')
catalog.addItem('minimum filter window size', win2, 'runIonFilt')
log += 'coherence value: {}\n'.format(cor2)
log += 'minimum filter window size: {}\n'.format(win2)
#4. filter interferogram
#fit ionosphere
if fit:
#prepare weight
wgt = std**2
wgt[np.nonzero(cor<corThresholdFit)] = 0
index = np.nonzero(wgt!=0)
wgt[index] = 1.0/(wgt[index])
#fit
ion_fit, coeff = polyfit_2d(ion, wgt, 2)
ion -= ion_fit * (ion!=0)
#filter the rest of the ionosphere
if filt:
(ion_filt, std_out, window_size_out) = adaptive_gaussian(ion, std, size_min, size_max, std_out0, fit=fitAdaptive)
if filtSecondary:
g2d = gaussian(size_secondary, size_secondary/2.0, scale=1.0)
scale = ss.fftconvolve((ion_filt!=0), g2d, mode='same')
ion_filt = (ion_filt!=0) * ss.fftconvolve(ion_filt, g2d, mode='same') / (scale + (scale==0))
if catalog is not None:
catalog.addItem('standard deviation of filtered ionospheric phase', std_out0, 'runIonFilt')
log += 'standard deviation of filtered ionospheric phase: {}\n'.format(std_out0)
#get final results
if (fit == True) and (filt == True):
ion_final = ion_filt + ion_fit * (ion_filt!=0)
elif (fit == True) and (filt == False):
ion_final = ion_fit
elif (fit == False) and (filt == True):
ion_final = ion_filt
else:
ion_final = ion
#output results
ion_final.astype(np.float32).tofile(ionfiltfile)
create_xml(ionfiltfile, width, length, 'float')
if filt == True:
std_out.astype(np.float32).tofile(stdfiltfile)
create_xml(stdfiltfile, width, length, 'float')
window_size_out.astype(np.float32).tofile(windowsizefiltfile)
create_xml(windowsizefiltfile, width, length, 'float')
os.chdir('../../')
return log
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='unwrap subband interferograms for ionospheric correction')
parser.add_argument('-idir', dest='idir', type=str, required=True,
help = 'input directory where resampled data of each date (YYMMDD) is located. only folders are recognized')
parser.add_argument('-idir2', dest='idir2', type=str, required=True,
help = 'input directory where original data of each date (YYMMDD) is located. only folders are recognized')
parser.add_argument('-ref_date_stack', dest='ref_date_stack', type=str, required=True,
help = 'reference date of stack. format: YYMMDD')
parser.add_argument('-ref_date', dest='ref_date', type=str, required=True,
help = 'reference date of this pair. format: YYMMDD')
parser.add_argument('-sec_date', dest='sec_date', type=str, required=True,
help = 'reference date of this pair. format: YYMMDD')
parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1,
help = 'number of range looks 1. default: 1')
parser.add_argument('-nalks1', dest='nalks1', type=int, default=1,
help = 'number of azimuth looks 1. default: 1')
parser.add_argument('-nrlks2', dest='nrlks2', type=int, default=1,
help = 'number of range looks 2. default: 1')
parser.add_argument('-nalks2', dest='nalks2', type=int, default=1,
help = 'number of azimuth looks 2. default: 1')
parser.add_argument('-nrlks_ion', dest='nrlks_ion', type=int, default=1,
help = 'number of range looks ion. default: 1')
parser.add_argument('-nalks_ion', dest='nalks_ion', type=int, default=1,
help = 'number of azimuth looks ion. default: 1')
parser.add_argument('-fit', dest='fit', action='store_true', default=False,
help='apply polynomial fit before filtering ionosphere phase')
parser.add_argument('-filt', dest='filt', action='store_true', default=False,
help='filtering ionosphere phase')
parser.add_argument('-fit_adaptive', dest='fit_adaptive', action='store_true', default=False,
help='apply polynomial fit in adaptive filtering window')
parser.add_argument('-filt_secondary', dest='filt_secondary', action='store_true', default=False,
help='secondary filtering of ionosphere phase')
parser.add_argument('-win_min', dest='win_min', type=int, default=11,
help = 'minimum filtering window size. default: 11')
parser.add_argument('-win_max', dest='win_max', type=int, default=301,
help = 'maximum filtering window size. default: 301')
parser.add_argument('-win_secondary', dest='win_secondary', type=int, default=5,
help = 'secondary filtering window size. default: 5')
parser.add_argument('-filter_std_ion', dest='filter_std_ion', type=float, default=None,
help = 'standard deviation after ionosphere filtering. default: None, automatically set by the program')
parser.add_argument('-masked_areas', dest='masked_areas', type=int, nargs='+', action='append', default=None,
help='This is a 2-d list. Each element in the 2-D list is a four-element list: [firstLine, lastLine, firstColumn, lastColumn], with line/column numbers starting with 1. If one of the four elements is specified with -1, the program will use firstLine/lastLine/firstColumn/lastColumn instead. e.g. two areas masked out: -masked_areas 10 20 10 20 -masked_areas 110 120 110 120')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
idir = inps.idir
idir2 = inps.idir2
dateReferenceStack = inps.ref_date_stack
dateReference = inps.ref_date
dateSecondary = inps.sec_date
numberRangeLooks1 = inps.nrlks1
numberAzimuthLooks1 = inps.nalks1
numberRangeLooks2 = inps.nrlks2
numberAzimuthLooks2 = inps.nalks2
numberRangeLooksIon = inps.nrlks_ion
numberAzimuthLooksIon = inps.nalks_ion
fitIon = inps.fit
filtIon = inps.filt
fitAdaptiveIon = inps.fit_adaptive
filtSecondaryIon = inps.filt_secondary
filteringWinsizeMinIon = inps.win_min
filteringWinsizeMaxIon = inps.win_max
filteringWinsizeSecondaryIon = inps.win_secondary
filterStdIon = inps.filter_std_ion
maskedAreasIon = inps.masked_areas
#######################################################
pair = '{}-{}'.format(dateReference, dateSecondary)
ms = pair
ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1)
ml2 = '_{}rlks_{}alks'.format(numberRangeLooks1*numberRangeLooks2, numberAzimuthLooks1*numberAzimuthLooks2)
dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir, dateReferenceStack)
dateDirs2, dates2, frames2, swaths2, dateIndexReference2 = stackDateStatistics(idir2, dateReferenceStack)
spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2()
trackReferenceStack = loadTrack(os.path.join(idir, dateReferenceStack), dateReferenceStack)
trackReference = loadTrack(os.path.join(idir2, dateReference), dateReference)
trackSecondary = loadTrack(os.path.join(idir2, dateSecondary), dateSecondary)
subbandRadarWavelength, subbandBandWidth, subbandFrequencyCenter, subbandPrefix = subbandParameters(trackReferenceStack)
self = createObject()
self._insar = createObject()
self._insar.numberRangeLooks1 = numberRangeLooks1
self._insar.numberAzimuthLooks1 = numberAzimuthLooks1
self._insar.numberRangeLooks2 = numberRangeLooks2
self._insar.numberAzimuthLooks2 = numberAzimuthLooks2
self._insar.numberRangeLooksIon = numberRangeLooksIon
self._insar.numberAzimuthLooksIon = numberAzimuthLooksIon
self.fitIon = fitIon
self.filtIon = filtIon
self.fitAdaptiveIon = fitAdaptiveIon
self.filtSecondaryIon = filtSecondaryIon
self.filteringWinsizeMaxIon = filteringWinsizeMaxIon
self.filteringWinsizeMinIon = filteringWinsizeMinIon
self.filteringWinsizeSecondaryIon = filteringWinsizeSecondaryIon
self.maskedAreasIon = maskedAreasIon
self.applyIon = False
#ionospheric phase standard deviation after filtering
if filterStdIon is not None:
self.filterStdIon = filterStdIon
else:
if trackReference.operationMode == trackSecondary.operationMode:
from isceobj.Alos2Proc.Alos2ProcPublic import modeProcParDict
self.filterStdIon = modeProcParDict['ALOS-2'][trackReference.operationMode]['filterStdIon']
else:
from isceobj.Alos2Proc.Alos2ProcPublic import filterStdPolyIon
self.filterStdIon = np.polyval(filterStdPolyIon, trackReference.frames[0].swaths[0].rangeBandwidth/(1e6))
self._insar.referenceFrames = frames
self._insar.startingSwath = swaths[0]
self._insar.endingSwath = swaths[-1]
self._insar.subbandRadarWavelength = subbandRadarWavelength
self._insar.multilookIon = ms + ml2 + '.ion'
self._insar.multilookDifferentialInterferogram = 'diff_' + ms + ml2 + '.int'
self._insar.multilookDifferentialInterferogramOriginal = 'diff_' + ms + ml2 + '_ori.int'
#usable combinations
referenceMode = trackReference.operationMode
secondaryMode = trackSecondary.operationMode
if (referenceMode in spotlightModes) and (secondaryMode in spotlightModes):
self._insar.modeCombination = 0
elif (referenceMode in stripmapModes) and (secondaryMode in stripmapModes):
self._insar.modeCombination = 1
elif (referenceMode in scansarNominalModes) and (secondaryMode in scansarNominalModes):
self._insar.modeCombination = 21
elif (referenceMode in scansarWideModes) and (secondaryMode in scansarWideModes):
self._insar.modeCombination = 22
elif (referenceMode in scansarNominalModes) and (secondaryMode in stripmapModes):
self._insar.modeCombination = 31
elif (referenceMode in scansarWideModes) and (secondaryMode in stripmapModes):
self._insar.modeCombination = 32
else:
print('\n\nthis mode combination is not possible')
print('note that for ScanSAR-stripmap, ScanSAR must be reference\n\n')
raise Exception('mode combination not supported')
if self._insar.modeCombination in [21]:
unsynTimeAll, synPercentageAll = computeBurstSynchronization(trackReference, trackSecondary)
self._insar.burstSynchronization = np.mean(np.array(synPercentageAll), dtype=np.float64)
else:
self._insar.burstSynchronization = 100.0
#log output info
log = '{} at {}\n'.format(os.path.basename(__file__), datetime.datetime.now())
log += '================================================================================================\n'
log += ionFilt(self, trackReferenceStack)
log += '\n'
logFile = 'process.log'
with open(logFile, 'a') as f:
f.write(log)

339
contrib/stack/alosStack/ion_ls.py Executable file
View File

@ -0,0 +1,339 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import shutil
import datetime
import numpy as np
import xml.etree.ElementTree as ET
import isce, isceobj
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
from StackPulic import loadProduct
from StackPulic import datesFromPairs
def least_sqares(H, S, W=None):
'''
#This can make use multiple threads (set environment variable: OMP_NUM_THREADS)
linear equations: H theta = s
W: weight matrix
'''
S.reshape(H.shape[0], 1)
if W is None:
#use np.dot instead since some old python versions don't have matmul
m1 = np.linalg.inv(np.dot(H.transpose(), H))
Z = np.dot( np.dot(m1, H.transpose()) , S)
else:
#use np.dot instead since some old python versions don't have matmul
m1 = np.linalg.inv(np.dot(np.dot(H.transpose(), W), H))
Z = np.dot(np.dot(np.dot(m1, H.transpose()), W), S)
return Z.reshape(Z.size)
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='unwrap subband interferograms for ionospheric correction')
parser.add_argument('-idir', dest='idir', type=str, required=True,
help = 'input directory where each pair (YYMMDD-YYMMDD) is located. only folders are recognized')
parser.add_argument('-odir', dest='odir', type=str, required=True,
help = 'output directory for estimated ionospheric phase of each date')
parser.add_argument('-ref_date_stack', dest='ref_date_stack', type=str, required=True,
help = 'reference date of stack. format: YYMMDD')
parser.add_argument('-zro_date', dest='zro_date', type=str, default=None,
help = 'date in least squares estimation whose ionospheric phase is assumed to be zero. format: YYMMDD. default: first date')
parser.add_argument('-pairs', dest='pairs', type=str, nargs='+', default=None,
help = 'pairs to be used in least squares estimation. This has highest priority. a number of pairs seperated by blanks. format: YYMMDD-YYMMDD YYMMDD-YYMMDD...')
parser.add_argument('-exc_date', dest='exc_date', type=str, nargs='+', default=[],
help = 'pairs involving these dates are excluded in least squares estimation. a number of dates seperated by blanks. format: YYMMDD YYMMDD YYMMDD...')
parser.add_argument('-exc_pair', dest='exc_pair', type=str, nargs='+', default=[],
help = 'pairs excluded in least squares estimation. a number of pairs seperated by blanks. format: YYMMDD-YYMMDD YYMMDD-YYMMDD...')
parser.add_argument('-tsmax', dest='tsmax', type=float, default=None,
help = 'maximum time span in years of pairs used in least squares estimation. default: None')
parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1,
help = 'number of range looks 1. default: 1')
parser.add_argument('-nalks1', dest='nalks1', type=int, default=1,
help = 'number of azimuth looks 1. default: 1')
parser.add_argument('-nrlks2', dest='nrlks2', type=int, default=1,
help = 'number of range looks 2. default: 1')
parser.add_argument('-nalks2', dest='nalks2', type=int, default=1,
help = 'number of azimuth looks 2. default: 1')
parser.add_argument('-nrlks_ion', dest='nrlks_ion', type=int, default=1,
help = 'number of range looks ion. default: 1')
parser.add_argument('-nalks_ion', dest='nalks_ion', type=int, default=1,
help = 'number of azimuth looks ion. default: 1')
parser.add_argument('-ww', dest='ww', action='store_true', default=False,
help='use reciprocal of window size as weight')
parser.add_argument('-interp', dest='interp', action='store_true', default=False,
help='interpolate ionospheric phase to nrlks2/nalks2 sample size')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
idir = inps.idir
odir = inps.odir
dateReferenceStack = inps.ref_date_stack
dateZero = inps.zro_date
pairsUser = inps.pairs
dateExcluded = inps.exc_date
pairExcluded = inps.exc_pair
tsmax = inps.tsmax
numberRangeLooks1 = inps.nrlks1
numberAzimuthLooks1 = inps.nalks1
numberRangeLooks2 = inps.nrlks2
numberAzimuthLooks2 = inps.nalks2
numberRangeLooksIon = inps.nrlks_ion
numberAzimuthLooksIon = inps.nalks_ion
ww = inps.ww
interp = inps.interp
#######################################################
#all pair folders in order
pairDirs = sorted(glob.glob(os.path.join(os.path.abspath(idir), '*-*')))
pairDirs = [x for x in pairDirs if os.path.isdir(x)]
#all pairs in order
pairsAll = [os.path.basename(x) for x in pairDirs]
#all dates in order
datesAll = datesFromPairs(pairsAll)
if pairsUser is not None:
pairs = pairsUser
for x in pairs:
if x not in pairsAll:
raise Exception('pair {} provided by user is not in processed pair list'.format(x))
else:
#exclude
#pairs = [x for x in pairsAll if (x.split('-')[0] not in dateExcluded) and (x.split('-')[1] not in dateExcluded)]
#pairs = [x for x in pairsAll if x not in pairExcluded]
pairs = []
for x in pairsAll:
dateReference = x.split('-')[0]
dateSecondary = x.split('-')[1]
timeReference = datetime.datetime.strptime(dateReference, "%y%m%d")
timeSecondary = datetime.datetime.strptime(dateSecondary, "%y%m%d")
ts = np.absolute((timeSecondary - timeReference).total_seconds()) / (365.0 * 24.0 * 3600)
if (dateReference in dateExcluded) and (dateSecondary in dateExcluded):
continue
if (x in pairExcluded):
continue
if tsmax is not None:
if ts > tsmax:
continue
pairs.append(x)
dates = datesFromPairs(pairs)
if dateZero is not None:
if dateZero not in dates:
raise Exception('zro_date provided by user not in the dates involved in least squares estimation.')
else:
dateZero = dates[0]
print('all pairs:\n{}'.format(' '.join(pairsAll)))
print('all dates:\n{}'.format(' '.join(datesAll)))
print('used pairs:\n{}'.format(' '.join(pairs)))
print('used dates:\n{}'.format(' '.join(dates)))
####################################################################################
print('\nSTEP 1. read files')
####################################################################################
ndate = len(dates)
npair = len(pairs)
ml2 = '_{}rlks_{}alks'.format(numberRangeLooks1*numberRangeLooksIon, numberAzimuthLooks1*numberAzimuthLooksIon)
ionfiltfile = 'filt_ion'+ml2+'.ion'
stdfiltfile = 'filt_ion'+ml2+'.std'
windowsizefiltfile = 'filt_ion'+ml2+'.win'
ionfiltfile1 = os.path.join(idir, pairs[0], 'ion/ion_cal', ionfiltfile)
img = isceobj.createImage()
img.load(ionfiltfile1+'.xml')
width = img.width
length = img.length
ionPairs = np.zeros((npair, length, width), dtype=np.float32)
stdPairs = np.zeros((npair, length, width), dtype=np.float32)
winPairs = np.zeros((npair, length, width), dtype=np.float32)
for i in range(npair):
ionfiltfile1 = os.path.join(idir, pairs[i], 'ion/ion_cal', ionfiltfile)
stdfiltfile1 = os.path.join(idir, pairs[i], 'ion/ion_cal', stdfiltfile)
windowsizefiltfile1 = os.path.join(idir, pairs[i], 'ion/ion_cal', windowsizefiltfile)
ionPairs[i, :, :] = np.fromfile(ionfiltfile1, dtype=np.float32).reshape(length, width)
stdPairs[i, :, :] = np.fromfile(stdfiltfile1, dtype=np.float32).reshape(length, width)
winPairs[i, :, :] = np.fromfile(windowsizefiltfile1, dtype=np.float32).reshape(length, width)
####################################################################################
print('\nSTEP 2. do least squares')
####################################################################################
import copy
from numpy.linalg import matrix_rank
dates2 = copy.deepcopy(dates)
dates2.remove(dateZero)
#observation matrix
H0 = np.zeros((npair, ndate-1))
for k in range(npair):
dateReference = pairs[k].split('-')[0]
dateSecondary = pairs[k].split('-')[1]
if dateReference != dateZero:
dateReference_i = dates2.index(dateReference)
H0[k, dateReference_i] = 1
if dateSecondary != dateZero:
dateSecondary_i = dates2.index(dateSecondary)
H0[k, dateSecondary_i] = -1
rank = matrix_rank(H0)
if rank < ndate-1:
raise Exception('dates to be estimated are not fully connected by the pairs used in least squares')
else:
print('number of pairs to be used in least squares: {}'.format(npair))
print('number of dates to be estimated: {}'.format(ndate-1))
print('observation matrix rank: {}'.format(rank))
ts = np.zeros((ndate-1, length, width), dtype=np.float32)
for i in range(length):
if (i+1) % 50 == 0 or (i+1) == length:
print('processing line: %6d of %6d' % (i+1, length), end='\r')
if (i+1) == length:
print()
for j in range(width):
#observed signal
S0 = ionPairs[:, i, j]
if ww == False:
#observed signal
S = S0
H = H0
else:
#add weight
#https://stackoverflow.com/questions/19624997/understanding-scipys-least-square-function-with-irls
#https://stackoverflow.com/questions/27128688/how-to-use-least-squares-with-weight-matrix-in-python
wgt = winPairs[:, i, j]
W = np.sqrt(1.0/wgt)
H = H0 * W[:, None]
S = S0 * W
#do least-squares estimation
#[theta, residuals, rank, singular] = np.linalg.lstsq(H, S)
#make W full matrix if use W here (which is a slower method)
#'using W before this' is faster
theta = least_sqares(H, S, W=None)
ts[:, i, j] = theta
# #dump raw estimate
# cdir = os.getcwd()
# os.makedirs(odir, exist_ok=True)
# os.chdir(odir)
# for i in range(ndate-1):
# file_name = 'filt_ion_'+dates2[i]+ml2+'.ion'
# ts[i, :, :].astype(np.float32).tofile(file_name)
# create_xml(file_name, width, length, 'float')
# file_name = 'filt_ion_'+dateZero+ml2+'.ion'
# (np.zeros((length, width), dtype=np.float32)).astype(np.float32).tofile(file_name)
# create_xml(file_name, width, length, 'float')
# os.chdir(cdir)
####################################################################################
print('\nSTEP 3. interpolate ionospheric phase')
####################################################################################
from scipy.interpolate import interp1d
ml3 = '_{}rlks_{}alks'.format(numberRangeLooks1*numberRangeLooks2,
numberAzimuthLooks1*numberAzimuthLooks2)
width2 = width
length2 = length
#ionrectfile1 = os.path.join(idir, pairs[0], 'insar', pairs[0] + ml3 + '.ion')
#multilookDifferentialInterferogram = os.path.join(idir, pairs[0], 'insar', 'diff_' + pairs[0] + ml3 + '.int')
#img = isceobj.createImage()
#img.load(multilookDifferentialInterferogram + '.xml')
#width3 = img.width
#length3 = img.length
trackParameter = os.path.join(idir, pairs[0], dateReferenceStack + '.track.xml')
trackTmp = loadProduct(trackParameter)
width3 = int(trackTmp.numberOfSamples / numberRangeLooks2)
length3 = int(trackTmp.numberOfLines / numberAzimuthLooks2)
#number of range looks output
nrlo = numberRangeLooks1*numberRangeLooks2
#number of range looks input
nrli = numberRangeLooks1*numberRangeLooksIon
#number of azimuth looks output
nalo = numberAzimuthLooks1*numberAzimuthLooks2
#number of azimuth looks input
nali = numberAzimuthLooks1*numberAzimuthLooksIon
cdir = os.getcwd()
os.makedirs(odir, exist_ok=True)
os.chdir(odir)
for idate in range(ndate-1):
print('interplate {}'.format(dates2[idate]))
if interp and ((numberRangeLooks2 != numberRangeLooksIon) or (numberAzimuthLooks2 != numberAzimuthLooksIon)):
ionfilt = ts[idate, :, :]
index2 = np.linspace(0, width2-1, num=width2, endpoint=True)
index3 = np.linspace(0, width3-1, num=width3, endpoint=True) * nrlo/nrli + (nrlo-nrli)/(2.0*nrli)
ionrect = np.zeros((length3, width3), dtype=np.float32)
for i in range(length2):
f = interp1d(index2, ionfilt[i,:], kind='cubic', fill_value="extrapolate")
ionrect[i, :] = f(index3)
index2 = np.linspace(0, length2-1, num=length2, endpoint=True)
index3 = np.linspace(0, length3-1, num=length3, endpoint=True) * nalo/nali + (nalo-nali)/(2.0*nali)
for j in range(width3):
f = interp1d(index2, ionrect[0:length2, j], kind='cubic', fill_value="extrapolate")
ionrect[:, j] = f(index3)
ionrectfile = 'filt_ion_'+dates2[idate]+ml3+'.ion'
ionrect.astype(np.float32).tofile(ionrectfile)
create_xml(ionrectfile, width3, length3, 'float')
else:
ionrectfile = 'filt_ion_'+dates2[idate]+ml2+'.ion'
ts[idate, :, :].astype(np.float32).tofile(ionrectfile)
create_xml(ionrectfile, width, length, 'float')
if interp and ((numberRangeLooks2 != numberRangeLooksIon) or (numberAzimuthLooks2 != numberAzimuthLooksIon)):
ionrectfile = 'filt_ion_'+dateZero+ml3+'.ion'
(np.zeros((length3, width3), dtype=np.float32)).astype(np.float32).tofile(ionrectfile)
create_xml(ionrectfile, width3, length3, 'float')
else:
ionrectfile = 'filt_ion_'+dateZero+ml2+'.ion'
(np.zeros((length, width), dtype=np.float32)).astype(np.float32).tofile(ionrectfile)
create_xml(ionrectfile, width, length, 'float')
os.chdir(cdir)

View File

@ -0,0 +1,619 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import shutil
import datetime
import numpy as np
import xml.etree.ElementTree as ET
import isce, isceobj
from isceobj.Constants import SPEED_OF_LIGHT
from isceobj.Alos2Proc.runSwathOffset import swathOffset
from isceobj.Alos2Proc.runFrameOffset import frameOffset
from isceobj.Alos2Proc.runIonSubband import defineIonDir
from StackPulic import loadTrack
from StackPulic import createObject
from StackPulic import stackDateStatistics
from StackPulic import acquisitionModesAlos2
def runIonSubband(self, referenceTrack, idir, dateReferenceStack, dateReference, dateSecondary):
'''create subband interferograms
'''
#catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
#self.updateParamemetersFromUser()
#if not self.doIon:
# catalog.printToLog(logger, "runIonSubband")
# self._insar.procDoc.addAllFromCatalog(catalog)
# return
#referenceTrack = self._insar.loadTrack(reference=True)
#secondaryTrack = self._insar.loadTrack(reference=False)
#using 1/3, 1/3, 1/3 band split
radarWavelength = referenceTrack.radarWavelength
rangeBandwidth = referenceTrack.frames[0].swaths[0].rangeBandwidth
rangeSamplingRate = referenceTrack.frames[0].swaths[0].rangeSamplingRate
radarWavelengthLower = SPEED_OF_LIGHT/(SPEED_OF_LIGHT / radarWavelength - rangeBandwidth / 3.0)
radarWavelengthUpper = SPEED_OF_LIGHT/(SPEED_OF_LIGHT / radarWavelength + rangeBandwidth / 3.0)
subbandRadarWavelength = [radarWavelengthLower, radarWavelengthUpper]
subbandBandWidth = [rangeBandwidth / 3.0 / rangeSamplingRate, rangeBandwidth / 3.0 / rangeSamplingRate]
subbandFrequencyCenter = [-rangeBandwidth / 3.0 / rangeSamplingRate, rangeBandwidth / 3.0 / rangeSamplingRate]
subbandPrefix = ['lower', 'upper']
'''
ionDir = {
ionDir['swathMosaic'] : 'mosaic',
ionDir['insar'] : 'insar',
ionDir['ion'] : 'ion',
ionDir['subband'] : ['lower', 'upper'],
ionDir['ionCal'] : 'ion_cal'
}
'''
#define upper level directory names
ionDir = defineIonDir()
#self._insar.subbandRadarWavelength = subbandRadarWavelength
############################################################
# STEP 1. create directories
############################################################
#create and enter 'ion' directory
#after finishing each step, we are in this directory
os.makedirs(ionDir['ion'], exist_ok=True)
os.chdir(ionDir['ion'])
#create insar processing directories
for k in range(2):
subbandDir = ionDir['subband'][k]
for i, frameNumber in enumerate(self._insar.referenceFrames):
frameDir = 'f{}_{}'.format(i+1, frameNumber)
for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)):
swathDir = 's{}'.format(swathNumber)
fullDir = os.path.join(subbandDir, frameDir, swathDir)
os.makedirs(fullDir, exist_ok=True)
#create ionospheric phase directory
os.makedirs(ionDir['ionCal'], exist_ok=True)
############################################################
# STEP 2. create subband interferograms
############################################################
#import numpy as np
#import stdproc
#from iscesys.StdOEL.StdOELPy import create_writer
#from isceobj.Alos2Proc.Alos2ProcPublic import readOffset
#from contrib.alos2proc.alos2proc import rg_filter
from StackPulic import formInterferogram
for i, frameNumber in enumerate(self._insar.referenceFrames):
frameDir = 'f{}_{}'.format(i+1, frameNumber)
for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)):
swathDir = 's{}'.format(swathNumber)
#skip this time consuming process, if interferogram already exists
if os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.interferogram)) and \
os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.interferogram+'.vrt')) and \
os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.interferogram+'.xml')) and \
os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.amplitude)) and \
os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.amplitude+'.vrt')) and \
os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.amplitude+'.xml')) and \
os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.interferogram)) and \
os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.interferogram+'.vrt')) and \
os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.interferogram+'.xml')) and \
os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.amplitude)) and \
os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.amplitude+'.vrt')) and \
os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.amplitude+'.xml')):
print('interferogram already exists at swath {}, frame {}'.format(swathNumber, frameNumber))
continue
# #filter reference and secondary images
# for slcx in [self._insar.referenceSlc, self._insar.secondarySlc]:
# slc = os.path.join('../', frameDir, swathDir, slcx)
# slcLower = os.path.join(ionDir['subband'][0], frameDir, swathDir, slcx)
# slcUpper = os.path.join(ionDir['subband'][1], frameDir, swathDir, slcx)
# rg_filter(slc, 2,
# [slcLower, slcUpper],
# subbandBandWidth,
# subbandFrequencyCenter,
# 257, 2048, 0.1, 0, 0.0)
#resample
for k in range(2):
os.chdir(os.path.join(ionDir['subband'][k], frameDir, swathDir))
slcReference = os.path.join('../../../../', idir, dateReference, frameDir, swathDir, dateReference+'_{}.slc'.format(ionDir['subband'][k]))
slcSecondary = os.path.join('../../../../', idir, dateSecondary, frameDir, swathDir, dateSecondary+'_{}.slc'.format(ionDir['subband'][k]))
formInterferogram(slcReference, slcSecondary, self._insar.interferogram, self._insar.amplitude, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1)
os.chdir('../../../')
############################################################
# STEP 3. mosaic swaths
############################################################
from isceobj.Alos2Proc.runSwathMosaic import swathMosaic
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
#log output info
log = 'mosaic swaths in {} at {}\n'.format(os.path.basename(__file__), datetime.datetime.now())
log += '================================================================================================\n'
for k in range(2):
os.chdir(ionDir['subband'][k])
for i, frameNumber in enumerate(self._insar.referenceFrames):
frameDir = 'f{}_{}'.format(i+1, frameNumber)
os.chdir(frameDir)
mosaicDir = ionDir['swathMosaic']
os.makedirs(mosaicDir, exist_ok=True)
os.chdir(mosaicDir)
if not (self._insar.endingSwath-self._insar.startingSwath >= 1):
import shutil
swathDir = 's{}'.format(referenceTrack.frames[i].swaths[0].swathNumber)
# if not os.path.isfile(self._insar.interferogram):
# os.symlink(os.path.join('../', swathDir, self._insar.interferogram), self._insar.interferogram)
# shutil.copy2(os.path.join('../', swathDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt')
# shutil.copy2(os.path.join('../', swathDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml')
# if not os.path.isfile(self._insar.amplitude):
# os.symlink(os.path.join('../', swathDir, self._insar.amplitude), self._insar.amplitude)
# shutil.copy2(os.path.join('../', swathDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt')
# shutil.copy2(os.path.join('../', swathDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml')
os.rename(os.path.join('../', swathDir, self._insar.interferogram), self._insar.interferogram)
os.rename(os.path.join('../', swathDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt')
os.rename(os.path.join('../', swathDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml')
os.rename(os.path.join('../', swathDir, self._insar.amplitude), self._insar.amplitude)
os.rename(os.path.join('../', swathDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt')
os.rename(os.path.join('../', swathDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml')
#no need to update frame parameters here
os.chdir('../')
#no need to save parameter file here
os.chdir('../')
continue
#choose offsets
numberOfFrames = len(referenceTrack.frames)
numberOfSwaths = len(referenceTrack.frames[i].swaths)
# if self.swathOffsetMatching:
# #no need to do this as the API support 2-d list
# #rangeOffsets = (np.array(self._insar.swathRangeOffsetMatchingReference)).reshape(numberOfFrames, numberOfSwaths)
# #azimuthOffsets = (np.array(self._insar.swathAzimuthOffsetMatchingReference)).reshape(numberOfFrames, numberOfSwaths)
# rangeOffsets = self._insar.swathRangeOffsetMatchingReference
# azimuthOffsets = self._insar.swathAzimuthOffsetMatchingReference
# else:
# #rangeOffsets = (np.array(self._insar.swathRangeOffsetGeometricalReference)).reshape(numberOfFrames, numberOfSwaths)
# #azimuthOffsets = (np.array(self._insar.swathAzimuthOffsetGeometricalReference)).reshape(numberOfFrames, numberOfSwaths)
# rangeOffsets = self._insar.swathRangeOffsetGeometricalReference
# azimuthOffsets = self._insar.swathAzimuthOffsetGeometricalReference
# rangeOffsets = rangeOffsets[i]
# azimuthOffsets = azimuthOffsets[i]
#compute swath offset using reference stack
#geometrical offset is enough now
offsetReferenceStack = swathOffset(referenceTrack.frames[i], dateReference+'.slc', 'swath_offset_' + dateReference + '.txt',
crossCorrelation=False, numberOfAzimuthLooks=10)
#we can faithfully make it integer.
#this can also reduce the error due to floating point computation
rangeOffsets = [float(round(x)) for x in offsetReferenceStack[0]]
azimuthOffsets = [float(round(x)) for x in offsetReferenceStack[1]]
#list of input files
inputInterferograms = []
inputAmplitudes = []
#phaseDiff = [None]
swathPhaseDiffIon = [self.swathPhaseDiffLowerIon, self.swathPhaseDiffUpperIon]
phaseDiff = swathPhaseDiffIon[k]
if swathPhaseDiffIon[k] is None:
phaseDiff = None
else:
phaseDiff = swathPhaseDiffIon[k][i]
phaseDiff.insert(0, None)
for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)):
swathDir = 's{}'.format(swathNumber)
inputInterferograms.append(os.path.join('../', swathDir, self._insar.interferogram))
inputAmplitudes.append(os.path.join('../', swathDir, self._insar.amplitude))
# #compute phase needed to be compensated using startingRange
# if j >= 1:
# #phaseDiffSwath1 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange)/subbandRadarWavelength[k]
# #phaseDiffSwath2 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange)/subbandRadarWavelength[k]
# phaseDiffSwath1 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \
# -4.0 * np.pi * secondaryTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k])
# phaseDiffSwath2 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \
# -4.0 * np.pi * secondaryTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k])
# if referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange == \
# referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange:
# #phaseDiff.append(phaseDiffSwath2 - phaseDiffSwath1)
# #if reference and secondary versions are all before or after version 2.025 (starting range error < 0.5 m),
# #it should be OK to do the above.
# #see results in neom where it meets the above requirement, but there is still phase diff
# #to be less risky, we do not input values here
# phaseDiff.append(None)
# else:
# phaseDiff.append(None)
#note that frame parameters are updated after mosaicking, here no need to update parameters
#mosaic amplitudes
swathMosaic(referenceTrack.frames[i], inputAmplitudes, self._insar.amplitude,
rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, resamplingMethod=0)
#mosaic interferograms
#These are for ALOS-2, may need to change for ALOS-4!
phaseDiffFixed = [0.0, 0.4754024578084084, 0.9509913179406437, 1.4261648478671614, 2.179664007520499, 2.6766909968024932, 3.130810857]
#if (referenceTrack.frames[i].processingSoftwareVersion == '2.025' and secondaryTrack.frames[i].processingSoftwareVersion == '2.023') or \
# (referenceTrack.frames[i].processingSoftwareVersion == '2.023' and secondaryTrack.frames[i].processingSoftwareVersion == '2.025'):
# # changed value number of samples to estimate new value new values estimate area
# ###########################################################################################################################
# # 2.6766909968024932-->2.6581660335779866 1808694 d169-f2850, north CA
# # 2.179664007520499 -->2.204125866652153 131120 d169-f2850, north CA
# phaseDiffFixed = [0.0, 0.4754024578084084, 0.9509913179406437, 1.4261648478671614, 2.204125866652153, 2.6581660335779866, 3.130810857]
snapThreshold = 0.2
#the above preparetions only applies to 'self._insar.modeCombination == 21'
#looks like it also works for 31 (scansarNominalModes-stripmapModes)
# if self._insar.modeCombination != 21:
# phaseDiff = None
# phaseDiffFixed = None
# snapThreshold = None
#whether snap for each swath
if self.swathPhaseDiffSnapIon == None:
snapSwath = [[True for jjj in range(numberOfSwaths-1)] for iii in range(numberOfFrames)]
else:
snapSwath = self.swathPhaseDiffSnapIon
if len(snapSwath) != numberOfFrames:
raise Exception('please specify each frame for parameter: swath phase difference snap to fixed values')
for iii in range(numberOfFrames):
if len(snapSwath[iii]) != (numberOfSwaths-1):
raise Exception('please specify correct number of swaths for parameter: swath phase difference snap to fixed values')
(phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = swathMosaic(referenceTrack.frames[i], inputInterferograms, self._insar.interferogram,
rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, updateFrame=False,
phaseCompensation=True, phaseDiff=phaseDiff, phaseDiffFixed=phaseDiffFixed, snapThreshold=snapThreshold, snapSwath=snapSwath[i], pcRangeLooks=1, pcAzimuthLooks=4,
filt=False, resamplingMethod=1)
#the first item is meaningless for all the following list, so only record the following items
if phaseDiff == None:
phaseDiff = [None for iii in range(self._insar.startingSwath, self._insar.endingSwath + 1)]
#catalog.addItem('frame {} {} band swath phase diff input'.format(frameNumber, ionDir['subband'][k]), phaseDiff[1:], 'runIonSubband')
#catalog.addItem('frame {} {} band swath phase diff estimated'.format(frameNumber, ionDir['subband'][k]), phaseDiffEst[1:], 'runIonSubband')
#catalog.addItem('frame {} {} band swath phase diff used'.format(frameNumber, ionDir['subband'][k]), phaseDiffUsed[1:], 'runIonSubband')
#catalog.addItem('frame {} {} band swath phase diff used source'.format(frameNumber, ionDir['subband'][k]), phaseDiffSource[1:], 'runIonSubband')
#catalog.addItem('frame {} {} band swath phase diff samples used'.format(frameNumber, ionDir['subband'][k]), numberOfValidSamples[1:], 'runIonSubband')
log += 'frame {} {} band swath phase diff input: {}\n'.format(frameNumber, ionDir['subband'][k], phaseDiff[1:])
log += 'frame {} {} band swath phase diff estimated: {}\n'.format(frameNumber, ionDir['subband'][k], phaseDiffEst[1:])
log += 'frame {} {} band swath phase diff used: {}\n'.format(frameNumber, ionDir['subband'][k], phaseDiffUsed[1:])
log += 'frame {} {} band swath phase diff used source: {}\n'.format(frameNumber, ionDir['subband'][k], phaseDiffSource[1:])
log += 'frame {} {} band swath phase diff samples used: {}\n'.format(frameNumber, ionDir['subband'][k], numberOfValidSamples[1:])
#check if there is value around 3.130810857, which may not be stable
phaseDiffUnstableExist = False
for xxx in phaseDiffUsed:
if abs(abs(xxx) - 3.130810857) < 0.2:
phaseDiffUnstableExist = True
#catalog.addItem('frame {} {} band swath phase diff unstable exists'.format(frameNumber, ionDir['subband'][k]), phaseDiffUnstableExist, 'runIonSubband')
log += 'frame {} {} band swath phase diff unstable exists: {}\n'.format(frameNumber, ionDir['subband'][k], phaseDiffUnstableExist)
log += '\n'
create_xml(self._insar.amplitude, referenceTrack.frames[i].numberOfSamples, referenceTrack.frames[i].numberOfLines, 'amp')
create_xml(self._insar.interferogram, referenceTrack.frames[i].numberOfSamples, referenceTrack.frames[i].numberOfLines, 'int')
#update secondary frame parameters here, here no need to update parameters
os.chdir('../')
#save parameter file, here no need to save parameter file
os.chdir('../')
os.chdir('../')
############################################################
# STEP 4. mosaic frames
############################################################
from isceobj.Alos2Proc.runFrameMosaic import frameMosaic
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
log += 'mosaic frames in {} at {}\n'.format(os.path.basename(__file__), datetime.datetime.now())
log += '================================================================================================\n'
spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2()
for k in range(2):
os.chdir(ionDir['subband'][k])
mosaicDir = ionDir['insar']
os.makedirs(mosaicDir, exist_ok=True)
os.chdir(mosaicDir)
numberOfFrames = len(referenceTrack.frames)
if numberOfFrames == 1:
import shutil
frameDir = os.path.join('f1_{}/mosaic'.format(self._insar.referenceFrames[0]))
# if not os.path.isfile(self._insar.interferogram):
# os.symlink(os.path.join('../', frameDir, self._insar.interferogram), self._insar.interferogram)
# #shutil.copy2() can overwrite
# shutil.copy2(os.path.join('../', frameDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt')
# shutil.copy2(os.path.join('../', frameDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml')
# if not os.path.isfile(self._insar.amplitude):
# os.symlink(os.path.join('../', frameDir, self._insar.amplitude), self._insar.amplitude)
# shutil.copy2(os.path.join('../', frameDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt')
# shutil.copy2(os.path.join('../', frameDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml')
os.rename(os.path.join('../', frameDir, self._insar.interferogram), self._insar.interferogram)
os.rename(os.path.join('../', frameDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt')
os.rename(os.path.join('../', frameDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml')
os.rename(os.path.join('../', frameDir, self._insar.amplitude), self._insar.amplitude)
os.rename(os.path.join('../', frameDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt')
os.rename(os.path.join('../', frameDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml')
#update track parameters, no need to update track parameters here
else:
# #choose offsets
# if self.frameOffsetMatching:
# rangeOffsets = self._insar.frameRangeOffsetMatchingReference
# azimuthOffsets = self._insar.frameAzimuthOffsetMatchingReference
# else:
# rangeOffsets = self._insar.frameRangeOffsetGeometricalReference
# azimuthOffsets = self._insar.frameAzimuthOffsetGeometricalReference
if referenceTrack.operationMode in scansarModes:
matchingMode=0
else:
matchingMode=1
#geometrical offset is enough
offsetReferenceStack = frameOffset(referenceTrack, dateReference+'.slc', 'frame_offset_' + dateReference + '.txt',
crossCorrelation=False, matchingMode=matchingMode)
#we can faithfully make it integer.
#this can also reduce the error due to floating point computation
rangeOffsets = [float(round(x)) for x in offsetReferenceStack[0]]
azimuthOffsets = [float(round(x)) for x in offsetReferenceStack[1]]
#list of input files
inputInterferograms = []
inputAmplitudes = []
for i, frameNumber in enumerate(self._insar.referenceFrames):
frameDir = 'f{}_{}'.format(i+1, frameNumber)
inputInterferograms.append(os.path.join('../', frameDir, 'mosaic', self._insar.interferogram))
inputAmplitudes.append(os.path.join('../', frameDir, 'mosaic', self._insar.amplitude))
#note that track parameters are updated after mosaicking
#mosaic amplitudes
frameMosaic(referenceTrack, inputAmplitudes, self._insar.amplitude,
rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1,
updateTrack=False, phaseCompensation=False, resamplingMethod=0)
#mosaic interferograms
(phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = frameMosaic(referenceTrack, inputInterferograms, self._insar.interferogram,
rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1,
updateTrack=False, phaseCompensation=True, resamplingMethod=1)
create_xml(self._insar.amplitude, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'amp')
create_xml(self._insar.interferogram, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'int')
#if multiple frames, remove frame amplitudes/inteferograms to save space
for x in inputAmplitudes:
os.remove(x)
os.remove(x+'.vrt')
os.remove(x+'.xml')
for x in inputInterferograms:
os.remove(x)
os.remove(x+'.vrt')
os.remove(x+'.xml')
#catalog.addItem('{} band frame phase diff estimated'.format(ionDir['subband'][k]), phaseDiffEst[1:], 'runIonSubband')
#catalog.addItem('{} band frame phase diff used'.format(ionDir['subband'][k]), phaseDiffUsed[1:], 'runIonSubband')
#catalog.addItem('{} band frame phase diff used source'.format(ionDir['subband'][k]), phaseDiffSource[1:], 'runIonSubband')
#catalog.addItem('{} band frame phase diff samples used'.format(ionDir['subband'][k]), numberOfValidSamples[1:], 'runIonSubband')
log += '{} band frame phase diff estimated: {}\n'.format(ionDir['subband'][k], phaseDiffEst[1:])
log += '{} band frame phase diff used: {}\n'.format(ionDir['subband'][k], phaseDiffUsed[1:])
log += '{} band frame phase diff used source: {}\n'.format(ionDir['subband'][k], phaseDiffSource[1:])
log += '{} band frame phase diff samples used: {}\n'.format(ionDir['subband'][k], numberOfValidSamples[1:])
log += '\n'
#update secondary parameters here, no need to update secondary parameters here
os.chdir('../')
#save parameter file, no need to save parameter file here
os.chdir('../')
############################################################
# STEP 5. clear frame processing files
############################################################
import shutil
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
for k in range(2):
os.chdir(ionDir['subband'][k])
for i, frameNumber in enumerate(self._insar.referenceFrames):
frameDir = 'f{}_{}'.format(i+1, frameNumber)
#keep subswath interferograms
#shutil.rmtree(frameDir)
#cmd = 'rm -rf {}'.format(frameDir)
#runCmd(cmd)
os.chdir('../')
############################################################
# STEP 6. create differential interferograms
############################################################
import numpy as np
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
for k in range(2):
os.chdir(ionDir['subband'][k])
insarDir = ionDir['insar']
os.makedirs(insarDir, exist_ok=True)
os.chdir(insarDir)
rangePixelSize = self._insar.numberRangeLooks1 * referenceTrack.rangePixelSize
radarWavelength = subbandRadarWavelength[k]
ml1 = '_{}rlks_{}alks'.format(self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1)
if dateReference == dateReferenceStack:
rectRangeOffset = os.path.join('../../../', idir, dateSecondary, 'insar', dateSecondary + ml1 + '_rg_rect.off')
cmd = "imageMath.py -e='a*exp(-1.0*J*b*4.0*{}*{}/{})*(b!=0)' --a={} --b={} -o {} -t cfloat".format(np.pi, rangePixelSize, radarWavelength, self._insar.interferogram, rectRangeOffset, self._insar.differentialInterferogram)
elif dateSecondary == dateReferenceStack:
rectRangeOffset = os.path.join('../../../', idir, dateReference, 'insar', dateReference + ml1 + '_rg_rect.off')
cmd = "imageMath.py -e='a*exp(1.0*J*b*4.0*{}*{}/{})*(b!=0)' --a={} --b={} -o {} -t cfloat".format(np.pi, rangePixelSize, radarWavelength, self._insar.interferogram, rectRangeOffset, self._insar.differentialInterferogram)
else:
rectRangeOffset1 = os.path.join('../../../', idir, dateReference, 'insar', dateReference + ml1 + '_rg_rect.off')
rectRangeOffset2 = os.path.join('../../../', idir, dateSecondary, 'insar', dateSecondary + ml1 + '_rg_rect.off')
cmd = "imageMath.py -e='a*exp(1.0*J*(b-c)*4.0*{}*{}/{})*(b!=0)*(c!=0)' --a={} --b={} --c={} -o {} -t cfloat".format(np.pi, rangePixelSize, radarWavelength, self._insar.interferogram, rectRangeOffset1, rectRangeOffset2, self._insar.differentialInterferogram)
runCmd(cmd)
os.chdir('../../')
os.chdir('../')
return log
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='create subband interferograms for ionospheric correction')
parser.add_argument('-idir', dest='idir', type=str, required=True,
help = 'input directory where resampled data of each date (YYMMDD) is located. only folders are recognized')
parser.add_argument('-ref_date_stack', dest='ref_date_stack', type=str, required=True,
help = 'reference date of stack. format: YYMMDD')
parser.add_argument('-ref_date', dest='ref_date', type=str, required=True,
help = 'reference date of this pair. format: YYMMDD')
parser.add_argument('-sec_date', dest='sec_date', type=str, required=True,
help = 'reference date of this pair. format: YYMMDD')
parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1,
help = 'number of range looks 1. default: 1')
parser.add_argument('-nalks1', dest='nalks1', type=int, default=1,
help = 'number of azimuth looks 1. default: 1')
# parser.add_argument('-nrlks_ion', dest='nrlks_ion', type=int, default=1,
# help = 'number of range looks ion. default: 1')
# parser.add_argument('-nalks_ion', dest='nalks_ion', type=int, default=1,
# help = 'number of azimuth looks ion. default: 1')
parser.add_argument('-snap', dest='snap', type=int, nargs='+', action='append', default=None,
help='swath phase difference snap to fixed values. e.g. you have 3 swaths and 2 frames. specify this parameter as: -snap 1 1 -snap 1 0, where 0 means no snap, 1 means snap')
parser.add_argument('-phase_diff_lower', dest='phase_diff_lower', type=str, nargs='+', action='append', default=None,
help='swath phase difference lower band. e.g. you have 3 swaths and 2 frames. specify this parameter as: -snap -1.3 2.37 -snap 0.1 None, where None means no user input phase difference value')
parser.add_argument('-phase_diff_upper', dest='phase_diff_upper', type=str, nargs='+', action='append', default=None,
help='swath phase difference upper band. e.g. you have 3 swaths and 2 frames. specify this parameter as: -snap -1.3 2.37 -snap 0.1 None, where None means no user input phase difference value')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
idir = inps.idir
dateReferenceStack = inps.ref_date_stack
dateReference = inps.ref_date
dateSecondary = inps.sec_date
numberRangeLooks1 = inps.nrlks1
numberAzimuthLooks1 = inps.nalks1
#numberRangeLooksIon = inps.nrlks_ion
#numberAzimuthLooksIon = inps.nalks_ion
swathPhaseDiffSnapIon = inps.snap
swathPhaseDiffLowerIon = inps.phase_diff_lower
swathPhaseDiffUpperIon = inps.phase_diff_upper
#######################################################
pair = '{}-{}'.format(dateReference, dateSecondary)
ms = pair
ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1)
dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir, dateReferenceStack)
nframe = len(frames)
nswath = len(swaths)
trackReferenceStack = loadTrack('./', dates[dateIndexReference])
#trackReference = loadTrack('./', dateReference)
#trackSecondary = loadTrack('./', dateSecondary)
self = createObject()
self._insar = createObject()
self._insar.referenceFrames = frames
self._insar.startingSwath = swaths[0]
self._insar.endingSwath = swaths[-1]
self._insar.numberRangeLooks1 = numberRangeLooks1
self._insar.numberAzimuthLooks1 = numberAzimuthLooks1
self._insar.interferogram = ms + ml1 + '.int'
self._insar.amplitude = ms + ml1 + '.amp'
self._insar.differentialInterferogram = 'diff_' + ms + ml1 + '.int'
#set self.swathPhaseDiffSnapIon, self.swathPhaseDiffLowerIon, self.swathPhaseDiffUpperIon
if swathPhaseDiffSnapIon is not None:
swathPhaseDiffSnapIon = [[True if x==1 else False for x in y] for y in swathPhaseDiffSnapIon]
if len(swathPhaseDiffSnapIon) != nframe:
raise Exception('please specify each frame for parameter: -snap')
for i in range(nframe):
if len(swathPhaseDiffSnapIon[i]) != (nswath-1):
raise Exception('please specify correct number of swaths for parameter: -snap')
if swathPhaseDiffLowerIon is not None:
swathPhaseDiffLowerIon = [[float(x) if x.upper() != 'NONE' else None for x in y] for y in swathPhaseDiffLowerIon]
if len(swathPhaseDiffLowerIon) != nframe:
raise Exception('please specify each frame for parameter: -phase_diff_lower')
for i in range(nframe):
if len(swathPhaseDiffLowerIon[i]) != (nswath-1):
raise Exception('please specify correct number of swaths for parameter: -phase_diff_lower')
if swathPhaseDiffUpperIon is not None:
swathPhaseDiffUpperIon = [[float(x) if x.upper() != 'NONE' else None for x in y] for y in swathPhaseDiffUpperIon]
if len(swathPhaseDiffUpperIon) != nframe:
raise Exception('please specify each frame for parameter: -phase_diff_upper')
for i in range(nframe):
if len(swathPhaseDiffUpperIon[i]) != (nswath-1):
raise Exception('please specify correct number of swaths for parameter: -phase_diff_upper')
self.swathPhaseDiffSnapIon = swathPhaseDiffSnapIon
self.swathPhaseDiffLowerIon = swathPhaseDiffLowerIon
self.swathPhaseDiffUpperIon = swathPhaseDiffUpperIon
log = runIonSubband(self, trackReferenceStack, idir, dateReferenceStack, dateReference, dateSecondary)
logFile = 'process.log'
with open(logFile, 'a') as f:
f.write(log)

View File

@ -0,0 +1,113 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import shutil
import datetime
import numpy as np
import xml.etree.ElementTree as ET
import isce, isceobj
from isceobj.Alos2Proc.runIonUwrap import ionUwrap
from StackPulic import loadTrack
from StackPulic import createObject
from StackPulic import stackDateStatistics
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='unwrap subband interferograms for ionospheric correction')
parser.add_argument('-idir', dest='idir', type=str, required=True,
help = 'input directory where resampled data of each date (YYMMDD) is located. only folders are recognized')
parser.add_argument('-ref_date_stack', dest='ref_date_stack', type=str, required=True,
help = 'reference date of stack. format: YYMMDD')
parser.add_argument('-ref_date', dest='ref_date', type=str, required=True,
help = 'reference date of this pair. format: YYMMDD')
parser.add_argument('-sec_date', dest='sec_date', type=str, required=True,
help = 'reference date of this pair. format: YYMMDD')
parser.add_argument('-wbd', dest='wbd', type=str, required=True,
help = 'water body file')
parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1,
help = 'number of range looks 1. default: 1')
parser.add_argument('-nalks1', dest='nalks1', type=int, default=1,
help = 'number of azimuth looks 1. default: 1')
parser.add_argument('-nrlks_ion', dest='nrlks_ion', type=int, default=1,
help = 'number of range looks ion. default: 1')
parser.add_argument('-nalks_ion', dest='nalks_ion', type=int, default=1,
help = 'number of azimuth looks ion. default: 1')
parser.add_argument('-filt', dest='filt', action='store_true', default=False,
help='filter subband interferograms')
parser.add_argument('-alpha', dest='alpha', type=float, default=0.3,
help='filtering strength. default: 0.3')
parser.add_argument('-win', dest='win', type=int, default=32,
help = 'filter window size. default: 32')
parser.add_argument('-step', dest='step', type=int, default=4,
help = 'filter step size. default: 4')
parser.add_argument('-keep_mag', dest='keep_mag', action='store_true', default=False,
help='keep magnitude before filtering subband interferogram')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
idir = inps.idir
dateReferenceStack = inps.ref_date_stack
dateReference = inps.ref_date
dateSecondary = inps.sec_date
wbd = inps.wbd
numberRangeLooks1 = inps.nrlks1
numberAzimuthLooks1 = inps.nalks1
numberRangeLooksIon = inps.nrlks_ion
numberAzimuthLooksIon = inps.nalks_ion
filterSubbandInt = inps.filt
filterStrengthSubbandInt = inps.alpha
filterWinsizeSubbandInt = inps.win
filterStepsizeSubbandInt = inps.step
removeMagnitudeBeforeFilteringSubbandInt = not inps.keep_mag
#######################################################
pair = '{}-{}'.format(dateReference, dateSecondary)
ms = pair
ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1)
dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir, dateReferenceStack)
trackReference = loadTrack('./', dateReference)
self = createObject()
self._insar = createObject()
self._insar.wbd = wbd
self._insar.numberRangeLooks1 = numberRangeLooks1
self._insar.numberAzimuthLooks1 = numberAzimuthLooks1
self._insar.numberRangeLooksIon = numberRangeLooksIon
self._insar.numberAzimuthLooksIon = numberAzimuthLooksIon
self._insar.amplitude = ms + ml1 + '.amp'
self._insar.differentialInterferogram = 'diff_' + ms + ml1 + '.int'
self._insar.latitude = dateReferenceStack + ml1 + '.lat'
self._insar.longitude = dateReferenceStack + ml1 + '.lon'
self.filterSubbandInt = filterSubbandInt
self.filterStrengthSubbandInt = filterStrengthSubbandInt
self.filterWinsizeSubbandInt = filterWinsizeSubbandInt
self.filterStepsizeSubbandInt = filterStepsizeSubbandInt
self.removeMagnitudeBeforeFilteringSubbandInt = removeMagnitudeBeforeFilteringSubbandInt
ionUwrap(self, trackReference, latLonDir=os.path.join(idir, dates[dateIndexReference], 'insar'))

View File

@ -0,0 +1,113 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import shutil
import datetime
import numpy as np
import xml.etree.ElementTree as ET
import isce, isceobj
from contrib.alos2proc.alos2proc import look
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
from isceobj.Alos2Proc.runCoherence import coherence
from StackPulic import loadProduct
from StackPulic import stackDateStatistics
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='take more looks and compute coherence')
parser.add_argument('-ref_date', dest='ref_date', type=str, required=True,
help = 'reference date of this pair. format: YYMMDD')
parser.add_argument('-sec_date', dest='sec_date', type=str, required=True,
help = 'reference date of this pair. format: YYMMDD')
parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1,
help = 'number of range looks 1. default: 1')
parser.add_argument('-nalks1', dest='nalks1', type=int, default=1,
help = 'number of azimuth looks 1. default: 1')
parser.add_argument('-nrlks2', dest='nrlks2', type=int, default=1,
help = 'number of range looks 2. default: 1')
parser.add_argument('-nalks2', dest='nalks2', type=int, default=1,
help = 'number of azimuth looks 2. default: 1')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
dateReference = inps.ref_date
dateSecondary = inps.sec_date
numberRangeLooks1 = inps.nrlks1
numberAzimuthLooks1 = inps.nalks1
numberRangeLooks2 = inps.nrlks2
numberAzimuthLooks2 = inps.nalks2
#######################################################
pair = '{}-{}'.format(dateReference, dateSecondary)
ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1)
ml2 = '_{}rlks_{}alks'.format(numberRangeLooks1*numberRangeLooks2, numberAzimuthLooks1*numberAzimuthLooks2)
insarDir = 'insar'
os.makedirs(insarDir, exist_ok=True)
os.chdir(insarDir)
amplitude = pair + ml1 + '.amp'
differentialInterferogram = 'diff_' + pair + ml1 + '.int'
multilookAmplitude = pair + ml2 + '.amp'
multilookDifferentialInterferogram = 'diff_' + pair + ml2 + '.int'
multilookCoherence = pair + ml2 + '.cor'
amp = isceobj.createImage()
amp.load(amplitude+'.xml')
width = amp.width
length = amp.length
width2 = int(width / numberRangeLooks2)
length2 = int(length / numberAzimuthLooks2)
if not ((numberRangeLooks2 == 1) and (numberAzimuthLooks2 == 1)):
#take looks
look(differentialInterferogram, multilookDifferentialInterferogram, width, numberRangeLooks2, numberAzimuthLooks2, 4, 0, 1)
look(amplitude, multilookAmplitude, width, numberRangeLooks2, numberAzimuthLooks2, 4, 1, 1)
#creat xml
create_xml(multilookDifferentialInterferogram, width2, length2, 'int')
create_xml(multilookAmplitude, width2, length2, 'amp')
if (numberRangeLooks1*numberRangeLooks2*numberAzimuthLooks1*numberAzimuthLooks2 >= 9):
cmd = "imageMath.py -e='sqrt(b_0*b_1);abs(a)/(b_0+(b_0==0))/(b_1+(b_1==0))*(b_0!=0)*(b_1!=0)' --a={} --b={} -o {} -t float -s BIL".format(
multilookDifferentialInterferogram,
multilookAmplitude,
multilookCoherence)
runCmd(cmd)
else:
#estimate coherence using a moving window
coherence(multilookAmplitude, multilookDifferentialInterferogram, multilookCoherence,
method="cchz_wave", windowSize=5)
os.chdir('../')

View File

@ -0,0 +1,130 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import shutil
import datetime
import numpy as np
import xml.etree.ElementTree as ET
import isce, isceobj
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
from contrib.alos2proc.alos2proc import look
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
from isceobj.Alos2Proc.Alos2ProcPublic import waterBodyRadar
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='take more looks')
parser.add_argument('-date', dest='date', type=str, required=True,
help = 'date. format: YYMMDD')
parser.add_argument('-wbd', dest='wbd', type=str, required=True,
help = 'water body file')
parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1,
help = 'number of range looks 1. default: 1')
parser.add_argument('-nalks1', dest='nalks1', type=int, default=1,
help = 'number of azimuth looks 1. default: 1')
parser.add_argument('-nrlks2', dest='nrlks2', type=int, default=1,
help = 'number of range looks 2. default: 1')
parser.add_argument('-nalks2', dest='nalks2', type=int, default=1,
help = 'number of azimuth looks 2. default: 1')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
date = inps.date
wbdFile = inps.wbd
numberRangeLooks1 = inps.nrlks1
numberAzimuthLooks1 = inps.nalks1
numberRangeLooks2 = inps.nrlks2
numberAzimuthLooks2 = inps.nalks2
#######################################################
#pair = '{}-{}'.format(dateReference, dateSecondary)
ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1)
ml2 = '_{}rlks_{}alks'.format(numberRangeLooks1*numberRangeLooks2, numberAzimuthLooks1*numberAzimuthLooks2)
latitude = date + ml1 + '.lat'
longitude = date + ml1 + '.lon'
height = date + ml1 + '.hgt'
los = date + ml1 + '.los'
multilookLatitude = date + ml2 + '.lat'
multilookLongitude = date + ml2 + '.lon'
multilookHeight = date + ml2 + '.hgt'
multilookLos = date + ml2 + '.los'
multilookWbdOut = date + ml2 + '.wbd'
wbdFile = os.path.abspath(wbdFile)
insarDir = 'insar'
os.makedirs(insarDir, exist_ok=True)
os.chdir(insarDir)
img = isceobj.createImage()
img.load(latitude+'.xml')
width = img.width
length = img.length
width2 = int(width / numberRangeLooks2)
length2 = int(length / numberAzimuthLooks2)
if not ((numberRangeLooks2 == 1) and (numberAzimuthLooks2 == 1)):
#take looks
look(latitude, multilookLatitude, width, numberRangeLooks2, numberAzimuthLooks2, 3, 0, 1)
look(longitude, multilookLongitude, width, numberRangeLooks2, numberAzimuthLooks2, 3, 0, 1)
look(height, multilookHeight, width, numberRangeLooks2, numberAzimuthLooks2, 3, 0, 1)
#creat xml
create_xml(multilookLatitude, width2, length2, 'double')
create_xml(multilookLongitude, width2, length2, 'double')
create_xml(multilookHeight, width2, length2, 'double')
#los has two bands, use look program in isce instead
#cmd = "looks.py -i {} -o {} -r {} -a {}".format(self._insar.los, self._insar.multilookLos, self._insar.numberRangeLooks2, self._insar.numberAzimuthLooks2)
#runCmd(cmd)
#replace the above system call with function call
from mroipac.looks.Looks import Looks
from isceobj.Image import createImage
inImage = createImage()
inImage.load(los+'.xml')
lkObj = Looks()
lkObj.setDownLooks(numberAzimuthLooks2)
lkObj.setAcrossLooks(numberRangeLooks2)
lkObj.setInputImage(inImage)
lkObj.setOutputFilename(multilookLos)
lkObj.looks()
#water body
#this looking operation has no problems where there is only water and land, but there is also possible no-data area
#look(self._insar.wbdOut, self._insar.multilookWbdOut, width, self._insar.numberRangeLooks2, self._insar.numberAzimuthLooks2, 0, 0, 1)
#create_xml(self._insar.multilookWbdOut, width2, length2, 'byte')
#use waterBodyRadar instead to avoid the problems of no-data pixels in water body
waterBodyRadar(multilookLatitude, multilookLongitude, wbdFile, multilookWbdOut)
os.chdir('../')

View File

@ -0,0 +1,226 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import shutil
import datetime
import numpy as np
import xml.etree.ElementTree as ET
import isce, isceobj
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
from isceobj.Alos2Proc.runSwathOffset import swathOffset
from isceobj.Alos2Proc.runFrameOffset import frameOffset
from isceobj.Alos2Proc.runSwathMosaic import swathMosaic
from isceobj.Alos2Proc.runFrameMosaic import frameMosaic
from StackPulic import acquisitionModesAlos2
from StackPulic import loadTrack
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='form interferogram')
parser.add_argument('-ref_date_stack', dest='ref_date_stack', type=str, required=True,
help = 'reference date of stack. format: YYMMDD')
parser.add_argument('-ref_date', dest='ref_date', type=str, required=True,
help = 'reference date of this pair. format: YYMMDD')
parser.add_argument('-sec_date', dest='sec_date', type=str, required=True,
help = 'reference date of this pair. format: YYMMDD')
parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1,
help = 'number of range looks 1. default: 1')
parser.add_argument('-nalks1', dest='nalks1', type=int, default=1,
help = 'number of azimuth looks 1. default: 1')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
dateReferenceStack = inps.ref_date_stack
dateReference = inps.ref_date
dateSecondary = inps.sec_date
numberRangeLooks1 = inps.nrlks1
numberAzimuthLooks1 = inps.nalks1
#######################################################
logFile = 'process.log'
pair = '{}-{}'.format(dateReference, dateSecondary)
ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1)
interferogram = pair + ml1 + '.int'
amplitude = pair + ml1 + '.amp'
spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2()
#use one date to find frames and swaths. any date should work, here we use dateIndexReference
frames = sorted([x[-4:] for x in glob.glob(os.path.join('./', 'f*_*'))])
swaths = sorted([int(x[-1]) for x in glob.glob(os.path.join('./', 'f1_*', 's*'))])
nframe = len(frames)
nswath = len(swaths)
trackReferenceStack = loadTrack('./', dateReferenceStack)
#mosaic swaths
for i, frameNumber in enumerate(frames):
frameDir = 'f{}_{}'.format(i+1, frameNumber)
os.chdir(frameDir)
mosaicDir = 'mosaic'
os.makedirs(mosaicDir, exist_ok=True)
os.chdir(mosaicDir)
if not (swaths[-1] - swaths[0] >= 1):
swathDir = 's{}'.format(swaths[0])
if not os.path.isfile(interferogram):
os.symlink(os.path.join('../', swathDir, interferogram), interferogram)
shutil.copy2(os.path.join('../', swathDir, interferogram+'.vrt'), interferogram+'.vrt')
shutil.copy2(os.path.join('../', swathDir, interferogram+'.xml'), interferogram+'.xml')
if not os.path.isfile(amplitude):
os.symlink(os.path.join('../', swathDir, amplitude), amplitude)
shutil.copy2(os.path.join('../', swathDir, amplitude+'.vrt'), amplitude+'.vrt')
shutil.copy2(os.path.join('../', swathDir, amplitude+'.xml'), amplitude+'.xml')
os.chdir('../../')
else:
#compute swath offset using reference stack
#geometrical offset is enough now
offsetReferenceStack = swathOffset(trackReferenceStack.frames[i], dateReferenceStack+'.slc', 'swath_offset_' + dateReferenceStack + '.txt',
crossCorrelation=False, numberOfAzimuthLooks=10)
#we can faithfully make it integer.
#this can also reduce the error due to floating point computation
rangeOffsets = [float(round(x)) for x in offsetReferenceStack[0]]
azimuthOffsets = [float(round(x)) for x in offsetReferenceStack[1]]
#list of input files
inputInterferograms = []
inputAmplitudes = []
for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)):
swathDir = 's{}'.format(swathNumber)
inputInterferograms.append(os.path.join('../', swathDir, interferogram))
inputAmplitudes.append(os.path.join('../', swathDir, amplitude))
#note that frame parameters do not need to be updated after mosaicking
#mosaic amplitudes
swathMosaic(trackReferenceStack.frames[i], inputAmplitudes, amplitude,
rangeOffsets, azimuthOffsets, numberRangeLooks1, numberAzimuthLooks1, resamplingMethod=0)
#mosaic interferograms
swathMosaic(trackReferenceStack.frames[i], inputInterferograms, interferogram,
rangeOffsets, azimuthOffsets, numberRangeLooks1, numberAzimuthLooks1, resamplingMethod=1)
create_xml(amplitude, trackReferenceStack.frames[i].numberOfSamples, trackReferenceStack.frames[i].numberOfLines, 'amp')
create_xml(interferogram, trackReferenceStack.frames[i].numberOfSamples, trackReferenceStack.frames[i].numberOfLines, 'int')
os.chdir('../../')
#mosaic frame
mosaicDir = 'insar'
os.makedirs(mosaicDir, exist_ok=True)
os.chdir(mosaicDir)
if nframe == 1:
frameDir = os.path.join('f1_{}/mosaic'.format(frames[0]))
if not os.path.isfile(interferogram):
os.symlink(os.path.join('../', frameDir, interferogram), interferogram)
#shutil.copy2() can overwrite
shutil.copy2(os.path.join('../', frameDir, interferogram+'.vrt'), interferogram+'.vrt')
shutil.copy2(os.path.join('../', frameDir, interferogram+'.xml'), interferogram+'.xml')
if not os.path.isfile(amplitude):
os.symlink(os.path.join('../', frameDir, amplitude), amplitude)
shutil.copy2(os.path.join('../', frameDir, amplitude+'.vrt'), amplitude+'.vrt')
shutil.copy2(os.path.join('../', frameDir, amplitude+'.xml'), amplitude+'.xml')
else:
if trackReferenceStack.operationMode in scansarModes:
matchingMode=0
else:
matchingMode=1
#geometrical offset is enough
offsetReferenceStack = frameOffset(trackReferenceStack, dateReferenceStack+'.slc', 'frame_offset_' + dateReferenceStack + '.txt',
crossCorrelation=False, matchingMode=matchingMode)
#we can faithfully make it integer.
#this can also reduce the error due to floating point computation
rangeOffsets = [float(round(x)) for x in offsetReferenceStack[0]]
azimuthOffsets = [float(round(x)) for x in offsetReferenceStack[1]]
#list of input files
inputInterferograms = []
inputAmplitudes = []
for i, frameNumber in enumerate(frames):
frameDir = 'f{}_{}'.format(i+1, frameNumber)
inputInterferograms.append(os.path.join('../', frameDir, 'mosaic', interferogram))
inputAmplitudes.append(os.path.join('../', frameDir, 'mosaic', amplitude))
#note that track parameters do not need to be updated after mosaicking
#mosaic amplitudes
frameMosaic(trackReferenceStack, inputAmplitudes, amplitude,
rangeOffsets, azimuthOffsets, numberRangeLooks1, numberAzimuthLooks1,
updateTrack=False, phaseCompensation=False, resamplingMethod=0)
#mosaic interferograms
(phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = \
frameMosaic(trackReferenceStack, inputInterferograms, interferogram,
rangeOffsets, azimuthOffsets, numberRangeLooks1, numberAzimuthLooks1,
updateTrack=False, phaseCompensation=True, resamplingMethod=1)
create_xml(amplitude, trackReferenceStack.numberOfSamples, trackReferenceStack.numberOfLines, 'amp')
create_xml(interferogram, trackReferenceStack.numberOfSamples, trackReferenceStack.numberOfLines, 'int')
#if multiple frames, remove frame amplitudes/inteferograms to save space
for x in inputAmplitudes:
os.remove(x)
os.remove(x+'.vrt')
os.remove(x+'.xml')
for x in inputInterferograms:
os.remove(x)
os.remove(x+'.vrt')
os.remove(x+'.xml')
#log output info
log = '{} at {}\n'.format(os.path.basename(__file__), datetime.datetime.now())
log += '================================================================================================\n'
log += 'frame phase diff estimated: {}\n'.format(phaseDiffEst[1:])
log += 'frame phase diff used: {}\n'.format(phaseDiffUsed[1:])
log += 'frame phase diff used source: {}\n'.format(phaseDiffSource[1:])
log += 'frame phase diff samples used: {}\n'.format(numberOfValidSamples[1:])
log += '\n'
with open(os.path.join('../', logFile), 'a') as f:
f.write(log)

View File

@ -0,0 +1,167 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import datetime
import numpy as np
import isce, isceobj
from StackPulic import loadTrack
from StackPulic import saveTrack
from StackPulic import stackDateStatistics
from StackPulic import acquisitionModesAlos2
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='mosaic all swaths and frames to form an entire track')
parser.add_argument('-idir', dest='idir', type=str, required=True,
help = 'input directory where data of each date (YYMMDD) is located. only folders are recognized')
parser.add_argument('-ref_date', dest='ref_date', type=str, required=True,
help = 'reference date. format: YYMMDD')
parser.add_argument('-sec_date', dest='sec_date', type=str, nargs='+', default=[],
help = 'a number of secondary dates seperated by blanks, can also include ref_date. format: YYMMDD YYMMDD YYMMDD. If provided, only process these dates')
parser.add_argument('-ref_frame', dest='ref_frame', type=str, default=None,
help = 'frame number of the swath whose grid is used as reference. e.g. 2800. default: first frame')
parser.add_argument('-ref_swath', dest='ref_swath', type=int, default=None,
help = 'swath number of the swath whose grid is used as reference. e.g. 1. default: first swath')
parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1,
help = 'number of range looks 1. default: 1')
parser.add_argument('-nalks1', dest='nalks1', type=int, default=1,
help = 'number of azimuth looks 1. default: 1')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
idir = inps.idir
dateReference = inps.ref_date
dateSecondary = inps.sec_date
frameReference = inps.ref_frame
swathReference = inps.ref_swath
numberRangeLooks1 = inps.nrlks1
numberAzimuthLooks1 = inps.nalks1
#######################################################
DEBUG=False
spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2()
#get date statistics
dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir, dateReference)
ndate = len(dates)
nframe = len(frames)
nswath = len(swaths)
#find frame and swath indexes of reference swath
if frameReference is None:
frameReference = frames[0]
if swathReference is None:
swathReference = swaths[0]
frameReferenceIndex = frames.index(frameReference)
swathReferenceIndex = swaths.index(swathReference)
print('resampling all frames and swaths to frame: {} (index: {}) swath: {} (index {})'.format(
frameReference, frameReferenceIndex, swathReference, swathReferenceIndex))
#mosaic parameters of each date
#strictly follow the actual image mosaicking processing of reference (after resampling adjustment in resample_common_grid.py)
#secondary sensingStart and startingRange are OK, no need to consider other things about secondary
os.chdir(idir)
for idate in range(ndate):
if dateSecondary != []:
if dates[idate] not in dateSecondary:
continue
print('processing: {}'.format(dates[idate]))
os.chdir(dates[idate])
track = loadTrack('./', dates[idate])
swathReference = track.frames[frameReferenceIndex].swaths[swathReferenceIndex]
#1. mosaic swaths
for i, frameNumber in enumerate(frames):
startingRange = []
sensingStart = []
endingRange = []
sensingEnd = []
for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)):
swath = track.frames[i].swaths[j]
startingRange.append(swath.startingRange)
endingRange.append(swath.startingRange+swath.rangePixelSize*swath.numberOfSamples)
sensingStart.append(swath.sensingStart)
sensingEnd.append(swath.sensingStart+datetime.timedelta(seconds=swath.azimuthLineInterval*swath.numberOfLines))
#update frame parameters
#########################################################
frame = track.frames[i]
#mosaic size
frame.numberOfSamples = int(round((max(endingRange)-min(startingRange))/swathReference.rangePixelSize) / numberRangeLooks1)
frame.numberOfLines = int(round((max(sensingEnd)-min(sensingStart)).total_seconds()/swathReference.azimuthLineInterval) / numberAzimuthLooks1)
#NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE
#range parameters
frame.startingRange = min(startingRange)
frame.rangeSamplingRate = swathReference.rangeSamplingRate
frame.rangePixelSize = swathReference.rangePixelSize
#azimuth parameters
frame.sensingStart = min(sensingStart)
frame.prf = swathReference.prf
frame.azimuthPixelSize = swathReference.azimuthPixelSize
frame.azimuthLineInterval = swathReference.azimuthLineInterval
#2. mosaic frames
startingRange = []
sensingStart = []
endingRange = []
sensingEnd = []
for i, frameNumber in enumerate(frames):
frame = track.frames[i]
startingRange.append(frame.startingRange)
endingRange.append(frame.startingRange+numberRangeLooks1*frame.rangePixelSize*frame.numberOfSamples)
sensingStart.append(frame.sensingStart)
sensingEnd.append(frame.sensingStart+datetime.timedelta(seconds=numberAzimuthLooks1*frame.azimuthLineInterval*frame.numberOfLines))
#update track parameters
#########################################################
#mosaic size
track.numberOfSamples = round((max(endingRange)-min(startingRange))/(numberRangeLooks1*swathReference.rangePixelSize))
track.numberOfLines = round((max(sensingEnd)-min(sensingStart)).total_seconds()/(numberAzimuthLooks1*swathReference.azimuthLineInterval))
#NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE
#range parameters
track.startingRange = min(startingRange)
track.rangeSamplingRate = swathReference.rangeSamplingRate
track.rangePixelSize = swathReference.rangePixelSize
#azimuth parameters
track.sensingStart = min(sensingStart)
track.prf = swathReference.prf
track.azimuthPixelSize = swathReference.azimuthPixelSize
track.azimuthLineInterval = swathReference.azimuthLineInterval
#save mosaicking result
saveTrack(track, dates[idate])
os.chdir('../')

View File

@ -0,0 +1,195 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import shutil
import datetime
import numpy as np
import xml.etree.ElementTree as ET
from StackPulic import stackDateStatistics
from StackPulic import acquisitionModesAlos2
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='create InSAR pairs')
parser.add_argument('-idir1', dest='idir1', type=str, required=True,
help = 'input directory where original data of each date (YYMMDD) is located. only folders are recognized')
parser.add_argument('-idir2', dest='idir2', type=str, required=True,
help = 'input directory where resampled data of each date (YYMMDD) is located. only folders are recognized')
parser.add_argument('-xml', dest='xml', type=str, default=None,
help = 'alos2App.py input xml file, e.g. alos2App.xml. default: None')
parser.add_argument('-odir', dest='odir', type=str, required=True,
help = 'output directory')
parser.add_argument('-ref_date', dest='ref_date', type=str, required=True,
help = 'reference date. format: YYMMDD')
parser.add_argument('-pairs', dest='pairs', type=str, nargs='+', default=None,
help = 'a number of pairs seperated by blanks. format: YYMMDD-YYMMDD YYMMDD-YYMMDD YYMMDD-YYMMDD... This argument has highest priority. When provided, only process these pairs')
parser.add_argument('-num', dest='num', type=int, default=None,
help = 'number of subsequent acquistions for each acquistion to pair up with. default: all pairs')
parser.add_argument('-exc_date', dest='exc_date', type=str, nargs='+', default=None,
help = 'a number of secondary dates seperated by blanks, can also include ref_date. format: YYMMDD YYMMDD YYMMDD. If provided, these dates will be excluded from pairing up')
parser.add_argument('-tsmin', dest='tsmin', type=float, default=None,
help = 'minimum time span in years for pairing up. default: None')
parser.add_argument('-tsmax', dest='tsmax', type=float, default=None,
help = 'maximum time span in years for pairing up. default: None')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
idir1 = inps.idir1
idir2 = inps.idir2
alos2AppXml = inps.xml
odir = inps.odir
dateReference = inps.ref_date
pairsUser = inps.pairs
subsequentNum = inps.num
dateExcluded = inps.exc_date
tsmin = inps.tsmin
tsmax = inps.tsmax
#######################################################
DEBUG=False
spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2()
#get date statistics, using resampled version
dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir2, dateReference)
ndate = len(dates)
nframe = len(frames)
nswath = len(swaths)
if subsequentNum is None:
subsequentNum = ndate - 1
#read standard configurations
if alos2AppXml is not None:
tree = ET.parse(alos2AppXml)
root = tree.getroot()
datefmt = "%y%m%d"
pairsCreated = []
for i in range(ndate):
mdate = dates[i]
mtime = datetime.datetime.strptime(mdate, datefmt)
for j in range(subsequentNum):
if i+j+1 <= ndate - 1:
sdate = dates[i+j+1]
stime = datetime.datetime.strptime(sdate, datefmt)
pair = mdate + '-' + sdate
ts = np.absolute((stime - mtime).total_seconds()) / (365.0 * 24.0 * 3600)
#1. determine whether process this pair
if pairsUser is not None:
if pair not in pairsUser:
continue
else:
if dateExcluded is not None:
if (mdate in dateExcluded) or (sdate in dateExcluded):
continue
if tsmin is not None:
if ts < tsmin:
continue
if tsmax is not None:
if ts > tsmax:
continue
#2. create pair dir
pairsCreated.append(pair)
print('creating pair: {}'.format(pair))
pairDir = os.path.join(odir, pair)
os.makedirs(pairDir, exist_ok=True)
#create xml
if alos2AppXml is not None:
safe = root.find("component/property[@name='reference directory']")
#safe.text = '{}'.format(os.path.join(inps.dir, mdate))
safe.text = 'None'
safe = root.find("component/property[@name='secondary directory']")
#safe.text = '{}'.format(os.path.join(inps.dir, sdate))
safe.text = 'None'
tree.write(os.path.join(pairDir, 'alos2App.xml'))
#3. make frame/swath directories, and copy *.track.xml and *.frame.xml
if mdate != dates[dateIndexReference]:
shutil.copy2(os.path.join(idir1, mdate, mdate+'.track.xml'), pairDir)
if sdate != dates[dateIndexReference]:
shutil.copy2(os.path.join(idir1, sdate, sdate+'.track.xml'), pairDir)
shutil.copy2(os.path.join(idir2, dates[dateIndexReference], dates[dateIndexReference]+'.track.xml'), pairDir)
for iframe, frameNumber in enumerate(frames):
frameDir = 'f{}_{}'.format(iframe+1, frameNumber)
os.makedirs(os.path.join(pairDir, frameDir), exist_ok=True)
if mdate != dates[dateIndexReference]:
shutil.copy2(os.path.join(idir1, mdate, frameDir, mdate+'.frame.xml'), os.path.join(pairDir, frameDir))
if sdate != dates[dateIndexReference]:
shutil.copy2(os.path.join(idir1, sdate, frameDir, sdate+'.frame.xml'), os.path.join(pairDir, frameDir))
shutil.copy2(os.path.join(idir2, dates[dateIndexReference], frameDir, dates[dateIndexReference]+'.frame.xml'), os.path.join(pairDir, frameDir))
for jswath, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)):
swathDir = 's{}'.format(swathNumber)
os.makedirs(os.path.join(pairDir, frameDir, swathDir), exist_ok=True)
if os.path.isfile(os.path.join(pairDir, frameDir, swathDir, mdate+'.slc')):
os.remove(os.path.join(pairDir, frameDir, swathDir, mdate+'.slc'))
relpath = os.path.relpath(os.path.join(idir2, mdate, frameDir, swathDir), os.path.join(pairDir, frameDir, swathDir))
os.symlink(os.path.join(relpath, mdate+'.slc'), os.path.join(pairDir, frameDir, swathDir, mdate+'.slc'))
#os.symlink(os.path.join(idir2, mdate, frameDir, swathDir, mdate+'.slc'), os.path.join(pairDir, frameDir, swathDir, mdate+'.slc'))
shutil.copy2(os.path.join(idir2, mdate, frameDir, swathDir, mdate+'.slc.vrt'), os.path.join(pairDir, frameDir, swathDir))
shutil.copy2(os.path.join(idir2, mdate, frameDir, swathDir, mdate+'.slc.xml'), os.path.join(pairDir, frameDir, swathDir))
if os.path.isfile(os.path.join(pairDir, frameDir, swathDir, sdate+'.slc')):
os.remove(os.path.join(pairDir, frameDir, swathDir, sdate+'.slc'))
relpath = os.path.relpath(os.path.join(idir2, sdate, frameDir, swathDir), os.path.join(pairDir, frameDir, swathDir))
os.symlink(os.path.join(relpath, sdate+'.slc'), os.path.join(pairDir, frameDir, swathDir, sdate+'.slc'))
#os.symlink(os.path.join(idir2, sdate, frameDir, swathDir, sdate+'.slc'), os.path.join(pairDir, frameDir, swathDir, sdate+'.slc'))
shutil.copy2(os.path.join(idir2, sdate, frameDir, swathDir, sdate+'.slc.vrt'), os.path.join(pairDir, frameDir, swathDir))
shutil.copy2(os.path.join(idir2, sdate, frameDir, swathDir, sdate+'.slc.xml'), os.path.join(pairDir, frameDir, swathDir))
print('total number of pairs created: {}'.format(len(pairsCreated)))
if pairsUser is not None:
if sorted(pairsUser) != sorted(pairsCreated):
print()
print('WARNING: user has specified pairs to process, but pairs created are different from user specified pairs')
print(' user specified pairs: {}'.format(', '.join(pairsUser)))
print(' pairs created: {}'.format(', '.join(pairsCreated)))
print()

View File

@ -0,0 +1,122 @@
#!/usr/bin/env python3
#Cunren Liang, JPL/Caltech, 28-NOV-2016
#https://matplotlib.org/3.1.1/gallery/text_labels_and_annotations/date.html
import os
import sys
import glob
import datetime
import argparse
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
def read_alosstack_baseline(baseline_file):
'''read baseline file generated by alosStack
'''
baseline_dict = {}
with open(baseline_file, 'r') as f:
lines = [line for line in f if line.strip() != '']
for x in lines[2:]:
blist = x.split()
#to fit into the format of other processors, all alos satellites are after 2000
#blist[0] = '20' + blist[0]
#blist[1] = '20' + blist[1]
baseline_dict[blist[1]] = float(blist[3])
baseline_dict[blist[0]] = 0
return baseline_dict
def cmdLineParse():
'''
Command line parser.
'''
parser = argparse.ArgumentParser(description='plot baselines')
parser.add_argument('-baseline', dest='baseline', type=str, required=True,
help = 'baseline file')
parser.add_argument('-pairs_dir', dest='pairs_dir', type=str, required=True,
help = 'pairs directory containing YYMMDD-YYMMDD folders. Only folders are recognized.')
parser.add_argument('-pairs_exc', dest='pairs_exc', type=str, nargs='+', default=None,
help = 'a number of pairs seperated by blanks. format: YYMMDD-YYMMDD YYMMDD-YYMMDD... If provided, these pairs will be excluded from plotting')
parser.add_argument('-output', dest='output', type=str, default='baseline.pdf',
help = 'output file name')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
baseline = inps.baseline
pairs_dir = inps.pairs_dir
pairs_exc = inps.pairs_exc
output = inps.output
baseline_dict = read_alosstack_baseline(baseline)
pairs = [os.path.basename(x) for x in sorted(glob.glob(os.path.join(pairs_dir, '*-*'))) if os.path.isdir(x)]
if pairs_exc != None:
for x in pairs_exc:
if x in pairs:
pairs.remove(x)
#start plot
plt.rcParams['font.family'] = 'Times New Roman'
plt.rcParams['font.size'] = 12
fig, ax = plt.subplots()
time = [datetime.datetime.strptime(x, "%y%m%d") for x in baseline_dict]
baseline = [baseline_dict[x] for x in baseline_dict]
ax.plot(time, baseline, 'o', alpha=0.7, c='g')
year_min = datetime.datetime(min(time).year, 1, 1)
year_max = datetime.datetime(max(time).year+1, 1, 1)
for x in pairs:
rdate, sdate = x.split('-')
rtime = datetime.datetime.strptime(rdate, "%y%m%d")
stime = datetime.datetime.strptime(sdate, "%y%m%d")
time = [rtime, stime]
baseline = [baseline_dict[rdate], baseline_dict[sdate]]
ax.plot(time, baseline, '-', lw=.5, c='b')
ax.xaxis.set_major_locator(mdates.YearLocator())
ax.xaxis.set_major_formatter(mdates.DateFormatter('%Y'))
ax.xaxis.set_minor_locator(mdates.MonthLocator())
ax.minorticks_on()
ax.tick_params('both', length=7, which='major', width=1)
ax.tick_params('both', length=4, which='minor', width=0.5)
ax.set_xlim(year_min, year_max)
ax.format_xdata = mdates.DateFormatter('%Y-%m-%d')
# rotates and right aligns the x labels, and moves the bottom of the
# axes up to make room for them
#fig.autofmt_xdate()
ax.set_xlabel('Time [years]')
ax.set_ylabel('Perpendicular Baseline [meters]')
plt.savefig(os.path.splitext(output)[0]+'.pdf')

View File

@ -0,0 +1,116 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import datetime
import numpy as np
import isce, isceobj
from isceobj.Alos2Proc.runRdrDemOffset import rdrDemOffset
from StackPulic import loadProduct
from StackPulic import createObject
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='estimate offset between radar and dem')
parser.add_argument('-track', dest='track', type=str, required=True,
help = 'track parameter file')
parser.add_argument('-dem', dest='dem', type=str, required=True,
help = 'dem used for geometrical coregistration')
parser.add_argument('-wbd', dest='wbd', type=str, required=True,
help = 'water body in radar coordinate')
parser.add_argument('-hgt', dest='hgt', type=str, required=True,
help = 'height in radar coordinate computed in geometrical coregistration')
parser.add_argument('-amp', dest='amp', type=str, required=True,
help = 'amplitude image')
parser.add_argument('-output', dest='output', type=str, required=True,
help = 'output file for saving the affine transformation paramters')
parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1,
help = 'number of range looks 1. default: 1')
parser.add_argument('-nalks1', dest='nalks1', type=int, default=1,
help = 'number of azimuth looks 1. default: 1')
parser.add_argument('-nrlks_sim', dest='nrlks_sim', type=int, default=None,
help = 'number of range looks when simulating radar image')
parser.add_argument('-nalks_sim', dest='nalks_sim', type=int, default=None,
help = 'number of azimuth looks when simulating radar image')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
trackParameter = inps.track
demFile = inps.dem
wbdOut = inps.wbd
height = inps.hgt
amplitude = inps.amp
output = inps.output
numberRangeLooks1 = inps.nrlks1
numberAzimuthLooks1 = inps.nalks1
numberRangeLooksSim = inps.nrlks_sim
numberAzimuthLooksSim = inps.nalks_sim
#######################################################
#prepare amplitude image
insarDir = 'insar'
os.makedirs(insarDir, exist_ok=True)
os.chdir(insarDir)
if not os.path.isfile(os.path.basename(amplitude)):
os.symlink(os.path.join('../', amplitude), os.path.basename(amplitude))
if not os.path.isfile(os.path.basename(amplitude)+'.vrt'):
os.symlink(os.path.join('../', amplitude)+'.vrt', os.path.basename(amplitude)+'.vrt')
if not os.path.isfile(os.path.basename(amplitude)+'.xml'):
os.symlink(os.path.join('../', amplitude)+'.xml', os.path.basename(amplitude)+'.xml')
os.chdir('../')
ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1)
simFile = 'radar_{}.sim'.format(ml1)
self = createObject()
self._insar = createObject()
self._insar.dem = demFile
self._insar.numberRangeLooksSim = numberRangeLooksSim
self._insar.numberRangeLooks1 = numberRangeLooks1
self._insar.numberAzimuthLooksSim = numberAzimuthLooksSim
self._insar.numberAzimuthLooks1 = numberAzimuthLooks1
self._insar.height = os.path.basename(height)
self._insar.sim = simFile
self._insar.amplitude = os.path.basename(amplitude)
self._insar.wbdOut = os.path.basename(wbdOut)
self._insar.radarDemAffineTransform = None
referenceTrack = loadProduct(trackParameter)
rdrDemOffset(self, referenceTrack, catalog=None)
os.chdir(insarDir)
#save the result
with open(output, 'w') as f:
f.write('{} {}\n{}'.format(self._insar.numberRangeLooksSim, self._insar.numberAzimuthLooksSim, self._insar.radarDemAffineTransform))
#remove amplitude image
os.remove(os.path.basename(amplitude))
os.remove(os.path.basename(amplitude)+'.vrt')
os.remove(os.path.basename(amplitude)+'.xml')
os.chdir('../')

View File

@ -0,0 +1,92 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import datetime
import numpy as np
import isce, isceobj
from isceobj.Alos2Proc.Alos2ProcPublic import waterBodyRadar
from isceobj.Alos2Proc.runRdr2Geo import topoCPU
from isceobj.Alos2Proc.runRdr2Geo import topoGPU
from StackPulic import loadTrack
from StackPulic import hasGPU
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='compute longitude, latitude, height and water body from radar parameters')
parser.add_argument('-date', dest='date', type=str, required=True,
help = 'date. format: YYMMDD')
parser.add_argument('-dem', dest='dem', type=str, required=True,
help = 'dem file')
parser.add_argument('-wbd', dest='wbd', type=str, required=True,
help = 'water body file')
parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1,
help = 'number of range looks 1. default: 1')
parser.add_argument('-nalks1', dest='nalks1', type=int, default=1,
help = 'number of azimuth looks 1. default: 1')
#parser.add_argument('-gpu', dest='gpu', type=int, default=1,
# help = 'use GPU when available. 0: no. 1: yes (default)')
parser.add_argument('-gpu', dest='gpu', action='store_true', default=False,
help='use GPU when available')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
date = inps.date
demFile = inps.dem
wbdFile = inps.wbd
numberRangeLooks1 = inps.nrlks1
numberAzimuthLooks1 = inps.nalks1
useGPU = inps.gpu
#######################################################
demFile = os.path.abspath(demFile)
wbdFile = os.path.abspath(wbdFile)
insarDir = 'insar'
os.makedirs(insarDir, exist_ok=True)
os.chdir(insarDir)
ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1)
latitude = date + ml1 + '.lat'
longitude = date + ml1 + '.lon'
height = date + ml1 + '.hgt'
los = date + ml1 + '.los'
wbdOut = date + ml1 + '.wbd'
track = loadTrack('../', date)
if useGPU and hasGPU():
topoGPU(track, numberRangeLooks1, numberAzimuthLooks1, demFile,
latitude, longitude, height, los)
else:
snwe = topoCPU(track, numberRangeLooks1, numberAzimuthLooks1, demFile,
latitude, longitude, height, los)
waterBodyRadar(latitude, longitude, wbdFile, wbdOut)

View File

@ -0,0 +1,301 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import datetime
import numpy as np
import isce, isceobj
import isceobj.Sensor.MultiMode as MultiMode
from StackPulic import saveProduct
from StackPulic import acquisitionModesAlos2
def getAlos2StackDirs(dataDir):
'''
1. this function takes the data directory containing a list of folders, in each of
which data of a date is located, and then returns a list of date directory sorted
by acquisition date.
2. under dataDir, only folders are recognized
'''
import os
import glob
def sorter(item):
#return date
return item.split('-')[-2]
#get only folders in dataDir
dateDirs = sorted(glob.glob(os.path.join(dataDir, '*')))
dateDirs = [x for x in dateDirs if os.path.isdir(x)]
ndate = len(dateDirs)
#get first LED files in dateDirs
dateFirstleaderFiles = [sorted(glob.glob(os.path.join(x, 'LED-ALOS2*-*-*')))[0] for x in dateDirs]
#sort first LED files using date in LED file name
dateFirstleaderFiles = sorted(dateFirstleaderFiles, key=sorter)
#keep only directory from the path
dateDirs = [os.path.dirname(x) for x in dateFirstleaderFiles]
return dateDirs
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='read a number of dates of data')
parser.add_argument('-idir', dest='idir', type=str, required=True,
help = 'input directory where data of each date is located. only folders are recognized')
parser.add_argument('-odir', dest='odir', type=str, required=True,
help = 'output directory where data of each date is output')
parser.add_argument('-ref_date', dest='ref_date', type=str, required=True,
help = 'reference date. format: YYMMDD')
parser.add_argument('-sec_date', dest='sec_date', type=str, nargs='+', default=[],
help = 'a number of secondary dates seperated by blanks, can also include reference date. format: YYMMDD YYMMDD YYMMDD. If provided, only read data of these dates')
parser.add_argument('-pol', dest='pol', type=str, default='HH',
help = 'polarization to process, default: HH')
parser.add_argument('-frames', dest='frames', type=str, nargs='+', default=None,
help = 'frames to process, must specify frame numbers of reference if frames are different among dates. e.g. -frames 2800 2850')
parser.add_argument('-starting_swath', dest='starting_swath', type=int, default=None,
help = 'starting swath to process.')
parser.add_argument('-ending_swath', dest='ending_swath', type=int, default=None,
help = 'starting swath to process')
parser.add_argument('-virtual', dest='virtual', action='store_true', default=False,
help='use virtual file')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
idir = inps.idir
odir = inps.odir
dateReference = inps.ref_date
dateSecondary = inps.sec_date
pol = inps.pol
framesInput = inps.frames
startingSwath = inps.starting_swath
endingSwath = inps.ending_swath
useVirtualFile = inps.virtual
#######################################################
#date directories sorted by acquistion date retrieved from filenames under each directory
dateDirs = getAlos2StackDirs(os.path.abspath(idir))
ndate = len(dateDirs)
if framesInput is not None:
framesInput = sorted(framesInput)
else:
framesInput = None
#1. find index of reference date:
dates = []
dateIndexReference = None
for i in range(ndate):
ledFiles = sorted(glob.glob(os.path.join(dateDirs[i], 'LED-ALOS2*-*-*')))
date = os.path.basename(ledFiles[0]).split('-')[-2]
dates.append(date)
if date == dateReference:
dateIndexReference = i
if dateIndexReference is None:
raise Exception('cannot get reference date {} from the data list, pleasae check your input'.format(dateReference))
#2. check if data are in the same mode
spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2()
#first frame of reference date
ledFilesReference = sorted(glob.glob(os.path.join(dateDirs[dateIndexReference], 'LED-ALOS2*-*-*')))
modeReference = os.path.basename(ledFilesReference[0]).split('-')[-1][0:3]
if modeReference in spotlightModes:
modeGroupReference = spotlightModes
if modeReference in stripmapModes:
modeGroupReference = stripmapModes
if modeReference in scansarNominalModes:
modeGroupReference = scansarNominalModes
if modeReference in scansarWideModes:
modeGroupReference = scansarWideModes
#check aquistion mode of all frames of each date
for i in range(ndate):
ledFiles = sorted(glob.glob(os.path.join(dateDirs[i], 'LED-ALOS2*-*-*')))
nframe = len(ledFiles)
for j in range(nframe):
mode = os.path.basename(ledFiles[j]).split('-')[-1][0:3]
if mode not in modeGroupReference:
raise Exception('all data must be in the same acquistion mode: spotlight, stripmap, or ScanSAR mode')
#3. find frame numbers and save it in a 2-d list
frames = []
#if not set, find frames automatically
if framesInput is None:
for i in range(ndate):
frames0 = []
ledFiles = sorted(glob.glob(os.path.join(dateDirs[i], 'LED-ALOS2*-*-*')))
for led in ledFiles:
frames0.append( os.path.basename(led).split('-')[-3][-4:] )
frames.append(sorted(frames0))
else:
for i in range(ndate):
frames.append(framesInput)
framesReference = frames[dateIndexReference]
#check if there is equal number of frames
nframe = len(frames[dateIndexReference])
for i in range(ndate):
if nframe != len(frames[i]):
raise Exception('there are not equal number of frames to process, please check your directory of each date')
#4. set starting and ending swaths
if modeReference in spotlightModes:
if startingSwath is None:
startingSwath = 1
if endingSwath is None:
endingSwath = 1
if modeReference in stripmapModes:
if startingSwath is None:
startingSwath = 1
if endingSwath is None:
endingSwath = 1
if modeReference in scansarNominalModes:
if startingSwath is None:
startingSwath = 1
if endingSwath is None:
endingSwath = 5
if modeReference in scansarWideModes:
if startingSwath is None:
startingSwath = 1
if endingSwath is None:
endingSwath = 7
#print result
print('\nlist of dates:')
print(' index date frames')
print('=======================================================')
for i in range(ndate):
if dates[i] == dateReference:
print(' %03d %s'%(i, dates[i])+' {}'.format(frames[i])+' reference')
else:
print(' %03d %s'%(i, dates[i])+' {}'.format(frames[i]))
print('\n')
##################################################
#1. create directories and read data
##################################################
if not os.path.isdir(odir):
print('output directory {} does not exist, create'.format(odir))
os.makedirs(odir, exist_ok=True)
os.chdir(odir)
for i in range(ndate):
ledFiles = sorted(glob.glob(os.path.join(dateDirs[i], 'LED-ALOS2*-*-*')))
date = os.path.basename(ledFiles[0]).split('-')[-2]
dateDir = date
if dateSecondary != []:
if date not in dateSecondary:
continue
if os.path.isdir(dateDir):
print('{} already exists, do not create'.format(dateDir))
continue
else:
os.makedirs(dateDir, exist_ok=True)
os.chdir(dateDir)
sensor = MultiMode.createSensor(sensor='ALOS2', name=None)
sensor.configure()
sensor.track.configure()
for j in range(nframe):
#frame number starts with 1
frameDir = 'f{}_{}'.format(j+1, framesReference[j])
os.makedirs(frameDir, exist_ok=True)
os.chdir(frameDir)
#attach a frame to reference and secondary
frameObj = MultiMode.createFrame()
frameObj.configure()
sensor.track.frames.append(frameObj)
#swath number starts with 1
for k in range(startingSwath, endingSwath+1):
print('processing date {} frame {} swath {}'.format(date, framesReference[j], k))
swathDir = 's{}'.format(k)
os.makedirs(swathDir, exist_ok=True)
os.chdir(swathDir)
#attach a swath to sensor
swathObj = MultiMode.createSwath()
swathObj.configure()
sensor.track.frames[-1].swaths.append(swathObj)
#setup sensor
#sensor.leaderFile = sorted(glob.glob(os.path.join(dateDirs[i], 'LED-ALOS2*{}-*-*'.format(framesReference[j]))))[0]
sensor.leaderFile = sorted(glob.glob(os.path.join(dateDirs[i], 'LED-ALOS2*{}-*-*'.format(frames[i][j]))))[0]
if modeReference in scansarModes:
#sensor.imageFile = sorted(glob.glob(os.path.join(dateDirs[i], 'IMG-{}-ALOS2*{}-*-*-F{}'.format(pol.upper(), framesReference[j], k))))[0]
sensor.imageFile = sorted(glob.glob(os.path.join(dateDirs[i], 'IMG-{}-ALOS2*{}-*-*-F{}'.format(pol.upper(), frames[i][j], k))))[0]
else:
#sensor.imageFile = sorted(glob.glob(os.path.join(dateDirs[i], 'IMG-{}-ALOS2*{}-*-*'.format(pol.upper(), framesReference[j]))))[0]
sensor.imageFile = sorted(glob.glob(os.path.join(dateDirs[i], 'IMG-{}-ALOS2*{}-*-*'.format(pol.upper(), frames[i][j]))))[0]
sensor.outputFile = date + '.slc'
sensor.useVirtualFile = useVirtualFile
#read sensor
(imageFDR, imageData)=sensor.readImage()
(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord)=sensor.readLeader()
sensor.setSwath(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData)
sensor.setFrame(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData)
sensor.setTrack(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData)
os.chdir('../')
#!!!frame numbers of all dates are reset to those of reference date
sensor.track.frames[j].frameNumber = framesReference[j]
saveProduct(sensor.track.frames[-1], date + '.frame.xml')
os.chdir('../')
saveProduct(sensor.track, date + '.track.xml')
os.chdir('../')

View File

@ -0,0 +1,101 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import datetime
import numpy as np
import isce, isceobj
from contrib.alos2proc_f.alos2proc_f import rect_with_looks
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
from StackPulic import createObject
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='rectify range offset')
parser.add_argument('-aff', dest='aff', type=str, required=True,
help = 'affine transform paramter file')
parser.add_argument('-input', dest='input', type=str, default='./',
help = 'input file')
parser.add_argument('-output', dest='output', type=str, required=True,
help = 'output file')
parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1,
help = 'number of range looks 1 . default: 1')
parser.add_argument('-nalks1', dest='nalks1', type=int, default=1,
help = 'number of azimuth looks 1. default: 1')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
aff = inps.aff
rangeOffset = inps.input
rectRangeOffset = inps.output
numberRangeLooks1 = inps.nrlks1
numberAzimuthLooks1 = inps.nalks1
#######################################################
DEBUG=False
self = createObject()
self._insar = createObject()
self._insar.rangeOffset = rangeOffset
self._insar.rectRangeOffset = rectRangeOffset
self._insar.numberRangeLooks1 = numberRangeLooks1
self._insar.numberAzimuthLooks1 = numberAzimuthLooks1
#read affine transform parameters
with open(aff, 'r') as f:
lines = f.readlines()
self._insar.numberRangeLooksSim = int(lines[0].split()[0])
self._insar.numberAzimuthLooksSim = int(lines[0].split()[1])
self._insar.radarDemAffineTransform = [float(x) for x in lines[1].strip('[').strip(']').split(',')]
if DEBUG:
print('++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++')
print('{} {}\n{}'.format(self._insar.numberRangeLooksSim, self._insar.numberAzimuthLooksSim, self._insar.radarDemAffineTransform))
print('++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++')
#rectify
rgoff = isceobj.createImage()
rgoff.load(self._insar.rangeOffset+'.xml')
if self._insar.radarDemAffineTransform == [1.0, 0.0, 0.0, 1.0, 0.0, 0.0]:
if not os.path.isfile(self._insar.rectRangeOffset):
os.symlink(self._insar.rangeOffset, self._insar.rectRangeOffset)
create_xml(self._insar.rectRangeOffset, rgoff.width, rgoff.length, 'float')
else:
rect_with_looks(self._insar.rangeOffset,
self._insar.rectRangeOffset,
rgoff.width, rgoff.length,
rgoff.width, rgoff.length,
self._insar.radarDemAffineTransform[0], self._insar.radarDemAffineTransform[1],
self._insar.radarDemAffineTransform[2], self._insar.radarDemAffineTransform[3],
self._insar.radarDemAffineTransform[4], self._insar.radarDemAffineTransform[5],
self._insar.numberRangeLooksSim*self._insar.numberRangeLooks1, self._insar.numberAzimuthLooksSim*self._insar.numberAzimuthLooks1,
self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1,
'REAL',
'Bilinear')
create_xml(self._insar.rectRangeOffset, rgoff.width, rgoff.length, 'float')

View File

@ -0,0 +1,500 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import datetime
import numpy as np
import isce, isceobj, stdproc
from isceobj.Util.Poly2D import Poly2D
from isceobj.Location.Offset import OffsetField, Offset
from isceobj.Alos2Proc.Alos2ProcPublic import readOffset
from isceobj.Alos2Proc.runSwathOffset import swathOffset
from contrib.alos2proc.alos2proc import rg_filter
from StackPulic import loadTrack
from StackPulic import saveTrack
from StackPulic import subbandParameters
from StackPulic import stackDateStatistics
from StackPulic import acquisitionModesAlos2
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='resample data to a common grid')
parser.add_argument('-idir', dest='idir', type=str, required=True,
help = 'input directory where data of each date (YYMMDD) is located. only folders are recognized')
parser.add_argument('-odir', dest='odir', type=str, required=True,
help = 'output directory where resampled version of each date is output')
parser.add_argument('-ref_date', dest='ref_date', type=str, required=True,
help = 'reference date. format: YYMMDD')
parser.add_argument('-sec_date', dest='sec_date', type=str, nargs='+', default=[],
help = 'a number of secondary dates seperated by blanks, can also include ref_date. format: YYMMDD YYMMDD YYMMDD. If provided, only resample these dates')
parser.add_argument('-ref_frame', dest='ref_frame', type=str, default=None,
help = 'frame number of the swath whose grid is used as reference. e.g. 2800. default: first frame')
parser.add_argument('-ref_swath', dest='ref_swath', type=int, default=None,
help = 'swath number of the swath whose grid is used as reference. e.g. 1. default: first swath')
parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1,
help = 'range offsets between swaths/frames should be integer multiples of -nrlks1. default: 1 ')
parser.add_argument('-nalks1', dest='nalks1', type=int, default=14,
help = 'azimuth offsets between swaths/frames should be integer multiples of -nalks1. default: 14')
parser.add_argument('-subband', dest='subband', action='store_true', default=False,
help='create and resample subband SLCs')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
idir = inps.idir
odir = inps.odir
dateReference = inps.ref_date
dateSecondary = inps.sec_date
frameReference = inps.ref_frame
swathReference = inps.ref_swath
nRange = inps.nrlks1
nAzimuth = inps.nalks1
subbandFlag = inps.subband
#######################################################
DEBUG=False
spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2()
#get date statistics
dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir, dateReference)
ndate = len(dates)
nframe = len(frames)
nswath = len(swaths)
if frameReference is None:
frameReference = frames[0]
else:
if frameReference not in frames:
raise Exception('specified -ref_frame {} not in frame list {}'.format(frameReference, frames))
if swathReference is None:
swathReference = swaths[0]
else:
if swathReference not in swaths:
raise Exception('specified -ref_swath {} not in swath list {}'.format(swathReference, swaths))
#find frame and swath indexes of reference swath
frameReferenceIndex = frames.index(frameReference)
swathReferenceIndex = swaths.index(swathReference)
print('resampling all frames and swaths to frame: {} (index: {}) swath: {} (index {})'.format(
frameReference, frameReferenceIndex, swathReference, swathReferenceIndex))
#read swath offsets and save in 2-d lists
swathRangeOffsetGeometrical = []
swathAzimuthOffsetGeometrical = []
swathRangeOffsetMatching = []
swathAzimuthOffsetMatching = []
for i, frameNumber in enumerate(frames):
swathRangeOffsetGeometrical0 = []
swathAzimuthOffsetGeometrical0 = []
swathRangeOffsetMatching0 = []
swathAzimuthOffsetMatching0 = []
if nswath >= 2:
frameDir = 'f{}_{}'.format(i+1, frameNumber)
with open(os.path.join(idir, dateReference, frameDir, 'mosaic/swath_offset.txt'), 'r') as f:
lines = f.readlines()
for linex in lines:
if 'range offset' in linex:
swathRangeOffsetGeometrical0.append(float(linex.split()[3]))
swathRangeOffsetMatching0.append(float(linex.split()[4]))
if 'azimuth offset' in linex:
swathAzimuthOffsetGeometrical0.append(float(linex.split()[3]))
swathAzimuthOffsetMatching0.append(float(linex.split()[4]))
else:
swathRangeOffsetGeometrical0.append(0.0)
swathRangeOffsetMatching0.append(0.0)
swathAzimuthOffsetGeometrical0.append(0.0)
swathAzimuthOffsetMatching0.append(0.0)
swathRangeOffsetGeometrical.append(swathRangeOffsetGeometrical0)
swathAzimuthOffsetGeometrical.append(swathAzimuthOffsetGeometrical0)
swathRangeOffsetMatching.append(swathRangeOffsetMatching0)
swathAzimuthOffsetMatching.append(swathAzimuthOffsetMatching0)
#read frame offsets and save in 1-d list
frameRangeOffsetGeometrical = []
frameAzimuthOffsetGeometrical = []
frameRangeOffsetMatching = []
frameAzimuthOffsetMatching = []
if nframe >= 2:
with open(os.path.join(idir, dateReference, 'insar/frame_offset.txt'), 'r') as f:
lines = f.readlines()
for linex in lines:
if 'range offset' in linex:
frameRangeOffsetGeometrical.append(float(linex.split()[3]))
frameRangeOffsetMatching.append(float(linex.split()[4]))
if 'azimuth offset' in linex:
frameAzimuthOffsetGeometrical.append(float(linex.split()[3]))
frameAzimuthOffsetMatching.append(float(linex.split()[4]))
else:
frameRangeOffsetGeometrical.append(0.0)
frameRangeOffsetMatching.append(0.0)
frameAzimuthOffsetGeometrical.append(0.0)
frameAzimuthOffsetMatching.append(0.0)
#compute accurate starting range and sensing start using offset file for reference date
#swath offset is computed between adjacent swaths within a frame, offset unit: first swath sample size
#frame offset is computed between first swaths of adjacent frames, offset unit: first swath sample size
startingRangeAll = [[None for j in range(nswath)] for i in range(nframe)]
sensingStartAll = [[None for j in range(nswath)] for i in range(nframe)]
trackReference = loadTrack(dateDirs[dateIndexReference], dates[dateIndexReference])
for i, frameNumber in enumerate(frames):
#startingRange and sensingStart of first swath of current frame
# for i1 in range(i+1):
# startingRangeFirst = trackReference.frames[0].swaths[0].startingRange - \
# frameRangeOffsetMatching[i1] * trackReference.frames[0].swaths[0].rangePixelSize
# sensingStartFirst = trackReference.frames[0].swaths[0].sensingStart - \
# datetime.timedelta(seconds = frameAzimuthOffsetMatching[i1] * trackReference.frames[0].swaths[0].azimuthLineInterval)
startingRangeFirst = trackReference.frames[0].swaths[0].startingRange - \
sum(frameRangeOffsetMatching[0:i+1]) * trackReference.frames[0].swaths[0].rangePixelSize
sensingStartFirst = trackReference.frames[0].swaths[0].sensingStart - \
datetime.timedelta(seconds = sum(frameAzimuthOffsetMatching[0:i+1]) * trackReference.frames[0].swaths[0].azimuthLineInterval)
#startingRange and sensingStart of each swath of current frame
for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)):
# for j1 in range(j+1):
# startingRangeAll[i][j] = startingRangeFirst - \
# swathRangeOffsetMatching[i][j1] * trackReference.frames[i].swaths[0].rangePixelSize
# sensingStartAll[i][j] = sensingStartFirst - \
# datetime.timedelta(seconds = swathAzimuthOffsetMatching[i][j1] * trackReference.frames[i].swaths[0].azimuthLineInterval)
startingRangeAll[i][j] = startingRangeFirst - \
sum(swathRangeOffsetMatching[i][0:j+1]) * trackReference.frames[i].swaths[0].rangePixelSize
sensingStartAll[i][j] = sensingStartFirst - \
datetime.timedelta(seconds = sum(swathAzimuthOffsetMatching[i][0:j+1]) * trackReference.frames[i].swaths[0].azimuthLineInterval)
#check computation result
if DEBUG:
for i, frameNumber in enumerate(frames):
for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)):
print(i, j, (trackReference.frames[i].swaths[j].startingRange-startingRangeAll[i][j])/trackReference.frames[0].swaths[0].rangePixelSize,
(trackReference.frames[i].swaths[j].sensingStart-sensingStartAll[i][j]).total_seconds()/trackReference.frames[0].swaths[0].azimuthLineInterval)
#update startingRange and sensingStart of reference track
for i, frameNumber in enumerate(frames):
for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)):
trackReference.frames[i].swaths[j].startingRange = startingRangeAll[i][j]
trackReference.frames[i].swaths[j].sensingStart = sensingStartAll[i][j]
##find minimum startingRange and sensingStart
startingRangeMinimum = trackReference.frames[0].swaths[0].startingRange
sensingStartMinimum = trackReference.frames[0].swaths[0].sensingStart
for i, frameNumber in enumerate(frames):
for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)):
if trackReference.frames[i].swaths[j].startingRange < startingRangeMinimum:
startingRangeMinimum = trackReference.frames[i].swaths[j].startingRange
if trackReference.frames[i].swaths[j].sensingStart < sensingStartMinimum:
sensingStartMinimum = trackReference.frames[i].swaths[j].sensingStart
print('startingRangeMinimum (m): {}'.format(startingRangeMinimum))
print('sensingStartMinimum: {}'.format(sensingStartMinimum))
#adjust each swath of each frame to minimum startingRange and sensingStart
#load reference track again for saving track parameters of resampled
trackReferenceResampled = loadTrack(dateDirs[dateIndexReference], dates[dateIndexReference])
for i, frameNumber in enumerate(frames):
for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)):
#current swath
swathReference = trackReference.frames[i].swaths[j]
#swath of reference sample size
swathReferenceReference = trackReference.frames[frameReferenceIndex].swaths[swathReferenceIndex]
#current swath resampled
swathReferenceResampled = trackReferenceResampled.frames[i].swaths[j]
#update startingRange and sensingStart
offsetRange = (swathReference.startingRange - startingRangeMinimum) / (swathReferenceReference.rangePixelSize*nRange)
offsetAzimuth = (swathReference.sensingStart - sensingStartMinimum).total_seconds() / (swathReferenceReference.azimuthLineInterval*nAzimuth)
swathReferenceResampled.startingRange = startingRangeMinimum + round(offsetRange) * (swathReferenceReference.rangePixelSize*nRange)
swathReferenceResampled.sensingStart = sensingStartMinimum + datetime.timedelta(seconds = round(offsetAzimuth) *
(swathReferenceReference.azimuthLineInterval*nAzimuth))
#update other parameters
swathReferenceResampled.numberOfSamples = round(swathReference.numberOfSamples * swathReference.rangePixelSize / swathReferenceReference.rangePixelSize)
swathReferenceResampled.numberOfLines = round(swathReference.numberOfLines * swathReference.azimuthLineInterval / swathReferenceReference.azimuthLineInterval)
swathReferenceResampled.rangeSamplingRate = swathReferenceReference.rangeSamplingRate
swathReferenceResampled.rangePixelSize = swathReferenceReference.rangePixelSize
swathReferenceResampled.prf = swathReferenceReference.prf
swathReferenceResampled.azimuthPixelSize = swathReferenceReference.azimuthPixelSize
swathReferenceResampled.azimuthLineInterval = swathReferenceReference.azimuthLineInterval
#should also update dopplerVsPixel, azimuthFmrateVsPixel?
#if hasattr(swathReference, 'burstLength'):
if swathReference.burstLength is not None:
swathReferenceResampled.burstLength *= (swathReference.burstLength * swathReference.azimuthLineInterval / swathReferenceReference.azimuthLineInterval)
#if hasattr(swathReference, 'burstCycleLength'):
if swathReference.burstCycleLength is not None:
swathReferenceResampled.burstCycleLength *= (swathReference.burstCycleLength * swathReference.azimuthLineInterval / swathReferenceReference.azimuthLineInterval)
#no need to update parameters for ScanSAR burst-by-burst processing, since we are not doing such burst-by-burst processing.
#resample each date
os.makedirs(odir, exist_ok=True)
os.chdir(odir)
for idate in range(ndate):
if dateSecondary != []:
if dates[idate] not in dateSecondary:
continue
os.makedirs(dates[idate], exist_ok=True)
os.chdir(dates[idate])
trackSecondary = loadTrack(dateDirs[idate], dates[idate])
for i, frameNumber in enumerate(frames):
frameDir = 'f{}_{}'.format(i+1, frameNumber)
os.makedirs(frameDir, exist_ok=True)
os.chdir(frameDir)
for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)):
swathDir = 's{}'.format(swathNumber)
os.makedirs(swathDir, exist_ok=True)
os.chdir(swathDir)
#current swath
swathReference = trackReference.frames[i].swaths[j]
#swath of reference sample size
swathReferenceReference = trackReference.frames[frameReferenceIndex].swaths[swathReferenceIndex]
#current swath resampled
swathReferenceResampled = trackReferenceResampled.frames[i].swaths[j]
#current swath to be resampled
swathSecondary = trackSecondary.frames[i].swaths[j]
#current slc to be processed
slc = os.path.join(dateDirs[idate], frameDir, swathDir, dates[idate]+'.slc')
#0. create subband SLCs
if subbandFlag:
subbandRadarWavelength, subbandBandWidth, subbandFrequencyCenter, subbandPrefix = subbandParameters(trackReference)
slcLower = dates[idate]+'_{}_tmp.slc'.format(subbandPrefix[0])
slcUpper = dates[idate]+'_{}_tmp.slc'.format(subbandPrefix[1])
rg_filter(slc, 2,
[slcLower, slcUpper],
subbandBandWidth,
subbandFrequencyCenter,
257, 2048, 0.1, 0, 0.0)
slcList = [slc, slcLower, slcUpper]
slcListResampled = [dates[idate]+'.slc', dates[idate]+'_{}.slc'.format(subbandPrefix[0]), dates[idate]+'_{}.slc'.format(subbandPrefix[1])]
slcListRemoved = [slcLower, slcUpper]
else:
slcList = [slc]
slcListResampled = [dates[idate]+'.slc']
slcListRemoved = []
#1. compute offset polynomial
if idate == dateIndexReference:
rangePoly = Poly2D()
rangePoly.initPoly(rangeOrder=1,azimuthOrder=0,coeffs=[[
(swathReferenceResampled.startingRange - swathReference.startingRange) / swathReference.rangePixelSize,
swathReferenceResampled.rangePixelSize / swathReference.rangePixelSize - 1.0]])
azimuthPoly = Poly2D()
azimuthPoly.initPoly(rangeOrder=0,azimuthOrder=1,coeffs=[
[(swathReferenceResampled.sensingStart - swathReference.sensingStart).total_seconds() / swathReference.azimuthLineInterval],
[swathReferenceResampled.azimuthLineInterval / swathReference.azimuthLineInterval - 1.0]])
if DEBUG:
print()
print('rangePoly.getCoeffs(): {}'.format(rangePoly.getCoeffs()))
print('azimuthPoly.getCoeffs(): {}'.format(azimuthPoly.getCoeffs()))
print('rangePoly._meanRange: {}'.format(rangePoly._meanRange))
print('rangePoly._normRange: {}'.format(rangePoly._normRange))
print('rangePoly._meanAzimuth: {}'.format(rangePoly._meanAzimuth))
print('rangePoly._normAzimuth: {}'.format(rangePoly._normAzimuth))
print('azimuthPoly._meanRange: {}'.format(azimuthPoly._meanRange))
print('azimuthPoly._normRange: {}'.format(azimuthPoly._normRange))
print('azimuthPoly._meanAzimuth: {}'.format(azimuthPoly._meanAzimuth))
print('azimuthPoly._normAzimuth: {}'.format(azimuthPoly._normAzimuth))
print()
else:
offsets = readOffset(os.path.join(dateDirs[idate], frameDir, swathDir, 'cull.off'))
# x1 x2 x3
# y1 y2 y3
#create new offset field to save offsets: swathReferenceResampled --> swathReference --> swathSecondary
offsetsUpdated = OffsetField()
for offset in offsets:
offsetUpdate = Offset()
x1 = offset.x * swathReference.rangePixelSize / swathReferenceResampled.rangePixelSize + \
(swathReference.startingRange - swathReferenceResampled.startingRange) / swathReferenceResampled.rangePixelSize
y1 = offset.y * swathReference.azimuthLineInterval / swathReferenceResampled.azimuthLineInterval + \
(swathReference.sensingStart - swathReferenceResampled.sensingStart).total_seconds() / swathReferenceResampled.azimuthLineInterval
x3 = offset.x + offset.dx
y3 = offset.y + offset.dy
dx = x3 - x1
dy = y3 - y1
offsetUpdate.setCoordinate(x1, y1)
offsetUpdate.setOffset(dx, dy)
offsetUpdate.setSignalToNoise(offset.snr)
offsetUpdate.setCovariance(offset.sigmax, offset.sigmay, offset.sigmaxy)
offsetsUpdated.addOffset(offsetUpdate)
azimuthPoly, rangePoly = offsetsUpdated.getFitPolynomials(rangeOrder=2,azimuthOrder=2,maxOrder=True, usenumpy=False)
#check polynomial accuracy
if DEBUG:
print()
print(' x y dx dy dx(poly) dy(poly) dx - dx(poly) dy - dy(poly)')
print('==============================================================================================================')
for offset in offsetsUpdated:
print('%11.3f %11.3f %11.3f %11.3f %11.3f %11.3f %11.3f %11.3f'%(offset.x, offset.y,
offset.dx, offset.dy,
rangePoly(offset.y, offset.x), azimuthPoly(offset.y, offset.x),
offset.dx - rangePoly(offset.y, offset.x), offset.dy - azimuthPoly(offset.y, offset.x)))
print()
if DEBUG:
print()
print('rangePoly.getCoeffs(): {}'.format(rangePoly.getCoeffs()))
print('azimuthPoly.getCoeffs(): {}'.format(azimuthPoly.getCoeffs()))
print('rangePoly._meanRange: {}'.format(rangePoly._meanRange))
print('rangePoly._normRange: {}'.format(rangePoly._normRange))
print('rangePoly._meanAzimuth: {}'.format(rangePoly._meanAzimuth))
print('rangePoly._normAzimuth: {}'.format(rangePoly._normAzimuth))
print('azimuthPoly._meanRange: {}'.format(azimuthPoly._meanRange))
print('azimuthPoly._normRange: {}'.format(azimuthPoly._normRange))
print('azimuthPoly._meanAzimuth: {}'.format(azimuthPoly._meanAzimuth))
print('azimuthPoly._normAzimuth: {}'.format(azimuthPoly._normAzimuth))
print()
#2. carrier phase
dpoly = Poly2D()
order = len(swathSecondary.dopplerVsPixel) - 1
coeffs = [2*np.pi*val*swathSecondary.azimuthLineInterval for val in swathSecondary.dopplerVsPixel]
dpoly.initPoly(rangeOrder=order, azimuthOrder=0)
dpoly.setCoeffs([coeffs])
#azCarrPoly = Poly2D()
#azCarrPoly.initPoly(rangeOrder=0,azimuthOrder=0,coeffs=[[0.]])
#3. resample images
#checked: offset computation results using azimuthPoly/rangePoly and in resamp_slc.f90
#checked: no flattenning
#checked: no reading of range and azimuth images
#checked: range/azimuth carrier values: 0, 0
#checked: doppler no problem
# but doppler is computed using reference's coordinate in:
# isce/components/stdproc/stdproc/resamp_slc/src/resamp_slc.f90
# I have fixed it.
for slcInput, slcOutput in zip(slcList, slcListResampled):
inimg = isceobj.createSlcImage()
inimg.load(slcInput + '.xml')
inimg.filename = slcInput
inimg.extraFilename = slcInput+'.vrt'
inimg.setAccessMode('READ')
rObj = stdproc.createResamp_slc()
#the following two items are actually not used, since we are not flattenning?
#but need to set these otherwise the program complains
rObj.slantRangePixelSpacing = swathSecondary.rangePixelSize
rObj.radarWavelength = trackSecondary.radarWavelength
#rObj.azimuthCarrierPoly = azCarrPoly
rObj.dopplerPoly = dpoly
rObj.azimuthOffsetsPoly = azimuthPoly
rObj.rangeOffsetsPoly = rangePoly
rObj.imageIn = inimg
####Setting reference values
#the following four items are actually not used, since we are not flattenning?
#but need to set these otherwise the program complains
rObj.startingRange = swathSecondary.startingRange
rObj.referenceSlantRangePixelSpacing = swathReferenceResampled.rangePixelSize
rObj.referenceStartingRange = swathReferenceResampled.startingRange
rObj.referenceWavelength = trackReferenceResampled.radarWavelength
width = swathReferenceResampled.numberOfSamples
length = swathReferenceResampled.numberOfLines
imgOut = isceobj.createSlcImage()
imgOut.setWidth(width)
imgOut.filename = slcOutput
imgOut.setAccessMode('write')
rObj.outputWidth = width
rObj.outputLines = length
#rObj.residualRangeImage = rngImg
#rObj.residualAzimuthImage = aziImg
rObj.resamp_slc(imageOut=imgOut)
imgOut.renderHdr()
for x in slcListRemoved:
os.remove(x)
os.remove(x + '.vrt')
os.remove(x + '.xml')
os.chdir('../')
os.chdir('../')
os.chdir('../')
#dump resampled reference paramter files, only do this when reference is resampled
dumpFlag = True
if dateSecondary != []:
if dates[dateIndexReference] not in dateSecondary:
dumpFlag = False
if dumpFlag:
#we are still in directory 'odir'
os.chdir(dates[dateIndexReference])
saveTrack(trackReferenceResampled, dates[dateIndexReference])

View File

@ -0,0 +1,101 @@
#!/usr/bin/env python3
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import shutil
import datetime
import numpy as np
import xml.etree.ElementTree as ET
import isce, isceobj
from isceobj.Alos2Proc.runUnwrapSnaphu import unwrapSnaphu
from StackPulic import createObject
from StackPulic import loadProduct
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='take more looks and compute coherence')
parser.add_argument('-idir', dest='idir', type=str, required=True,
help = 'input directory where resampled data of each date (YYMMDD) is located. only folders are recognized')
parser.add_argument('-ref_date_stack', dest='ref_date_stack', type=str, required=True,
help = 'reference date of stack. format: YYMMDD')
parser.add_argument('-ref_date', dest='ref_date', type=str, required=True,
help = 'reference date of this pair. format: YYMMDD')
parser.add_argument('-sec_date', dest='sec_date', type=str, required=True,
help = 'reference date of this pair. format: YYMMDD')
parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1,
help = 'number of range looks 1. default: 1')
parser.add_argument('-nalks1', dest='nalks1', type=int, default=1,
help = 'number of azimuth looks 1. default: 1')
parser.add_argument('-nrlks2', dest='nrlks2', type=int, default=1,
help = 'number of range looks 2. default: 1')
parser.add_argument('-nalks2', dest='nalks2', type=int, default=1,
help = 'number of azimuth looks 2. default: 1')
parser.add_argument('-wbd_msk', dest='wbd_msk', action='store_true', default=False,
help='mask unwrapped interferogram with water body')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
idir = inps.idir
dateReferenceStack = inps.ref_date_stack
dateReference = inps.ref_date
dateSecondary = inps.sec_date
numberRangeLooks1 = inps.nrlks1
numberAzimuthLooks1 = inps.nalks1
numberRangeLooks2 = inps.nrlks2
numberAzimuthLooks2 = inps.nalks2
waterBodyMaskStartingStep = inps.wbd_msk
#######################################################
pair = '{}-{}'.format(dateReference, dateSecondary)
ms = pair
ml2 = '_{}rlks_{}alks'.format(numberRangeLooks1*numberRangeLooks2, numberAzimuthLooks1*numberAzimuthLooks2)
self = createObject()
self._insar = createObject()
self._insar.filteredInterferogram = 'filt_' + ms + ml2 + '.int'
self._insar.multilookAmplitude = ms + ml2 + '.amp'
self._insar.multilookPhsig = ms + ml2 + '.phsig'
self._insar.unwrappedInterferogram = 'filt_' + ms + ml2 + '.unw'
self._insar.unwrappedMaskedInterferogram = 'filt_' + ms + ml2 + '_msk.unw'
self._insar.multilookWbdOut = os.path.join('../', idir, dateReferenceStack, 'insar', dateReferenceStack + ml2 + '.wbd')
self._insar.numberRangeLooks1 = numberRangeLooks1
self._insar.numberAzimuthLooks1 = numberAzimuthLooks1
self._insar.numberRangeLooks2 = numberRangeLooks2
self._insar.numberAzimuthLooks2 = numberAzimuthLooks2
if waterBodyMaskStartingStep:
self.waterBodyMaskStartingStep='unwrap'
else:
self.waterBodyMaskStartingStep=None
trackReference = loadProduct('{}.track.xml'.format(dateReference))
unwrapSnaphu(self, trackReference)

View File

@ -30,7 +30,12 @@
<property name="water body">/net/kraken/nobak/cunrenl/z_common_data/insarzd_test_dataset/gorkha/wbd/swbdLat_N22_N33_Lon_E078_E092.wbd</property>
<!--=========================================================================================
See also comments of parameters "number of range looks ion" and "number of azimuth looks ion"
below to set a smaller number of looks to avoid phase aliasing in some areas (such as edges of
Tibetan Plateau, where there might be strong tropospheric variations due to large height
differences).
==========================================================================================-->
@ -133,6 +138,10 @@ IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 8, pp. 4492-450
<!--<property name="water body">None</property>-->
<!--=========================================================================================
if only dense offset is needed, do InSAR can be set to False to skip InSAR steps.
==========================================================================================-->
<!--<property name="do InSAR">True</property>-->
<!--<property name="use virtual file">True</property>-->
<!--<property name="use GPU">False</property>-->
@ -169,6 +178,45 @@ IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 8, pp. 4492-450
<!--<property name="number of azimuth offsets for slc matching">None</property>-->
<!--============================================================================================================================================
Instructions on number of looks used by the software
The software first takes number of range/azimuth looks 1, and then take any other number of range/azimuth looks (2, sim and ion).
Here are the purposes of these number of looks. Usually there is no need to set number of range/azimuth looks sim (automatically set), so it is
not explained here.
number of range/azimuth looks 1: save space, remove speckle noise, equalize sample size, match original resolution (full-aperture)
number of range/azimuth looks 2: make interferogram not too small or large
number of range/azimuth looks ion: make interferogram for ionosphere estimation not too small or large, facilitate ionosphere filtering
total number of looks of InSAR processing is: number of range/azimuth looks 1 * number of range/azimuth looks 2
total number of looks in ionosphere estimation is: number of range/azimuth looks 1 * number of range/azimuth looks ion
total number of looks in radar/DEM matching is: number of range/azimuth looks 1 * number of range/azimuth looks sim
Below is the default number of looks used by the software. REMEMBER, NORMALLY YOU ONLY NEED TO CHANGE number of range/azimuth looks 2!!!
============================================================================================================================================
Operation Mode | Mode (AUIG2) | Mode (in file name) | look1 (r*a) | look2 (r*a) | total insar (r*a) | look_ion (r*a) | total ion (r*a)
============================================================================================================================================
spotlight | SPT | SBS | 2*4 | 4*4 | 8*16 | 16*16 | 32*64
============================================================================================================================================
stripmap | SM1 | UBS, UBD | 2*3 | 4*4 | 8*12 | 32*32 | 64*96
| SM2 | HBS, HBD, HBQ | 2*4 | 4*4 | 8*16 | 16*16 | 32*64
| SM3 | FBS, FBD, FBQ | 2*4 | 4*4 | 8*16 | 16*16 | 32*64
============================================================================================================================================
ScanSAR | WD1 | WBS, WBD | 1*14 | 5*2 | 5*28 | 80*32 | 80*448
| WD1 | WWS, WWD | 2*14 | 5*2 | 10*28 | 80*32 | 160*448
| WD2 | VBS, VBD | 1*14 | 5*2 | 5*28 | 80*32 | 80*448
============================================================================================================================================
To find the acquisition mode code, check the unpacked ALOS-2 product. For example, in the following
file name
IMG-HH-ALOS2183010685-171012-FBDR1.1__A
^^^
FBD (indicated by ^) is the acquisition mode code.
=============================================================================================================================================-->
<!--=========================================================================================
These are the numbers of looks to be taken when forming the interferogram
==========================================================================================-->
@ -234,7 +282,15 @@ IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 8, pp. 4492-450
<!--=========================================================================================
These are the numbers of looks to be taken after taking the numbers of range/azimuth looks 1.
This is for ionospheric correction
This is for ionospheric correction.
Use a larger number of looks results in smaller image size, which saves time in filtering in
ionosphere estimation. However, a larger number of looks may also lead to phase aliasing in
the resulting inteferograms and therefore lead to phase unwrapping errors, which causes
significant errors in ionosphere estimation.
If the area has strong troposhere or phase variations (normally in areas with large height
differences such as edges of Tibetan Plateau), a smaller number of looks should be used to
avoid phase aliasing after taking looks. E.g. 1/2 of the default number of range/azimuth looks
ion that can be found in the annotation of parameter 'number of range looks 1'.
==========================================================================================-->
<!--<property name="number of range looks ion">None</property>-->
<!--<property name="number of azimuth looks ion">None</property>-->
@ -251,9 +307,37 @@ IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 8, pp. 4492-450
[[100, 200, 100, 200],[1000, 1200, 500, 600]]
==========================================================================================-->
<!--<property name="areas masked out in ionospheric phase estimation">None</property>-->
<!--=========================================================================================
a 2-D list. e.g. if you are processing two ScanSAR frames, each with five swaths, and you do
not want phase difference of swath 1 and 2 in frame 2 snap to fixed values, the parameter can be specified
as:
[[True, True, True, True], [False, True, True, True]]
==========================================================================================-->
<!--<property name="swath phase difference snap to fixed values">None</property>-->
<!--=========================================================================================
a 2-D list. e.g. if you are processing two ScanSAR frames, each with five swaths, and you want
to use a phase difference value 0.21 (rad) for swath 1 and 2 in frame 2, the parameter can be
specified as:
[[None, None, None, None], [0.21, None, None, None]]
This parameter has highest priority in determing phase difference between swaths.
==========================================================================================-->
<!--<property name="swath phase difference of lower band">None</property>-->
<!--<property name="swath phase difference of upper band">None</property>-->
<!--<property name="apply polynomial fit before filtering ionosphere phase">True</property>-->
<!--<property name="maximum window size for filtering ionosphere phase">151</property>-->
<!--<property name="minimum window size for filtering ionosphere phase">41</property>-->
<!--<property name="whether filtering ionosphere phase">True</property>-->
<!--<property name="apply polynomial fit in adaptive filtering window">True</property>-->
<!--<property name="whether do secondary filtering of ionosphere phase">True</property>-->
<!--<property name="maximum window size for filtering ionosphere phase">301</property>-->
<!--<property name="minimum window size for filtering ionosphere phase">11</property>-->
<!--<property name="window size of secondary filtering of ionosphere phase">5</property>-->
<!--=========================================================================================
Normally no need to set this parameter, it will be automatically determined.
==========================================================================================-->
<!--<property name="standard deviation of ionosphere phase after filtering">None</property>-->
<!--=========================================================================================
parameters for filtering subband interferograms used for ionospheric phase estimation

Some files were not shown because too many files have changed in this diff Show More