Skip to content

Commit

Permalink
Apply more patches from JPL to fix S2 failures
Browse files Browse the repository at this point in the history
  • Loading branch information
jhkennedy committed Dec 13, 2020
1 parent ccea7cf commit dc377a9
Show file tree
Hide file tree
Showing 6 changed files with 292 additions and 36 deletions.
15 changes: 12 additions & 3 deletions hyp3_autorift/netcdf_output.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,13 @@
import hyp3_autorift


def v_error_cal(vx_error, vy_error):
vx = np.random.normal(0, vx_error, 1000000)
vy = np.random.normal(0, vy_error, 1000000)
v = np.sqrt(vx**2 + vy**2)
return np.std(v)


def netCDF_packaging(VX, VY, DX, DY, INTERPMASK, CHIPSIZEX, CHIPSIZEY, SSM, SX, SY,
offset2vx_1, offset2vx_2, offset2vy_1, offset2vy_2, MM, VXref, VYref,
rangePixelSize, azimuthPixelSize, dt, epsg, srs, tran, out_nc_filename, pair_type,
Expand All @@ -19,11 +26,11 @@ def netCDF_packaging(VX, VY, DX, DY, INTERPMASK, CHIPSIZEX, CHIPSIZEY, SSM, SX,
vx_mean_shift = offset2vx_1 * dx_mean_shift + offset2vx_2 * dy_mean_shift
temp = vx_mean_shift
temp[np.logical_not(SSM)] = np.nan
vx_mean_shift = np.median(temp[(temp > -500) & (temp < 500)])
vx_mean_shift = np.median(temp[np.logical_not(np.isnan(temp))])
vy_mean_shift = offset2vy_1 * dx_mean_shift + offset2vy_2 * dy_mean_shift
temp = vy_mean_shift
temp[np.logical_not(SSM)] = np.nan
vy_mean_shift = np.median(temp[(temp > -500) & (temp < 500)])
vy_mean_shift = np.median(temp[np.logical_not(np.isnan(temp))])
else:
vx_mean_shift = 0.0
vy_mean_shift = 0.0
Expand Down Expand Up @@ -125,7 +132,7 @@ def netCDF_packaging(VX, VY, DX, DY, INTERPMASK, CHIPSIZEX, CHIPSIZEY, SSM, SX,
institution = 'NASA Jet Propulsion Laboratory (JPL), California Institute of Technology'

isce_version = subprocess.check_output('conda list | grep isce | awk \'{print $2}\'', shell=True, text=True)
autorift_version = '1.0.7'
autorift_version = '1.0.8'
source = f'ASF DAAC HyP3 {datetime.datetime.now().year} using the {hyp3_autorift.__name__} plugin version' \
f' {hyp3_autorift.__version__} running autoRIFT version {autorift_version} as distributed with ISCE ' \
f'version {isce_version.strip()}. Contains modified Copernicus Sentinel data ' \
Expand Down Expand Up @@ -332,6 +339,7 @@ def netCDF_packaging(VX, VY, DX, DY, INTERPMASK, CHIPSIZEX, CHIPSIZEY, SSM, SX,
var[:] = np.round(np.clip(V, -32768, 32767)).astype(np.int16)
var.setncattr('missing_value', np.int16(NoDataValue))

v_error = v_error_cal(vx_error, vy_error)
varname = 'v_error'
datatype = np.dtype('int16')
dimensions = ('y', 'x')
Expand All @@ -346,6 +354,7 @@ def netCDF_packaging(VX, VY, DX, DY, INTERPMASK, CHIPSIZEX, CHIPSIZEY, SSM, SX,
var.setncattr('description', 'velocity magnitude error')
var.setncattr('units', 'm/y')
V_error = np.sqrt((vx_error * VX / V) ** 2 + (vy_error * VY / V) ** 2)
V_error[V==0] = v_error
V_error[noDataMask] = NoDataValue
var[:] = np.round(np.clip(V_error, -32768, 32767)).astype(np.int16)
var.setncattr('missing_value', np.int16(NoDataValue))
Expand Down
223 changes: 223 additions & 0 deletions hyp3_autorift/vend/PRE109-PATCH.diff
Original file line number Diff line number Diff line change
@@ -0,0 +1,223 @@
--- a/testautoRIFT.py
+++ b/testautoRIFT.py
@@ -390,7 +390,7 @@ def main():
urlflag = inps.urlflag

if inps.optical_flag == 1:
- if urlflag is 1:
+ if urlflag == 1:
data_r, data_s = loadProductOpticalURL(inps.indir_r, inps.indir_s)
else:
data_r = loadProductOptical(inps.indir_r)
@@ -595,7 +595,7 @@ def main():
xcount = int(str.split(runCmd('fgrep "Dimensions of geogrid:" testGeogrid.txt'))[3])
ycount = int(str.split(runCmd('fgrep "Dimensions of geogrid:" testGeogrid.txt'))[5])

- if urlflag is 1:
+ if urlflag == 1:
ds = gdal.Open('/vsicurl/%s' %(vxrefname))
else:
ds = gdal.Open(vxrefname)
@@ -604,7 +604,7 @@ def main():
ds = None
band = None

- if urlflag is 1:
+ if urlflag == 1:
ds = gdal.Open('/vsicurl/%s' %(vyrefname))
else:
ds = gdal.Open(vyrefname)
@@ -613,7 +613,7 @@ def main():
ds = None
band = None

- if urlflag is 1:
+ if urlflag == 1:
ds = gdal.Open('/vsicurl/%s' %(sxname))
else:
ds = gdal.Open(sxname)
@@ -622,7 +622,7 @@ def main():
ds = None
band = None

- if urlflag is 1:
+ if urlflag == 1:
ds = gdal.Open('/vsicurl/%s' %(syname))
else:
ds = gdal.Open(syname)
@@ -631,7 +631,7 @@ def main():
ds = None
band = None

- if urlflag is 1:
+ if urlflag == 1:
ds = gdal.Open('/vsicurl/%s' %(maskname))
else:
ds = gdal.Open(maskname)
@@ -643,7 +643,7 @@ def main():
DXref = offset2vy_2 / (offset2vx_1 * offset2vy_2 - offset2vx_2 * offset2vy_1) * VXref - offset2vx_2 / (offset2vx_1 * offset2vy_2 - offset2vx_2 * offset2vy_1) * VYref
DYref = offset2vx_1 / (offset2vx_1 * offset2vy_2 - offset2vx_2 * offset2vy_1) * VYref - offset2vy_1 / (offset2vx_1 * offset2vy_2 - offset2vx_2 * offset2vy_1) * VXref

- stable_count = np.sum(SSM & np.logical_not(np.isnan(DX)))
+ stable_count = np.sum(SSM & np.logical_not(np.isnan(DX)) & (DX-DXref > -5) & (DX-DXref < 5) & (DY-DYref > -5) & (DY-DYref < 5))

if stable_count == 0:
stable_shift_applied = 0
@@ -692,10 +692,13 @@ def main():
pair_type = 'radar'
detection_method = 'feature'
coordinates = 'radar'
+ roi_valid_percentage = int(round(np.sum(CHIPSIZEX!=0)/np.sum(SEARCHLIMITX!=0)*1000.0))/1000
# out_nc_filename = 'Jakobshavn.nc'
- out_nc_filename = reference_filename[0:-4]+'_'+secondary_filename[0:-4]+'.nc'
+ PPP = roi_valid_percentage * 100
+ out_nc_filename = reference_filename[0:-4]+'_X_'+secondary_filename[0:-4]+'_G0240V02_P{0}{1}{2}.nc'.format(int(PPP/10),int((PPP-int(PPP/10)*10)),int((PPP-int(PPP/10)*10-int((PPP-int(PPP/10)*10)))*10))
out_nc_filename = './' + out_nc_filename
- roi_valid_percentage = int(round(np.sum(CHIPSIZEX!=0)/np.sum(SEARCHLIMITX!=0)*1000.0))/1000
+
CHIPSIZEY = np.round(CHIPSIZEX * ScaleChipSizeY / 2) * 2


@@ -770,10 +773,12 @@ def main():
pair_type = 'optical'
detection_method = 'feature'
coordinates = 'map'
+ roi_valid_percentage = int(round(np.sum(CHIPSIZEX!=0)/np.sum(SEARCHLIMITX!=0)*1000.0))/1000
# out_nc_filename = 'Jakobshavn_opt.nc'
- out_nc_filename = reference_filename[0:-4]+'_'+secondary_filename[0:-4]+'.nc'
+ PPP = roi_valid_percentage * 100
+ out_nc_filename = reference_filename[0:-8]+'_X_'+secondary_filename[0:-8]+'_G0240V02_P{0}{1}{2}.nc'.format(int(PPP/10),int((PPP-int(PPP/10)*10)),int((PPP-int(PPP/10)*10-int((PPP-int(PPP/10)*10)))*10))
out_nc_filename = './' + out_nc_filename
- roi_valid_percentage = int(round(np.sum(CHIPSIZEX!=0)/np.sum(SEARCHLIMITX!=0)*1000.0))/1000
CHIPSIZEY = np.round(CHIPSIZEX * ScaleChipSizeY / 2) * 2

d0 = datetime.date(np.int(reference_split[3][0:4]),np.int(reference_split[3][4:6]),np.int(reference_split[3][6:8]))
@@ -851,9 +856,11 @@ def main():
detection_method = 'feature'
coordinates = 'map'

- out_nc_filename = reference_filename[0:-4]+'_'+secondary_filename[0:-4]+'.nc'
- out_nc_filename = './' + out_nc_filename
roi_valid_percentage = int(round(np.sum(CHIPSIZEX!=0)/np.sum(SEARCHLIMITX!=0)*1000.0))/1000
+ PPP = roi_valid_percentage * 100
+ out_nc_filename = reference_filename[0:-8]+'_X_'+secondary_filename[0:-8]+'_G0240V02_P{0}{1}{2}.nc'.format(int(PPP/10),int((PPP-int(PPP/10)*10)),int((PPP-int(PPP/10)*10-int((PPP-int(PPP/10)*10)))*10))
+ out_nc_filename = './' + out_nc_filename
CHIPSIZEY = np.round(CHIPSIZEX * ScaleChipSizeY / 2) * 2

d0 = datetime.date(np.int(reference_split[2][0:4]),np.int(reference_split[2][4:6]),np.int(reference_split[2][6:8]))
--- a/testautoRIFT_ISCE.py
+++ b/testautoRIFT_ISCE.py
@@ -133,7 +133,7 @@ def loadProductOpticalURL(file_r, file_s):

x1a, y1a, xsize1, ysize1, x2a, y2a, xsize2, ysize2, trans = obj.coregister(file_r, file_s, 1)

- DS1 = gdal.Open('/vsicurl/%s' % (file_r))
+ DS1 = gdal.Open('/vsicurl/%s' %(file_r))
DS2 = gdal.Open('/vsicurl/%s' %(file_s))

I1 = DS1.ReadAsArray(xoff=x1a, yoff=y1a, xsize=xsize1, ysize=ysize1)
@@ -386,7 +386,7 @@ def main():
urlflag = inps.urlflag

if inps.optical_flag == 1:
- if urlflag is 1:
+ if urlflag == 1:
data_r, data_s = loadProductOpticalURL(inps.indir_r, inps.indir_s)
else:
data_r = loadProductOptical(inps.indir_r)
@@ -593,7 +593,7 @@ def main():
xcount = int(str.split(runCmd('fgrep "Dimensions of geogrid:" testGeogrid.txt'))[3])
ycount = int(str.split(runCmd('fgrep "Dimensions of geogrid:" testGeogrid.txt'))[5])

- if urlflag is 1:
+ if urlflag == 1:
ds = gdal.Open('/vsicurl/%s' %(vxrefname))
else:
ds = gdal.Open(vxrefname)
@@ -602,7 +602,7 @@ def main():
ds = None
band = None

- if urlflag is 1:
+ if urlflag == 1:
ds = gdal.Open('/vsicurl/%s' %(vyrefname))
else:
ds = gdal.Open(vyrefname)
@@ -611,7 +611,7 @@ def main():
ds = None
band = None

- if urlflag is 1:
+ if urlflag == 1:
ds = gdal.Open('/vsicurl/%s' %(sxname))
else:
ds = gdal.Open(sxname)
@@ -620,7 +620,7 @@ def main():
ds = None
band = None

- if urlflag is 1:
+ if urlflag == 1:
ds = gdal.Open('/vsicurl/%s' %(syname))
else:
ds = gdal.Open(syname)
@@ -629,7 +629,7 @@ def main():
ds = None
band = None

- if urlflag is 1:
+ if urlflag == 1:
ds = gdal.Open('/vsicurl/%s' %(maskname))
else:
ds = gdal.Open(maskname)
@@ -641,7 +641,7 @@ def main():
DXref = offset2vy_2 / (offset2vx_1 * offset2vy_2 - offset2vx_2 * offset2vy_1) * VXref - offset2vx_2 / (offset2vx_1 * offset2vy_2 - offset2vx_2 * offset2vy_1) * VYref
DYref = offset2vx_1 / (offset2vx_1 * offset2vy_2 - offset2vx_2 * offset2vy_1) * VYref - offset2vy_1 / (offset2vx_1 * offset2vy_2 - offset2vx_2 * offset2vy_1) * VXref

- stable_count = np.sum(SSM & np.logical_not(np.isnan(DX)))
+ stable_count = np.sum(SSM & np.logical_not(np.isnan(DX)) & (DX-DXref > -5) & (DX-DXref < 5) & (DY-DYref > -5) & (DY-DYref < 5))

if stable_count == 0:
stable_shift_applied = 0
@@ -690,10 +690,13 @@ def main():
pair_type = 'radar'
detection_method = 'feature'
coordinates = 'radar'
+ roi_valid_percentage = int(round(np.sum(CHIPSIZEX!=0)/np.sum(SEARCHLIMITX!=0)*1000.0))/1000
# out_nc_filename = 'Jakobshavn.nc'
- out_nc_filename = reference_filename[0:-4]+'_'+secondary_filename[0:-4]+'.nc'
+ PPP = roi_valid_percentage * 100
+ out_nc_filename = reference_filename[0:-4]+'_X_'+secondary_filename[0:-4]+'_G0240V02_P{0}{1}{2}.nc'.format(int(PPP/10),int((PPP-int(PPP/10)*10)),int((PPP-int(PPP/10)*10-int((PPP-int(PPP/10)*10)))*10))
out_nc_filename = './' + out_nc_filename
- roi_valid_percentage = int(round(np.sum(CHIPSIZEX!=0)/np.sum(SEARCHLIMITX!=0)*1000.0))/1000
+
CHIPSIZEY = np.round(CHIPSIZEX * ScaleChipSizeY / 2) * 2


@@ -768,10 +771,12 @@ def main():
pair_type = 'optical'
detection_method = 'feature'
coordinates = 'map'
+ roi_valid_percentage = int(round(np.sum(CHIPSIZEX!=0)/np.sum(SEARCHLIMITX!=0)*1000.0))/1000
# out_nc_filename = 'Jakobshavn_opt.nc'
- out_nc_filename = reference_filename[0:-4]+'_'+secondary_filename[0:-4]+'.nc'
+ PPP = roi_valid_percentage * 100
+ out_nc_filename = reference_filename[0:-8]+'_X_'+secondary_filename[0:-8]+'_G0240V02_P{0}{1}{2}.nc'.format(int(PPP/10),int((PPP-int(PPP/10)*10)),int((PPP-int(PPP/10)*10-int((PPP-int(PPP/10)*10)))*10))
out_nc_filename = './' + out_nc_filename
- roi_valid_percentage = int(round(np.sum(CHIPSIZEX!=0)/np.sum(SEARCHLIMITX!=0)*1000.0))/1000
CHIPSIZEY = np.round(CHIPSIZEX * ScaleChipSizeY / 2) * 2

d0 = datetime.date(np.int(reference_split[3][0:4]),np.int(reference_split[3][4:6]),np.int(reference_split[3][6:8]))
@@ -849,9 +854,11 @@ def main():
detection_method = 'feature'
coordinates = 'map'

- out_nc_filename = reference_filename[0:-4]+'_'+secondary_filename[0:-4]+'.nc'
- out_nc_filename = './' + out_nc_filename
roi_valid_percentage = int(round(np.sum(CHIPSIZEX!=0)/np.sum(SEARCHLIMITX!=0)*1000.0))/1000
+ PPP = roi_valid_percentage * 100
+ out_nc_filename = reference_filename[0:-8]+'_X_'+secondary_filename[0:-8]+'_G0240V02_P{0}{1}{2}.nc'.format(int(PPP/10),int((PPP-int(PPP/10)*10)),int((PPP-int(PPP/10)*10-int((PPP-int(PPP/10)*10)))*10))
+ out_nc_filename = './' + out_nc_filename
CHIPSIZEY = np.round(CHIPSIZEX * ScaleChipSizeY / 2) * 2

d0 = datetime.date(np.int(reference_split[2][0:4]),np.int(reference_split[2][4:6]),np.int(reference_split[2][6:8]))
7 changes: 2 additions & 5 deletions hyp3_autorift/vend/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,10 @@ be easily incorporated from a package manager or installed appropriately.
---
*Note: A patch from autoRIFT was applied to these files to prevent failures due
to stable surface misclassification, which will be included in the next autoRIFT
release:*
```diff
- stable_count = np.sum(SSM & np.logical_not(np.isnan(DX)))
+ stable_count = np.sum(SSM & np.logical_not(np.isnan(DX)) & (DX-DXref > -5) & (DX-DXref < 5) & (DY-DYref > -5) & (DY-DYref < 5))
```
release. See `PRE109-PATCH.diff` for the changes applied.*
---


These modules were provided in the autoRIFT
[v1.0.8 release](https://github.com/leiyangleon/autoRIFT/releases/tag/v1.0.8),
and are required for the expected workflow provided to ASF. However, in their
Expand Down
Loading

0 comments on commit dc377a9

Please sign in to comment.