From 376610ca94478777fdaffc3e07f0784b300f3b51 Mon Sep 17 00:00:00 2001 From: piyushrpt Date: Tue, 8 Nov 2022 18:52:31 +0000 Subject: [PATCH 01/12] Prepping for chunking with multiprocessing --- tools/RAiDER/delayFcns.py | 29 +++++++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/tools/RAiDER/delayFcns.py b/tools/RAiDER/delayFcns.py index 599464c45..ca1ebc347 100755 --- a/tools/RAiDER/delayFcns.py +++ b/tools/RAiDER/delayFcns.py @@ -86,9 +86,9 @@ def get_delays( return wet_delay, hydro_delay -def getInterpolators(wm_file, kind='pointwise'): +def getInterpolators(wm_file, kind='pointwise', shared=False): ''' - Read 3D gridded data from a processed weather model file and wrap it with + Read 3D gridded data from a processed weather model file and wrap it with an interpolator ''' # Get the weather model data @@ -105,12 +105,37 @@ def getInterpolators(wm_file, kind='pointwise'): wet = np.array(f.variables['wet_total'][:]).transpose(1, 2, 0) hydro = np.array(f.variables['hydro_total'][:]).transpose(1, 2, 0) + # If shared interpolators are requested + # The arrays are not modified - so turning off lock for performance + if shared: + xs_wm = make_shared_raw(xs_wm) + ys_wm = make_shared_raw(ys_wm) + zs_wm = make_shared_raw(zs_wm) + wet = make_shared_raw(wet) + hydro = make_shared_raw(hydro) + + ifWet = Interpolator((ys_wm, xs_wm, zs_wm), wet, fill_value=np.nan) ifHydro = Interpolator((ys_wm, xs_wm, zs_wm), hydro, fill_value=np.nan) return ifWet, ifHydro +def make_shared_raw(inarr): + """ + Make numpy view array of mp.Array + """ + # Create flat shared array + shared_arr = mp.RawArray('d', inarr.size) + # Create a numpy view of it + shared_arr_np = np.ndarray(inarr.shape, dtype=np.float64, + buffer=shared_arr) + # Copy data to shared array + np.copyto(shared_arr_np, inarr) + + return shared_arr_np + + def chunk(chunkSize, in_shape): ''' Create a set of indices to use as chunks From a18d848cd4a22f97a2b4219dad71d164595bdf1f Mon Sep 17 00:00:00 2001 From: piyushrpt Date: Wed, 9 Nov 2022 01:54:41 +0000 Subject: [PATCH 02/12] Making output arrays optional --- tools/RAiDER/delay.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/tools/RAiDER/delay.py b/tools/RAiDER/delay.py index 46becf0d2..cccca9ebb 100755 --- a/tools/RAiDER/delay.py +++ b/tools/RAiDER/delay.py @@ -466,7 +466,8 @@ def build_cube(xpts, ypts, zpts, model_crs, pts_crs, interpolators): return outputArrs -def build_cube_ray(xpts, ypts, zpts, ref_time, orbit_file, look_dir, model_crs, pts_crs, interpolators): +def build_cube_ray(xpts, ypts, zpts, ref_time, orbit_file, look_dir, model_crs, + pts_crs, interpolators, outputArrs=None): """ Iterate over interpolators and build a cube """ @@ -504,8 +505,11 @@ def build_cube_ray(xpts, ypts, zpts, ref_time, orbit_file, look_dir, model_crs, xx, yy = np.meshgrid(xpts, ypts) # Output arrays - outputArrs = [np.zeros((zpts.size, ypts.size, xpts.size)) - for mm in range(len(interpolators))] + output_created_here = False + if outputArrs is None: + output_created_here = True + outputArrs = [np.zeros((zpts.size, ypts.size, xpts.size)) + for mm in range(len(interpolators))] # Various transformers needed here epsg4326 = CRS.from_epsg(4326) @@ -645,5 +649,5 @@ def build_cube_ray(xpts, ypts, zpts, ref_time, orbit_file, look_dir, model_crs, val[np.isnan(val)] = 0.0 out += wt * val - - return outputArrs + if output_created_here: + return outputArrs From a0493d420b7bceb1774e9263456a9257a781b1a4 Mon Sep 17 00:00:00 2001 From: piyushrpt Date: Wed, 9 Nov 2022 05:16:17 +0000 Subject: [PATCH 03/12] Multiprocessing work in progress --- tools/RAiDER/delay.py | 37 ++++++++++++++++++++++++++++++------- 1 file changed, 30 insertions(+), 7 deletions(-) diff --git a/tools/RAiDER/delay.py b/tools/RAiDER/delay.py index cccca9ebb..b2ab82469 100755 --- a/tools/RAiDER/delay.py +++ b/tools/RAiDER/delay.py @@ -219,6 +219,9 @@ def tropo_delay_cube(dt, wf, args, model_file=None): except CRSError: raise ValueError('output_projection argument is not a valid CRS specifier') + # For testing multiprocessing + # TODO - move this to configuration + nproc = 1 # logging logger.debug('Starting to run the weather model cube calculation') @@ -310,14 +313,26 @@ def tropo_delay_cube(dt, wf, args, model_file=None): ) # Get pointwise interpolators - ifWet, ifHydro = getInterpolators(weather_model_file, "pointwise") + ifWet, ifHydro = getInterpolators(weather_model_file, + kind="pointwise", + shared=(nproc > 1)) # Build cube - wetDelay, hydroDelay = build_cube_ray( - xpts, ypts, zpts, - dt, args["los"]._file, args["look_dir"], - wm_proj, crs, - [ifWet, ifHydro]) + if nproc == 1: + wetDelay, hydroDelay = build_cube_ray( + xpts, ypts, zpts, + dt, args["los"]._file, args["look_dir"], + wm_proj, crs, + [ifWet, ifHydro]) + + ### Use multi-processing here + else: + # Pre-build output arrays + + # Create worker pool + + # Loop over heights + print("Testing") # Write output file # Modify this as needed for NISAR / other projects @@ -471,7 +486,9 @@ def build_cube_ray(xpts, ypts, zpts, ref_time, orbit_file, look_dir, model_crs, """ Iterate over interpolators and build a cube """ + # Some constants for this module + # TODO - Read this from constants or configuration MAX_SEGMENT_LENGTH = 1000. MAX_TROPO_HEIGHT = 50000. @@ -615,7 +632,13 @@ def build_cube_ray(xpts, ypts, zpts, ref_time, orbit_file, look_dir, model_crs, cos_factor = (high_ht - low_ht) / ray_length # Determine number of parts to break ray into - nParts = int(np.ceil(ray_length.max() / MAX_SEGMENT_LENGTH)) + 1 + try: + nParts = int(np.ceil(ray_length.max() / MAX_SEGMENT_LENGTH)) + 1 + except ValueError: + raise ValueError( + "geo2rdr did not converge. Check orbit coverage" + ) + if (nParts == 1): raise RuntimeError( "Ray with one segment encountered" From 129b0cd01eab5749b035aeab02e8c76d4916bfbd Mon Sep 17 00:00:00 2001 From: piyushrpt Date: Wed, 9 Nov 2022 21:46:23 +0000 Subject: [PATCH 04/12] Fixes for handling model info --- tools/RAiDER/delay.py | 24 ++++++++++-------------- tools/RAiDER/models/weatherModel.py | 12 +++++++++++- 2 files changed, 21 insertions(+), 15 deletions(-) diff --git a/tools/RAiDER/delay.py b/tools/RAiDER/delay.py index b2ab82469..62bf34996 100755 --- a/tools/RAiDER/delay.py +++ b/tools/RAiDER/delay.py @@ -29,7 +29,7 @@ from RAiDER.processWM import prepareWeatherModel from RAiDER.utilFcns import ( writeDelays, projectDelays, writePnts2HDF5, - lla2ecef, transform_bbox, clip_bbox, + lla2ecef, transform_bbox, clip_bbox, rio_profile, ) @@ -109,13 +109,11 @@ def tropo_delay(dt, wetFilename, hydroFilename, args): ########################################################### # Load the downloaded model file for CRS information - ds = xarray.load_dataset(weather_model_file) - try: - wm_proj = ds['CRS'] - except: + wm_proj = rio_profile(f"netcdf:{weather_model_file}:t")["crs"] + print(wm_proj) + if wm_proj is None: print("WARNING: I can't find a CRS in the weather model file, so I will assume you are using WGS84") wm_proj = 4326 - ds.close() #################################################################### #################################################################### @@ -164,8 +162,6 @@ def tropo_delay(dt, wetFilename, hydroFilename, args): else: raise ValueError("Unknown operation type") - del ds # cleanup - ########################################################### # Write the delays to file # Different options depending on the inputs @@ -276,12 +272,12 @@ def tropo_delay_cube(dt, wf, args, model_file=None): ds.close() logger.debug(f'Output height range is {min(heights)} to {max(heights)}') - - try: - wm_proj = ds["CRS"] - except: - print("WARNING: I can't find a CRS in the weather model file, so I will assume you are using WGS84") - wm_proj = CRS.from_epsg(4326) + # Load CRS from weather model file + wm_proj = rio_profile(f"netcdf:{weather_model_file}:t")["crs"] + print(wm_proj) + if wm_proj is None: + print("WARNING: I can't find a CRS in the weather model file, so I will assume you are using WGS84") + wm_proj = CRS.from_epsg(4326) # Build the output grid zpts = np.array(heights) diff --git a/tools/RAiDER/models/weatherModel.py b/tools/RAiDER/models/weatherModel.py index 75943fddd..7c6298c57 100755 --- a/tools/RAiDER/models/weatherModel.py +++ b/tools/RAiDER/models/weatherModel.py @@ -395,7 +395,17 @@ def bbox(self) -> list: if len(datasets) == 0: raise ValueError('No subdatasets found in the weather model. The file may be corrupt.\nWeather model path: {}'.format(weather_model_path)) - with rasterio.open(datasets[0]) as ds: + # First dataset can end up being a coord. + # So search for temperature here - maybe there is a better way to + # do this + temp_dataset = None + for ds in datasets: + if ds.endswith((":t", ":T", ":temperature")): + temp_dataset = ds + break + + logger.debug(f"Using {temp_dataset} for bounds estimation") + with rasterio.open(temp_dataset) as ds: bounds = ds.bounds xmin, ymin, xmax, ymax = tuple(bounds) From 1c65d4be06bb66e71fb447f3bde00336008b6571 Mon Sep 17 00:00:00 2001 From: piyushrpt Date: Thu, 10 Nov 2022 00:01:44 +0000 Subject: [PATCH 05/12] Updated HRRR subsetter --- tools/RAiDER/models/hrrr.py | 95 +++++++++++++++++++++++------ tools/RAiDER/models/weatherModel.py | 18 ------ 2 files changed, 77 insertions(+), 36 deletions(-) diff --git a/tools/RAiDER/models/hrrr.py b/tools/RAiDER/models/hrrr.py index 9c73a396e..f2c2418cf 100644 --- a/tools/RAiDER/models/hrrr.py +++ b/tools/RAiDER/models/hrrr.py @@ -9,9 +9,10 @@ from herbie import Herbie from pathlib import Path -from pyproj import CRS +from pyproj import CRS, Transformer from RAiDER.logger import logger +from RAiDER.utilFcns import rio_profile from RAiDER.models.weatherModel import WeatherModel from RAiDER.models.model_levels import ( LEVELS_137_HEIGHTS, @@ -76,7 +77,6 @@ def _fetch(self, out): time = self._time self.files = self._download_hrrr_file(time, out) - def load_weather(self, *args, filename=None, **kwargs): @@ -137,34 +137,93 @@ def _makeDataCubes(self, filename, out=None): if out is None: out = filename - # Pull the native grid - xArr, yArr = self.getXY_gdal(filename) + # Get profile information from gdal + prof = rio_profile(str(filename)) + + # Now get bounds + S, N, W, E = self._ll_bounds # open the dataset and pull the data ds = xarray.open_dataset(filename, engine='cfgrib', filter_by_keys={'typeOfLevel': 'isobaricInhPa'}) - t = ds['t'].values.copy() - z = ds['gh'].values.copy() - q = ds['q'].values.copy() - lats = ds['t'].latitude.values.copy() - lons = ds['t'].longitude.values.copy() + # Determine mask based on query bounds + lats = ds["latitude"].to_numpy() + lons = ds["longitude"].to_numpy() + shp = lats.shape + lons[lons > 180.0] -= 360. + m1 = (S <= lats) & (N >= lats) &\ + (W <= lons) & (E >= lons) + + # Y extent + m1_y = np.argwhere(np.sum(m1, axis=1) != 0) + y_min = max(m1_y[0][0] - 2, 0) + y_max = min(m1_y[-1][0] + 3, shp[0]) + m1_y = None + + # X extent + m1_x = np.argwhere(np.sum(m1, axis=0) != 0) + x_min = max(m1_x[0][0] - 2, 0) + x_max = min(m1_x[-1][0] + 3, shp[1]) + m1_x = None + m1 = None + + # Coordinate arrays + # HRRR GRIB has data in south-up format + trans = prof["transform"].to_gdal() + xArr = trans[0] + (np.arange(x_min, x_max) + 0.5) * trans[1] + yArr = trans[3] + (prof["height"] * trans[5]) - (np.arange(y_min, y_max) + 0.5) * trans[5] + lats = lats[y_min:y_max, x_min:x_max] + lons = lons[y_min:y_max, x_min:x_max] + + # TODO: Remove this block once we know we handle + # different flavors of HRRR correctly + # self._proj currently used but these are available + # as attrs of ds["t"] for example + llhtolcc = Transformer.from_crs(4326, self._proj) + res = llhtolcc.transform(lats, lons) + print("ERROR in X: ", np.abs(res[0] - xArr[None, :]).max()) + print("ERROR in Y: ", np.abs(res[1] - yArr[:, None]).max()) + + # Data variables + t = ds['t'][:, y_min:y_max, x_min:x_max].to_numpy() + z = ds['gh'][:, y_min:y_max, x_min:x_max].to_numpy() + q = ds['q'][:, y_min:y_max, x_min:x_max].to_numpy() + ds.close() + + # Create output dataset ds_new = xarray.Dataset( data_vars=dict( - t= (["y", "x", "level"], np.moveaxis(t, [0, 1, 2], [2, 0, 1])), - z= (["y", "x", "level"], np.moveaxis(z, [0, 1, 2], [2, 0, 1])), - q= (["y", "x", "level"], np.moveaxis(q, [0, 1, 2], [2, 0, 1])), - lons=(["y", "x"], lons), - lats=(["y", "x"], lats), + t= (["level", "y", "x"], t, + {"grid_mapping": "proj"}), + z= (["level", "y", "x"], z, + {"grid_mapping": "proj"}), + q= (["level", "y", "x"], q, + {"grid_mapping": "proj"}), ), coords=dict( - level=np.arange(40) + 1, #TODO: is this correct? was 137... - x=(["x"], xArr), - y=(["y"], yArr), + level=(["level"], np.arange(t.shape[0]) + 1, + {"description": "model level", + "axis": "Z"}), + x=(["x"], xArr, + {"standard_name": "projection_x_coordinate", + "units": "m", + "axis": "X"}), + y=(["y"], yArr, + {"standard_name": "projection_y_coordinate", + "units": "m", + "axis": "Y"}), ), attrs={ + 'Conventions': 'CF-1.7', 'Weather_model':'HRRR', } ) + + # Write projection of output + ds_new["proj"] = int() + for k, v in self._proj.to_cf().items(): + ds_new.proj.attrs[k] = v + ds_new.to_netcdf(out) @@ -190,4 +249,4 @@ def _download_hrrr_file(self, DATE, out, model='hrrr', product='prs', fxx=0, ver self._makeDataCubes(pf, out) - return out \ No newline at end of file + return out diff --git a/tools/RAiDER/models/weatherModel.py b/tools/RAiDER/models/weatherModel.py index 7c6298c57..364e6dded 100755 --- a/tools/RAiDER/models/weatherModel.py +++ b/tools/RAiDER/models/weatherModel.py @@ -547,24 +547,6 @@ def getProjection(self): def getPoints(self): return self._xs.copy(), self._ys.copy(), self._zs.copy() - def getXY_gdal(self, filename): - ''' - Pull the grid info (x,y) from a gdal-readable file - ''' - with rasterio.open(filename) as ds: - xSize, ySize = ds.width, ds.height - trans = ds.transform.to_gdal() - - # make regular point grid - pixelSizeX = trans[1] - pixelSizeY = trans[5] - eastOrigin = trans[0] + 0.5 * pixelSizeX - northOrigin = trans[3] + 0.5 * pixelSizeY - xArray = np.arange(eastOrigin, eastOrigin + pixelSizeX * xSize, pixelSizeX) - yArray = np.arange(northOrigin, northOrigin + pixelSizeY * ySize, pixelSizeY) - - return xArray, yArray - def _uniform_in_z(self, _zlevels=None): ''' Interpolate all variables to a regular grid in z From 7c59d9d0a4b465559b6641519074f6a2f796ad6e Mon Sep 17 00:00:00 2001 From: piyushrpt Date: Thu, 10 Nov 2022 22:04:02 +0000 Subject: [PATCH 06/12] Updating HRRR with subsetting and correct conventions --- tools/RAiDER/checkArgs.py | 2 +- tools/RAiDER/models/hrrr.py | 57 ++++++++++++++++++++++++++++--------- 2 files changed, 45 insertions(+), 14 deletions(-) diff --git a/tools/RAiDER/checkArgs.py b/tools/RAiDER/checkArgs.py index fe8d608f2..774b548f5 100644 --- a/tools/RAiDER/checkArgs.py +++ b/tools/RAiDER/checkArgs.py @@ -57,7 +57,7 @@ def checkArgs(args): wetFilename, hydroFilename = makeDelayFileNames( d, args.los, - 'nc', + args.raster_format, args.weather_model._dataset.upper(), args.output_directory, ) diff --git a/tools/RAiDER/models/hrrr.py b/tools/RAiDER/models/hrrr.py index f2c2418cf..c0d667841 100644 --- a/tools/RAiDER/models/hrrr.py +++ b/tools/RAiDER/models/hrrr.py @@ -95,9 +95,9 @@ def load_weather(self, *args, filename=None, **kwargs): yArr = ds['y'].values lats = ds['lats'].values lons = ds['lons'].values - temps = ds['t'].values - qs = ds['q'].values - geo_hgt = ds['z'].values + temps = ds['t'].values.transpose(1, 2, 0) + qs = ds['q'].values.transpose(1, 2, 0) + geo_hgt = ds['z'].values.transpose(1, 2, 0) Ny, Nx = lats.shape @@ -119,15 +119,12 @@ def load_weather(self, *args, filename=None, **kwargs): self._t = temps self._q = qs self._p = np.broadcast_to(pl[np.newaxis, np.newaxis, :], - self._zs.shape) + geo_hgt.shape) self._xs = _xs self._ys = _ys self._lats = _lats self._lons = _lons - # For some reason z is opposite the others - self._p = np.flip(self._p, axis=2) - def _makeDataCubes(self, filename, out=None): ''' @@ -149,6 +146,7 @@ def _makeDataCubes(self, filename, out=None): # Determine mask based on query bounds lats = ds["latitude"].to_numpy() lons = ds["longitude"].to_numpy() + levels = ds["isobaricInhPa"].to_numpy() shp = lats.shape lons[lons > 180.0] -= 360. m1 = (S <= lats) & (N >= lats) &\ @@ -179,10 +177,10 @@ def _makeDataCubes(self, filename, out=None): # different flavors of HRRR correctly # self._proj currently used but these are available # as attrs of ds["t"] for example - llhtolcc = Transformer.from_crs(4326, self._proj) - res = llhtolcc.transform(lats, lons) - print("ERROR in X: ", np.abs(res[0] - xArr[None, :]).max()) - print("ERROR in Y: ", np.abs(res[1] - yArr[:, None]).max()) + # llhtolcc = Transformer.from_crs(4326, self._proj) + # res = llhtolcc.transform(lats, lons) + # print("ERROR in X: ", np.abs(res[0] - xArr[None, :]).max()) + # print("ERROR in Y: ", np.abs(res[1] - yArr[:, None]).max()) # Data variables t = ds['t'][:, y_min:y_max, x_min:x_max].to_numpy() @@ -190,6 +188,36 @@ def _makeDataCubes(self, filename, out=None): q = ds['q'][:, y_min:y_max, x_min:x_max].to_numpy() ds.close() + # This section is purely for flipping arrays as needed + # to match ECMWF reader is doing + # All flips are views - no extra memory use + # Lon -> From west to east + # Lat -> From south to north (ECMWF reads north to south and flips it + # load_weather) - So we do south to north here + # Pres -> High to Los - (ECWMF does now to high and flips it back) - so + # we do high to low + # Data is currently in [levels, y, x] order + flip_axes = [] + if levels[-1] > levels[0]: + flip_axes.append(0) + levels = np.flip(levels) + + if lats[0, 0] > lats[-1, 0]: + flip_axes.append(1) + lats = np.flip(lats, 0) + yArr = np.flip(yArr) + + if lons[0, 0] > lons[0, -1]: + flip_axes.append(2) + lons = np.flip(lons, 1) + xArr = np.flip(xArr) + + flip_axes = tuple(flip_axes) + if flip_axes: + t = np.flip(t, flip_axes) + z = np.flip(z, flip_axes) + q = np.flip(q, flip_axes) + # Create output dataset ds_new = xarray.Dataset( data_vars=dict( @@ -199,10 +227,13 @@ def _makeDataCubes(self, filename, out=None): {"grid_mapping": "proj"}), q= (["level", "y", "x"], q, {"grid_mapping": "proj"}), + lats=(["y", "x"], lats), + lons=(["y", "x"], lons), ), coords=dict( - level=(["level"], np.arange(t.shape[0]) + 1, - {"description": "model level", + level=(["level"], levels, + {"units": "millibars", + "long_name": "pressure_level", "axis": "Z"}), x=(["x"], xArr, {"standard_name": "projection_x_coordinate", From a416304278e9278c08bdea27db2a21d3a7c1c043 Mon Sep 17 00:00:00 2001 From: piyushrpt Date: Thu, 10 Nov 2022 22:14:32 +0000 Subject: [PATCH 07/12] Adding support to write netcdf in other projections --- tools/RAiDER/utilFcns.py | 41 ++++++++++++++++++++-------------------- 1 file changed, 21 insertions(+), 20 deletions(-) diff --git a/tools/RAiDER/utilFcns.py b/tools/RAiDER/utilFcns.py index b04c713e9..808f39c08 100755 --- a/tools/RAiDER/utilFcns.py +++ b/tools/RAiDER/utilFcns.py @@ -844,33 +844,33 @@ def write2NETCDF4core(nc_outfile, dimension_dict, dataset_dict, tran, mapping_na from osgeo import osr if mapping_name == 'WGS84': - epsg = 4326 crs = pyproj.CRS.from_epsg(epsg) grid_mapping = 'WGS84' # need to set this as an attribute for the image variables - datatype = np.dtype('S1') - dimensions = () + else: + crs = pyproj.CRS.from_wkt(mapping_name) + grid_mapping = 'CRS' - var = nc_outfile.createVariable( - mapping_name, - datatype, - dimensions, - fill_value=None - ) - # variable made, now add attributes + datatype = np.dtype('S1') + dimensions = () - for k, v in crs.to_cf().items(): - var.setncattr(k, v) + var = nc_outfile.createVariable( + grid_mapping, + datatype, + dimensions, + fill_value=None + ) - # These might not be needed as to_cf adds a crs_wkt - var.setncattr('spatial_ref', crs.to_wkt()) - var.setncattr('spatial_proj4', crs.to_proj4()) - var.setncattr('spatial_epsg', epsg) - var.setncattr('GeoTransform', ' '.join(str(x) for x in tran)) # note this has pixel size in it - set explicitly above + # variable made, now add attributes + for k, v in crs.to_cf().items(): + var.setncattr(k, v) - else: - raise Exception('Grid mapping name not supported; currently, only WGS84 (EPSG: 4326) is supported!') + # These might not be needed as to_cf adds a crs_wkt + # var.setncattr('spatial_ref', crs.to_wkt()) + # var.setncattr('spatial_proj4', crs.to_proj4()) + # var.setncattr('spatial_epsg', epsg) + var.setncattr('GeoTransform', ' '.join(str(x) for x in tran)) # note this has pixel size in it - set explicitly above for dim in dimension_dict: nc_outfile.createDimension(dim, dimension_dict[dim]['length']) @@ -900,7 +900,8 @@ def write2NETCDF4core(nc_outfile, dimension_dict, dataset_dict, tran, mapping_na shuffle=True, chunksizes=ChunkSize ) - var.setncattr('grid_mapping', dataset_dict[data]['grid_mapping']) + # Override with correct name here + var.setncattr('grid_mapping', grid_mapping) # dataset_dict[data]['grid_mapping']) var.setncattr('standard_name', dataset_dict[data]['standard_name']) var.setncattr('description', dataset_dict[data]['description']) if 'units' in dataset_dict[data]: From 3a95a0739aed1e96ad5bd69e233b7069801acb36 Mon Sep 17 00:00:00 2001 From: piyushrpt Date: Thu, 10 Nov 2022 22:20:43 +0000 Subject: [PATCH 08/12] Minor cleanup --- tools/RAiDER/delay.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/tools/RAiDER/delay.py b/tools/RAiDER/delay.py index 62bf34996..aa9569ab9 100755 --- a/tools/RAiDER/delay.py +++ b/tools/RAiDER/delay.py @@ -110,7 +110,6 @@ def tropo_delay(dt, wetFilename, hydroFilename, args): ########################################################### # Load the downloaded model file for CRS information wm_proj = rio_profile(f"netcdf:{weather_model_file}:t")["crs"] - print(wm_proj) if wm_proj is None: print("WARNING: I can't find a CRS in the weather model file, so I will assume you are using WGS84") wm_proj = 4326 @@ -274,7 +273,6 @@ def tropo_delay_cube(dt, wf, args, model_file=None): # Load CRS from weather model file wm_proj = rio_profile(f"netcdf:{weather_model_file}:t")["crs"] - print(wm_proj) if wm_proj is None: print("WARNING: I can't find a CRS in the weather model file, so I will assume you are using WGS84") wm_proj = CRS.from_epsg(4326) @@ -298,7 +296,6 @@ def tropo_delay_cube(dt, wf, args, model_file=None): xpts, ypts, zpts, wm_proj, crs, [ifWet, ifHydro]) - else: out_type = "slant range" out_filename = wf.replace("_ztd", "_std").replace("wet", "tropo") From 5de0e2fa21cf7d67094ce445860a91778ce575f1 Mon Sep 17 00:00:00 2001 From: piyushrpt Date: Thu, 10 Nov 2022 23:02:21 +0000 Subject: [PATCH 09/12] Write support for HRRR --- tools/RAiDER/models/weatherModel.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tools/RAiDER/models/weatherModel.py b/tools/RAiDER/models/weatherModel.py index 364e6dded..a8037fa19 100755 --- a/tools/RAiDER/models/weatherModel.py +++ b/tools/RAiDER/models/weatherModel.py @@ -123,10 +123,10 @@ def fetch(self, out, ll_bounds, time): ''' Checks the input datetime against the valid date range for the model and then calls the model _fetch routine - + Parameters ---------- - out - + out - ll_bounds - 4 x 1 array, SNWE time = UTC datetime ''' @@ -771,7 +771,7 @@ def write( dimension_dict, dataset_dict, tran, - mapping_name='WGS84' + mapping_name=mapping_name ) nc_outfile.sync() # flush data to disk From 174fc834a6d75bf1f2fc89ea95a0a414e57457f9 Mon Sep 17 00:00:00 2001 From: piyushrpt Date: Fri, 11 Nov 2022 00:19:06 +0000 Subject: [PATCH 10/12] Working HRRR Zenith Delays --- tools/RAiDER/delay.py | 42 ++++++++++++++++++++++++++++++++---------- 1 file changed, 32 insertions(+), 10 deletions(-) diff --git a/tools/RAiDER/delay.py b/tools/RAiDER/delay.py index aa9569ab9..9502d4523 100755 --- a/tools/RAiDER/delay.py +++ b/tools/RAiDER/delay.py @@ -112,7 +112,9 @@ def tropo_delay(dt, wetFilename, hydroFilename, args): wm_proj = rio_profile(f"netcdf:{weather_model_file}:t")["crs"] if wm_proj is None: print("WARNING: I can't find a CRS in the weather model file, so I will assume you are using WGS84") - wm_proj = 4326 + wm_proj = CRS.from_epsg(4326) + else: + wm_proj = CRS.from_wkt(wm_proj.to_wkt()) #################################################################### #################################################################### @@ -137,7 +139,6 @@ def tropo_delay(dt, wetFilename, hydroFilename, args): hgts, pnt_proj, wm_proj, - flip_xy=pnt_proj.axis_info[0].abbrev in ["X","E","Lon"] ) else: # interpolators require y, x, z @@ -276,6 +277,8 @@ def tropo_delay_cube(dt, wf, args, model_file=None): if wm_proj is None: print("WARNING: I can't find a CRS in the weather model file, so I will assume you are using WGS84") wm_proj = CRS.from_epsg(4326) + else: + wm_proj = CRS.from_wkt(wm_proj.to_wkt()) # Build the output grid zpts = np.array(heights) @@ -325,7 +328,7 @@ def tropo_delay_cube(dt, wf, args, model_file=None): # Create worker pool # Loop over heights - print("Testing") + raise NotImplementedError # Write output file # Modify this as needed for NISAR / other projects @@ -411,7 +414,7 @@ def checkQueryPntsFile(pnts_file, query_shape): return write_flag -def transformPoints(lats, lons, hgts, old_proj, new_proj,flip_xy=False): +def transformPoints(lats, lons, hgts, old_proj, new_proj): ''' Transform lat/lon/hgt data to an array of points in a new projection @@ -423,17 +426,26 @@ def transformPoints(lats, lons, hgts, old_proj, new_proj,flip_xy=False): hgts - Ellipsoidal height in meters old_proj - the original projection of the points new_proj - the new projection in which to return the points - flip_xy - Boolean flag to specify XY order of old_proj Returns ------- - the array of query points in the weather model coordinate system + the array of query points in the weather model coordinate system (YX) ''' t = Transformer.from_crs(old_proj, new_proj) - if flip_xy: - return np.stack(t.transform(lons, lats, hgts), axis=-1).T + + # Flags for flipping inputs or outputs + in_flip = old_proj.axis_info[0].direction == "east" + out_flip = new_proj.axis_info[0].direction == "east" + + if in_flip: + res = t.transform(lons, lats, hgts) else: - return np.stack(t.transform(lats, lons, hgts), axis=-1).T + res = t.transform(lats, lons, hgts) + + if out_flip: + return np.stack((res[1], res[0], res[2]), axis=-1).T + else: + return np.stack(res, axis=-1).T def build_cube(xpts, ypts, zpts, model_crs, pts_crs, interpolators): @@ -451,6 +463,17 @@ def build_cube(xpts, ypts, zpts, model_crs, pts_crs, interpolators): zmin = min(interpolators[0].grid[2]) zmax = max(interpolators[0].grid[2]) + # print("Output grid: ") + # print("crs: ", pts_crs) + # print("X: ", xpts[0], xpts[-1]) + # print("Y: ", ypts[0], ypts[-1]) + + # ii = interpolators[0] + # print("Model grid: ") + # print("crs: ", model_crs) + # print("X: ", ii.grid[1][0], ii.grid[1][-1]) + # print("Y: ", ii.grid[0][0], ii.grid[0][-1]) + # Loop over heights and compute delays for ii, ht in enumerate(zpts): @@ -463,7 +486,6 @@ def build_cube(xpts, ypts, zpts, model_crs, pts_crs, interpolators): transformPoints( yy, xx, np.full(yy.shape, ht), pts_crs, model_crs, - flip_xy=pts_crs.axis_info[0].abbrev in ["X", "E", "Lon"] ), (2, 1, 0)) else: pts = np.stack([yy, xx, np.full(yy.shape, ht)], axis=-1) From c1340aaadeab3b6e6df3e4f820c74d89713b2415 Mon Sep 17 00:00:00 2001 From: piyushrpt Date: Fri, 11 Nov 2022 02:34:43 +0000 Subject: [PATCH 11/12] Working ray tracing for HRRR --- tools/RAiDER/delay.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/tools/RAiDER/delay.py b/tools/RAiDER/delay.py index 9502d4523..18a7cfd4d 100755 --- a/tools/RAiDER/delay.py +++ b/tools/RAiDER/delay.py @@ -548,6 +548,8 @@ def build_cube_ray(xpts, ypts, zpts, ref_time, orbit_file, look_dir, model_crs, cube_to_llh = Transformer.from_crs(pts_crs, epsg4326, always_xy=True) ecef_to_model = Transformer.from_crs(CRS.from_epsg(4978), model_crs) + # For calling the interpolators + flip_xy = model_crs.axis_info[0].direction == "east" # Loop over heights of output cube and compute delays for hh, ht in enumerate(zpts): @@ -668,12 +670,17 @@ def build_cube_ray(xpts, ypts, zpts, ref_time, orbit_file, look_dir, model_crs, pts_xyz = low_xyz + ff * (high_xyz - low_xyz) # Ray point in model coordinates - pts = np.stack( - ecef_to_model.transform( - pts_xyz[..., 0], - pts_xyz[..., 1], - pts_xyz[..., 2] - ), axis=-1) + pts = ecef_to_model.transform( + pts_xyz[..., 0], + pts_xyz[..., 1], + pts_xyz[..., 2] + ) + + # Order for the interpolator + if flip_xy: + pts = np.stack((pts[1], pts[0], pts[2]), axis=-1) + else: + pts = np.stack(pts, axis=-1) # Trapezoidal integration with scaling wt = 0.5 if findex in [0, fracs.size-1] else 1.0 From 60ee31b4a7fb9b30d36a40972858b6fd5f18e569 Mon Sep 17 00:00:00 2001 From: piyushrpt Date: Fri, 11 Nov 2022 03:08:12 +0000 Subject: [PATCH 12/12] Addressing feedback --- tools/RAiDER/utilFcns.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/tools/RAiDER/utilFcns.py b/tools/RAiDER/utilFcns.py index 808f39c08..704746dd3 100755 --- a/tools/RAiDER/utilFcns.py +++ b/tools/RAiDER/utilFcns.py @@ -866,10 +866,6 @@ def write2NETCDF4core(nc_outfile, dimension_dict, dataset_dict, tran, mapping_na for k, v in crs.to_cf().items(): var.setncattr(k, v) - # These might not be needed as to_cf adds a crs_wkt - # var.setncattr('spatial_ref', crs.to_wkt()) - # var.setncattr('spatial_proj4', crs.to_proj4()) - # var.setncattr('spatial_epsg', epsg) var.setncattr('GeoTransform', ' '.join(str(x) for x in tran)) # note this has pixel size in it - set explicitly above for dim in dimension_dict: