Skip to content

Commit

Permalink
Merge pull request #43 from schism-dev/bugfix
Browse files Browse the repository at this point in the history
Bugfix
  • Loading branch information
cuill authored Jun 28, 2022
2 parents bac8635 + def1f09 commit c1fef38
Show file tree
Hide file tree
Showing 6 changed files with 132 additions and 84 deletions.
74 changes: 41 additions & 33 deletions pyschism/forcing/hycom/hycom2schism.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,20 @@

logger = logging.getLogger(__name__)

def convert_longitude(ds):
lon_name = 'lon'
ds['_lon_adjusted'] = xr.where(
ds[lon_name] > 180,
ds[lon_name] - 360,
ds[lon_name])
ds = (
ds.swap_dims({lon_name: '_lon_adjusted'})
.sel(**{'_lon_adjusted': sorted(ds._lon_adjusted)})
.drop(lon_name)
)
ds = ds.rename({'_lon_adjusted': lon_name})
return ds

def get_database(date, Bbox=None):
if date >= datetime(2018, 12, 4):
database = f'GLBy0.08/expt_93.0'
Expand Down Expand Up @@ -415,7 +429,6 @@ def fetch_data(self, outdir: Union[str, os.PathLike], start_date, rnday, elev2D=
#timeseries_uv[it,:,:,1]=vvel_int

ds.close()

logger.info(f'Writing *th.nc takes {time()-t0} seconds')

class Nudge:
Expand Down Expand Up @@ -671,7 +684,11 @@ class DownloadHycom:

def __init__(self, hgrid):

self.bbox = hgrid.bbox
xmin, xmax = hgrid.coords[:, 0].min(), hgrid.coords[:, 0].max()
ymin, ymax = hgrid.coords[:, 1].min(), hgrid.coords[:, 1].max()
xmin = xmin + 360. if xmin < 0 else xmin
xmax = xmax + 360. if xmax < 0 else xmax
self.bbox = Bbox.from_extents(xmin, ymin, xmax, ymax)

def fetch_data(self, date):

Expand All @@ -686,16 +703,13 @@ def fetch_data(self, date):
foutname = f'SSH_{date.strftime("%Y%m%d")}.nc'
logger.info(f'filename is {foutname}')
ds = xr.open_dataset(url_ssh)
ds1 = ds.rename_dims({'lon':'xlon'})
ds2 = ds1.rename_dims({'lat':'ylat'})
ds3 = ds2.rename_vars({'lat':'ylat'})
ds4 = ds3.rename_vars({'lon':'xlon'})
ds4.to_netcdf(foutname, 'w', 'NETCDF3_CLASSIC', unlimited_dims='time')
ds = convert_longitude(ds)
ds = ds.rename_dims({'lon':'xlon'})
ds = ds.rename_dims({'lat':'ylat'})
ds = ds.rename_vars({'lat':'ylat'})
ds = ds.rename_vars({'lon':'xlon'})
ds.to_netcdf(foutname, 'w', 'NETCDF3_CLASSIC', unlimited_dims='time')
ds.close()
ds1.close()
ds2.close()
ds3.close()
ds4.close()

url_uv = f'https://tds.hycom.org/thredds/dodsC/{database}?lat[{lat_idx1}:1:{lat_idx2}],' + \
f'lon[{lon_idx1}:1:{lon_idx2}],depth[0:1:-1],time[{time_idx}],' + \
Expand All @@ -705,16 +719,13 @@ def fetch_data(self, date):
foutname = f'UV_{date.strftime("%Y%m%d")}.nc'
logger.info(f'filename is {foutname}')
ds = xr.open_dataset(url_uv)
ds1 = ds.rename_dims({'lon':'xlon'})
ds2 = ds1.rename_dims({'lat':'ylat'})
ds3 = ds2.rename_vars({'lat':'ylat'})
ds4 = ds3.rename_vars({'lon':'xlon'})
ds4.to_netcdf(foutname, 'w', 'NETCDF3_CLASSIC', unlimited_dims='time')
ds = convert_longitude(ds)
ds = ds.rename_dims({'lon':'xlon'})
ds = ds.rename_dims({'lat':'ylat'})
ds = ds.rename_vars({'lat':'ylat'})
ds = ds.rename_vars({'lon':'xlon'})
ds.to_netcdf(foutname, 'w', 'NETCDF3_CLASSIC', unlimited_dims='time')
ds.close()
ds1.close()
ds2.close()
ds3.close()
ds4.close()

url_ts = f'https://tds.hycom.org/thredds/dodsC/{database}?lat[{lat_idx1}:1:{lat_idx2}],' + \
f'lon[{lon_idx1}:1:{lon_idx2}],depth[0:1:-1],time[{time_idx}],' + \
Expand All @@ -725,6 +736,7 @@ def fetch_data(self, date):
logger.info(f'filename is {foutname}')

ds = xr.open_dataset(url_ts)

temp = ds.water_temp.values
salt = ds.salinity.values
dep = ds.depth.values
Expand All @@ -733,9 +745,9 @@ def fetch_data(self, date):
ptemp = ConvertTemp(salt, temp, dep)

#drop water_temp variable and add new temperature variable
ds1 = ds.drop('water_temp')
ds1['temperature']=(['time','depth','lat','lon'], ptemp)
ds1.temperature.attrs = {
ds = ds.drop('water_temp')
ds['temperature']=(['time','depth','lat','lon'], ptemp)
ds.temperature.attrs = {
'long_name': 'Sea water potential temperature',
'standard_name': 'sea_water_potential_temperature',
'units': 'degC'
Expand All @@ -744,14 +756,10 @@ def fetch_data(self, date):
#ds.assign(water_temp2=ptemp)
#ds.assign.attrs = ds.water_temp.attrs

ds2 = ds1.rename_dims({'lon':'xlon'})
ds3 = ds2.rename_dims({'lat':'ylat'})
ds4 = ds3.rename_vars({'lat':'ylat'})
ds5 = ds4.rename_vars({'lon':'xlon'})
ds5.to_netcdf(foutname, 'w', unlimited_dims='time', encoding={'temperature':{'dtype': 'h', '_FillValue': -30000.,'scale_factor': 0.001, 'add_offset': 20., 'missing_value': -30000.}})
ds = convert_longitude(ds)
ds = ds.rename_dims({'lon':'xlon'})
ds = ds.rename_dims({'lat':'ylat'})
ds = ds.rename_vars({'lat':'ylat'})
ds = ds.rename_vars({'lon':'xlon'})
ds.to_netcdf(foutname, 'w', unlimited_dims='time', encoding={'temperature':{'dtype': 'h', '_FillValue': -30000.,'scale_factor': 0.001, 'add_offset': 20., 'missing_value': -30000.}})
ds.close()
ds1.close()
ds2.close()
ds3.close()
ds4.close()
ds5.close()
30 changes: 18 additions & 12 deletions pyschism/forcing/nws/nws2/era5.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,10 +89,10 @@ def xy_grid(self):
lon.append(x)
return np.meshgrid(np.array(lon), self.lat)

def put_sflux_fields(iday, date, timevector, ds, nx_grid, ny_grid, air, rad, prc, OUTDIR):
def put_sflux_fields(iday, date, timevector, ds, nx_grid, ny_grid, air, rad, prc, output_interval, OUTDIR):
rt=pd.to_datetime(str(date))
idx=np.where(rt == timevector)[0].item()
times=[i/24 for i in np.arange(0, 25, 3)]
times=[i/24 for i in np.arange(0, 25, output_interval)]

if air is True:
with Dataset(OUTDIR / "sflux_air_1.{:04}.nc".format(iday+1), 'w', format='NETCDF3_CLASSIC') as dst:
Expand Down Expand Up @@ -128,7 +128,7 @@ def put_sflux_fields(iday, date, timevector, ds, nx_grid, ny_grid, air, rad, prc
dst['prmsl'].long_name = "Pressure reduced to MSL"
dst['prmsl'].standard_name = "air_pressure_at_sea_level"
dst['prmsl'].units = "Pa"
dst['prmsl'][:,:,:]=ds['msl'][idx:idx+26:3,::-1,:]
dst['prmsl'][:,:,:]=ds['msl'][idx:idx+25:output_interval, ::-1, :]

# spfh
dst.createVariable('spfh', 'f4', ('time', 'ny_grid', 'nx_grid'))
Expand All @@ -137,8 +137,8 @@ def put_sflux_fields(iday, date, timevector, ds, nx_grid, ny_grid, air, rad, prc
dst['spfh'].standard_name = "specific_humidity"
dst['spfh'].units = "1"
#convert dewpoint to specific humidity
d2m = ds['d2m'][idx:idx+26:3,::-1,:]
msl = ds['msl'][idx:idx+26:3,::-1,:]
d2m = ds['d2m'][idx:idx+25:output_interval, ::-1, :]
msl = ds['msl'][idx:idx+25:output_interval, ::-1, :]
Td = d2m - 273.15
e1 = 6.112*np.exp((17.67*Td)/(Td + 243.5))
spfh = (0.622*e1)/(msl*0.01 - (0.378*e1))
Expand All @@ -149,23 +149,23 @@ def put_sflux_fields(iday, date, timevector, ds, nx_grid, ny_grid, air, rad, prc
dst['stmp'].long_name = "Surface Air Temperature (2m AGL)"
dst['stmp'].standard_name = "air_temperature"
dst['stmp'].units = "K"
dst['stmp'][:,:,:]=ds['t2m'][idx:idx+26:3,::-1,:]
dst['stmp'][:,:,:]=ds['t2m'][idx:idx+25:output_interval, ::-1, :]

# uwind
dst.createVariable('uwind', 'f4', ('time', 'ny_grid', 'nx_grid'))
dst['uwind'].long_name = "Surface Eastward Air Velocity "\
"(10m AGL)"
dst['uwind'].standard_name = "eastward_wind"
dst['uwind'].units = "m/s"
dst['uwind'][:,:,:]=ds['u10'][idx:idx+26:3,::-1,:]
dst['uwind'][:,:,:]=ds['u10'][idx:idx+25:output_interval, ::-1, :]

# vwind
dst.createVariable('vwind', 'f4', ('time', 'ny_grid', 'nx_grid'))
dst['vwind'].long_name = "Surface Northward Air Velocity "\
"(10m AGL)"
dst['vwind'].standard_name = "northward_wind"
dst['vwind'].units = "m/s"
dst['vwind'][:,:,:]=ds['v10'][idx:idx+26:3,::-1,:]
dst['vwind'][:,:,:]=ds['v10'][idx:idx+25:output_interval, ::-1, :]

if prc is True:
with Dataset(OUTDIR / "sflux_prc_1.{:04}.nc".format(iday+1), 'w', format='NETCDF3_CLASSIC') as dst:
Expand Down Expand Up @@ -201,7 +201,7 @@ def put_sflux_fields(iday, date, timevector, ds, nx_grid, ny_grid, air, rad, prc
dst['prate'].long_name = "Surface Precipitation Rate"
dst['prate'].standard_name = "air_pressure_at_sea_level"
dst['prate'].units = "kg/m^2/s"
dst['prate'][:,:,:]=ds['mtpr'][idx:idx+26:3,::-1,:]
dst['prate'][:,:,:]=ds['mtpr'][idx:idx+25:output_interval, ::-1, :]

if rad is True:
with Dataset(OUTDIR / "sflux_rad_1.{:04}.nc".format(iday+1), 'w', format='NETCDF3_CLASSIC') as dst:
Expand Down Expand Up @@ -237,14 +237,14 @@ def put_sflux_fields(iday, date, timevector, ds, nx_grid, ny_grid, air, rad, prc
dst['dlwrf'].long_name = "Downward Long Wave Radiation Flux"
dst['dlwrf'].standard_name = "surface_downwelling_longwave_flux_in_air"
dst['dlwrf'].units = "W/m^2"
dst['dlwrf'][:,:,:]=ds['msdwlwrf'][idx:idx+26:3,::-1,:]
dst['dlwrf'][:,:,:]=ds['msdwlwrf'][idx:idx+25:output_interval, ::-1, :]

# dwrf
dst.createVariable('dswrf', 'f4', ('time', 'ny_grid', 'nx_grid'))
dst['dswrf'].long_name = "Downward Long Wave Radiation Flux"
dst['dswrf'].standard_name = "surface_downwelling_shortwave_flux_in_air"
dst['dswrf'].units = "W/m^2"
dst['dswrf'][:,:,:]=ds['msdwswrf'][idx:idx+26:3,::-1,:]
dst['dswrf'][:,:,:]=ds['msdwswrf'][idx:idx+25:output_interval, ::-1, :]


class ERA5(SfluxDataset):
Expand Down Expand Up @@ -273,6 +273,7 @@ def write(
rad: bool = True,
bbox: Bbox = None,
overwrite: bool=False,
output_interval: int = 1,
):
self.start_date=start_date
self.rnday=rnday
Expand All @@ -284,6 +285,10 @@ def write(
np.timedelta64(1, 'D'),
dtype='datetime64')}

#write sflux_inputs.txt
with open(f'{outdir}/sflux_inputs.txt', 'w') as f:
f.write('&sflux_inputs\n/\n')

logger.info('Start downloading ERA5')
self.inventory = ERA5DataInventory(
self.start_date,
Expand All @@ -300,4 +305,5 @@ def write(
times=nc4.num2date(time1,units=time1.units,only_use_cftime_datetimes=False)

for iday, date in enumerate(dates):
put_sflux_fields(iday, date, times, ds, nx_grid, ny_grid, air=air, rad=rad, prc=prc, OUTDIR=outdir)
put_sflux_fields(iday, date, times, ds, nx_grid, ny_grid, air=air, rad=rad, prc=prc, output_interval=output_interval, OUTDIR=outdir)

61 changes: 41 additions & 20 deletions pyschism/forcing/source_sink/nwm.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,8 @@ def __init__(self, hgrid: Gr3, nwm_file=None, workers=-1):
raise IOError(
"No National Water model intersections found on the mesh.")
intersection = gpd.GeoDataFrame(data, crs=hgrid.crs)
#TODO: add exporting intersection as an option
#intersection.to_file('intersections.shp')
del data

# 2) Generate element centroid KDTree
Expand Down Expand Up @@ -370,12 +372,14 @@ def __new__(
return AWSHindcastInventory.__new__(cls)

# GOOGLEHindcastInventory -> January 2019 through 30 days earlier than today
# data before April 20, 2021 (including Apirl 20) is 3-hr interval, after that is hourly
# Missing data 20211231, 20220101, 20220102, 20220103
elif start_date >= dates.localize_datetime(
datetime(2021, 1, 1, 0, 0)
) and start_date + rnday < dates.nearest_zulu() - timedelta(days=30):
) and start_date + rnday < dates.nearest_zulu() - timedelta(days=2):
return GOOGLEHindcastInventory.__new__(cls)

elif start_date >= dates.nearest_zulu() - timedelta(days=30):
elif start_date >= dates.nearest_zulu() - timedelta(days=2):
return AWSForecastInventory.__new__(cls)

else:
Expand Down Expand Up @@ -561,15 +565,14 @@ def __init__(
self,
start_date: datetime = None,
rnday: Union[int, float, timedelta] = timedelta(days=5.0),
product='analysis_assim.channel_rt',
product='medium_range_mem1',
verbose=False,
fallback=True,
cache=None,
):

self.start_date = dates.nearest_cycle() if start_date is None \
else dates.nearest_cycle(dates.localize_datetime(start_date))
# self.start_date = self.start_date.replace(tzinfo=None)
self.rnday = rnday if isinstance(rnday, timedelta) \
else timedelta(days=rnday)
self.product = product
Expand All @@ -588,37 +591,54 @@ def __init__(
dtype='datetime64')

for requested_time, _ in self._files.items():
#print(f'Requesting NWM data for time {requested_time}')
print(f'Requesting NWM data for time {requested_time}')

logger.info(f'Requesting NWM data for time {requested_time}')

self._files[requested_time] = self.request_data(requested_time)
if requested_time.hour == 0:
requested_time2 = requested_time - timedelta(days=1)
else:
requested_time2 = requested_time

def request_data(self, request_time):
self._files[requested_time] = self.request_data(requested_time, requested_time2)

def request_data(self, request_time, request_time2):

fname = self.tmpdir / f'nwm.t00z.{self.product[:12]}.channel_rt_1.' \
f'{request_time.strftime("%Y%m%d%H")}.conus.nc'
#logger.info(f'fname is {fname}')

fname = self.tmpdir / f'{request_time.strftime("%Y%m%d%H")}00.CHRTOUT_DOMAIN1.comp'
if fname.is_file():
cached_file = list(self.tmpdir.glob(f'**/{fname.name}'))
if len(cached_file) == 1:
fname = cached_file[0]
logger.info(f'Use cached file {fname}')
else:

fname = f'./analysis_assim/{request_time.strftime("%Y%m%d%H")}00.CHRTOUT_DOMAIN1.comp'

logger.info(f'Downloading file {request_time}, ')
#nwm.20180917/forcing_medium_range/nwm.t12z.medium_range.forcing.f160.conus.nc
#https://storage.googleapis.com/national-water-model/nwm.20210409/analysis_assim/nwm.t00z.analysis_assim.channel_rt.tm01.conus.nc
url=f'https://storage.googleapis.com/national-water-model/nwm.{request_time.strftime("%Y%m%d")}' \
f'/analysis_assim/nwm.t{request_time.strftime("%H")}z.analysis_assim.channel_rt.tm00.conus.nc'
logger.info(f'{url}')
else:

fname = f'{self.start_date.strftime("%Y%m%d")}/nwm.t00z.' \
f'{self.product[:12]}.channel_rt_1.{request_time.strftime("%Y%m%d%H")}.conus.nc'

logger.info(f'Downloading file {request_time}, ')

it = request_time.strftime("%H")
if it == '00':
logger.info(f'Requesting data at 00Z from yesterday!')
it = str(int(it) + 24)
it = it.zfill(3)

url = f'https://storage.googleapis.com/national-water-model/nwm.{request_time2.strftime("%Y%m%d")}' \
f'/{self.product}/nwm.t00z.{self.product[:12]}.channel_rt_1.f{it}.conus.nc'
logger.info(f'{url}')
try:
wget.download(url, fname)
except:
logger.info(f'No data for {request_time}!')

return fname

@property
def output_interval(self) -> timedelta:
return {
'analysis_assim.channel_rt': timedelta(hours=1)
'medium_range_mem1': timedelta(hours=3)
}[self.product]

@property
Expand Down Expand Up @@ -694,6 +714,7 @@ def __init__(
)

for it, (requested_time, _) in enumerate(self._files.items()):

if it == 0 and requested_time.hour == 0:
logger.info(f'set it=0 {requested_time}')
yesterday = (self.start_date - timedelta(days=1)).strftime("%Y%m%d")
Expand Down
25 changes: 18 additions & 7 deletions pyschism/mesh/vgrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,13 +151,24 @@ def open(cls, path):

nvrt = int(lines[1].strip().split()[0])

kbp = np.array([int(i.split()[1])-1 for i in lines[2:]])

sigma = -np.ones((len(kbp), nvrt))

for i, line in enumerate(lines[2:]):
sigma[i, kbp[i]:] = np.array(
line.strip().split()[2:]).astype('float')
sline = np.array(lines[2].split()).astype('float')
if sline.min() < 0:
#old version
kbp = np.array([int(i.split()[1])-1 for i in lines[2:]])
sigma = -np.ones((len(kbp), nvrt))

for i, line in enumerate(lines[2:]):
sigma[i, kbp[i]:] = np.array(
line.strip().split()[2:]).astype('float')

else:
#new version
sline = sline.astype('int')
kbp = sline-1
sigma = np.array([line.split()[1:] for line in lines[3:]]).T.astype('float')
#replace -9. with -1.
fpm = sigma<-1
sigma[fpm] = -1

return cls(sigma)

Expand Down
Loading

0 comments on commit c1fef38

Please sign in to comment.