Hazard Winds
Contents
Hazard Winds#
from datetime import datetime
print("execution start: {0}".format(datetime.today().strftime('%Y-%m-%d %H:%M:%S')))
execution start: 2022-09-13 06:50:02
import os
import os.path as op
import sys
import glob
import pandas as pd
import numpy as np
import xarray as xr
# raster tools
from rasterio.crs import CRS
import json
import rioxarray
# kepler
from keplergl import KeplerGl
# bluemath
sys.path.insert(0, op.join(op.abspath(''), '..', '..'))
sys.path.insert(0, op.join(op.abspath(''), '..'))
# operational utils
from operational.util import read_config_args
Warning: ecCodes 2.21.0 or higher is recommended. You are running version 2.16.0
Forecast Parameters#
# project database
p_data = r'/media/administrador/HD2/SamoaTonga/data'
p_db = r'/media/administrador/HD1/DATABASES'
#site = 'Savaii' # Savaii / Upolu / Tongatapu
# (optional) get site from config file
nb_args = read_config_args(op.abspath(''), '09b_hazard')
site = nb_args['site']
print('Study site: {0}'.format(site))
Study site: Savaii
# Hazard winds
process_raster = False
# site related parameteres
if site == 'Savaii':
# site
site_ = site.lower()
nm = 'sp'
# riskscape
site_main = 'Samoa'
code_utm = 32702
elif site == 'Upolu':
# site
site_ = site.lower()
nm = 'up'
# riskscape
site_main = 'Samoa'
code_utm = 32702
elif site == 'Tongatapu':
# site
site_ = site
nm = 'tp'
# riskscape
site_main = 'Tongatapu'
code_utm = 32701
Database#
p_site = op.join(p_data, site)
# riskscape folder
p_riskscape = op.join(p_data, 'riskscape_projects')
p_riskscape_site = op.join(p_riskscape, site)
p_riskscape_data = op.join(p_riskscape, site, 'data')
p_riskscape_data_rain = op.join(p_riskscape_data, 'rain')
# kepler config files
p_kepler_config = op.join(p_riskscape_site, 'config_files', 'config_hazard_rain_{0}.json'.format(site.lower()))
Forecast Output Folder#
p_forecast = op.join(p_site, 'forecast', '09_rainfall_tc_inundation')
# last valid execution of 10a
dates_exec = sorted([x for x in os.listdir(p_forecast) if len(glob.glob(op.join(p_forecast, x, 'Forecast_*.nc'))) > 0])
dates_exec = [x for x in dates_exec if 'reforecast' not in x]
if len(dates_exec) == 0:
raise ValueError('No solved TCs available from 09a')
# choose last date
date = dates_exec[-1]
print('last available date: {0}'.format(date))
last available date: 202209021920
# available TC names
fs = glob.glob(op.join(p_forecast, date, 'Forecast_*.nc'))
names = list(set([op.basename(x).split('_')[1] for x in fs]))
print('available TCs: {0}'.format(names))
available TCs: ['amos']
# select tc name
tc_name = names[0]
# forecast folder
p_fore_date = op.join(p_forecast, date)
print('forecast date code: {0}'.format(date))
p_fore_tc = op.join(p_fore_date, tc_name + '_forecast')
p_out_tracks = op.join(p_fore_tc, 'OUT_TRACKS')
forecast date code: 202209021920
tk = 'max' #Track number or max
if tk == 'max':
file_name = op.join(p_out_tracks, 'Flooding_Metamodel_Analogues_envelope.nc')
ds_rain = xr.open_dataset(file_name).to_dataframe().set_index(['x','y']).to_xarray()
else:
file_name = op.join(p_out_tracks, 'Flooding_Metamodel_Analogues_{0}_{1}_raster.nc'.format(tc_name, tk))
ds_rain = xr.open_dataset(file_name)
tif_riskscape = op.join(p_riskscape, site, 'data', 'rainfall', 'Flooding_Metamodel_' + tc_name + '_tk_' + tk + '_' + date + '_.tif') #Always save in the same folder
print('Saving tif at: ' + tif_riskscape)
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
/media/administrador/HD2/git_nico/SamoaTonga/venv/lib/python3.7/site-packages/xarray/backends/file_manager.py in _acquire_with_cache_info(self, needs_lock)
198 try:
--> 199 file = self._cache[self._key]
200 except KeyError:
/media/administrador/HD2/git_nico/SamoaTonga/venv/lib/python3.7/site-packages/xarray/backends/lru_cache.py in __getitem__(self, key)
52 with self._lock:
---> 53 value = self._cache[key]
54 self._cache.move_to_end(key)
KeyError: [<class 'netCDF4._netCDF4.Dataset'>, ('/media/administrador/HD2/SamoaTonga/data/Savaii/forecast/09_rainfall_tc_inundation/202209021920/amos_forecast/OUT_TRACKS/Flooding_Metamodel_Analogues_envelope.nc',), 'r', (('clobber', True), ('diskless', False), ('format', 'NETCDF4'), ('persist', False))]
During handling of the above exception, another exception occurred:
FileNotFoundError Traceback (most recent call last)
/tmp/ipykernel_29311/3794932057.py in <module>
3 if tk == 'max':
4 file_name = op.join(p_out_tracks, 'Flooding_Metamodel_Analogues_envelope.nc')
----> 5 ds_rain = xr.open_dataset(file_name).to_dataframe().set_index(['x','y']).to_xarray()
6 else:
7 file_name = op.join(p_out_tracks, 'Flooding_Metamodel_Analogues_{0}_{1}_raster.nc'.format(tc_name, tk))
/media/administrador/HD2/git_nico/SamoaTonga/venv/lib/python3.7/site-packages/xarray/backends/api.py in open_dataset(filename_or_obj, group, decode_cf, mask_and_scale, decode_times, concat_characters, decode_coords, engine, chunks, lock, cache, drop_variables, backend_kwargs, use_cftime, decode_timedelta)
552
553 opener = _get_backend_cls(engine)
--> 554 store = opener(filename_or_obj, **extra_kwargs, **backend_kwargs)
555
556 with close_on_error(store):
/media/administrador/HD2/git_nico/SamoaTonga/venv/lib/python3.7/site-packages/xarray/backends/netCDF4_.py in open(cls, filename, mode, format, group, clobber, diskless, persist, lock, lock_maker, autoclose)
376 netCDF4.Dataset, filename, mode=mode, kwargs=kwargs
377 )
--> 378 return cls(manager, group=group, mode=mode, lock=lock, autoclose=autoclose)
379
380 def _acquire(self, needs_lock=True):
/media/administrador/HD2/git_nico/SamoaTonga/venv/lib/python3.7/site-packages/xarray/backends/netCDF4_.py in __init__(self, manager, group, mode, lock, autoclose)
324 self._group = group
325 self._mode = mode
--> 326 self.format = self.ds.data_model
327 self._filename = self.ds.filepath()
328 self.is_remote = is_remote_uri(self._filename)
/media/administrador/HD2/git_nico/SamoaTonga/venv/lib/python3.7/site-packages/xarray/backends/netCDF4_.py in ds(self)
385 @property
386 def ds(self):
--> 387 return self._acquire()
388
389 def open_store_variable(self, name, var):
/media/administrador/HD2/git_nico/SamoaTonga/venv/lib/python3.7/site-packages/xarray/backends/netCDF4_.py in _acquire(self, needs_lock)
379
380 def _acquire(self, needs_lock=True):
--> 381 with self._manager.acquire_context(needs_lock) as root:
382 ds = _nc4_require_group(root, self._group, self._mode)
383 return ds
/usr/lib/python3.7/contextlib.py in __enter__(self)
110 del self.args, self.kwds, self.func
111 try:
--> 112 return next(self.gen)
113 except StopIteration:
114 raise RuntimeError("generator didn't yield") from None
/media/administrador/HD2/git_nico/SamoaTonga/venv/lib/python3.7/site-packages/xarray/backends/file_manager.py in acquire_context(self, needs_lock)
185 def acquire_context(self, needs_lock=True):
186 """Context manager for acquiring a file."""
--> 187 file, cached = self._acquire_with_cache_info(needs_lock)
188 try:
189 yield file
/media/administrador/HD2/git_nico/SamoaTonga/venv/lib/python3.7/site-packages/xarray/backends/file_manager.py in _acquire_with_cache_info(self, needs_lock)
203 kwargs = kwargs.copy()
204 kwargs["mode"] = self._mode
--> 205 file = self._opener(*self._args, **kwargs)
206 if self._mode == "w":
207 # ensure file doesn't get overriden when opened again
src/netCDF4/_netCDF4.pyx in netCDF4._netCDF4.Dataset.__init__()
src/netCDF4/_netCDF4.pyx in netCDF4._netCDF4._ensure_nc_success()
FileNotFoundError: [Errno 2] No such file or directory: b'/media/administrador/HD2/SamoaTonga/data/Savaii/forecast/09_rainfall_tc_inundation/202209021920/amos_forecast/OUT_TRACKS/Flooding_Metamodel_Analogues_envelope.nc'
raster = ds_rain.rio.write_crs('EPSG:{0}'.format(code_utm)).transpose('y', 'x')
raster = raster.where(raster < 5)
raster.rio.to_raster(tif_riskscape)
Hazard Winds#
# project to lon, lat (WGS84) to plot in Kepler
data_sim_84 = raster.rio.reproject(CRS.from_epsg(4326)).copy()
data_sim_84 = data_sim_84.where((data_sim_84>=0.2) & (data_sim_84<=100))
df_sim_84 = data_sim_84.to_dataframe().reset_index().dropna()\
.rename(columns={'x':'Lon', 'y':'Lat'})
df_sim_84
Plot Hazard - SWATH winds#
a_file = open(p_kepler_config, "rb")
config_rain = json.load(a_file)
# Hazard - flooding depth
map_1 = KeplerGl(height=700, data={"Depth Metamodel": df_sim_84}, config=config_rain, show_docs=False)
map_1
with open(p_kepler_config, “w”) as outfile: json.dump(map_1.config, outfile)
print("execution end: {0}".format(datetime.today().strftime('%Y-%m-%d %H:%M:%S')))