Exploring the NHC Advisories and Sea Surface Height during Hurricane Irma#
Created: 2017-09-09
Updated: 2024-09-18
This notebook aims to demonstrate how to create a simple interactive GIS map with the National Hurricane Center predictions [1] and the observed sea surface height from CO-OPS [2].
First we have to download the National Hurricane Center (NHC) GIS 5 day predictions data for Irma.
NHC codes storms are coded with 8 letter names:
2 char for region
al
→ Atlantic2 char for number
11
is Irmaand 4 char for year,
2017
Browse https://www.nhc.noaa.gov/gis/archive_wsurge.php?year=2017 to find other hurricanes code.
code = "al112017"
hurricane = f"{code}_5day"
import sys # noqa
from pathlib import Path
from urllib.request import urlopen, urlretrieve
import lxml.html
def url_lister(url):
urls = []
connection = urlopen(url)
dom = lxml.html.fromstring(connection.read())
for link in dom.xpath("//a/@href"):
urls.append(link)
return urls
def download(url, path):
sys.stdout.write(fname + "\n")
path = Path(path)
if not path.is_file():
urlretrieve(url, filename=str(path), reporthook=progress_hook(sys.stdout))
sys.stdout.write("\n")
sys.stdout.flush()
def progress_hook(out):
"""
Return a progress hook function, suitable for passing to
urllib.retrieve, that writes to the file object *out*.
"""
def it(n, bs, ts):
got = n * bs
if ts < 0:
outof = ""
else:
# On the last block n*bs can exceed ts, so we clamp it
# to avoid awkward questions.
got = min(got, ts)
outof = f"/{int(ts):d} [{100 * got // ts:d}%%]"
out.write(f"\r {int(got):d}{outof}")
out.flush()
return it
nhc = "https://www.nhc.noaa.gov/gis/forecast/archive/"
# We don't need the latest file b/c that is redundant to the latest number.
fnames = [
fname
for fname in url_lister(nhc)
if fname.startswith(hurricane) and "latest" not in fname
]
base = Path(".").joinpath("data", hurricane).resolve()
if not base.is_dir():
base.mkdir(parents=True)
for fname in fnames:
url = f"{nhc}/{fname}"
path = base.joinpath(fname)
download(url, path)
al112017_5day_001.zip
38308/38308 [100%%]
al112017_5day_002.zip
39079/39079 [100%%]
al112017_5day_003.zip
38758/38758 [100%%]
al112017_5day_004.zip
37734/37734 [100%%]
al112017_5day_005.zip
39971/39971 [100%%]
al112017_5day_006.zip
39349/39349 [100%%]
al112017_5day_007.zip
39083/39083 [100%%]
al112017_5day_008.zip
39922/39922 [100%%]
al112017_5day_009.zip
39556/39556 [100%%]
al112017_5day_010.zip
40684/40684 [100%%]
al112017_5day_011.zip
39870/39870 [100%%]
al112017_5day_012.zip
39753/39753 [100%%]
al112017_5day_013.zip
39634/39634 [100%%]
al112017_5day_014.zip
39540/39540 [100%%]
al112017_5day_015.zip
39371/39371 [100%%]
al112017_5day_016.zip
39936/39936 [100%%]
al112017_5day_017.zip
39256/39256 [100%%]
al112017_5day_018.zip
42716/42716 [100%%]
al112017_5day_018A.zip
42521/42521 [100%%]
al112017_5day_019.zip
42068/42068 [100%%]
al112017_5day_019A.zip
42200/42200 [100%%]
al112017_5day_020.zip
41983/41983 [100%%]
al112017_5day_020A.zip
41361/41361 [100%%]
al112017_5day_021.zip
42444/42444 [100%%]
al112017_5day_021A.zip
42653/42653 [100%%]
al112017_5day_022.zip
42738/42738 [100%%]
al112017_5day_022A.zip
42066/42066 [100%%]
al112017_5day_023.zip
42615/42615 [100%%]
al112017_5day_023A.zip
41725/41725 [100%%]
al112017_5day_024.zip
42586/42586 [100%%]
al112017_5day_025.zip
42228/42228 [100%%]
al112017_5day_026.zip
42587/42587 [100%%]
al112017_5day_026A.zip
42387/42387 [100%%]
al112017_5day_027.zip
42045/42045 [100%%]
al112017_5day_027A.zip
41724/41724 [100%%]
al112017_5day_028.zip
41614/41614 [100%%]
al112017_5day_028A.zip
41451/41451 [100%%]
al112017_5day_029.zip
41012/41012 [100%%]
al112017_5day_029A.zip
40565/40565 [100%%]
al112017_5day_030.zip
39812/39812 [100%%]
al112017_5day_030A.zip
39587/39587 [100%%]
al112017_5day_031.zip
39747/39747 [100%%]
al112017_5day_031A.zip
39330/39330 [100%%]
al112017_5day_032.zip
39236/39236 [100%%]
al112017_5day_032A.zip
39011/39011 [100%%]
al112017_5day_033.zip
38796/38796 [100%%]
al112017_5day_033A.zip
38543/38543 [100%%]
al112017_5day_034.zip
38261/38261 [100%%]
al112017_5day_034A.zip
38068/38068 [100%%]
al112017_5day_035.zip
38529/38529 [100%%]
al112017_5day_035A.zip
38181/38181 [100%%]
al112017_5day_036.zip
39094/39094 [100%%]
al112017_5day_036A.zip
38496/38496 [100%%]
al112017_5day_037.zip
38739/38739 [100%%]
al112017_5day_037A.zip
37740/37740 [100%%]
al112017_5day_038.zip
37839/37839 [100%%]
al112017_5day_038A.zip
37831/37831 [100%%]
al112017_5day_039.zip
37665/37665 [100%%]
al112017_5day_039A.zip
36841/36841 [100%%]
al112017_5day_040.zip
37146/37146 [100%%]
al112017_5day_040A.zip
37277/37277 [100%%]
al112017_5day_041.zip
36808/36808 [100%%]
al112017_5day_041A.zip
36475/36475 [100%%]
al112017_5day_042.zip
36491/36491 [100%%]
al112017_5day_042A.zip
36685/36685 [100%%]
al112017_5day_043.zip
35509/35509 [100%%]
al112017_5day_043A.zip
35413/35413 [100%%]
al112017_5day_044.zip
35677/35677 [100%%]
al112017_5day_044A.zip
35574/35574 [100%%]
al112017_5day_045.zip
35612/35612 [100%%]
al112017_5day_045A.zip
35502/35502 [100%%]
al112017_5day_046.zip
36241/36241 [100%%]
al112017_5day_046A.zip
36218/36218 [100%%]
al112017_5day_047.zip
36584/36584 [100%%]
al112017_5day_047A.zip
36519/36519 [100%%]
al112017_5day_048.zip
33345/33345 [100%%]
al112017_5day_048A.zip
33255/33255 [100%%]
al112017_5day_049.zip
30642/30642 [100%%]
al112017_5day_049A.zip
30249/30249 [100%%]
al112017_5day_050.zip
29716/29716 [100%%]
al112017_5day_050A.zip
29256/29256 [100%%]
al112017_5day_051.zip
28829/28829 [100%%]
al112017_5day_051A.zip
28420/28420 [100%%]
al112017_5day_052.zip
23019/23019 [100%%]
In the cells below we use geopandas
to load the data we just downloaded. We will use only the prediction cone (png
) and the track points (pts
), but feel free to explore this data further, there is plenty more there.
import os
os.environ["CPL_ZIP_ENCODING"] = "UTF-8"
os.environ["TZ"] = "GMT0"
from glob import glob
import geopandas
cones, points = [], []
for fname in sorted(glob(str(base.joinpath(f"{hurricane}_*.zip")))):
number = fname.split("_")[-1].split(".zip")[0]
pgn = geopandas.read_file(f"{fname}!{code}-{number}_5day_pgn.shp")
cones.append(pgn)
pts = geopandas.read_file(f"{fname}!{code}-{number}_5day_pts.shp")
# Only the first "obsevartion."
points.append(pts.iloc[0])
Let’s create a color code for the point track.
colors = {
"Subtropical Depression": "yellow",
"Tropical Depression": "yellow",
"Tropical Storm": "orange",
"Subtropical Storm": "orange",
"Hurricane": "red",
"Major Hurricane": "crimson",
}
Now we can get all the information we need from those GIS files. Let’s start with the event dates.
import dateutil
start = points[0]["FLDATELBL"].strip(" AST")
end = points[-1]["FLDATELBL"].strip(" EDT")
start = dateutil.parser.parse(start)
end = dateutil.parser.parse(end)
And the bounding box to search the data.
from shapely.geometry import LineString
from shapely.ops import unary_union
last_cone = cones[-1]["geometry"].iloc[0]
track = LineString([point["geometry"] for point in points])
polygon = unary_union([last_cone, track])
# Add a buffer to find the stations along the track.
bbox = polygon.buffer(2).bounds
Note that the bounding box is derived from the track and the latest prediction cone.
strbbox = ", ".join(format(v, ".2f") for v in bbox)
print(f"bbox: {strbbox}\nstart: {start}\n end: {end}")
bbox: -91.91, 14.40, -28.30, 39.45
start: 2017-08-30 08:00:00
end: 2017-09-11 20:00:00
Now we need to build a filter with those parameters to find the observations along the Hurricane path. We still need to specify:
the units for the observations;
and the SOS name for the variables of interest.
Next, we can use pyoos
to assemble a collector to download the data into a pandas DataFrame
.
import cf_units
import pandas as pd
from ioos_tools.ioos import collector2table
from pyoos.collectors.coops.coops_sos import CoopsSos
from retrying import retry
# We need to retry in case of failure b/c the server cannot handle
# the high traffic during events like Irma.
@retry(stop_max_attempt_number=5, wait_fixed=3000)
def get_coops(start, end, sos_name, units, bbox, verbose=False):
collector = CoopsSos()
collector.set_bbox(bbox)
collector.end_time = end
collector.start_time = start
collector.variables = [sos_name]
ofrs = collector.server.offerings
title = collector.server.identification.title
config = dict(
units=units,
sos_name=sos_name,
)
data = collector2table(
collector=collector,
config=config,
col=f"{sos_name} ({units.format(cf_units.UT_ISO_8859_1)})",
)
# Clean the table.
table = dict(
station_name=[s._metadata.get("station_name") for s in data],
station_code=[s._metadata.get("station_code") for s in data],
sensor=[s._metadata.get("sensor") for s in data],
lon=[s._metadata.get("lon") for s in data],
lat=[s._metadata.get("lat") for s in data],
depth=[s._metadata.get("depth", "NA") for s in data],
)
table = pd.DataFrame(table).set_index("station_name")
if verbose:
print("Collector offerings")
print(f"{title}: {len(ofrs)} offerings")
return data, table
ssh, ssh_table = get_coops(
start=start,
end=end,
sos_name="water_surface_height_above_reference_datum",
units=cf_units.Unit("meters"),
bbox=bbox,
)
ssh_table
station_code | sensor | lon | lat | depth | |
---|---|---|---|---|---|
station_name | |||||
Bermuda Biological Station, Bermuda | 2695535 | urn:ioos:sensor:NOAA.NOS.CO-OPS:2695535:N1 | -64.6950 | 32.3700 | None |
Bermuda, St. Georges Island, Bermuda | 2695540 | urn:ioos:sensor:NOAA.NOS.CO-OPS:2695540:Y1 | -64.7033 | 32.3733 | None |
Atlantic City, NJ | 8534720 | urn:ioos:sensor:NOAA.NOS.CO-OPS:8534720:A1 | -74.4181 | 39.3567 | None |
Cape May, NJ | 8536110 | urn:ioos:sensor:NOAA.NOS.CO-OPS:8536110:A1 | -74.9600 | 38.9683 | None |
Ship John Shoal, NJ | 8537121 | urn:ioos:sensor:NOAA.NOS.CO-OPS:8537121:B1 | -75.3767 | 39.3054 | None |
... | ... | ... | ... | ... | ... |
Esperanza, Vieques Island, PR | 9752695 | urn:ioos:sensor:NOAA.NOS.CO-OPS:9752695:A1 | -65.4714 | 18.0939 | None |
San Juan, La Puntilla, San Juan Bay, PR | 9755371 | urn:ioos:sensor:NOAA.NOS.CO-OPS:9755371:Y1 | -66.1164 | 18.4589 | None |
Magueyes Island, PR | 9759110 | urn:ioos:sensor:NOAA.NOS.CO-OPS:9759110:Y1 | -67.0464 | 17.9701 | None |
Mayaguez, PR | 9759394 | urn:ioos:sensor:NOAA.NOS.CO-OPS:9759394:Y1 | -67.1624 | 18.2188 | None |
Mona Island, PR | 9759938 | urn:ioos:sensor:NOAA.NOS.CO-OPS:9759938:N1 | -67.9382 | 18.0893 | None |
84 rows × 5 columns
wind_speed, wind_speed_table = get_coops(
start=start,
end=end,
sos_name="wind_speed",
units=cf_units.Unit("m/s"),
bbox=bbox,
)
wind_speed_table
station_code | sensor | lon | lat | depth | |
---|---|---|---|---|---|
station_name | |||||
Bermuda, St. Georges Island, Bermuda | 2695540 | urn:ioos:sensor:NOAA.NOS.CO-OPS:2695540:C1 | -64.7033 | 32.3733 | None |
Cape May, NJ | 8536110 | urn:ioos:sensor:NOAA.NOS.CO-OPS:8536110:C1 | -74.9600 | 38.9683 | None |
Ship John Shoal, NJ | 8537121 | urn:ioos:sensor:NOAA.NOS.CO-OPS:8537121:C1 | -75.3767 | 39.3054 | None |
Brandywine Shoal Light, DE | 8555889 | urn:ioos:sensor:NOAA.NOS.CO-OPS:8555889:C1 | -75.1130 | 38.9870 | None |
Lewes, DE | 8557380 | urn:ioos:sensor:NOAA.NOS.CO-OPS:8557380:C1 | -75.1193 | 38.7828 | None |
... | ... | ... | ... | ... | ... |
Charlotte Amalie, VI | 9751639 | urn:ioos:sensor:NOAA.NOS.CO-OPS:9751639:C1 | -64.9258 | 18.3306 | None |
Esperanza, Vieques Island, PR | 9752695 | urn:ioos:sensor:NOAA.NOS.CO-OPS:9752695:C1 | -65.4714 | 18.0939 | None |
San Juan, La Puntilla, San Juan Bay, PR | 9755371 | urn:ioos:sensor:NOAA.NOS.CO-OPS:9755371:C1 | -66.1164 | 18.4589 | None |
Magueyes Island, PR | 9759110 | urn:ioos:sensor:NOAA.NOS.CO-OPS:9759110:C1 | -67.0464 | 17.9701 | None |
Mayaguez, PR | 9759394 | urn:ioos:sensor:NOAA.NOS.CO-OPS:9759394:C1 | -67.1624 | 18.2188 | None |
73 rows × 5 columns
For simplicity we will use only the stations that have both wind speed and sea surface height and reject those that have only one or the other.
common = set(ssh_table["station_code"]).intersection(wind_speed_table["station_code"])
ssh_obs, win_obs = [], []
for station in common:
ssh_obs.extend([obs for obs in ssh if obs._metadata["station_code"] == station])
win_obs.extend(
[obs for obs in wind_speed if obs._metadata["station_code"] == station]
)
index = pd.date_range(start=start, end=end, freq="15min")
# Re-index and rename series.
ssh_observations = []
for series in ssh_obs:
_metadata = series._metadata
series.index = series.index.tz_localize(None)
obs = series.reindex(index=index, limit=1, method="nearest")
obs._metadata = _metadata
obs.name = _metadata["station_name"]
ssh_observations.append(obs)
winds_observations = []
for series in win_obs:
_metadata = series._metadata
series.index = series.index.tz_localize(None)
obs = series.reindex(index=index, limit=1, method="nearest")
obs._metadata = _metadata
obs.name = _metadata["station_name"]
winds_observations.append(obs)
Let’s take a look at some stations to see if the data is OK. Below we have a station in Naples, FL along the Gulf of Mexico.
import matplotlib.pyplot as plt
try:
station = "8725110"
w = [obs for obs in winds_observations if obs._metadata["station_code"] == station][
0
]
s = [obs for obs in ssh_observations if obs._metadata["station_code"] == station][0]
fig, ax = plt.subplots(figsize=(17, 3.75))
s["2017-9-10":].plot(ax=ax, label="Sea surface height (m)", color="#0000ff")
ax1 = w["2017-9-10":].plot(
ax=ax, label="Wind speed (m/s)", color="#9900cc", secondary_y=True
)
ax.set_title(w._metadata["station_name"])
lines = ax.get_lines() + ax.right_ax.get_lines()
ax.legend(lines, [l.get_label() for l in lines], loc="upper left")
ax.axhline(0, color="black")
ax.set_ylabel("Sea surface height (m)", color="#0000ff")
ax.right_ax.set_ylabel("Wind speed (m/s)", color="#9900cc")
ax1.annotate(
"Eye of the hurricane",
xy=(w["2017-9-10":].argmin().to_pydatetime(), w["2017-9-10":].min()),
xytext=(5, 10),
textcoords="offset points",
arrowprops=dict(arrowstyle="simple", facecolor="crimson"),
)
ax.grid(True)
except Exception:
print(f"Cannot find station {station}")
Cannot find station 8725110
We can observe the sea level retreating around 10-Sep 9:00 and then a significant surge after 19:00. The lower winds at beginning of the surge is probably the eye of the hurricane.
For our interactive map we will use bokeh
HTML plots instead of the usual raster matplotlib
ones to enhance the user experience when exploring the graphs.
from bokeh.embed import file_html
from bokeh.models import HoverTool, LinearAxis, Range1d
from bokeh.plotting import figure
from bokeh.resources import CDN
from folium import IFrame
# Plot defaults.
tools = "pan,box_zoom,reset"
width, height = 750, 250
def make_plot(ssh, wind):
p = figure(
toolbar_location="above",
x_axis_type="datetime",
width=width,
height=height,
tools=tools,
title=ssh.name,
)
p.yaxis.axis_label = "wind speed (m/s)"
l0 = p.line(
x=wind.index,
y=wind.values,
line_width=5,
line_cap="round",
line_join="round",
legend_label="wind speed (m/s)",
color="#9900cc",
alpha=0.5,
)
p.extra_y_ranges = {}
p.extra_y_ranges["y2"] = Range1d(start=-1, end=3.5)
p.add_layout(LinearAxis(y_range_name="y2", axis_label="ssh (m)"), "right")
l1 = p.line(
x=ssh.index,
y=ssh.values,
line_width=5,
line_cap="round",
line_join="round",
legend_label="ssh (m)",
color="#0000ff",
alpha=0.5,
y_range_name="y2",
)
p.legend.location = "top_left"
p.add_tools(
HoverTool(
tooltips=[
("wind speed (m/s)", "@y"),
],
renderers=[l0],
),
HoverTool(
tooltips=[
("ssh (m)", "@y"),
],
renderers=[l1],
),
)
return p
def make_marker(p, location, fname):
html = file_html(p, CDN, fname)
iframe = IFrame(html, width=width + 45, height=height + 80)
popup = folium.Popup(iframe, max_width=2650)
icon = folium.Icon(color="green", icon="stats")
marker = folium.Marker(location=location, popup=popup, icon=icon)
return marker
Here is the final result. Explore the map by clicking on the map features plotted!
import folium
from folium.plugins import Fullscreen, MarkerCluster
from ioos_tools.ioos import get_coordinates
lon = track.centroid.x
lat = track.centroid.y
m = folium.Map(location=[lat, lon], tiles="OpenStreetMap", zoom_start=4)
Fullscreen(position="topright", force_separate_button=True).add_to(m)
marker_cluster0 = MarkerCluster(name="Observations")
marker_cluster1 = MarkerCluster(name="Past predictions")
marker_cluster0.add_to(m)
marker_cluster1.add_to(m)
url = "http://oos.soest.hawaii.edu/thredds/wms/hioos/satellite/dhw_5km"
w0 = folium.WmsTileLayer(
url,
name="Sea Surface Temperature",
fmt="image/png",
layers="CRW_SST",
attr="PacIOOS TDS",
overlay=True,
transparent=True,
)
w0.add_to(m)
url = "http://hfrnet.ucsd.edu/thredds/wms/HFRNet/USEGC/6km/hourly/RTV"
w1 = folium.WmsTileLayer(
url,
name="HF Radar",
fmt="image/png",
layers="surface_sea_water_velocity",
attr="HFRNet",
overlay=True,
transparent=True,
)
w1.add_to(m)
def style_function(feature):
return {
"fillOpacity": 0,
"color": "black",
"stroke": 1,
"weight": 0.5,
"opacity": 0.2,
}
# Latest cone prediction.
latest = cones[-1]
folium.GeoJson(
data=latest.__geo_interface__,
name="Cone prediction as of {}".format(latest["ADVDATE"].values[0]),
).add_to(m)
# Past cone predictions.
for cone in cones[:-1]:
folium.GeoJson(
data=cone.__geo_interface__,
style_function=style_function,
).add_to(marker_cluster1)
# Latest points prediction.
for k, row in pts.iterrows():
date = row["FLDATELBL"]
hclass = row["TCDVLP"]
location = row["LAT"], row["LON"]
popup = f"{date}<br>{hclass}"
folium.CircleMarker(
location=location,
radius=10,
fill=True,
color=colors[hclass],
popup=popup,
).add_to(m)
# All the points along the track.
for point in points:
date = point["FLDATELBL"]
hclass = point["TCDVLP"]
location = point["LAT"], point["LON"]
popup = f"{date}<br>{hclass}"
folium.CircleMarker(
location=location,
radius=5,
fill=True,
color=colors[hclass],
popup=popup,
).add_to(m)
# Observations.
for ssh, wind in zip(ssh_observations, winds_observations):
fname = ssh._metadata["station_code"]
location = ssh._metadata["lat"], ssh._metadata["lon"]
p = make_plot(ssh, wind)
marker = make_marker(p, location=location, fname=fname)
marker.add_to(marker_cluster0)
folium.LayerControl().add_to(m)
p = folium.PolyLine(get_coordinates(bbox), color="#009933", weight=1, opacity=0.2)
p.add_to(m);
m