-
Notifications
You must be signed in to change notification settings - Fork 0
/
sst-prep.Rmd
261 lines (207 loc) · 9.64 KB
/
sst-prep.Rmd
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
---
title: "SST preparation"
author: "Robert Schlegel"
date: "2019-05-23"
output: workflowr::wflow_html
editor_options:
chunk_output_type: console
---
```{r global_options, include = FALSE}
knitr::opts_chunk$set(fig.width = 8, fig.align = 'center',
echo = TRUE, warning = FALSE, message = FALSE,
eval = TRUE, tidy = FALSE)
```
## Introduction
Building on the work performed in the [Polygon preparation](https://robwschlegel.github.io/MHWNWA/polygon-prep.html) vignette, we will now create grouped SST time series for the regions in our study area based on the 50 and 200 m isobaths within each. This will not be done by using shape files to define the area, as previously planned. Rather we will find the mean depth for each SST pixel and assign it to one of the three depth classes within each region. How this looks is shown below.
```{r libraries}
# Packages used in this vignette
library(tidyverse) # Base suite of functions
library(heatwaveR, lib.loc = "../R-packages/") # For detecting MHWs
# cat(paste0("heatwaveR version = ", packageDescription("heatwaveR")$Version))
library(FNN) # For fastest nearest neighbour searches
library(ncdf4) # For opening and working with NetCDF files
library(SDMTools) # For finding points within polygons
library(lubridate) # For convenient date manipulation
# Set number of cores
doMC::registerDoMC(cores = 50)
# Disable scientific notation for numeric values
# I just find it annoying
options(scipen = 999)
```
Before we access the SST data from the NAPA (3-Oceans) model, let's load all of the polygons and bathymetry data we will need to create our grouped time series.
```{r load-poly-bathy}
# Corners of the study area
NWA_corners <- readRDS("data/NWA_corners.Rda")
# Individual regions
NWA_coords <- readRDS("data/NWA_coords_cabot.Rda")
# Lowres bathymetry
NWA_bathy <- readRDS("data/NWA_bathy_lowres.Rda")
# The NAPA model lon/lat values
NAPA_coords <- readRDS("data/NAPA_coords.Rda")
# The base map
map_base <- ggplot2::fortify(maps::map(fill = TRUE, col = "grey80", plot = FALSE)) %>%
dplyr::rename(lon = long) %>%
mutate(group = ifelse(lon > 180, group+9999, group),
lon = ifelse(lon > 180, lon-360, lon)) %>%
select(-region, -subregion)
```
## Pixel prep
### NAPA bathymetry
We will now extract the bathymetry data from the NAPA model to use as our guide for how to assign depth values to the different SST pixels.
```{r NAPA-bathy, eval=FALSE}
# Open bathymetry NetCDF file
nc_bathy <- nc_open("../../data/NAPA025/mesh_grid/bathy_creg025_extended_5m.nc")
# Extract and combine lon/lat/bathy
lon <- as.data.frame(ncvar_get(nc_bathy, varid = "nav_lon")) %>%
setNames(., as.numeric(nc_bathy$dim$y$vals)) %>%
mutate(lon = as.numeric(nc_bathy$dim$x$vals)) %>%
gather(-lon, key = lat, value = nav_lon) %>%
mutate(lat = as.numeric(lat))
lat <- as.data.frame(ncvar_get(nc_bathy, varid = "nav_lat")) %>%
setNames(., as.numeric(nc_bathy$dim$y$vals)) %>%
mutate(lon = as.numeric(nc_bathy$dim$x$vals)) %>%
gather(-lon, key = lat, value = nav_lat) %>%
mutate(lat = as.numeric(lat))
bathy <- as.data.frame(ncvar_get(nc_bathy, varid = "Bathymetry")) %>%
mutate(lon = as.numeric(nc$dim$x$vals)) %>%
gather(-lon, key = lat, value = bathy) %>%
mutate(lat = rep(as.numeric(nc_bathy$dim$y$vals), each = 528),
bathy = ifelse(bathy == 0, NA, bathy),
bathy = round(bathy, 2)) %>%
na.omit() %>%
left_join(lon, by = c("lon", "lat")) %>%
left_join(lat, by = c("lon", "lat")) %>%
dplyr::rename(lon_index = lon, lat_index = lat,
lon = nav_lon, lat = nav_lat) %>%
mutate(lon = round(lon, 4),
lat = round(lat, 4))
nc_close(nc_bathy)
# Save
# saveRDS(bathy, "data/NAPA_bathy.Rda")
rm(nc_bathy, bathy, lon, lat)
```
### Assign pixels to regions
With the depths for the model pixels worked out we may now assign them within one of the seven regions.
```{r pixel-regions}
# Load NAPA bathymetry
NAPA_bathy <- readRDS("data/NAPA_bathy.Rda")# %>%
# mutate(index = paste0(lon, lat))
# Function for finding and cleaning up points within a given region polygon
pnts_in_region <- function(region_in){
region_sub <- NWA_coords %>%
filter(region == region_in)
coords_in <- pnt.in.poly(pnts = NAPA_bathy[4:5], poly.pnts = region_sub[2:3]) %>%
filter(pip == 1) %>%
# mutate(index = paste(lon, lat)) %>%
left_join(NAPA_bathy, by = c("lon", "lat")) %>%
dplyr::select(-pip) %>%
mutate(region = region_in)
return(coords_in)
}
# Run the function
NWA_NAPA_info <- plyr::ldply(unique(NWA_coords$region), pnts_in_region)
# Visualise to ensure success
ggplot(NWA_coords, aes(x = lon, y = lat)) +
geom_polygon(data = map_base, aes(group = group), show.legend = F) +
geom_polygon(aes(fill = region), alpha = 0.2) +
geom_point(data = NWA_NAPA_info, aes(colour = region)) +
coord_cartesian(xlim = NWA_corners[1:2],
ylim = NWA_corners[3:4]) +
labs(x = NULL, y = NULL)
```
### Assign pixels to sub-regions
The `pnt.in.poly` function was remarkably convenient. Our points have now very easily been placed within their respective regions. The last step now before we move on to creating our clumped time series is to cut the regions up into three groups each based on depth: 0 -- 50 m, 51 -- 200 m, 201+ m.
```{r depth-sub-regions}
# Cut the depth strata into sub-regions as desired
NWA_NAPA_info <- NWA_NAPA_info %>%
mutate(sub_region = cut(bathy, breaks = c(0, 50, 200, ceiling(max(bathy))), dig.lab = 4))
# saveRDS(NWA_NAPA_info, "data/NWA_NAPA_info.Rda")
# Visualise to ensure success
sub_region_map <- ggplot(NWA_coords, aes(x = lon, y = lat)) +
geom_polygon(data = map_base, aes(group = group), show.legend = F) +
# geom_polygon(aes(fill = region), alpha = 0.2) +
geom_point(data = NWA_NAPA_info, aes(colour = region, alpha = sub_region),
shape = 15, size = 0.5) +
guides(colour = guide_legend(override.aes = list(size = 5))) +
scale_alpha_manual(values = c(0.4, 0.7, 1)) +
coord_cartesian(xlim = NWA_corners[1:2],
ylim = NWA_corners[3:4]) +
labs(x = NULL, y = NULL, colour = "Region", alpha = "Sub region")
# ggsave(plot = sub_region_map, filename = "output/sub_region_map.pdf", height = 5, width = 6)
# Visualise
sub_region_map
```
## SST prep
With the NAPA pixels successfully assigned to regions based on their thermal properties, and sub-regions based on their depth, we now need to go about clumping these SST pixels into one average time series per sub/region.
```{r sst-clump, eval=FALSE}
# The NAPA data location
NAPA_files <- dir("../../data/NAPA025/1d_grid_T_2D", full.names = T)
# Function for loading the individual NAPA NetCDF files and subsetting SST accordingly
load_NAPA_sst_sub <- function(file_name, coords = NWA_NAPA_info){
nc <- nc_open(as.character(file_name))
date_start <- ymd(str_sub(basename(as.character(file_name)), start = 29, end = 36))
date_end <- ymd(str_sub(basename(as.character(file_name)), start = 38, end = 45))
date_seq <- seq(date_start, date_end, by = "day")
sst <- as.data.frame(ncvar_get(nc, varid = "sst")) %>%
mutate(lon = as.numeric(nc$dim$x$vals)) %>%
gather(-lon, key = lat, value = temp) %>%
mutate(t = rep(date_seq, each = 388080),
lat = rep(rep(as.numeric(nc$dim$y$vals), each = 528), times = 5),
temp = round(temp, 2)) %>%
select(lon, lat, t, temp) %>%
na.omit() %>%
dplyr::rename(lon_index = lon, lat_index = lat) %>%
right_join(coords, by = c("lon_index" , "lat_index")) %>%
group_by(region, sub_region, t) %>%
summarise(temp = mean(temp, na.rm = T))
nc_close(nc)
return(sst)
}
# Clomp'em
system.time(
NAPA_sst_sub <- plyr::ldply(NAPA_files,
.fun = load_NAPA_sst_sub,
.parallel = TRUE)
) # 1.5 seconds for 1, 135 seconds for all
# For some reason the rounding didn't stick in the function above so we wack it again here
NAPA_sst_sub <- NAPA_sst_sub %>%
mutate(temp = round(temp, 2))
# Save
# saveRDS(NAPA_sst_sub, "data/NAPA_sst_sub.Rda")
```
## MHW detection
With our clumped SST time series ready the last step in this vignette is to detect the MHWs within each.
```{r MHW-sub, eval=FALSE}
# Load the time series data
NAPA_sst_sub <- readRDS("data/NAPA_sst_sub.Rda")
# Calculate base results
NAPA_MHW_sub <- NAPA_sst_sub %>%
group_by(region, sub_region) %>%
nest() %>%
mutate(clims = map(data, ts2clm,
climatologyPeriod = c(min(NAPA_sst_sub$t), max(NAPA_sst_sub$t))),
events = map(clims, detect_event),
cats = map(events, category)) %>%
select(-data)
# saveRDS(NAPA_MHW_sub, "data/NAPA_MHW_sub.Rda")
```
With the MHWs detected, let's visualise the results to ensure everything worked as expected.
```{r MHW-vis}
# Load MHW results
NAPA_MHW_sub <- readRDS("data/NAPA_MHW_sub.Rda")
# Events
NAPA_MHW_event <- NAPA_MHW_sub %>%
select(-clims, -cats) %>%
unnest(events) %>%
filter(row_number() %% 2 == 0) %>%
unnest(events)
event_lolli_plot <- ggplot(data = NAPA_MHW_event , aes(x = date_peak, y = intensity_max)) +
geom_lolli(colour = "salmon", colour_n = "red", n = 3) +
labs(x = "Peak Date", y = "Max. Intensity (°C)") +
# scale_y_continuous(expand = c(0, 0))+
facet_grid(region~sub_region)
# ggsave(plot = event_lolli_plot, filename = "output/event_lolli_plot.pdf", height = 7, width = 13)
# Visualise
event_lolli_plot
```
Everything appears to check out. Up next in the [Variable preparation](https://robwschlegel.github.io/MHWNWA/var-prep.html) vignette we will go through the steps necessary to build the data that will be fed into our self-organising maps as seen in the [Self-organising map (SOM) analysis](https://robwschlegel.github.io/MHWNWA/som.html) vignette.