/
app.R
1693 lines (1488 loc) · 73 KB
/
app.R
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#Just importing packages from CRAN
# install.packages(c("shiny", "leaflet", "mosaic", "rnoaa", "shinyWidgets", "lubridate", "taxize", "raster", "rasterVis",
# "tidyverse", "hash", "shinycssloaders", "rgdal", "shinytoastr", "shinyalert", "plotly", "shinyglide",
# "cicerone"))
library(shiny)
library(leaflet)
library(mosaic)
library(rnoaa)
library(shinyWidgets)
library(lubridate)
#library(leafpop)
library(taxize)
library(raster)
library(rasterVis)
library(tidyverse)
library(hash)
library(shinycssloaders)
library(rgdal)
library(shinytoastr)
library(shinyalert)
library(plotly)
library(shinyglide)
library(cicerone)
library(aws.s3)
#library(caret)
#if (!require('devtools')) install.packages('devtools')
#install_github(c("ColinFay/glouton", "mikejohnson51/AOI", "mikejohnson51/climateR", "carlganz/shinyCleave", "dreamRs/shinypop", "JohnCoene/shinyscroll"))
#devtools::install_github("ColinFay/glouton")
#devtools::install_github("carlganz/shinyCleave")
#devtools::install_github("dreamRs/shinypop")
#devtools::install_github("JohnCoene/shinyscroll")
library(shinypop)
library(shinyCleave)
library(AOI)
library(climateR)
library(glouton)
library(shinyscroll)
js <- "$(document).on('shiny:connected', function(event) {
Shiny.onInputChange('loaded', true)
});"
##Change this if you want the heatmap to not automatically update
autoUpdateHeatmap = FALSE
##How many species heatmaps can be updated when a user begins a session
max_heatmap_updates_per_session = 1
#s3_data <- "shiny-insect-forecaster"
#aws.s3::s3sync(path = "./dat",
# bucket = s3_data,
# prefix = "dat/",
# direction = "download")
#library(rgdal)
#library(testit)
#DO NOT FORGET TO ADD RESET FOR EVERY YEAR (first day reset to 0).
npn_data <- read.csv("./dat/npn_phenometrics/site_phenometrics_data.csv")
npn_data <- replace(npn_data, npn_data == -9999, NA)
get_dfWrangled <- function(){
#Import seasonality database
AppendixS3_SeasonalityDatabase <- read.csv("./dat/AppendixS3_SeasonalityDatabase.csv", header=TRUE)
#Selecting certain columns and creating mean_* columns
dfWrangled <- as_tibble(AppendixS3_SeasonalityDatabase) %>%
dplyr::select(Species, Species.1, BDT.C, EADDC, lat, lon, Author, Year, Journal, Location, quality) %>%
group_by(Species.1) %>%
mutate(mean_BDT.C = mean(BDT.C, na.rm=TRUE),
mean_EADDC = mean(EADDC, na.rm=TRUE))
#Remove physiological outliers
dfWrangled = subset(dfWrangled, dfWrangled$BDT.C > -7 & dfWrangled$EADDC < 2000)
#Restrict to dat with lat / lon
dfWrangled = dfWrangled[which(!is.na(dfWrangled$lon) & !is.na(dfWrangled$lat) ),]
dfWrangled$uid <- seq.int(nrow(dfWrangled))
return(dfWrangled)}
dfWrangled <- get_dfWrangled()
#Create necessary datarframe for RNOAA query
# latLonDF <- dplyr::select(dfWrangled, c("Species.1", "uid", "lat", "lon"))
# colnames(latLonDF) <- c("Species.1", "id", "latitude", "longitude")
#
# #Shorten database for ease
# num_spec = 65
# latLonDF <- head(latLonDF, num_spec)
#Turn each row to a dataframe
# pLatLonDF <- latLonDF %>%
# rowwise %>%
# do( X = as_tibble(.) ) %>%
# ungroup
#---------------Only run the following if you want to update ghcnd-stations.txt------------
#This will query NOAA for the most up to date info on weather stations in the GHCND network
#The output is saved to a file called ghcnd-stations-current.csv, in the current working directory
updateGHCNDStations <- function(){
print("Getting ghcnd-stations.txt from NOAA...")
stationsDailyRaw <- read.fwf(url("https://www1.ncdc.noaa.gov/pub/data/ghcn/daily/ghcnd-stations.txt"),
widths = c(11, 9, 11, 7, 2, 31, 5, 10),
header = FALSE, strip.white = TRUE, comment.char = "",
stringsAsFactors = FALSE)
print("Getting ghcnd-inventory.txt from NOAA...")
inventoryDailyRaw <- read.fwf(url("https://www1.ncdc.noaa.gov/pub/data/ghcn/daily/ghcnd-inventory.txt"),
widths = c(11, 9, 10, 5, 5, 5),
header = FALSE, strip.white = TRUE, comment.char = "",
stringsAsFactors = FALSE)
stationColNames <- c("id","latitude", "longitude", "elevation",
"state", "name", "gsn_flag", "wmo_id")
inventoryColNames <- c("id","latitude", "longitude",
"element", "first_year", "last_year")
ghcndStationsDaily <- stats::setNames(stationsDailyRaw, stationColNames)
ghcndInventoryDaily <- stats::setNames(inventoryDailyRaw, inventoryColNames)
ghcndStationsDailyComplete <- merge(ghcndStationsDaily, ghcndInventoryDaily[, -c(2, 3)], by = "id")
sturdyGHCNDStations <- tibble::as_tibble(ghcndStationsDailyComplete[stats::complete.cases(ghcndStationsDailyComplete), ])
saveRDS(sturdyGHCNDStations, file = "./dat/ghcnd-stations-current.csv")
return(sturdyGHCNDStations)}
#--------End Update of GHCND Stations----------
#read in current local copy of ghcnd stations
localGHCNDStations <- readRDS(file = "./dat/ghcnd-stations-current.csv")
#Takes in a dataframe containing c("Species.1", "id", "latitude", "longitude") and returns info about nearest weather station
nearestStat <- function(Y) {meteo_nearby_stations(lat_lon_df = Y,
station_data = localGHCNDStations,
var = c("TMAX", "TMIN"),
year_min = 2000,
year_max = 2020,
radius = 500,
limit = 2
)}
#Take every dataframe in pLatLonDF and add a result column containing the RNOAA-station-id of the nearest weather station
# stationLatLonDf <- pLatLonDF %>%
# mutate(result = map(X, nearestStat)) %>%
# unnest(cols = c(X, result)) %>%
# dplyr::select("Species.1", "id", "result") %>%
# rename(uid = id) %>%
# unnest(cols = c(result)) %>%
# rename(sid = id) %>%
# dplyr::select("uid", "sid")
# stationLatLonDf$rank <- rep(c(1,2), length.out = nrow(stationLatLonDf))
# stationLatLonDf <- spread(stationLatLonDf, rank, sid)
# colnames(stationLatLonDf) <- c("uid", "sid1", "sid2")
#Merge weather dataframe with species dataframe
#speciesStationDF <- merge(x = dfWrangled, y = stationLatLonDf, by = "uid")
#Removes observations with no nearby weather stations (<500 miles) as defined by radius argument in nearestStat
#speciesStationDF <- speciesStationDF[!(is.na(speciesStationDF$sid1) || speciesStationDF$sid1==""), ]
#Check have TMIN and TMAX dat (Potentially need in different location later)
# stations= stations[which(stations$Var=="TMAX" | stations$Var=="TMIN"),]
# stations= spread(stations,Var, Var)
# stations= stations[which(stations$TMAX=="TMAX" & stations$TMIN=="TMIN"),]
#DEGREE DAYS CALCULATION
#Single sine wave approximation from Baskerville & Emin 1969
#(see http://www.ipm.ucdavis.edu/WEATHER/ddss_tbl.html)
#Input:
#Tdat: 2 column matrix with Tmin followed by Tmax
#LDT:lower developmental threshold
#------Adapted from Lauren Buckley (no longer allows for negative DDs and accepts NA values)
degree.days.mat = function(Tmin, Tmax, LDT){
# entirely above LDT
#|| is.null(Tmin) || is.null(Tmax)
if(is.na(Tmin) || is.na(Tmax)){dd = NA}
else{
if(Tmin>=LDT) {dd = (Tmax+Tmin)/2-LDT}
# intercepted by LDT
## for single sine wave approximation
if(Tmin<LDT && Tmax>LDT){
alpha=(Tmax-Tmin)/2
theta1=asin(((LDT-(Tmax+Tmin))/alpha)*pi/180)
dd=1/pi*(((Tmax+Tmin)/2-LDT)*(pi/2-theta1)+alpha*cos(theta1))
if(!is.na(dd))if(dd<0){dd=0}
} #matches online calculation
# entirely below LDT
if(Tmax <= LDT){dd = 0}}
return(dd)
}
# cumsum with reset adapted from @jgilfillan on github, many thanks!
cumsum_with_reset <- function(x, threshold, gen_gap, prev_accum = 0) {
cumsum <- prev_accum
#group <- 1
result <- numeric()
pause <- 0
for (i in 1:length(x)) {
if (pause == gen_gap){
#group <- group + 1
cumsum <- 0
pause <- 0
}
if(!((i == 1) && (prev_accum != 0))){cumsum <- cumsum + x[i]}
if (cumsum >= threshold) {
if(pause == 0)(cumsum <- threshold)
else(cumsum <- threshold + 1)
pause <- pause + 1
}
result = c(result, cumsum)
}
return (result)
}
#-----Graphing Helper Functions---------
dd_plot <- function(tMax1, tMax2,
tMin1, tMin2,
BDT, EADDC,
startTime, endTime = Sys.Date(),
species,
lat = NULL, lon = NULL,
breaks = NULL,
dateformat='%m/%y',
locality = NULL,
gen_gap = 1,
forecast_length = 30) {
UseMethod("dd_plot")
}
dd_plot.default <- function(tMax1, tMax2, tMin1, tMin2, BDT, EADDC, startTime, endTime, species, lat = NULL, lon = NULL, breaks = NULL, dateformat='%m/%y', locality = NULL, gen_gap = 1, forecast_length = 30) {
#Making a new dataframe that has all of tMax1 and tMax2 for missing dates
if(!is.null(tMax1) && !is.null(tMax2) && !is.null(tMin1) && !is.null(tMin2)) {
dfTMAX <- rbind(tMax1, tMax2[!tMax2$date %in% tMax1$date,])
dfTMIN <- rbind(tMin1, tMin2[!tMin2$date %in% tMin1$date,])
} else {
# dfTMAX <- rbind(tMax1, tMax2)
# dfTMIN <- rbind(tMin1, tMin2)
if(!is.null(tMax1) && !is.null(tMin1)){
dfTMAX <- tMax1
dfTMIN <- tMin1
}else{
dfTMAX <- tMax2
dfTMIN <- tMin2
}
}
#Cleaning up dates
dfTMAX$date <- ymd(sub('T00:00:00\\.000|T00:00:00', '', as.character(dfTMAX$date)))
dfTMIN$date <- ymd(sub('T00:00:00\\.000|T00:00:00', '', as.character(dfTMIN$date)))
#Order dataframe by date
dfTMAX <- dfTMAX[order(as.Date(dfTMAX$date, format = "%y%m%d")),]
dfTMIN <- dfTMIN[order(as.Date(dfTMIN$date, format = "%y%m%d")),]
#value = NULL
#Joining TMIN and TMAX data into dfTEMP
dfTEMP <- full_join(dfTMAX, dfTMIN[ , c("date", "TMIN")], by = 'date')
#Matching units and removing errors
dfTEMP$TMAX[which(dfTEMP$TMAX==-9999)]= NA
dfTEMP$TMAX= dfTEMP$TMAX/10 #correct for tenths of degrees or mm
dfTEMP$TMIN[which(dfTEMP$TMIN==-9999)]= NA
dfTEMP$TMIN= dfTEMP$TMIN/10 #correct for tenths of degrees or mm
#Catch other NA values
dfTEMP$TMAX[which(dfTEMP$TMAX > 200)] = NA
dfTEMP$TMIN[which(dfTEMP$TMIN > 200)] = NA
dfTEMP$TMAX[which(dfTEMP$TMAX < -200)] = NA
dfTEMP$TMIN[which(dfTEMP$TMIN < -200)] = NA
dfTEMP <- na.omit(dfTEMP)
dfTEMP$dd <- NA
for (i in 1:nrow(dfTEMP)) {
dd = degree.days.mat(dfTEMP$TMIN[i], dfTEMP$TMAX[i], BDT)
dfTEMP$dd[i] <- dd
}
dDays <- dfTEMP
#Adding a csum column which sums degree days and resets after reaching threshold (EADDC)
dDays$csum <- cumsum_with_reset(dDays$dd, EADDC, gen_gap)
#Plot csum vs date
# plot <- ggplot(dDays, aes(date, csum)) +
# plot_template(df, breaks, dateformat) +
# #ncdc_theme() +
# geom_hline(aes(yintercept = EADDC), linetype = "dashed", color = "#ff7729", size = 1) +
# geom_text(aes(startTime, EADDC, label = "EADDC", vjust = +1.5, hjust = -0.1), size = 4)
if(!is.null(locality)){
species_locality <- locality
}
#------------------
if(!is.null(lat) && !is.null(lon)){
observationLocationLookup <- AOI::geocode_rev(c(lat, lon))
species_locality <- str_c(observationLocationLookup$county, ", ", observationLocationLookup$state, ", ", toupper(observationLocationLookup$country_code))
}
#smoother <- loess(dd~as.numeric(date), data = dDays, span=0.75)
#set.seed(123)
#training.samples <- dDays$dd %>%
#createDataPartition(p = 0.8, list = FALSE)
#train.data <- dDays[as.vector(training.samples), ]
#test.data <- dDays[as.vector(-training.samples), ]
poly_model <- lm(dd ~ poly(as.numeric(date), 2, raw = TRUE), data = dDays)
#predict(poly_model, data.frame(date = as.numeric(2020-01-01)))
forecast <- NULL
forecast$date <- c(seq.Date(from = dDays$date[length(dDays$csum)], to = dDays$date[length(dDays$csum)] + forecast_length, by = "day"))
forecast$dd <- as.vector(predict(poly_model, forecast, type="response"))
forecast$csum <- cumsum_with_reset(forecast$dd, EADDC, gen_gap, dDays$csum[length(dDays$csum)])
#Start the plot and add the three variables to be plotted (date, csum, dd)
fig <- plot_ly(dDays, x = ~date) %>%
add_lines(x = ~date, y = ~csum, name = "Accumulated Degree Days") %>%
add_lines(x = ~date, y = ~dd, name = "Individual Degree Days", fill = 'tozeroy') %>%
add_lines(x = ~date, y = stats::predict(poly_model), name = "Polynomial Fit", line = list(dash = "dash"), visible = "legendonly") %>%
add_lines(x = forecast$date, y = forecast$dd, name = "Pred. Degree Days", line = list(dash = "dash")) %>%
add_lines(x = forecast$date, y = forecast$csum, name = "Pred. Accumulated Degree Days")
#Add generation dashed line
datesAdulthoodReached <- dDays$date[which(dDays$csum == EADDC)]
if(length(datesAdulthoodReached) != 0){
fig <- fig %>%
add_segments(x = datesAdulthoodReached +1,
xend = datesAdulthoodReached +1,
y = 0,
yend = EADDC*1.05,
name = "Eggs Reached Adulthood",
line = list(dash = "dash"))}
datesAdulthoodReached_future <- forecast$date[which(forecast$csum == EADDC)]
if(length(datesAdulthoodReached_future) != 0){
fig <- fig %>%
add_segments(x = datesAdulthoodReached_future +1,
xend = datesAdulthoodReached_future +1,
y = 0,
yend = EADDC*1.05,
name = "Eggs Likely to Reach Adulthood",
line = list(dash = "dash"))}
#Add horizontal dashed line at EADDC
fig <- fig %>%
add_segments(x = startTime,
xend = dDays$date[length(dDays$csum)] + forecast_length,
y = EADDC,
yend = EADDC,
name = "G",
line = list(dash = "dash"))
#Beautify and add date selectors
fig <- fig %>% layout(
title = str_c(paste(species, collapse = ', '), " development in ", species_locality),
xaxis = list(
rangeselector = list(
buttons = list(
list(
count = 1,
label = "1 mo",
step = "month",
stepmode = "backward"),
list(
count = 6,
label = "6 mo",
step = "mo",
stepmode = "backward"),
list(
count = 1,
label = "YTD",
step = "year",
stepmode = "todate"))),
rangeslider = list(type = "date")),
yaxis = list(title = "Degree Days (\u00B0C)")
# annotations =
# list(x = 1, y = -0.1, text = "Source: data I found somewhere.",
# showarrow = F, xref='paper', yref='paper',
# xanchor='right', yanchor='bottom', xshift=0, yshift=0,
# font=list(size=15, color="red"))
)
#------------------
return(fig)
#scale_y_continuous(breaks = sort(c(ggplot_build(plot1)$layout$panel_ranges[[1]]$y.major_source, h)))
}
# dd_plot.default <- function(tMax1, tMax2, tMin1, tMin2, BDT, EADDC, startTime, breaks = NULL, dateformat = '%d/%m/%y') {
# stop("No method for ", class(list(tMax1)[[1]]), call. = FALSE)
# }
# plot_template <- function(df, breaks, dateformat) {
# tt <- list(
# theme_minimal(base_size = 18),
# geom_line(size = 1),
# labs(y = "Accumulated Degree Days", x = "Date (MM/YY)")
# )
# if (!is.null(breaks)) {
# c(tt, scale_x_date(date_breaks = breaks, date_labels = dateformat))
# } else {
# tt
# }
# }
#--------End graphing helper functions--------------------
#Fetch a common name for a species or return "No available common name." if no results found
safeSci2Com <- function(df) {
com <- sci2comm(df, db = "eol", simplify = TRUE) %>%
flatten()
if(identical(com, list())){
com <- "No available common name."
} else {
#fcom <- flatten(com)
com <- com[[1]]
}
return(com)
}
#-----Get rasterStack of accumulated DD for current year, by week------------
#-----Also saves copy to dat folder with name species_name.grd or end_date if no species is provided
#--Note: degree.days.mat(tmin, tmax, BDT) must be declared prior to execution
#Optional arguments:
# Note: BDT and EADDC arguments must be specified if species is not specified
# - start_date: a date to begin accumulation at
# - end_date: a date to stop accumulation at (default: two days ago)
# - BDT: either an integer BDT value or a vector of values to be averaged
# - EADDC: either an integer EADDC value or a vector of values to be averaged
# - cum_DD: a rasterLayer containing cumulative Degree Day values on the start date
# (Degree days will begin accumulating from here, otherwise they start at 0)
# - species: a string species name that will be queried to get mean BDT and EADDC values from ./dat/AppendixS3_SeasonalityDatabase.csv
accumulateDD <- function(start_date = as.Date(str_c(year(Sys.Date()), '-01-01')),
end_date = Sys.Date() -2,
BDT = NULL,
EADDC = NULL,
cum_DD = NULL,
species = NULL){
#Define area of interest
print(cum_DD)
if(!is.null(cum_DD)){
print("Matching start date with layer")
start_date <- str_replace_all(sub('.', '', last(names(cum_DD))), "[/.]", "-")
}
if(!is.Date(start_date)){start_date <- as.Date(start_date)}
if(!is.Date(end_date)){end_date <- as.Date(end_date)}
print(str_c("Start date: ",start_date))
if((is.null(BDT) || is.null(EADDC)) && is.null(species)){return("Please provide T0 and G arguments, or a species to query")}
if(!is.null(species)){
toAccumulate <- get_dfWrangled() %>% filter(Species == species)
print(str_c("Species selected: ", species))
print("T0 Values Found: ")
print(toAccumulate$BDT.C)
BDT <- mean(toAccumulate$BDT.C)
print(str_c("Average T0: ", BDT))
print("G Values Found: ")
print(toAccumulate$EADDC)
EADDC <- mean(toAccumulate$EADDC)
print(str_c("Average G: ", EADDC))}
#Find means of BDT and EADDC if vector of either is passed in
if(length(BDT) > 1){
print(str_c("Averaging T0s: ", BDT))
BDT <- mean(BDT)
print(str_c("Average T0: ", BDT))}
if(length(EADDC) > 1){
print(str_c("Averaging G: ", EADDC))
EADDC <- mean(EADDC)
print(str_c("Average G: ", EADDC))}
print(str_c("T0: ", BDT, ", G: ", EADDC))
AOI = aoi_get(state = "conus")
#Get temp raster stack for start_date
#raster::plot(AOI)
#print("not above here")
#Initialize cum_DD to DD values for start_date
if(is.null(cum_DD)){
#print("initializing")
p = getGridMET(AOI, param = c('tmax','tmin'), startDate = start_date)
r = raster::brick(p)
#r = raster::brick(p$tmax[[1]], p$tmin[[1]])
names(r) = c('tmin', 'tmax')
pastStack <- NULL
cum_DD <- overlay(r, fun = function(x){degree.days.mat(mosaic::value(x[1]) -273.15, mosaic::value(x[2]) -273.15, BDT)})
p <- NULL
r <- NULL
}else{
pastStack <- cum_DD
# hold_temps <- tempfile(fileext = ".grd")
# writeRaster(tstack, filename = hold_temps)
cum_DD <- raster(pastStack, layer = nlayers(pastStack))}
print(cum_DD)
# cum_DD <- calc(r, fun = function(x){
# degree.days.mat(x[2] / 10, x[1] / 10, BDT)})
#Set current day to the next start day
#Update ./dat/availablePhenoSpecies.csv and ./dat/phenoSpeciesEADDC.csv with new species entry
if(!is.null(species)){
filePath <- str_c("./dat/", make.names(species), ".grd")
availablePhenoSpecies <- read_rds("./dat/availablePhenoSpecies.csv")
if(!is.null(availablePhenoSpecies[[species]])){availablePhenoSpecies[[species]] <- NULL}
else{
speciesEADDC_Dict <- read_rds("./dat/phenoSpeciesEADDC.csv")
speciesEADDC_Dict[[filePath]] <- EADDC
write_rds(speciesEADDC_Dict, "./dat/phenoSpeciesEADDC.csv")
speciesBDT_Dict <- read_rds("./dat/phenoSpeciesBDT.csv")
speciesBDT_Dict[[filePath]] <- BDT
write_rds(speciesBDT_Dict, "./dat/phenoSpeciesBDT.csv")}
#Add species to available list
availablePhenoSpecies <- append(availablePhenoSpecies, filePath)
names(availablePhenoSpecies)[length(availablePhenoSpecies)] <- str_c(species)
write_rds(availablePhenoSpecies, "./dat/availablePhenoSpecies.csv")}
else{filePath <- str_c("./dat/", make.names(current_date), ".grd")}
print(str_c("Filepath: ", filePath))
week <- 1
current_date = start_date + 1
the_stack <- NULL
#Accumulate Degree Days from current_date to end_date, inclusive.
while(current_date <= end_date){
#Get raster stack of tmin and tmax for current day
temps = getGridMET(AOI, param = c('tmax','tmin'), startDate = current_date)
tstack = raster::brick(temps)
names(tstack) = c('tmin', 'tmax')
current_DD <- overlay(tstack, fun = function(x){degree.days.mat(mosaic::value(x[1]) -273.15, mosaic::value(x[2]) -273.15, BDT)})
#print(current_date)
# hold_temps <- tempfile(fileext = ".grd")
# writeRaster(tstack, filename = hold_temps)
#Calculate todays DD values
#current_DD <- calc(tstack, fun = function(x){degree.days.mat(value(x[2]) -273.15, value(x[1]) -273.15, BDT)})
temps <- NULL
tstack <- NULL
print(str_c("Calculated DDs for ", current_date))
#print(identical(current_DD, cum_DD))
#names(current_DD) = c(current_date)
#Add cumulative DD values to current_date DD values (current_DD)
cum_DD <- cum_DD + current_DD
#Reset cum_DD values greater than EADDC to 0
cum_DD <- overlay(cum_DD, fun = function(cumul){
if (!is.na(cumul[1]) && (cumul[1] >= EADDC)){
return(as.vector(EADDC))}
else {
return(cumul[1])}})
#Increment current_date
names(cum_DD) = c(current_date)
week <- week + 1
current_date = current_date + 1
if(week == 7){
if(!is_null(pastStack)){
the_stack <- raster::stack(pastStack, cum_DD)
#hold_brick <- tempfile(fileext = ".grd")
#writeRaster(the_stack, filename = hold_brick)
#writeRaster(brick(hold_brick), filePath, overwrite=TRUE)
writeRaster(the_stack, filePath, overwrite=TRUE)
pastStack <- NULL
}else{
the_stack <- raster::stack(cum_DD)}
print(str_c("the_stack: ",the_stack))
#hold_brick <- tempfile(fileext = ".grd")
#writeRaster(the_stack, filename = hold_brick)
#writeRaster(brick(hold_brick), filePath, overwrite=TRUE)
writeRaster(the_stack, filePath, overwrite=TRUE)
print(names(the_stack))
}
if(week == 14){
#if(is.null(the_stack)){the_stack = raster::stack("holdraster.grd")}
the_stack <- raster::stack(the_stack, cum_DD)
#hold_brick <- tempfile(fileext = ".grd")
#writeRaster(the_stack, filename = hold_brick)
#writeRaster(brick(hold_brick), filePath, overwrite=TRUE)
writeRaster(the_stack, filePath, overwrite=TRUE)
print(the_stack)
print(names(the_stack))
#the_stack <- NULL
week <- 7
}
}
#Save the raster to the dat folder
print(str_c("Writing raster: ", the_stack))
#saveRDS(the_stack, filePath)
#hold_brick <- tempfile(fileext = ".grd")
#writeRaster(the_stack, filename = hold_brick)
#writeRaster(brick(hold_brick), filePath, overwrite=TRUE)
return(the_stack)#brick(hold_brick))
#raster::plot(newR)
}
#If user selects week in day, here's the update function to accumulate more degree days
accumulateDDPart <- function(start_date, end_date = Sys.Date() -2, BDT, EADDC, cum_DD = NULL){
#Define area of interest
if(!is.Date(start_date)){start_date <- as.Date(start_date)}
if(!is.Date(end_date)){end_date <- as.Date(end_date)}
AOI = aoi_get(state = "conus")
#Get temp raster stack for start_date
#raster::plot(AOI)
print(str_c("Accumulating degree days starting at " , start_date, " until ", end_date, "..."))
print(str_c("T0: ", BDT, ", G: ", EADDC))
#print("not above here")
#Initialize cum_DD to DD values for start_date
if(is.null(cum_DD)){
p = getGridMET(AOI, param = c('tmax','tmin'), startDate = start_date)
r = raster::stack(p)
names(r) = c('tmin', 'tmax')
pastStack <- NULL
#print('1')
cum_DD <- overlay(r, fun = function(x){degree.days.mat(mosaic::value(x[1]) -273.15, mosaic::value(x[2]) -273.15, BDT)})
p <- NULL
r <- NULL
}
print("Initial raster layer: ")
print(cum_DD)
#Set current day to the next start day
week <- 1
current_date = start_date + 1
#Accumulate Degree Days from current_date to end_date, inclusive.
while(current_date <= end_date){
#Get raster stack of tmin and tmax for current day
temps = getGridMET(AOI, param = c('tmax','tmin'), startDate = current_date)
tstack = raster::brick(temps)
names(tstack) = c('tmin', 'tmax')
print(str_c("Accumulating ", current_date))
#Calculate todays DD values
current_DD <- overlay(tstack, fun = function(x){degree.days.mat(mosaic::value(x[1]) -273.15, mosaic::value(x[2]) -273.15, BDT)})
temps <- NULL
tstack <- NULL
#print(identical(current_DD, cum_DD))
names(current_DD) = c(current_date)
#Add cumulative DD values to current_date DD values (current_DD)
cum_DD <- cum_DD + current_DD
#Reset cum_DD values greater than EADDC to 0
cum_DD <- overlay(cum_DD, fun = function(cumul){
if (!is.na(cumul[1]) && (cumul[1] >= EADDC)){
return(as.vector(EADDC))}
else {
return(cumul[1])}})
#Increment current_date
names(cum_DD) = c(current_date)
week <- week + 1
current_date = current_date + 1
}
#if(is.null(the_stack)){the_stack = raster::stack("holdraster.grd")}
#writeRaster(the_stack, str_c(current_date, ".grd"), overwrite=TRUE)
return(cum_DD)
#raster::plot(newR)
}
#-----Define species to visualize phenology with, locations of species' rasterStacks, and EADDC/BDT values used in computation of rasterStack
speciesEADDC_Dict <- read_rds("./dat/phenoSpeciesEADDC.csv")
speciesBDT_Dict <- read_rds("./dat/phenoSpeciesBDT.csv")
#Species name and corresponding filename
availablePhenoSpecies <- read_rds("./dat/availablePhenoSpecies.csv")
#--This function checks the age of the phenology maps for species we have and updates them if they are over a week old.
#----availableSpecies: an optional list of species names and locations of phenology grids for corresponding species, it defaults to reading the current availablePhenoSpecies list.
updatePhenology <- function(availableSpecies = read_rds("./dat/availablePhenoSpecies.csv"), maxUpdates = 1){
if(!is.null(max_heatmap_updates_per_session)) {maxUpdates = max_heatmap_updates_per_session}
updates_remaining <- maxUpdates
#lapply(seq_along(availableSpecies), function(i){
i <- 1
while(updates_remaining >= 1 && i <= length(availableSpecies)){
#Get name and filePath values from availableSpecies list at index i
#print("triggered")
# print(max_updates)
# max_updates = max_updates -1
# print(max_updates)
name <- names(availableSpecies)[[i]]
filePath <- availableSpecies[[i]]
toUpdate <- raster::brick(availableSpecies[[i]])
last_update <- as.Date(str_replace_all(sub('.', '', last(names(toUpdate))), "[/.]", "-"))
currentDay <- Sys.Date() -2
#Calculate file age (in days) after fetching the last date it was modified
#last_update <- as.Date(file.info(availableSpecies[[i]])$mtime)
print(str_c("Layer last updated on ", last_update))
file_age <- as.double.difftime(currentDay - last_update)
print(str_c("Data currently available for ", currentDay))
thisYear <- format(Sys.Date(), '%Y')
#If the file is from the previous year and we have gridMET data from this year, delete the file and start a new one.
if(!(format(last_update, '%Y') == thisYear) && (Sys.Date() -2 >= as.Date(str_c(thisYear, "-01-01")))){
print(str_c("Creating a new phenology record for ", name, " in ", thisYear))
#toastr_info("Please be patient, a new degree day map is being created for this year.")
dir.create('./dat/tmp')
accumulateDD(as.Date(str_c(thisYear, "-01-01")),
Sys.Date() - 2,
species = name)
unlink("./dat/tmp", recursive=TRUE)
#max_updates <- max_updates -1
updates_remaining <- updates_remaining -1
}else{
#If the file is at least a week old and we have gridMET data for this year, update it
if((file_age >= 7) && (Sys.Date() -2 >= as.Date(str_c(thisYear, "-01-01")))){
#toastr_info("A degree day map is being updated.")
weeks <- as.integer(file_age / 7)
stop_update <- last_update + (7 * weeks)
dir.create('./dat/tmp')
rasterOptions(tmpdir='./dat/tmp')
hold_rast <- rasterTmpFile(prefix = make.names(name))
writeRaster(toUpdate, filename = hold_rast)
print(str_c("Updating ", name, ". This file was ", file_age ," days old."))
accumulateDD(end_date = stop_update,
species = name,
cum_DD = brick(hold_rast))
#Just added to cleanup temp files
#removeTmpFiles()
unlink("./dat/tmp", recursive=TRUE)
updates_remaining <- updates_remaining -1
print(str_c(name, " was ", file_age, " days old. It is now 2 days old, due to gridMET restrictions."))
} else print(str_c(name, " was modified less than a week ago (", file_age, " days). It will be updated in ", (7 - file_age), " days."))
}
i <- i + 1}}
#Execute function every time the app starts up to make sure files are up to date:
if(autoUpdateHeatmap) updatePhenology()
# pll <- function(){
# run_r_process(updatePhenology())
# }
# synchronise(pll())
#--------------------------------------------------------------------------------
#----------Guided walkthrough------------------
guide <- Cicerone$
new(allow_close = TRUE)$
step(
"viz-wrapper",
"Visualization Tool",
"Welcome to our insect phenology visualization tool. There are many ways you can interact with our model. Click next to get started!"
)$
step(
"mymap",
"The Phenophase Heatmap",
"The colored layer on the map represents current insect development in the US.
For more information on how we modeled this check out the introduction above. Now, let's continue to see what the layer is actually displaying."
)$
step(
"heatmap-species-wrapper",
"Selecting a species",
"This tells you what species is currently depicted on the heatmap. Viewing a different species' heatmap is as simple as selecting it here."
)$
step(
"heatmap-date-wrapper",
"Changing the date",
"Visualizing the development of your selected species at a different date (this year), can easily be accomplished here."
)$
step(
"phnInf",
"Heatmap layer info",
"Here we see the values of the two ecological parameters used in the modeling of this species' springtime phenology.
We also see the current date of the layer being displayed. You may notice that the date is not the date you selected in the last step. More info on that next."
)$
step(
"heatmap-resolution-wrapper",
"Heatmap date accuracy",
"The colors displayed on the map are computed weekly, and a record of each week is stored for quick viewing.
By default, when a date is selected the data that is displayed is that of the closest available weekly data.
If you are after greater modeling accuracy, changing this will recalculate heatmap values to the day."
)$
step(
"subspecies",
"Species descriptions",
"A description of the current species can be found here.
Now, let's continue to learn about plotting phenology data for over 650 insect species."
)$
step(
"mymap",
"Species observation markers",
"Our thermal ecology dataset currently contains 1,493 observations of 678 unique insect species.
Let's explore it! First, select the 'Observations' layer checkbox in the top-right corner of the map.
Each appearing blue marker represents a location where an insect species was observed.
Click a blue marker to see the species' name and its unique thermal parameters, experimentally determined by researchers at that location."
)$
step(
"observation-filter-wrapper",
"Adding and removing markers",
"You can use the tool in this tab to control which species have visible observation markers. Now, continue to view the phenology plot for our selected observation."
)$
step(
"observation-plot-wrapper",
"Species observation plots",
"Woah, a new tab appeared! This is the current insect development plot for your selected insect observation.
We use weather data from stations in the GHCND network near the location where the insect was observed in conjunction with observed thermal parameters (T0 and G).
The result is a plot depicting accumulated degree days (a proxy for development) as well as individual degree days, which is summed to produce accumulated degree days.
An insect is expected to reach adulthood when accumulated degree days equals their experimentally determined threshold, G.
Vertical lines mark the beginning of a new generation, and the assumption was made that this happens immediately after a species reaches adulthood.
A polynomial regression model was used to predict future degree day accumulation and can be viewed by selecting it in the legend."
)$
step(
"observation-dates-wrapper",
"Changing the date range",
"This date range field makes it simple to change the x-axis date range.
Degree days are summed from the beginning of the date range, which defaults to January 1st of this year (winter in the Northern Hemisphere)."
)$
step(
"observation-gap-wrapper",
"Changing the generational gap",
"While we made the assumption that every species produces new eggs immediately upon reaching adulthood, this is not necessarily the case.
Here, you can easily change the generational gap to any number of days for the species you are plotting."
)$step(
"prediction-length-wrapper",
"Extrapolating polynomial model",
"We automatically extrapolate data from a polynomial regression 30 days ahead of current weather data. You can change this here (up to 60 days)"
)$step(
"plotting-assistant-wrapper",
"Phenology plotting tool",
"Congratulations, you're ready to start using our visualization. As a final note, I'd like to highlight our new phenology plotting tool.
It makes visualizing the current, local development of any species in our database simple for any United States zipcode. "
)
#---------------------------------------------
#-----It's the user interface! (What the user sees)-------
ui <- fluidPage(
title = "Insect Phenology",
useToastr(),
useShinyalert(),
use_glouton(),
use_cicerone(),
use_shinyscroll(),
tags$script(js),
tags$head(tags$link(rel="shortcut icon", href="https://insect-phenology-photos.s3-us-west-2.amazonaws.com/insect.ico")),
tags$head(tags$style(".modal-dialog{ width:80%}")),
verticalLayout(
h1("Spring Insect Phenology"),
hr(),
div(
id="scroll-button-wrapper",
align="left",
tags$em(tags$b("Skip to Map")),
p("If you're not one for an introduction, feel free to skip to the visualization tool."),
actionBttn("scroll", "Insect Phenology Visualization", size = "sm", style = "fill", color = "primary")),
div(
id = "plotting-assistant-wrapper",
hr(),
tags$em(tags$b("New Plotting Tool! ")),
p("Our new phenology plotting feature allows you to interactively produce a current/historical development plot for an insect species of your choice using weather data from any US zipcode."),
actionBttn("miniPlotter", "Phenology Plotting Tool", size = "sm", style = "fill", color = "primary"),
p(""),
hr()),
includeMarkdown('fin_intro1.md'),
#hr(),
#includeMarkdown('intro.md'),
plotlyOutput("demoPlot"),
helpText("This plot shows degree day accumulation slowing in late October, suggesting P. rapae may be entering diapause. Development resumes in the spring, when temperatures reach the range necessary for continued development. P. Rapae reaches adulthood mid-june, subsequently producing a new generation of offspring."),
includeMarkdown('fin_intro2.md'),
#headerPanel('Insect Phenology Visualization'),
div(
id = "viz-wrapper",
sidebarLayout(
tabsetPanel(id = "tabset",
tabPanel("Phenology Heatmap Controls", value = "Phen", sidebarPanel(
div(
id = "heatmap-species-wrapper",
tags$b("Select a species"),
helpText("Change the species shown on the heatmap."),
selectInput(inputId = "phenoSpecies",
label = NULL,
choices = availablePhenoSpecies,
multiple = FALSE,
selectize = TRUE)),
div(
id = "heatmap-date-wrapper",
tags$b("Change heatmap date"),
helpText("Change the date shown on the heatmap, up to two days ago. The map depicts accumulated degree days from the first day of this year until this date."),
dateInput(inputId = "phenoDate",
label = NULL,
value = Sys.Date()-2,
min = as.Date(str_c(year(Sys.Date()), '-01-01')),
max = Sys.Date()-2,
format = "mm/dd/yy")),
div(
id = "heatmap-resolution-wrapper",
tags$b("Advanced: Alter heatmap resolution"),
helpText("Change the accuracy of the heatmap. This may take several minutes to load if changed. 'Week' (default) will accumulate degree days to within a week of the selected date, while 'Day of Week' accumulates to the day."),
radioButtons(inputId = "computeDD",
label = NULL,
choiceNames = c("Day of Week", "Week"),
choiceValues = c(TRUE, FALSE),
selected = FALSE)),
htmlOutput(outputId = "phnInf", container = div),
helpText("More information about the species in this heatmap is available in the 'Heatmap Species Info' tab below."),
tags$b("If you're stuck... "),
actionBttn("tour", "Take a Tour!", size = "sm", style = "float", color = "primary", block = TRUE))),
tabPanel("Filter species observation markers", value = "Obs", sidebarPanel(
div(
id = "observation-filter-wrapper",
tags$b('Add or remove species observation markers from the map'),
helpText("Click on a species to add or remove its corresponding observation marker from the map. All records are visible by default."),
multiInput('sel_species',
NULL,
choices = as.vector(unique(dfWrangled$Species)),
selected = unique(dfWrangled$Species)),
actionButton("all", "All"),
actionButton("none", "None"))
#actionBttn("miniPlotter", "Phenology Plotting Assistant", style = "fill", color = "primary"),
# dateRangeInput(inputId = "dateRange",
# label = "Change date range for plot: ",
# start = floor_date(Sys.Date(), "year"),
# min = "1970-01-01",
# format = "mm/dd/yy",
# separator = " - ")
#verbatimTextOutput(outputId = "obsInf", placeholder = FALSE) %>% withSpinner(color = "#228B22")))
))),
mainPanel(
leafletOutput("mymap", height = 600) %>% withSpinner(color = "#228B22"),
helpText("Map markers from the toggleable species 'Observations' layer can be clicked. When selected, a phenology plot is rendered in the 'Observation Plot' tab below.")
))),
hr(),
tabsetPanel(id = "tabsetSupport",
tabPanel(title = "Heatmap Species Info",
value = "Phen2",
htmlOutput(outputId = "subspecies")),
tabPanel(title = "Observation Plot",
value = "Obs2",
div(
id = "observation-plot-wrapper",
helpText("If the plot is not loaded, click a marker on the map to load that species' phenology plot below."),
plotlyOutput("predPlot") %>% withSpinner(color = "#228B22"),
textOutput("obsPltInf"),
div(
id = "observation-dates-wrapper",
dateRangeInput(inputId = "dateRange",
label = "Change x-axis date range: ",
start = floor_date(Sys.Date(), "year"),
min = "1970-01-01",
format = "mm/dd/yy",
separator = " - ")),
helpText("Note: To plot springtime phenology for an observation located in the Southern Hemisphere, change the starting date to 6/1.
This makes visualizing Southern Hemisphere springtime development possible, and is needed due to differences in the timing of spring."),
div(
id = "observation-gap-wrapper",
numericInput(
inputId = "genGap",
label = "Number of days from adulthood to new offspring: ",
value = 1,
min = 1
)
),
div(
id = "prediction-length-wrapper",
numericInput(
inputId = "fcLength",
label = "Number of days to predict degree day accumulation: ",
value = 30,
min = 1,
max = 60
))))),
br(),
hr()
)
)
#------Here is the server for the shiny app (How the page becomes responsive)--------