Skip to content

Commit

Permalink
version 0.1.2
Browse files Browse the repository at this point in the history
  • Loading branch information
Robert Bauer authored and cran-robot committed Jun 24, 2020
1 parent befa2c6 commit 4dec20a
Show file tree
Hide file tree
Showing 14 changed files with 389 additions and 37 deletions.
3 changes: 3 additions & 0 deletions ChangeLog
Expand Up @@ -3,6 +3,9 @@ RchivalTag versions
upcoming:
- add argument "show_time_limits" in plot_TS/DepthTS

RchivalTag_0.1.2 -- June 17, 2020 (Robert Bauer)
- added dygraph version of plot_DepthTS

RchivalTag_0.1.1 -- February 7, 2020 (Robert Bauer)
- added min_perc argument in hist_tad, hist_tat and ts2histos-function to define a minimum data coverage for histogram entries obtained through time series data.
- added new function read_TS to read in time series data
Expand Down
12 changes: 6 additions & 6 deletions DESCRIPTION
@@ -1,18 +1,18 @@
Package: RchivalTag
Type: Package
Title: Analyzing Archival Tagging Data
Version: 0.1.1
Date: 2020-03-23
Version: 0.1.2
Date: 2020-06-17
Author: Robert Bauer
Maintainer: Robert Bauer <marine.biologging@gmail.com>
Description: A set of functions to generate, access and analyze standard data products from archival tagging data.
Depends: R (>= 3.5.0)
Imports: plyr, maptools, graphics, stats, raster, readr, rgeos, ncdf4,
pracma, maps, mapdata, grDevices, oceanmap, sp, methods,
PBSmapping
pracma, dygraphs, xts, maps, mapdata, grDevices, oceanmap, sp,
methods, PBSmapping
License: GPL (>= 3)
LazyLoad: yes
Packaged: 2020-03-24 18:42:37 UTC; work
Packaged: 2020-06-23 22:48:04 UTC; work
Repository: CRAN
Date/Publication: 2020-03-25 14:00:02 UTC
Date/Publication: 2020-06-24 04:30:03 UTC
NeedsCompilation: no
24 changes: 13 additions & 11 deletions MD5
@@ -1,13 +1,14 @@
cc2b6b42b7b57121f758282c981562d8 *ChangeLog
6bb4c0b7d3756f3a85e92a365e57fb6d *DESCRIPTION
210145c57636a1a590e2127fcec2d406 *NAMESPACE
f419f80f781e29430f8caa0d9e114a2a *ChangeLog
d75e85e1694fa45f4bbd40491545a5ca *DESCRIPTION
f854c861049b1b9bf648ee1f463d53de *NAMESPACE
3e611abed9a9cac153c07f2681682a8e *R/bin_TempTS.r
d41a58726ca0cc494e29d3fe83555616 *R/dy_DepthTS.r
f873e0b8c23a7ec2783872a83889f639 *R/get_DaytTimeLimits.r
49eb7860036a00f1b1e3da8c19ffdd9a *R/get_geopos.r
aad80d7e3584aa4ec7f9280434659b65 *R/get_geopos.r
5a9b700d9095f7dabdb593f7ceef78fc *R/get_thermalstrat.r
45b2cdd18c15ddf3361627afd1b06f8d *R/hidden_functions.r
bb85761ae6bc9f47d0ef960a0e0473b2 *R/hist_tad.r
19ac72047a0a701f38baf52f08ce3ac8 *R/image_TempDepthProfiles.r
89adc7f96c9b304f574e94aa3769e56e *R/image_TempDepthProfiles.r
55582205339a91e7662fc4a8f4e30625 *R/interpolate_TempDepthProfiles.r
70f41d70368a7e29db06f53ed5a360d6 *R/plot_DepthTempTS.r
9a9b6601f9b984b9c647e1795e66fb43 *R/plot_TS.r
Expand All @@ -20,7 +21,7 @@ bcf56c5a266f2ceb4e1a930242913a61 *R/resample_PDT.r
49a8eeb4ec659d3035009c5ee21d7a73 *R/resample_TS.r
45f53b2c5259161227dbba44e0f3e030 *R/tad_summary.r
41afdbaf29b35c14974e3cdba0b12575 *R/ts2histos.r
0a0c01931c24b0a4c1abbc82f783fda3 *inst/doc/RchivalTag.pdf
f50bdc412ee64f942e65724a51271957 *inst/doc/RchivalTag.pdf
5ed8535044dcec09af091dca0617ac41 *inst/example_files/104659-Histos.csv
14919538772cfbcc8dde94b889ede6f9 *inst/example_files/104659-PDTs.csv
ec53f903951050e6c4f96a0b7be076b8 *inst/example_files/104659-Series.csv
Expand All @@ -32,22 +33,23 @@ cd01eabf380a9ce776802f5d32f1cde7 *inst/example_files/104659-Series_date_format_E
6997d03563e9eaad4131bed6e9c0a3b7 *inst/example_files/15P1019-104659-1-GPE3.kmz
17ec587e2ccd909540cb761cac3c267c *inst/example_files/15P1019-104659-1-GPE3.nc
a903534a495186068e391639a6344e0f *inst/example_files/67851-12h-Histos.csv
c2af40d028ff03331e793c56125f9ecc *man/RchivalTag.Rd
3e58664e7ca62df410584e2abd3dae2a *man/RchivalTag.Rd
328353bb9a7cc8f29c0078f5ea8a85bb *man/bin_TempTS.Rd
30ed29f31e2d77028f85e594242ca268 *man/classify_DayTime.Rd
4b01620123bacb718d7543dab21a8734 *man/combine_histos.Rd
905aa9faf48429c00bc767aca892ed68 *man/dy_DepthTS.Rd
486408d4604d7492a1c06668aeb517f7 *man/get_DayTimeLimits.Rd
6a2cb1742f5110296c27a1fef29ba5a7 *man/get_thermalstrat.Rd
36bf92979bd9f2f42bd591667f6b5563 *man/hist_tad.Rd
8ac3217650cf11e9fed4680ff4c45dcc *man/hist_tat.Rd
3d974bc1c0bbf712a0efb6b3410b3662 *man/image_TempDepthProfiles.Rd
f0a9097082c5f066d349135247e7f5e1 *man/image_TempDepthProfiles.Rd
4b5b03b49120e9f63a55e81007eaa2b0 *man/interpolate_TempDepthProfiles.Rd
8fad25981fae098bc1b4760e7b8709c0 *man/merge_histos.Rd
296de476d03f1dcf323794bd7ef7218b *man/plot_DepthTempTS.Rd
0c562b1166bcb376bc28e30558666e58 *man/plot_TS.Rd
c544bda6a708b414bcbe301843f28952 *man/plot_geopos.Rd
30501bcfc4bf025b579f7ff563d07584 *man/plot_TS.Rd
d998944fe43d333bc5d98fd47c3bab2e *man/plot_geopos.Rd
ab6718b373798e002235f4c02d387fc9 *man/read_PDT.Rd
0b817d0a5143aa41a89d0f6cab1e1362 *man/read_TS.Rd
06c39bfa504efdde5828878baca76292 *man/read_TS.Rd
54c4018702bc3086672b926919af4c0c *man/read_histos.Rd
c84991f34b8ec976ce4bf0a2b9ea9cb7 *man/resample_PDT.Rd
e9ed65a81c5a4b7de53dfd078ac536d5 *man/resample_TS.Rd
Expand Down
2 changes: 2 additions & 0 deletions NAMESPACE
Expand Up @@ -8,6 +8,8 @@ import("raster")
import("rgeos")
import("ncdf4")
import("maps")
import("dygraphs")
import("xts")
import("readr")
import("mapdata")
import("oceanmap")
Expand Down
100 changes: 100 additions & 0 deletions R/dy_DepthTS.r
@@ -0,0 +1,100 @@
dy_DepthTS <- dy_TS <- function(ts_df, y="Depth", xlim, ylim,
ylab=y, xlab="Time (UTC)", main,
ID, ID_label="Serial",
plot_DayTimePeriods=TRUE, twilight.set="ast",
color="darkblue",
doRangeSelector=TRUE, labelsUTC=TRUE, drawPoints=FALSE,pointSize=2,
...){

if(missing(ID) & is.null(ts_df[[ID_label]])) {
k <- c("DeployID","Ptt") %in% names(ts_df)
if(any(k)) {
ID_label <- c("DeployID","Ptt")[which(k)[1]]
warning(paste0("ID_label 'Serial' not found! Resetting ID_label to '",ID_label,"'!"))
}
if(!all(k)) warning(paste0("no column corresponding to ID_label '",ID_label,"' found!"))
}
if(missing(ID)) ID <- unique(ts_df[[ID_label]])
if(length(ID) > 1) {
warning("multiple tags in data set: ", paste(ID, collapse=', '))
main <- ''
}
if(missing(main)) main <- paste("Tag ID", ID)

if(missing(ylim)){
if(y == "Depth") {ylim <- c(max(ts_df$Depth,na.rm = T),-1)
}else{
ylim <- range(ts_df$y)
}
}
if(!missing(xlim)) {
if(class(xlim)[1] == 'Date' | nchar(as.character(xlim[1])) == 10){
xlim <- as.Date(xlim)
if(length(xlim) == 1) xlim <- c(xlim, xlim)
xlim[2] <- xlim[2]+1
xlim <- paste(xlim, '00:00:00')
}
xlim <- .fact2datetime(xlim,tz = "UTC")
if(length(xlim) > 2) xlim <- range(xlim)
if(length(xlim) == 1) xlim <- c(xlim, xlim[1]+24*60*60)

ts_df <- ts_df[which(ts_df$datetime >= xlim[1] & ts_df$datetime <= xlim[2]),]
}

if(!labelsUTC) xlab <- gsub("(UTC)","",xlab)

dat <- ts_df[,c("datetime",y)]
dat$datetime <- as.POSIXct(dat$datetime,tz = "UTC")
ds <- data.frame(dat[,y]); names(ds) <- y
dat_xts <- xts::xts(ds,order.by=dat$datetime)

# create dygraph
dg <- dygraph(dat_xts, xlab=xlab, ylab=ylab, main=main, ...)



# add shades
if(plot_DayTimePeriods){
dawn <- paste0("dawn.",twilight.set)
dusk <- paste0("dusk.",twilight.set)
shade_periods <- c("sunrise","sunset",dawn,dusk)
if(!all(shade_periods %in% names(ts_df))) {
if(all(c("Lon", "Lat", "datetime") %in% names(ts_df))){
ts_df <- get_DayTimeLimits(ts_df)
}else{
plot_DayTimePeriods <- F
warning("no geolocation data or datetime vector provided (Lon, Lat, datetime)! plot_DayTimePeriods omitted in current call. Please revise.")
}
}
}
if(plot_DayTimePeriods){
shades <- unique(ts_df[,shade_periods])

shades_list <- list()
j <- 1

for(i in 1:nrow(shades)){
add <- list(from=as.POSIXct(shades$sunrise[i],tz = "UTC"), to=as.POSIXct(shades$sunset[i],tz = "UTC"),color="white")
shades_list[[j]] <- add
j <- j+1
add <- list(from=as.POSIXct(shades$dawn.ast[i],tz = "UTC"), to=as.POSIXct(shades$sunrise[i],tz = "UTC"),color="lightgrey")
shades_list[[j]] <- add
j <- j+1
add <- list(from=as.POSIXct(shades$dusk.ast[i],tz = "UTC"), to=as.POSIXct(shades$sunset[i],tz = "UTC"),color="lightgrey")
shades_list[[j]] <- add
j <- j+1
}
shades_list

dg <- dyShading(dg, from = ts_df$datetime[1] , to = tail(ts_df$datetime,1),color = "darkgrey" )
for( period in shades_list ) {
dg <- dyShading(dg, from = period$from , to = period$to,color = period$color)
dg <- dyAnnotation(dg, x = mean(c(period$from,period$to)), text = period$label, attachAtBottom=T)
}
}
label <- y
if(y == "Depth") label <- paste(y,"(m)")
dg <- dg %>% dyOptions(colors=color,drawPoints = drawPoints, pointSize = pointSize,labelsUTC = labelsUTC, strokeWidth = 1) %>% dyAxis("y", label = label, valueRange = ylim)
if(doRangeSelector) dg <- dg %>% dyRangeSelector()
return(dg)
}
127 changes: 118 additions & 9 deletions R/get_geopos.r
@@ -1,6 +1,7 @@
get_geopos <- function(x, xlim, ylim, date_format, lang_format="en", tz="UTC", add=FALSE, prob_lim=.75){
get_geopos <- function(x, xlim, ylim, date_format, lang_format="en", tz="UTC", proj4string, add=FALSE, prob_lim=.5){
file <- x
if(missing(date_format)) date_format <- "%d-%b-%Y %H:%M:%S"
if (missing(proj4string)) proj4string <- CRS(as.character(NA))

if(substr(file,nchar(file)-2,nchar(file)) == "csv"){
#### check for header line:
Expand All @@ -22,10 +23,9 @@ get_geopos <- function(x, xlim, ylim, date_format, lang_format="en", tz="UTC", a
pos$date <- as.Date(pos$datetime)
pos$Date <- c()
out <- pos
}else{

# source("~/Dropbox/my_R-packages/RchivalTag.build/RchivalTag/R/hidden_functions.r")
# library(oceanmap)
}

if(substr(file,nchar(file)-1,nchar(file)) == "nc"){

nc <- ncdf4::nc_open(file)
# print(nc)
Expand Down Expand Up @@ -63,9 +63,11 @@ get_geopos <- function(x, xlim, ylim, date_format, lang_format="en", tz="UTC", a
Raster.HR@data@values <- Raster.HR@data@values/sum(Raster.HR@data@values,na.rm = T) #normalize the grid values so they sum to 1

RasterVals <- sort(Raster.HR@data@values) #sort the probability values
Raster.breaks <- c(RasterVals[max(which(cumsum(RasterVals)<=prob_lim))])
Raster.breaks <- c(RasterVals[max(which(cumsum(RasterVals)<=(1-prob_lim)))])
cl <- try(rasterToContour(Raster.HR,levels = Raster.breaks),silent = T)
cl

cl0 <- cl
if(class(cl) != "try-error"){
p <- maptools::SpatialLines2PolySet(cl)
spolys <- maptools::PolySet2SpatialPolygons(p)
Expand All @@ -82,11 +84,118 @@ get_geopos <- function(x, xlim, ylim, date_format, lang_format="en", tz="UTC", a
# pols[[datetime[i]]] <- spolys
}
}

projection(pols) <- proj4string
pols_joined <- pols
# pols_joined <- SpatialPolygons(lapply(pols, function(x){x@polygons[[1]]}))
out <- pols_df <- SpatialPolygonsDataFrame(Sr=pols_joined, data=data.frame(file=file,prob_lim=prob_lim,datetime=datetime,
xmin=xlim[1],xmax=xlim[2],ymin=ylim[1],ymax=ylim[2]),FALSE)
xmin=xlim[1],xmax=xlim[2],ymin=ylim[1],ymax=ylim[2]),FALSE)
}

if(substr(file,nchar(file)-2,nchar(file)) %in% c("kml","kmz")){
pl <- .getKMLpols(kmlfile=file)
LikelihoodArea <- prob_lim*100
if(!(prob_lim %in% c(.99, .95, .5))) stop("Invalid 'porb_lim' value. Please select one of the following values: 0.99, 0.95, 0.50")
out <- .merge_pols(pl, LikelihoodArea=LikelihoodArea, date_format=date_format, lang_format=lang_format, tz=tz, proj4string = proj4string, xlim=xlim, ylim=ylim)

}
return(out)
}



.getKMLpols <- function(kmlfile, ignoreAltitude=TRUE){
if (missing(kmlfile)) stop("kmlfile is missing")


if(substr(start = (nchar(kmlfile)-3),stop = nchar(kmlfile),kmlfile) ==".kmz"){
ofile <- kmlfile
if(grepl(" ",ofile)){
ofile <- gsub(" ","\\\\ ",kmlfile)
}

kmzfile <- kmlfile
zipfile <- gsub(".kmz",".zip",kmzfile)
exdir <- gsub(".kmz","",kmzfile)

system(paste("cp",ofile,gsub(".kmz",".zip",ofile)))
unzip(zipfile,exdir=exdir)
tmpfile <- Sys.glob(paste0(exdir,"/*.kml"))
kmlfile <- gsub(".kmz",".kml",kmzfile)
system(paste("mv",gsub(" ","\\\\ ",tmpfile),gsub(" ","\\\\ ",kmlfile)))
system(paste("rm -r", gsub(" ","\\\\ ",exdir)))
system(paste("rm -r", gsub(" ","\\\\ ",zipfile)))

cat("extracted kml-file from provided kmz-file\n")
}

kml0 <- readLines(kmlfile)
istart <- grep("Time Lapse",kml0)
iend <- grep("Maximum Likelihood",kml0)
iend <- iend[which(iend > istart)][1]
kml <- kml0[istart:iend]
idates <- grep('Data name="time"',kml)

n <- length(idates)
idates <- c(idates,length(kml))
out <- list()
for(ii in 1:n){
ptype <- as.numeric(strsplit(gsub("</styleUrl>","",kml[(idates[ii]-2)]),"#contour-")[[1]][[2]])+1
ltype <- paste(c("99%","95%","50%")[ptype], "Likelihood Areas")

dd <- strsplit(gsub("</value>","",kml[idates[ii]+1]),"<value>")[[1]][2]
sub <- kml[idates[ii]:(idates[ii+1]-1)]
pols_start <- grep("<coordinates>",sub)
pols_end <- grep("</coordinates>",sub)


nj <- length(pols_start); j <- 1
while(j <= nj){
poltype <- c("inner","outer")[grepl("outer",sub[(pols_start[j]-2)])+1]

sub2 <- sub[pols_start[j]:(pols_end[j]-1)]
sub2[1] <- strsplit(sub2[1],"<coordinates>")[[1]][2]
coords <- read.table(textConnection(sub2),sep = ",")
if(ignoreAltitude) coords[[3]] <- c()
add <- coords
out[[ltype]][[dd]][[poltype]] <- add
j <- j+1
}
}
out$file <- kmlfile
return(out)
}
}

.merge_pols <- function(pl, LikelihoodArea=95, date_format = "%d-%b-%Y %H:%M:%S",lang_format="en",tz="UTC", proj4string, xlim, ylim){
out <- pl
file <- out$file; out$file <- c()
ltype <- paste0(LikelihoodArea,"% Likelihood Areas")
valid_ltypes <- gsub("% Likelihood Areas","",names(out))
if(!ltype %in% names(out)) stop("Please select one of the following valid Likelihood Areas: ",paste(valid_ltypes,collapse=", "))
n <- length(out[[ltype]])

if (missing(proj4string)) proj4string <- CRS(as.character(NA))
pols <- c()
if(missing(xlim)) xlim <- c()
if(missing(ylim)) ylim <- c()

for(i in 1:n){
dd <- names(out[[ltype]])[i]
coords <- out[[ltype]][[dd]][["outer"]]
spolys <- SpatialPolygons(list(Polygons(list(Polygon(coords, hole=as.logical(NA))), ID=paste("outer",dd))), proj4string=proj4string)

if(is.null(pols)){
pols <- spolys
}else{
pols@polygons[[i]] <- spolys@polygons[[1]]
}

xlim <- range(c(xlim,coords[,1]))
ylim <- range(c(ylim,coords[,2]))
}
pols_joined <- pols
datetime <- .fact2datetime(names(out[[ltype]]),date_format = date_format, lang_format = lang_format, tz = tz)

out2 <- pols_df <- SpatialPolygonsDataFrame(Sr=pols_joined, data=data.frame(file=file,prob_lim=LikelihoodArea/100,datetime=datetime,
xmin=xlim[1],xmax=xlim[2],ymin=ylim[1],ymax=ylim[2]),FALSE)
return(out2)
}
2 changes: 1 addition & 1 deletion R/image_TempDepthProfiles.r
@@ -1,7 +1,7 @@

image_TempDepthProfiles <- function(x, main=NULL, xlab='Date', ylab="Depth (m)",
cb.xlab=expression(paste("Temperature (",degree,"C)")), cex.cb.xlab=1, cex.cb.ticks=1,
xlim, ylim, zlim, pal="jet", only.months, month.line, mars, ...){
xlim, ylim, zlim, pal="jet", only.months, month.line=0, mars, ...){
cmap <- NULL
if(is.character(pal) & length(pal) == 1){
data("cmap", package='oceanmap', envir = environment())
Expand Down
Binary file modified inst/doc/RchivalTag.pdf
Binary file not shown.
2 changes: 1 addition & 1 deletion man/RchivalTag.Rd
Expand Up @@ -5,7 +5,7 @@
RchivalTag - Analyzing Archival Tagging Data
}
\description{
\code{RchivalTag} provides a set of functions to analyze and visualize (aquatic) archival tagging data, including:
\code{RchivalTag} provides a set of functions to analyze and visualize different data products from Archival Tags (Supported Models include amongst others: MiniPAT, sPAT, mk10, mk9 from \href{http://wildlifecomputers.com/}{Wildlife Computers} as well as LOTEK PSAT Models \href{https://www.lotek.com/products/psat-series/}{LOTEK}. Models from other Manufactorers might be supported as well.

\itemize{
\item{"(Depth) time series data"} (See \link{empty.plot_TS}, \link{plot_TS} & \link{plot_DepthTS})
Expand Down

0 comments on commit 4dec20a

Please sign in to comment.