Skip to content

Commit

Permalink
version 2.0.3
Browse files Browse the repository at this point in the history
  • Loading branch information
zhizuio authored and cran-robot committed Oct 2, 2023
0 parents commit b47a653
Show file tree
Hide file tree
Showing 30 changed files with 16,443 additions and 0 deletions.
30 changes: 30 additions & 0 deletions DESCRIPTION
@@ -0,0 +1,30 @@
Package: psbcSpeedUp
Title: Penalized Semiparametric Bayesian Survival Models
Version: 2.0.3
Date: 2023-10-01
URL: https://github.com/ocbe-uio/psbcSpeedUp
BugReports: https://github.com/ocbe-uio/psbcSpeedUp/issues
Authors@R: c(person("Zhi", "Zhao", role=c("aut","cre"), email = "zhi.zhao@medisin.uio.no"),
person("Manuela", "Zucknick", role=c("aut")),
person("Maral", "Saadati", role=c("aut")),
person("Axel", "Benner", role=c("aut")))
Description: Algorithms to speed up the Bayesian Lasso Cox model (Lee et al., Int J Biostat, 2011 <doi:10.2202/1557-4679.1301>) and the Bayesian Lasso Cox with mandatory variables (Zucknick et al. Biometrical J, 2015 <doi:10.1002/bimj.201400160>).
License: GPL-3
Copyright: The C++ files pugixml.cpp, pugixml.h and pugiconfig.h are
used with Copyright (C) 2006-2018 Arseny Kapoulkine and
Copyright (C) 2003 Kristen Wegner.
Depends: R (>= 4.0)
Encoding: UTF-8
RoxygenNote: 7.2.3
LinkingTo: Rcpp, RcppArmadillo (>= 0.9.000)
Imports: Rcpp, xml2, ggplot2, GGally, MASS, utils, stats
LazyData: true
NeedsCompilation: yes
Packaged: 2023-10-01 16:21:37 UTC; zhiz
Author: Zhi Zhao [aut, cre],
Manuela Zucknick [aut],
Maral Saadati [aut],
Axel Benner [aut]
Maintainer: Zhi Zhao <zhi.zhao@medisin.uio.no>
Repository: CRAN
Date/Publication: 2023-10-02 10:20:03 UTC
29 changes: 29 additions & 0 deletions MD5
@@ -0,0 +1,29 @@
76f79f5f373a7dde63b6121b85dcd76b *DESCRIPTION
54e2e2200e0daa66e9280c35d1b4000b *NAMESPACE
908c8d68e730796153119a7b0187fabd *NEWS.md
7ae1823500df61c6e97cd9c55c9887ee *R/RcppExports.R
21d58ff7804deb95e7bfb46620d52e32 *R/coef.psbcSpeedUp.R
c0dd2e03b926a0d3c5ee44898121cfbe *R/exampleData.R
b22f82e532fc429f2732cb5a888ca46e *R/plot.psbcSpeedUp.R
d9ca51ad31c9c6334afbb6e1cb944adb *R/psbcSpeedUp.R
a7351c4693e287661e13ee5d7114baea *README.md
f91c2ee39ced0e6e91f98b81e9344f11 *data/datalist
7b548c9bd6a373f888b1abd5f788be24 *data/exampleData.rda
2fe8a0aedb704c36b800f6724cd3569a *man/coef.psbcSpeedUp.Rd
ceb1aaadfdec68603d5cc77d5f796051 *man/exampleData.Rd
198b70412d51b344f358c6bdf480f5c7 *man/figures/README_plot_beta.png
495aaa9330ceb6cad6b1e2b596768650 *man/plot.psbcSpeedUp.Rd
a7325fb454a4bfe342813817bf61fc77 *man/psbcSpeedUp.Rd
be677fb5ddff8d3a2e91ab570258256f *src/Makevars
dabf6e18c628e0fdcaf2b8da95f8ec2c *src/Makevars.win
7cdb99eb44136e10afff8ed97e5ceaaf *src/RcppExports.cpp
fd4b9246e51dcd4952be89c2f1901796 *src/drive.cpp
c06c39ff9d69371f3f52a6fded48ce54 *src/drive.h
ca89b29337087802b658b063fcedf3b1 *src/global.cpp
fcc63ff239f7e16106ebff3b268e81a8 *src/global.h
185e7e0d65856fa49943cb9e87e28875 *src/init.c
63fb7f837f5ead3567c138071d25469d *src/pugiconfig.h
2a482565cd695663ed7426f6302af0ae *src/pugixml.cpp
654ec0ae83f4bc6dcdd26c633cb4ee08 *src/pugixml.h
e07acc941971e653481244878da70084 *src/utils.cpp
89b7b0a35fd94e7bdec8f425188492eb *src/utils.h
18 changes: 18 additions & 0 deletions NAMESPACE
@@ -0,0 +1,18 @@
# Generated by roxygen2: do not edit by hand

S3method(coef,psbcSpeedUp)
S3method(plot,psbcSpeedUp)
export(psbcSpeedUp)
importFrom(GGally,ggcoef)
importFrom(MASS,mvrnorm)
importFrom(Rcpp,evalCpp)
importFrom(ggplot2,xlab)
importFrom(ggplot2,ylab)
importFrom(stats,quantile)
importFrom(stats,rexp)
importFrom(stats,rgamma)
importFrom(stats,runif)
importFrom(utils,write.table)
importFrom(xml2,as_xml_document)
importFrom(xml2,write_xml)
useDynLib(psbcSpeedUp)
12 changes: 12 additions & 0 deletions NEWS.md
@@ -0,0 +1,12 @@
# TODO in next versions

* use Bandicoot: Armadillo C++ library for GPU
* remove Rcpp::List object, instead writing all output objects into files
* allow multiple chains and omp for parallelisation
* add R functions for feature (stability) selection
* add R functions for survival predictions
* extend C++ source code for implementing other shrinkage and group priors

# psbcSpeedUp 2.0.3

* First released version
7 changes: 7 additions & 0 deletions R/RcppExports.R
@@ -0,0 +1,7 @@
# Generated by using Rcpp::compileAttributes() -> do not edit by hand
# Generator token: 10BE3573-1514-4C36-9D1C-5A225CD40393

psbcSpeedUp_internal <- function(dataFile, p, q, hyperParFile, outFilePath, ini_beta, ini_tauSq, ini_h, groupInd, nIter, nChains, thin, rw) {
.Call('_psbcSpeedUp_psbcSpeedUp_internal', PACKAGE = 'psbcSpeedUp', dataFile, p, q, hyperParFile, outFilePath, ini_beta, ini_tauSq, ini_h, groupInd, nIter, nChains, thin, rw)
}

58 changes: 58 additions & 0 deletions R/coef.psbcSpeedUp.R
@@ -0,0 +1,58 @@
#' @title coef method for class \code{psbcSpeedUp}
#'
#' @description
#' Extract the point estimates of the regression coefficients
#'
#' @name coef.psbcSpeedUp
#'
#' @param object an object of class \code{psbcSpeedUp}
#' @param type type of point estimates of regressions. One of
#' \code{c("mean", "median")}. Default is \code{mean}
#' @param ... not used
#'
#' @return Estimated coefficients are from an object of class \code{psbcSpeedUp}.
#' If the \code{psbcSpeedUp} specified data standardization, the fitted values
#' are base based on standardized data.
#'
#' @examples
#'
#' # Load the example dataset
#' data("exampleData", package = "psbcSpeedUp")
#' p <- exampleData$p
#' q <- exampleData$q
#' survObj <- exampleData[1:3]
#'
#' # Set hyperparameters
#' mypriorPara <- list(
#' "groupInd" = 1:p, "eta0" = 0.02, "kappa0" = 1, "c0" = 2, "r" = 10 / 9,
#' "delta" = 1e-05, "lambdaSq" = 1, "sigmaSq" = runif(1, 0.1, 10),
#' "beta.prop.var" = 1, "beta.clin.var" = 1)
#'
#' # run Bayesian Lasso Cox
#' library(psbcSpeedUp)
#' set.seed(123)
#' fitBayesCox <- psbcSpeedUp(survObj,
#' p = p, q = q, hyperpar = mypriorPara,
#' nIter = 10, burnin = 0, outFilePath = tempdir()
#' )
#' coef(fitBayesCox)
#'
#' @export
coef.psbcSpeedUp <- function(object, type = "mean", ...) {
if (!inherits(object, "psbcSpeedUp")) {
stop("Use only with \"psbcSpeedUp\" objects")
}

if (length(type) == 1) {
if (!type %in% c("mean", "median")) {
stop("'type' should be one of c('mean', 'median')!")
}
} else {
stop("'type' should be one of c('mean', 'median')!")
}

beta_p <- object$output$beta.p[-(1:(object$input$burnin + 1)), ]
beta_est <- apply(beta_p, 2, type)

return(beta_est)
}
98 changes: 98 additions & 0 deletions R/exampleData.R
@@ -0,0 +1,98 @@
#' @title Simulated data set
#'
#' @description
#' Simulated data set for a quick test. The data set is a list with six
#' components: survival times \code{"t"}, event status \code{"di"}, covariates
#' \code{"x"}, number of genomics variables \code{"p"}, number of clinical
#' variables \code{"1"} and true effects of covariates \code{"beta_true"}.
#' The R code for generating the simulated data is given in the Examples
#' paragraph.
#'
#' @importFrom MASS mvrnorm
#'
#' @examples
#' # Load the example dataset
#' data("exampleData", package = "psbcSpeedUp")
#' str(exampleData)
#'
#' # ===============
#' # The code below is to show how to generate the dataset "exampleData.rda"
#' # ===============
#'
#' requireNamespace("MASS", quietly = TRUE)
#'
#' ########################### Predefined Functions
#'
#' Expo <- function(times, surv) {
#' z1 <- -log(surv[1])
#' t1 <- times[1]
#' lambda <- z1 / (t1)
#' list(rate = lambda)
#' }
#'
#' Weibull <- function(times, surv) {
#' z1 <- -log(surv[1])
#' z2 <- -log(surv[2])
#' t1 <- times[1]
#' t2 <- times[2]
#' gamma <- log(z2 / z1) / log(t2 / t1)
#' lambda <- z1 / (t1^gamma)
#' list(scale = lambda, shape = gamma)
#' }
#'
#' ########################### Problem Dimensions
#' n <- 200
#' p <- 30
#' q <- 5
#' s <- 10
#'
#' ############################ Simulate a set of n x p covariates
#'
#' # effects
#' bg <- c(0.75, -0.75, 0.5, -0.5, 0.25, -0.25, rep(0, p - 6))
#' bc <- c(-1.0, 1.0, 0.3, 0, -0.3)
#' bX <- c(bg, bc)
#'
#' # covariates
#' # genomic
#' means <- rep(0, p)
#' Sigma <- diag(1, p)
#' Xg <- MASS::mvrnorm(n, means, Sigma)
#' # clinical
#' x1 <- rbinom(n = n, size = 1, prob = 0.7)
#' x2 <- rbinom(n = n, size = 1, prob = 0.3)
#' x3 <- rnorm(n = n, mean = 0, sd = 1)
#' x4 <- rnorm(n = n, mean = 0, sd = 1)
#' x5 <- rnorm(n = n, mean = 0, sd = 1)
#' Xc <- cbind(x1, x2, x3, x4, x5)
#' # all
#' X <- data.frame(Xg, Xc)
#' names(X) <- c(paste("G", 1:p, sep = ""), paste("C", 1:q, sep = ""))
#' X <- scale(X)
#'
#' # censoring function
#' # - follow-up time 36 to 72 months
#' # - administrative censoring: uniform data entry (cens1)
#' # - loss to follow-up: exponential, 20% loss in 72 months (cens2)
#' ACT <- 36
#' FUT <- 72
#' cens.start <- FUT
#' cens.end <- ACT + FUT
#' cens1 <- runif(n, cens.start, cens.end)
#' loss <- Expo(times = 72, surv = 0.8)
#' cens2 <- rexp(n, rate = loss$rate)
#' cens <- pmin(cens1, cens2)
#'
#' # survival distribution (Weibull, survival probs 0.5 and 0.9 at 12 and 36 months)
#' h0 <- round(log(2) / 36, 2)
#' surv <- Weibull(times = c(12, 36), surv = c(0.9, 0.5))
#'
#' dt <- (-log(runif(n)) * (1 / surv$scale) * exp(-as.matrix(X) %*% bX))^(1 / surv$shape)
#'
#' # survival object
#' status <- ifelse(dt <= cens, 1, 0)
#' os <- pmin(dt, cens)
#'
#' exampleData <- list("t" = os, "di" = status, "x" = X, "beta_true" = bX)
#'
"exampleData"
89 changes: 89 additions & 0 deletions R/plot.psbcSpeedUp.R
@@ -0,0 +1,89 @@
#' @title create a plot of estimated coefficients
#' @description
#' Plot point estimates of regression coefficients and 95\% credible intervals
#'
#' @name plot.psbcSpeedUp
#'
#' @importFrom GGally ggcoef
#' @importFrom ggplot2 xlab ylab
#' @importFrom stats quantile
#'
#' @param x an object of class \code{psbcSpeedUp} or a matrix. If \code{x}
#' is a matrix, use \code{psbcSpeedUp:::plot.psbcSpeedUp(x)}
#' @param type type of point estimates of regression coefficients. One of
#' \code{c("mean", "median")}. Default is \code{mean}
#' @param interval logical argument to show 95\% credible intervals. Default
#' is \code{TRUE}
#' @param ... additional arguments sent to \code{ggplot2::geom_point()}
#'
#' @return ggplot object
#'
#' @examples
#'
#' # Load the example dataset
#' data("exampleData", package = "psbcSpeedUp")
#' p <- exampleData$p
#' q <- exampleData$q
#' survObj <- exampleData[1:3]
#'
#' # Set hyperparameters
#' mypriorPara <- list(
#' "groupInd" = 1:p, "eta0" = 0.02, "kappa0" = 1, "c0" = 2, "r" = 10 / 9,
#' "delta" = 1e-05, "lambdaSq" = 1, "sigmaSq" = runif(1, 0.1, 10),
#' "beta.prop.var" = 1, "beta.clin.var" = 1)
#'
#' # run Bayesian Lasso Cox
#' library(psbcSpeedUp)
#' set.seed(123)
#' fitBayesCox <- psbcSpeedUp(survObj,
#' p = p, q = q, hyperpar = mypriorPara,
#' nIter = 10, burnin = 0, outFilePath = tempdir()
#' )
#' plot(fitBayesCox, color = "blue")
#'
#' @export
plot.psbcSpeedUp <- function(x, type = "mean", interval = TRUE, ...) {
if (!(inherits(x, "psbcSpeedUp") | is.matrix(x))) {
stop("Use only with 'psbcSpeedUp' object or a matrix!")
}

if (length(type) == 1) {
if (!type %in% c("mean", "median")) {
stop("'type' should be one of c('mean', 'median')!")
}
} else {
stop("'type' should be one of c('mean', 'median')!")
}

if (!is.logical(interval)) {
stop("Argument 'interval' must be a logical value!")
}

if (inherits(x, "psbcSpeedUp")) {
if (is.null(colnames(x$output$beta.p))) {
x_names <- paste0("x", 1:ncol(x$output$beta.p))
} else {
x_names <- colnames(x$output$beta.p)
}
beta_p <- x$output$beta.p[-(1:(x$input$burnin / x$input$thin + 1)), ]
} else {
if (is.null(colnames(x))) {
x_names <- paste0("x", 1:ncol(x))
} else {
x_names <- colnames(x)
}
beta_p <- x
}

# pdf("psbcBeta.pdf", height = 5, width = 3.5)
beta_est <- apply(beta_p, 2, type)
beta_L <- apply(beta_p, 2, quantile, 0.025)
beta_U <- apply(beta_p, 2, quantile, 0.975)
tbl <- data.frame(term = x_names, estimate = beta_est, conf.low = beta_L, conf.high = beta_U)
tbl$term <- factor(tbl$term, levels = tbl$term)

# Sys.setenv(`_R_S3_METHOD_REGISTRATION_NOTE_OVERWRITES_` = "false")
pCoef <- ggcoef(tbl, ...) + xlab(expression(Posterior ~ ~beta)) + ylab("")
pCoef
# dev.off()
}

0 comments on commit b47a653

Please sign in to comment.