Skip to content

Commit

Permalink
Merge pull request #28 from Laksafoss/v0.1.1
Browse files Browse the repository at this point in the history
Update v0.1.0 for CRAN submission
  • Loading branch information
Laksafoss committed Feb 26, 2024
2 parents 04923b4 + f490c1d commit e8c0c87
Show file tree
Hide file tree
Showing 21 changed files with 271 additions and 576 deletions.
20 changes: 11 additions & 9 deletions DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,10 +1,14 @@
Package: EpiForsk
Title: Code Sharing at the Department of Epidemiological Research at Statens Serum Institut
Version: 0.1.0
Version: 0.1.1
Authors@R:
c(person("Anna", "Laksafoss", , "adls@ssi.dk", role = c("aut", "cre"),
c(person("Anders", "Husby", , "andh@ssi.dk", role = c("aut"),
comment = c(ORCID = "0000-0002-7634-8455")),
person("Anna", "Laksafoss", , "adls@ssi.dk", role = c("aut"),
comment = c(ORCID = "0000-0002-9898-2924")),
person("Kim Daniel", "Jakobsen", , "kija@ssi.dk", role = c("aut"),
person("Emilia Myrup", "Thiesson", , "emth@ssi.dk", role = c("aut"),
comment = c(ORCID = "0000-0001-6258-4177")),
person("Kim Daniel", "Jakobsen", , "kija@ssi.dk", role = c("aut", "cre"),
comment = c(ORCID = "0000-0003-0086-9980")),
person("Mikael", "Andersson", , "aso@ssi.dk", role = c("aut"),
comment = c(ORCID = "0000-0002-0114-2057")),
Expand All @@ -18,7 +22,7 @@ Description: This is a collection of assorted functions and examples collected
regression functionalities a method for calculating the confidence intervals
for functions of parameters from a GLM, Bayes equivalent for hypothesis
testing with asymptotic Bayes factor, and several help functions for
generalized random forest analysis using the grf package.
generalized random forest analysis using 'grf'.
License: MIT + file LICENSE
Encoding: UTF-8
Roxygen: list(markdown = TRUE)
Expand All @@ -39,7 +43,7 @@ Imports:
nnet,
patchwork,
policytree,
progress,
progressr,
purrr,
rlang,
stringr,
Expand All @@ -54,14 +58,12 @@ LazyData: true
Suggests:
cli,
CVXR,
doParallel,
doSNOW,
foreach,
furrr,
future,
ggsci,
knitr,
parallel,
rmarkdown,
snow,
testthat (>= 3.0.0)
VignetteBuilder: knitr
Config/testthat/edition: 3
15 changes: 5 additions & 10 deletions R/EpiForsk_package.R
Original file line number Diff line number Diff line change
Expand Up @@ -10,16 +10,6 @@
#' testing with asymptotic Bayes factor, and several help functions for
#' generalized random forest analysis using the grf package.
#'
#' In the package there are contributions from
#' \itemize{
#' \item{ADLS : }{Anna Damkjær Laksafoss (https://orcid.org/0000-0002-9898-2924)}
#' \item{ANDH : }{Anders Husby (https://orcid.org/0000-0002-7634-8455)}
#' \item{ASO : }{Mikael Andersson (https://orcid.org/0000-0002-0114-2057)}
#' \item{EMTH : }{Emilia Myrup Thiesson (https://orcid.org/0000-0001-6258-4177)}
#' \item{KIJA : }{Kim Daniel Jakobsen (https://orcid.org/0000-0003-0086-9980)}
#' \item{KLP : }{Klaus Rostgaard (https://orcid.org/0000-0001-6220-9414)}
#' }
#'
#' @importFrom gridExtra arrangeGrob
#' @importFrom methods hasArg
#' @importFrom dplyr .data
Expand All @@ -46,5 +36,10 @@ globalVariables(c("y"))
globalVariables(c(".SD"))
globalVariables(c(".N"))

#' make package data table aware
#'
#' This package uses data.table as a fast alternative to dplyr in cases where
#' performance is essential.
#'
#' @export
.datatable.aware = TRUE
2 changes: 2 additions & 0 deletions R/aso_odds_ratio_function.R
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@
#' expvars = c("tocc", "education", "tocc:education")
#' )
#'
#' \donttest{
#' # Conditional binomial logistic regression with some extra text added:
#' func_est2 <- odds_ratio_function(
#' logan2,
Expand All @@ -90,6 +91,7 @@
#' matchgroup = "id",
#' textvar = "Testing function"
#' )
#' }
#'
#' # Standard binomial logistic regression as survey data with no prepared
#' # weights:
Expand Down
14 changes: 6 additions & 8 deletions R/kija_cate_surface.R
Original file line number Diff line number Diff line change
Expand Up @@ -57,12 +57,12 @@
#' @author KIJA
#'
#' @examples
#' n <- 1500
#' p <- 5
#' n <- 1000
#' p <- 3
#' X <- matrix(rnorm(n * p), n, p) |> as.data.frame()
#' X_d <- data.frame(
#' X_d1 = factor(sample(1:5, n, replace = TRUE)),
#' X_d2 = factor(sample(1:5, n, replace = TRUE))
#' X_d1 = factor(sample(1:3, n, replace = TRUE)),
#' X_d2 = factor(sample(1:3, n, replace = TRUE))
#' )
#' X_d <- DiscreteCovariatesToOneHot(X_d)
#' X <- cbind(X, X_d)
Expand All @@ -72,13 +72,11 @@
#' cf <- grf::causal_forest(X, Y, W)
#' cate_surface <- CATESurface(
#' cf,
#' continuous_covariates = paste0("V", 1:4),
#' continuous_covariates = paste0("V", 1:2),
#' discrete_covariates = "X_d1",
#' grid = list(
#' V1 = 10,
#' V2 = 5,
#' V3 = -5:5,
#' V4 = 2
#' V2 = -5:5
#' ),
#' other_discrete = data.frame(
#' covs = "X_d2",
Expand Down
4 changes: 2 additions & 2 deletions R/kija_causal_forest_dynamic_subgroups.R
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,8 @@
#' @author KIJA
#'
#' @examples
#' n <- 1000
#' p <- 5
#' n <- 800
#' p <- 3
#' X <- matrix(rnorm(n * p), n, p) |> as.data.frame()
#' W <- rbinom(n, 1, 0.5)
#' event_prob <- 1 / (1 + exp(2 * (pmax(2 * X[, 1], 0) * W - X[, 2])))
Expand Down
2 changes: 2 additions & 0 deletions R/kija_covariate_balance.R
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@
#' @author KIJA
#'
#' @examples
#' \donttest{
#' n <- 1000
#' p <- 5
#' X <- matrix(rnorm(n * p), n, p) |>
Expand Down Expand Up @@ -152,6 +153,7 @@
#' cd_x_scale_width = 1,
#' cd_bar_width = 0.3
#' )
#' }
#'
#' @export

Expand Down
2 changes: 1 addition & 1 deletion R/kija_discrete_covariates_to_onehot.R
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@

DiscreteCovariatesToOneHot <- function(df,
factors = dplyr::everything()) {
if (!is.data.frame(df)) {
if (!inherits(df, "data.frame")) {
stop("df must be a data.frame or data.frame like object.")
}
df_f <- df |> dplyr::select({{ factors }})
Expand Down
Loading

0 comments on commit e8c0c87

Please sign in to comment.