Skip to content

Commit

Permalink
version 0.2
Browse files Browse the repository at this point in the history
  • Loading branch information
Xiang Li authored and gaborcsardi committed Dec 10, 2015
1 parent bbd87c5 commit e3bb28c
Show file tree
Hide file tree
Showing 17 changed files with 2,094 additions and 273 deletions.
10 changes: 5 additions & 5 deletions DESCRIPTION
@@ -1,16 +1,16 @@
Package: Coxnet
Type: Package
Title: Regularized Cox Model
Version: 0.1-1
Date: 2015-03-21
Version: 0.2
Date: 2015-12-09
Author: Xiang Li, Donglin Zeng and Yuanjia Wang
Maintainer: Xiang Li <xl2473@columbia.edu>
Description: Cox model regularized with net (L1 and Laplacian), elastic-net (L1 and L2) or lasso (L1) penalty. In addition, it can truncate the estimate by a hard threshold, which is selected simultaneously with other tuning parameters. Moreover, it can handle the adaptive version of these regularization forms, such as adaptive lasso and net adjusting for signs of linked coefficients. The package uses one-step coordinate descent algorithm and runs extremely fast by taking into account the sparsity structure of coefficients.
Description: Cox model regularized with net (L1 and Laplacian), elastic-net (L1 and L2) or lasso (L1) penalty, and their adaptive forms, such as adaptive lasso and net adjusting for signs of linked coefficients. Moreover, it treats the number of non-zero coefficients as another tuning parameter and simultaneously selects with the regularization parameter \code{lambda}. In addition, it fits a varying coefficient Cox model by kernel smoothing, incorporated with the aforementioned penalties. The package uses one-step coordinate descent algorithm and runs extremely fast by taking into account the sparsity structure of coefficients.
License: GPL (>= 2)
Imports: Rcpp (>= 0.11.5)
LinkingTo: Rcpp, RcppEigen
Depends: Matrix (>= 1.1-5)
Packaged: 2015-03-21 23:51:42 UTC; Li Xiang
Packaged: 2015-12-09 22:25:55 UTC; xl2473
NeedsCompilation: yes
Repository: CRAN
Date/Publication: 2015-03-22 08:11:34
Date/Publication: 2015-12-10 00:04:05
31 changes: 16 additions & 15 deletions MD5
@@ -1,16 +1,17 @@
6ec6e31f1a832b0f1adb8522b398e0c3 *DESCRIPTION
30a64060a2aea0b24ca9e2f12480a260 *NAMESPACE
7be568675c98b1990019f0308136fa0e *R/Coxnet.R
801482527ef416e89685c6cfc6ca34b0 *R/RcppExports.R
ca0d3444cbf360057a6a6c15cfed8076 *R/coxini.R
2f09f18f5567f805fe66af8c45e7ea27 *R/coxprep.R
8f4c1d718d2742e799bf9b94625338ef *R/coxsplit.R
5a69d1e57dcf9d6ccb51a9f9eb3e0809 *R/print.Coxnet.R
bd773173575389461be5a62ae8093b44 *DESCRIPTION
23aca1460546b750eeeacb67ca63efa4 *NAMESPACE
f6c76b7e526ad2430ebf03b12d5f511e *R/Coxnet.R
ccd13dddcb5ce15bddb7b6c64603e4a1 *R/RcppExports.R
8b3b2289b92bb2fdfcc3c7b7bd7c9b73 *R/coxini.R
1ef838204b6f92113bb21c987d5e4c1b *R/coxprep.R
4bf909d1df7e5ef08763e7d97d6cf518 *R/coxsplit.R
eea100bf4ab14ea03dd9b91b63b12cd2 *R/loCoxnet.R
217292cd25c065bd5bf88ce3f7b34a85 *R/print.Coxnet.R
af501ead70399056e3279f9952155d41 *inst/include/Coxnet.h
b0f167deb401f0fa526f3783a3e624a3 *inst/include/Coxnet_RcppExports.h
7ae1a90cc88b4cfd77f5692756d7d528 *man/Coxnet-package.Rd
04fc00b1a6b563362baa88ab8f5a6725 *man/Coxnet.Rd
43b9a3db9ec1c469a28bd77e1cdb0f0f *man/coxsplit.Rd
1920d6a1728fe12174aabf9d54f30efd *man/print.Coxnet.Rd
574291f344995c8ce4d9bc381ea8da0e *src/Coxnet.cpp
39f8b3017461be813aca5af77587c563 *src/RcppExports.cpp
48e812d140a04234b633afd3ecc2e565 *inst/include/Coxnet_RcppExports.h
8b3eca961e56804fe6cbaafe111371c1 *man/Coxnet-package.Rd
e24caca21a1dc3fac4cfcc0358cb94a5 *man/Coxnet.Rd
56e325f52b5c8cdf93a851463232948e *man/coxsplit.Rd
6b50288629cef493804e10bda7b5fe15 *man/print.Coxnet.Rd
37a8ab35d30a98c06403d6a04751060c *src/Coxnet.cpp
f5223094b039a12bc4b96c97fe6c2d57 *src/RcppExports.cpp
4 changes: 2 additions & 2 deletions NAMESPACE
@@ -1,6 +1,6 @@
useDynLib(Coxnet)
import(Matrix)
importFrom(Rcpp, evalCpp)

export(Coxnet, print.Coxnet, coxsplit)
importFrom("stats", "sd", "weighted.mean")
export(Coxnet, loCoxnet, print.Coxnet, coxsplit, coxsplity, coxsplitw)
S3method(print, Coxnet)
65 changes: 33 additions & 32 deletions R/Coxnet.R
Expand Up @@ -12,7 +12,7 @@
##########################################################
### lambda1*||_1+lambda2/2*||_2 <-> lambda*(alpha_1+(1-alpha)/2_2)

Coxnet=function(x, y, Omega=NULL, penalty=c("Lasso","Enet", "Net"), alpha=1, lambda=NULL, nlambda=50, rlambda=NULL, nfolds=1, foldid=NULL, itrunc=TRUE, adaptive=c(FALSE,TRUE), aini=NULL, alambda=NULL, nalambda=10, isd=TRUE, ifast=TRUE, keep.beta=FALSE, thresh=1e-7, maxit=1e+5) {
Coxnet=function(x, y, Omega=NULL, penalty=c("Lasso","Enet", "Net"), alpha=1, lambda=NULL, nlambda=50, rlambda=NULL, nfolds=1, foldid=NULL, inzero=TRUE, adaptive=c(FALSE,TRUE), aini=NULL, isd=FALSE, ifast=TRUE, keep.beta=FALSE, thresh=1e-6, maxit=1e+5) {

#fcall=match.call()
penalty=match.arg(penalty)
Expand All @@ -27,8 +27,8 @@ Coxnet=function(x, y, Omega=NULL, penalty=c("Lasso","Enet", "Net"), alpha=1, lam
}

fit=switch(penalty,
"Enet"=coxEnet(x,y,alpha,lambda,nlambda,rlambda,nfolds,foldid,itrunc,adaptive[1],aini,alambda,nalambda,isd,ifast,keep.beta,thresh,maxit),
"Net"=coxNet(x,y,Omega,alpha,lambda,nlambda,rlambda,nfolds,foldid,itrunc,adaptive,aini,alambda,nalambda,isd,ifast,keep.beta,thresh,maxit))
"Enet"=coxEnet(x,y,alpha,lambda,nlambda,rlambda,nfolds,foldid,inzero,adaptive[1],aini,isd,ifast,keep.beta,thresh,maxit),
"Net"=coxNet(x,y,Omega,alpha,lambda,nlambda,rlambda,nfolds,foldid,inzero,adaptive,aini,isd,ifast,keep.beta,thresh,maxit))

#fit$call=fcall
class(fit)="Coxnet"
Expand All @@ -41,28 +41,25 @@ Coxnet=function(x, y, Omega=NULL, penalty=c("Lasso","Enet", "Net"), alpha=1, lam
##### Enet (Lasso) #####
##########################

coxEnet=function(x, y, alpha=1, lambda=NULL, nlambda=100, rlambda=NULL, nfolds=1, foldid=NULL, itrunc=TRUE, adaptive=FALSE, aini=NULL, alambda=NULL, nalambda=10, isd=TRUE, ifast=TRUE, keep.beta=FALSE, thresh=1e-7, maxit=1e+5) {
coxEnet=function(x, y, alpha=1, lambda=NULL, nlambda=100, rlambda=NULL, nfolds=1, foldid=NULL, itrunc=TRUE, adaptive=FALSE, aini=NULL, isd=FALSE, ifast=TRUE, keep.beta=FALSE, thresh=1e-7, maxit=1e+5) {

penalty=ifelse(alpha==1,"Lasso","Enet")

N0=nrow(x);p=ncol(x)
ifast=as.integer(ifast)

### scaleC and standardized
xscale=rep(1, p)
if (isd) {
tem=scaleC(x)
xscale=tem$sd;x=tem$x
rm(tem)
}
tem=scaleC(x)
xscale=tem$sd;x=tem$x
rm(tem)

### Full data ###
prep0=coxprep(x, y)

### Adaptive based on Ridge (L2)
if (adaptive) {
if (is.null(aini))
aini=coxini(x, y, alambda, nalambda, rlambda, isd)
if (is.null(aini))
aini=coxini(x, y)
wbeta=aini$wbeta
rm(aini)
} else {
Expand All @@ -84,7 +81,8 @@ coxEnet=function(x, y, alpha=1, lambda=NULL, nlambda=100, rlambda=NULL, nfolds=1
if (nlambdai==0)
return(NULL)
lambdai=lambda[1:nlambdai]
out$Beta=Matrix(out$Beta[, 1:nlambdai]/xscale, sparse=TRUE)
#out$Beta=Matrix(out$Beta[, 1:nlambdai]/xscale, sparse=TRUE)
if (!isd) out$Beta=matrix(out$Beta[, 1:nlambdai]/xscale, ncol=nlambdai)
out$nzero=apply(out$Beta!=0, 2, sum)
out$flag=out$flag[1:nlambdai]

Expand Down Expand Up @@ -125,15 +123,16 @@ coxEnet=function(x, y, alpha=1, lambda=NULL, nlambda=100, rlambda=NULL, nfolds=1
indexi=which.max(cvm)
indexij=which(cvm>=(cvm[indexi]-cvse[indexi]))[1]
temi=rep("", nlambdai)
temi[indexi]="**";temi[indexij]=ifelse(temi[indexij]=="", "*", "***")
temi[indexi]="max"
#temi[indexi]="**";temi[indexij]=ifelse(temi[indexij]=="", "*", "***")
temCV=data.frame(lambda=lambdai, cvm=cvm, cvse=cvse, nzero=out$nzero, index=temi,stringsAsFactors=FALSE)

if (!itrunc) {
rm(outi)
if (!keep.beta) {
return(list(Beta=out$Beta[, c(indexij, indexi)], fit=temCV, lambda.max=lambdai[indexi], lambda.1se=lambdai[indexij], penalty=penalty, adaptive=adaptive, flag=out$flag))
return(list(Beta=out$Beta[, indexi], fit=temCV, lambda.max=lambdai[indexi], penalty=penalty, adaptive=adaptive, flag=out$flag))
} else {
return(list(Beta=out$Beta, fit=temCV, lambda.max=lambdai[indexi], lambda.1se=lambdai[indexij], penalty=penalty, adaptive=adaptive, flag=out$flag))
return(list(Beta=out$Beta, fit=temCV, lambda.max=lambdai[indexi], penalty=penalty, adaptive=adaptive, flag=out$flag))
}
}

Expand Down Expand Up @@ -224,9 +223,9 @@ coxEnet=function(x, y, alpha=1, lambda=NULL, nlambda=100, rlambda=NULL, nfolds=1
temCV0=data.frame(lambda=lambdai[index0],cvm=cv.max[index0],nzero=cuti)

if (!keep.beta) {
return(list(Beta=out$Beta[, c(indexij, indexi)], Beta0=Beta0, fit=temCV, fit0=temCV0, lambda.max=lambdai[indexi], lambda.1se=lambdai[indexij], lambda.opt=lambdai[index0], cv.nzero=cvm[[index0]], penalty=penalty, adaptive=adaptive, flag=out$flag))
return(list(Beta=out$Beta[, indexi], Beta0=Beta0, fit=temCV, fit0=temCV0, lambda.max=lambdai[indexi], lambda.opt=lambdai[index0], cv.nzero=cvm[[index0]], penalty=penalty, adaptive=adaptive, flag=out$flag))
} else {
return(list(Beta=out$Beta, Beta0=Beta0, fit=temCV, fit0=temCV0, lambda.max=lambdai[indexi], lambda.1se=lambdai[indexij], lambda.opt=lambdai[index0], cv.nzero=cvm[[index0]], penalty=penalty, adaptive=adaptive, flag=out$flag))
return(list(Beta=out$Beta, Beta0=Beta0, fit=temCV, fit0=temCV0, lambda.max=lambdai[indexi], lambda.opt=lambdai[index0], cv.nzero=cvm[[index0]], penalty=penalty, adaptive=adaptive, flag=out$flag))
}
}
}
Expand All @@ -239,28 +238,26 @@ coxEnet=function(x, y, alpha=1, lambda=NULL, nlambda=100, rlambda=NULL, nfolds=1
##### Net #####
#################

coxNet=function(x, y, Omega=NULL, alpha=1, lambda=NULL, nlambda=50, rlambda=NULL, nfolds=1, foldid=NULL, itrunc=TRUE, adaptive=c(FALSE,TRUE), aini=NULL, alambda=NULL, nalambda=10, isd=TRUE, ifast=TRUE, keep.beta=FALSE, thresh=1e-7, maxit=1e+5){
coxNet=function(x, y, Omega=NULL, alpha=1, lambda=NULL, nlambda=50, rlambda=NULL, nfolds=1, foldid=NULL, itrunc=TRUE, adaptive=c(FALSE,TRUE), aini=NULL, isd=FALSE, ifast=TRUE, keep.beta=FALSE, thresh=1e-7, maxit=1e+5){

penalty=ifelse(alpha==1,"Lasso","Net")

N0=nrow(x);p=ncol(x)
ifast=as.integer(ifast)

### scaleC and standardized
xscale=rep(1, p)
if (isd) {
tem=scaleC(x)
xscale=tem$sd;x=tem$x
rm(tem)
}
tem=scaleC(x)
xscale=tem$sd; x=tem$x
rm(tem)


### Full data ###
prep0=coxprep(x, y)

### Adaptive based on Ridge (L2)
if (any(adaptive)>0) {
if (is.null(aini))
aini=coxini(x, y, alambda, nalambda, rlambda, isd)
aini=coxini(x, y)
if (adaptive[1] & !adaptive[2]) {
wbeta=aini$wbeta;sgn=rep(1, p)
} else if (!adaptive[1] & adaptive[2]) {
Expand Down Expand Up @@ -296,7 +293,8 @@ coxNet=function(x, y, Omega=NULL, alpha=1, lambda=NULL, nlambda=50, rlambda=NULL
if (nlambdai==0)
return(NULL)
lambdai=lambda[1:nlambdai]
out$Beta=Matrix(out$Beta[, 1:nlambdai]/xscale, sparse=TRUE)
#out$Beta=Matrix(out$Beta[, 1:nlambdai]/xscale, sparse=TRUE)
if (!isd) out$Beta=matrix(out$Beta[, 1:nlambdai]/xscale, ncol=nlambdai)
out$nzero=apply(out$Beta!=0, 2, sum)
out$flag=out$flag[1:nlambdai]

Expand Down Expand Up @@ -337,15 +335,16 @@ coxNet=function(x, y, Omega=NULL, alpha=1, lambda=NULL, nlambda=50, rlambda=NULL
indexi=which.max(cvm)
indexij=which(cvm>=(cvm[indexi]-cvse[indexi]))[1]
temi=rep("", nlambdai)
temi[indexi]="**";temi[indexij]=ifelse(temi[indexij]=="", "*", "***")
temi[indexi]="max"
#temi[indexi]="**";temi[indexij]=ifelse(temi[indexij]=="", "*", "***")
temCV=data.frame(lambda=lambdai, cvm=cvm, cvse=cvse, nzero=out$nzero, index=temi,stringsAsFactors=FALSE)

if (!itrunc) {
rm(outi)
if (!keep.beta) {
return(list(Beta=out$Beta[, c(indexij, indexi)], fit=temCV, lambda.max=lambdai[indexi], lambda.1se=lambdai[indexij], penalty=penalty, adaptive=adaptive, flag=out$flag))
return(list(Beta=out$Beta[, indexi], fit=temCV, lambda.max=lambdai[indexi], penalty=penalty, adaptive=adaptive, flag=out$flag))
} else {
return(list(Beta=out$Beta, fit=temCV, lambda.max=lambdai[indexi], lambda.1se=lambdai[indexij], penalty=penalty, adaptive=adaptive, flag=out$flag))
return(list(Beta=out$Beta, fit=temCV, lambda.max=lambdai[indexi], penalty=penalty, adaptive=adaptive, flag=out$flag))
}
}

Expand Down Expand Up @@ -436,11 +435,13 @@ coxNet=function(x, y, Omega=NULL, alpha=1, lambda=NULL, nlambda=50, rlambda=NULL
temCV0=data.frame(lambda=lambdai[index0],cvm=cv.max[index0],nzero=cuti)

if (!keep.beta) {
return(list(Beta=out$Beta[, c(indexij, indexi)], Beta0=Beta0, fit=temCV, fit0=temCV0, lambda.max=lambdai[indexi], lambda.1se=lambdai[indexij], lambda.opt=lambdai[index0], cv.nzero=cvm[[index0]], penalty=penalty, adaptive=adaptive, flag=out$flag))
return(list(Beta=out$Beta[, indexi], Beta0=Beta0, fit=temCV, fit0=temCV0, lambda.max=lambdai[indexi], lambda.opt=lambdai[index0], cv.nzero=cvm[[index0]], penalty=penalty, adaptive=adaptive, flag=out$flag))
} else {
return(list(Beta=out$Beta, Beta0=Beta0, fit=temCV, fit0=temCV0, lambda.max=lambdai[indexi], lambda.1se=lambdai[indexij], lambda.opt=lambdai[index0], cv.nzero=cvm[[index0]], penalty=penalty, adaptive=adaptive, flag=out$flag))
return(list(Beta=out$Beta, Beta0=Beta0, fit=temCV, fit0=temCV0, lambda.max=lambdai[indexi], lambda.opt=lambdai[index0], cv.nzero=cvm[[index0]], penalty=penalty, adaptive=adaptive, flag=out$flag))
}
}
}




40 changes: 40 additions & 0 deletions R/RcppExports.R
Expand Up @@ -5,6 +5,10 @@ scaleC <- function(X) {
.Call('Coxnet_scaleC', PACKAGE = 'Coxnet', X)
}

softC <- function(z, lambda) {
.Call('Coxnet_softC', PACKAGE = 'Coxnet', z, lambda)
}

OmegaC <- function(Omega, sgn) {
.Call('Coxnet_OmegaC', PACKAGE = 'Coxnet', Omega, sgn)
}
Expand Down Expand Up @@ -41,6 +45,42 @@ cvcoxnetC <- function(X, tevent, alpha, lambda, nlambda, wbeta, Omega, loc, nadj
.Call('Coxnet_cvcoxnetC', PACKAGE = 'Coxnet', X, tevent, alpha, lambda, nlambda, wbeta, Omega, loc, nadj, N, nevent, nevent1, loc1, n, p, N0, thresh, maxit, ifast, XF, NF, neventF, nevent1F, loc1F, nF)
}

max_loclambdaC <- function(X, tevent, Kh, Kh1, N, nevent, nevent1, loc1, n, alpha, wbeta, N0) {
.Call('Coxnet_max_loclambdaC', PACKAGE = 'Coxnet', X, tevent, Kh, Kh1, N, nevent, nevent1, loc1, n, alpha, wbeta, N0)
}

alocletaC <- function(eta, tevent, Kh, Kh1, N, nevent, nevent1, loc1, n) {
.Call('Coxnet_alocletaC', PACKAGE = 'Coxnet', eta, tevent, Kh, Kh1, N, nevent, nevent1, loc1, n)
}

locletaC <- function(eta, Kh, nevent, nevent1, loc1, n) {
.Call('Coxnet_locletaC', PACKAGE = 'Coxnet', eta, Kh, nevent, nevent1, loc1, n)
}

loceobjF <- function(beta, eta, lambda1, lambda2, Kh, nevent, nevent1, loc1, n, N0) {
.Call('Coxnet_loceobjF', PACKAGE = 'Coxnet', beta, eta, lambda1, lambda2, Kh, nevent, nevent1, loc1, n, N0)
}

loclbetaC <- function(beta, X, Kh, N, nevent, nevent1, loc1, n) {
.Call('Coxnet_loclbetaC', PACKAGE = 'Coxnet', beta, X, Kh, N, nevent, nevent1, loc1, n)
}

locoxenetC <- function(X, tevent, alpha, lambda, nlambda, wbeta, Kh, Kh1, N, nevent, nevent1, loc1, n, p, N0, thresh, thresh2, maxit) {
.Call('Coxnet_locoxenetC', PACKAGE = 'Coxnet', X, tevent, alpha, lambda, nlambda, wbeta, Kh, Kh1, N, nevent, nevent1, loc1, n, p, N0, thresh, thresh2, maxit)
}

cvlocoxenetC <- function(X, tevent, alpha, lambda, nlambda, wbeta, Kh, Kh1, N, nevent, nevent1, loc1, n, p, N0, thresh, thresh2, maxit, XF, KhF, NF, neventF, nevent1F, loc1F, nF) {
.Call('Coxnet_cvlocoxenetC', PACKAGE = 'Coxnet', X, tevent, alpha, lambda, nlambda, wbeta, Kh, Kh1, N, nevent, nevent1, loc1, n, p, N0, thresh, thresh2, maxit, XF, KhF, NF, neventF, nevent1F, loc1F, nF)
}

locoxnetC <- function(X, tevent, alpha, lambda, nlambda, wbeta, L, Omega, Kh, Kh1, N, nevent, nevent1, loc1, n, p, N0, thresh, thresh2, maxit) {
.Call('Coxnet_locoxnetC', PACKAGE = 'Coxnet', X, tevent, alpha, lambda, nlambda, wbeta, L, Omega, Kh, Kh1, N, nevent, nevent1, loc1, n, p, N0, thresh, thresh2, maxit)
}

cvlocoxnetC <- function(X, tevent, alpha, lambda, nlambda, wbeta, L, Omega, Kh, Kh1, N, nevent, nevent1, loc1, n, p, N0, thresh, thresh2, maxit, XF, KhF, NF, neventF, nevent1F, loc1F, nF) {
.Call('Coxnet_cvlocoxnetC', PACKAGE = 'Coxnet', X, tevent, alpha, lambda, nlambda, wbeta, L, Omega, Kh, Kh1, N, nevent, nevent1, loc1, n, p, N0, thresh, thresh2, maxit, XF, KhF, NF, neventF, nevent1F, loc1F, nF)
}

# Register entry points for exported C++ functions
methods::setLoadAction(function(ns) {
.Call('Coxnet_RcppExport_registerCCallable', PACKAGE = 'Coxnet')
Expand Down

0 comments on commit e3bb28c

Please sign in to comment.