Skip to content

Commit

Permalink
version 1.1.0
Browse files Browse the repository at this point in the history
  • Loading branch information
Thierry Denoeux authored and cran-robot committed Jul 1, 2016
1 parent 65a9193 commit dd9a001
Show file tree
Hide file tree
Showing 18 changed files with 466 additions and 54 deletions.
10 changes: 5 additions & 5 deletions DESCRIPTION
@@ -1,8 +1,8 @@
Package: evclass
Type: Package
Title: Evidential Distance-Based Classification
Version: 1.0.2
Date: 2016-06-22
Version: 1.1.0
Date: 2016-07-01
Author: Thierry Denoeux
Maintainer: Thierry Denoeux <tdenoeux@utc.fr>
Description: Different evidential distance-based classifiers, which provide
Expand All @@ -14,8 +14,8 @@ Imports: FNN
LazyData: TRUE
RoxygenNote: 5.0.1
VignetteBuilder: knitr
Suggests: knitr,rmarkdown
Suggests: knitr,rmarkdown,datasets
NeedsCompilation: no
Packaged: 2016-06-22 08:59:07 UTC; Thierry
Packaged: 2016-07-01 08:58:55 UTC; Thierry
Repository: CRAN
Date/Publication: 2016-06-22 12:25:47
Date/Publication: 2016-07-01 13:54:48
32 changes: 17 additions & 15 deletions MD5
@@ -1,36 +1,38 @@
28f2b86366ee109c16742d202febf9db *DESCRIPTION
6dfd314ed4c8aaa318cb72b37c1bdd9d *NAMESPACE
bcd052b70a02a0e61c6457b927c334c8 *R/EkNNfit.R
a0a4669c272eff4f63a0bdcacafe84e7 *R/EkNNinit.R
cc918301931ef0dfa197a2f3d791b3d0 *R/EkNNval.R
527ced758fbb3daf73e3d5cf8bc6a476 *DESCRIPTION
a5c2724892a74189c1da78b052dd85f5 *NAMESPACE
b1177ebc5f71a8b36341eb7ed2df0baf *R/EkNNfit.R
4a492283408257cfb90af408e81dd804 *R/EkNNinit.R
4ee80bfa2fcaef4a9742f1eb81c32225 *R/EkNNval.R
52a431a233d5075e603d3b52f9c9782c *R/classds.R
115d774e5bce42f17e4c44ba20b1c5e2 *R/evclass.R
bba8fad3584d23527015bd66decb3bb5 *R/decision.R
b4b6d2da30c068ca1ebc7483aeab92c8 *R/evclass.R
6f31c165a0d7a2a3e86c41b4e697ed38 *R/foncgradr2n.R
aeaad4cb6952aa9e16d979f5b01f0950 *R/glass-data.R
2980f6f9293ffb4e0dc4d828494c1c99 *R/gradientds.R
20a8bb25740f8f2445eda7ea0460ec20 *R/harris.R
7c919b97eb312a99911bba45aa026664 *R/ionosphere-data.R
848b2e2c11ce455a332dd8b0e7306f75 *R/optimds.R
7535b780d2fd7f9fc526016f40975819 *R/proDSfit.R
4523d06f8b0b94024d8d57b9819c9aac *R/proDSinit.R
cf689fe5918edf79efcfb88395fcbd20 *R/proDSval.R
07349727a3fdec8e7adab2d787fb6bf9 *R/proDSfit.R
d290f1445c20f34e6c5f725d2088363f *R/proDSinit.R
0fd8bba09664d173f876e9336c328a12 *R/proDSval.R
a988fc277f3bfdb7f16cd8807d7bec4e *R/vehicles-data.R
66d6b013c497c3521de3206e5855209e *build/vignette.rds
988e27d0f83a7ff327e4f38f089aeefd *data/glass.RData
b7fa49999b8679c81898cc5262ee0c81 *data/ionosphere.RData
1665c19c5134e390d23a6f938557c3b5 *data/vehicles.RData
ee10a173dbc60c639538cd06f19e9b55 *inst/doc/Introduction.R
15c64d602e7264b11a8ca115a92a62f8 *inst/doc/Introduction.Rmd
b78cbd50381c968633d8e952f67adbd6 *inst/doc/Introduction.html
533bbd19c4a779c4752827530d2c8ea5 *inst/doc/Introduction.R
0dfd3d1fa0aa29cdb381bc5545958d4b *inst/doc/Introduction.Rmd
4a4cdd01ee8bf578eb467911dadf5d18 *inst/doc/Introduction.html
692b0234dd3ae533733ef4382a3109b6 *man/EkNNfit.Rd
df9e6f51bbe7a511a71c62669b0369e9 *man/EkNNinit.Rd
5c6c7e59a33b0c199ea37c15e93b6f1a *man/EkNNval.Rd
cf56f366ae4cec7a5916b3be9c7313c5 *man/evclass.Rd
1321ba273ed3af3992038c683ae4d242 *man/decision.Rd
f9877cda81848dcb74cf78427486b2e5 *man/evclass.Rd
ecf423e10f75e6dab5a7ced8a253575b *man/glass.Rd
144a8069c347be152abc7543780db1c2 *man/ionosphere.Rd
4f20dd9c17a3c66b6eda7e1c24012814 *man/proDSfit.Rd
19dc1316032dc586a3b83fd4d9b31945 *man/proDSinit.Rd
68c05de616fa459bba567ce123daebed *man/proDSval.Rd
5d2098b37a40c37c063e2f93e705331f *man/proDSval.Rd
6d00db39a18df54421981daa1b761cdc *man/vehicles.Rd
15c64d602e7264b11a8ca115a92a62f8 *vignettes/Introduction.Rmd
0dfd3d1fa0aa29cdb381bc5545958d4b *vignettes/Introduction.Rmd
7a59c365f0124c990e37d0f2e8c9768c *vignettes/tdenoeux.bib
1 change: 1 addition & 0 deletions NAMESPACE
Expand Up @@ -3,6 +3,7 @@
export(EkNNfit)
export(EkNNinit)
export(EkNNval)
export(decision)
export(proDSfit)
export(proDSinit)
export(proDSval)
Expand Down
2 changes: 1 addition & 1 deletion R/EkNNfit.R
Expand Up @@ -55,7 +55,7 @@
#' fit<-EkNNfit(x,y,K=5)
EkNNfit<-function(x,y,K,param=NULL,alpha=0.95,lambda=1/max(as.numeric(y)),optimize=TRUE,
options=list(maxiter=300,eta=0.1,gain_min=1e-6,disp=TRUE)){
y<-as.integer(y)
y<-as.numeric(y)
x<-as.matrix(x)
if(is.null(param)) param<-EkNNinit(x,y,alpha)
knn<-get.knn(x,k=K)
Expand Down
1 change: 1 addition & 0 deletions R/EkNNinit.R
Expand Up @@ -44,6 +44,7 @@
#' param
EkNNinit<-function(x,y,alpha=0.95){
y<-as.numeric(y)
x<-as.matrix(x)
M<-max(y)
gamm<-rep(0,M)
for(k in 1:M){
Expand Down
4 changes: 3 additions & 1 deletion R/EkNNval.R
Expand Up @@ -54,7 +54,9 @@
#' fit<-EkNNfit(xtrain,ytrain,K)
#' test<-EkNNval(xtrain,ytrain,xtst,K,ytst,fit$param)
EkNNval <- function(xtrain,ytrain,xtst,K,ytst=NULL,param=NULL){


xtst<-as.matrix(xtst)
xtrain<-as.matrix(xtrain)
ytrain<-as.numeric(ytrain)
if(!is.null(ytst)) ytst<-as.numeric(ytst)

Expand Down
64 changes: 64 additions & 0 deletions R/decision.R
@@ -0,0 +1,64 @@
#' Decision rules for evidential classifiers
#'
#'\code{decision} returns decisions from a loss matrix and mass functions computed
#'by an evidential classifier.
#'
#'This function implements the decision rules described in Denoeux (1997), with an
#'arbitrary loss function. The decision rules are the minimization of the lower,
#'upper or pignistic expectation, and Jaffray's decision rule based on minimizing a
#'convex combination of the lower and upper expectations. The function also handles
#'the case where there is an "unknown" class, in addition to the classes represented
#'in the training set.
#'
#' @param m Matrix of masses for n test cases. Each row is a mass function. The first M columns
#' correspond to the mass assigned to each of the M classes. The last column
#' corresponds to the mass assigned to the whole set of classes.
#' @param L The loss matrix of dimension (na,M) or (na,M+1), where na is the set
#' of actions. L[k,j] is the loss incurred of action j is chosen and the true class
#' if \eqn{\omega_k}. If L has M+1 rows, the last row corresponds to the unknown
#' class.
#' @param rule Decision rule to be used. Must be one of these: 'upper' (upper
#' expectation), 'lower' (lower expectations), 'pignistic' (pignistic expectation),
#' 'hurwicz' (weighted sum of the lower and upper expectations).
#' @param rho Parameter between 0 and 1. Used only is rule='rho'.
#'
#' @return A n-vector with the decisions (integers between 1 and na).
#'
#'@references
#'T. Denoeux. Analysis of evidence-theoretic decision rules for pattern
#'classification. Pattern Recognition, 30(7):1095--1107, 1997.
#'
#'Available from \url{https://www.hds.utc.fr/~tdenoeux}.
#'
#'@author Thierry Denoeux.
#'
#' @export
#'
#' @seealso \code{\link{EkNNval}}, \code{\link{proDSval}}
#'
#' @examples ## Example with M=2 classes
#' m<-matrix(c(0.9,0.1,0,0.4,0.6,0,0.1,0.1,0.8),3,3,byrow=TRUE)
#' ## Loss matrix with na=4 acts: assignment to class 1, assignment to class2,
#' # rejection, and assignment to the unknown class.
#' L<-matrix(c(0,1,1,1,0,1,0.2,0.2,0.2,0.25,0.25,0),3,4)
#' d<-decision(m,L,'upper') ## instances 2 and 3 are rejected
#' d<-decision(m,L,'lower') ## instance 2 is rejected, instance 3 is
#' # assigned to the unknown class
#'
decision<-function(m,L=1-diag(ncol(m)-1),rule=c('upper','lower','pignistic','hurwicz'),
rho=0.5){
M<-ncol(m)-1
n<-nrow(m)
if(nrow(L)==(M+1)) m<-cbind(m[,1:M],rep(0,n),m[,M+1]) # unknown class
if(rule=='upper'){
L1<-apply(L,2,max)
}else if(rule=='lower'){
L1<-apply(L,2,min)
}else if(rule=='pignistic'){
L1<-colMeans(L)
}else if(rule=='hurwicz'){
L1<-rho*apply(L,2,min)+ (1-rho)*apply(L,2,max)
}
C<-m %*% rbind(L,L1)
return(max.col(-C))
}
7 changes: 5 additions & 2 deletions R/evclass.R
Expand Up @@ -6,9 +6,9 @@
#' classifier quantify the uncertainty of the classification using Dempster-Shafer mass functions.
#'
#' The main functions are: \code{\link{EkNNinit}}, \code{\link{EkNNfit}} and \code{\link{EkNNval}}
#' for the initialization, training and evaluation of the EK-NN classifier, and
#' for the initialization, training and evaluation of the EK-NN classifier,
#' \code{\link{proDSinit}}, \code{\link{proDSfit}} and \code{\link{proDSval}} for the
#' evidential neural network classifier.
#' evidential neural network classifier, and \code{\link{decision}} for decision-making.
#'
#' @docType package
#' @name evclass
Expand All @@ -20,6 +20,9 @@
#'T. Denoeux. A k-nearest neighbor classification rule based on Dempster-Shafer
#'theory. IEEE Transactions on Systems, Man and Cybernetics, 25(05):804--813, 1995.
#'
#'T. Denoeux. Analysis of evidence-theoretic decision rules for pattern
#'classification. Pattern Recognition, 30(7):1095--1107, 1997.
#'
#'T. Denoeux. A neural network classifier based on Dempster-Shafer theory.
#'IEEE Trans. on Systems, Man and Cybernetics A, 30(2):131--150, 2000.
#'
Expand Down
5 changes: 3 additions & 2 deletions R/proDSfit.R
Expand Up @@ -54,8 +54,9 @@
#' fit<-proDSfit(xapp,yapp,param0)
proDSfit <- function(x,y,param,lambda=1/max(as.numeric(y)),mu=0,optimProto=TRUE,
options=list(maxiter=500,eta=0.1,gain_min=1e-4,disp=10)){

M<-max(as.numeric(y))
x<-as.matrix(x)
y<-as.numeric(y)
M<-max(y)
n<-nrow(param$W)
p<-ncol(param$W)
Id <- diag(M)
Expand Down
1 change: 1 addition & 0 deletions R/proDSinit.R
Expand Up @@ -45,6 +45,7 @@
#' param0
proDSinit<- function(x,y,nproto,nprotoPerClass=FALSE,crisp=FALSE){
y<-as.numeric(y)
x<-as.matrix(x)
M <- max(y)
N <- nrow(x)
Id <- diag(M)
Expand Down
5 changes: 3 additions & 2 deletions R/proDSval.R
Expand Up @@ -6,7 +6,7 @@
#'
#' @param x Matrix of size n x d, containing the values of the d attributes for the test data.
#' @param param Neural network parameters, as provided by \code{\link{proDSfit}}.
#' @param y Optimnal vector of class labels for the test data. May be a factor, or a vector of
#' @param y Optional vector of class labels for the test data. May be a factor, or a vector of
#' integers.
#'
#' @return A list with three elements:
Expand Down Expand Up @@ -47,6 +47,7 @@
proDSval<-function(x,param,y=NULL){
n<-nrow(param$W)
p<-ncol(param$W)
x<-as.matrix(x)
N <- nrow(x)
M <- ncol(param$beta)
x<-t(x)
Expand All @@ -67,9 +68,9 @@ proDSval<-function(x,param,y=NULL){
# normalisation
K <- colSums(mk)
mk<-t(mk/matrix(K,M+1,N,byrow=TRUE))
ypred<-max.col(mk[,1:M])

if(!is.null(y)){
ypred<-max.col(mk[,1:M])
err<-length(which(as.numeric(y)!=ypred))/N
} else err<-NULL

Expand Down
51 changes: 51 additions & 0 deletions inst/doc/Introduction.R
Expand Up @@ -57,3 +57,54 @@ val<-proDSval(xtst,fit$param,ytst)
print(val$err)
table(ytst,val$ypred)

## ---- fig.width=6, fig.height=6------------------------------------------
data("iris")
x<- iris[,3:4]
y<-as.numeric(iris[,5])
c<-max(y)
plot(x[,1],x[,2],pch=y,xlab="Petal Length",ylab="Petal Width")

param0<-proDSinit(x,y,6)
fit<-proDSfit(x,y,param0)

## ------------------------------------------------------------------------
L=cbind(1-diag(c),rep(0.3,c))
print(L)

## ---- fig.width=6, fig.height=6------------------------------------------
xx<-seq(-1,9,0.01)
yy<-seq(-2,4.5,0.01)
nx<-length(xx)
ny<-length(yy)
Dlower<-matrix(0,nrow=nx,ncol=ny)
Dupper<-Dlower
Dpig<-Dlower
for(i in 1:nx){
X<-matrix(c(rep(xx[i],ny),yy),ny,2)
val<-proDSval(X,fit$param,rep(0,ny))
Dupper[i,]<-decision(val$m,L=L,rule='upper')
Dlower[i,]<-decision(val$m,L=L,rule='lower')
Dpig[i,]<-decision(val$m,L=L,rule='pignistic')
}

contour(xx,yy,Dlower,xlab="Petal.Length",ylab="Petal.Width",drawlabels=FALSE)
for(k in 1:c) points(x[y==k,1],x[y==k,2],pch=k)
contour(xx,yy,Dupper,xlab="Petal.Length",ylab="Petal.Width",drawlabels=FALSE,add=TRUE,lty=2)
contour(xx,yy,Dpig,xlab="Petal.Length",ylab="Petal.Width",drawlabels=FALSE,add=TRUE,lty=3)

## ------------------------------------------------------------------------
L<-cbind(1-diag(c),rep(0.2,c),rep(0.22,c))
L<-rbind(L,c(1,1,1,0.2,0))
print(L)

## ---- fig.width=6, fig.height=6------------------------------------------
for(i in 1:nx){
X<-matrix(c(rep(xx[i],ny),yy),ny,2)
val<-proDSval(X,fit$param,rep(0,ny))
Dlower[i,]<-decision(val$m,L=L,rule='lower')
Dpig[i,]<-decision(val$m,L=L,rule='pignistic')
}

contour(xx,yy,Dpig,xlab="Petal.Length",ylab="Petal.Width",drawlabels=FALSE)
for(k in 1:c) points(x[y==k,1],x[y==k,2],pch=k)

0 comments on commit dd9a001

Please sign in to comment.