/
proDSinit.R
87 lines (82 loc) · 2.84 KB
/
proDSinit.R
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
#' Initialization of parameters for the evidential neural network classifier
#'
#'\code{proDSinit} returns initial parameter values for the evidential neural network classifier.
#'
#'The prototypes are initialized by the k-means algorithms. The initial membership values \eqn{u_{ik}} of
#'each prototype \eqn{p_i} to class \eqn{\omega_k} are normally defined as the proportion of training samples
#'from class \eqn{\omega_k} in the neighborhood of prototype \eqn{p_i}. If arguments \code{crisp} and
#'\code{nprotoPerClass} are set to TRUE, the prototypes are assigned to one and only one class.
#'
#' @param x Input matrix of size n x d, where n is the number of objects and d the number of
#' attributes.
#' @param y Vector of class lables (of length n). May be a factor, or a vector of
#' integers.
#' @param nproto Number of prototypes.
#' @param nprotoPerClass Boolean. If TRUE, there are \code{nproto} prototypes per class. If
#' FALSE (default), the total number of prototypes is equal to \code{nproto}.
#' @param crisp Boolean. If TRUE, the prototypes have full membership to only one class. (Available only is
#' nprotoPerClass=TRUE).
#'
#' @return A list with four elements containg the initialized network parameters
#' \describe{
#' \item{alpha}{Vector of length r, where r is the number of prototypes.}
#' \item{gamma}{Vector of length r}
#' \item{beta}{Matrix of size (r,M), where M is the number fo classes.}
#' \item{W}{Matrix of size (r,d), containing the prototype coordinates.}
#' }
#'
#'@references T. Denoeux. A neural network classifier based on Dempster-Shafer theory.
#'IEEE Trans. on Systems, Man and Cybernetics A, 30(2):131--150, 2000.
#'
#'Available from \url{https://www.hds.utc.fr/~tdenoeux}.
#'
#'@author Thierry Denoeux.
#'
#' @export
#' @importFrom stats runif kmeans
#'
#' @seealso \code{\link{proDSfit}}, \code{\link{proDSval}}
#'
#' @examples ## Glass dataset
#' data(glass)
#' xapp<-glass$x[1:89,]
#' yapp<-glass$y[1:89]
#' param0<-proDSinit(xapp,yapp,nproto=7)
#' param0
proDSinit<- function(x,y,nproto,nprotoPerClass=FALSE,crisp=FALSE){
y<-as.numeric(y)
x<-as.matrix(x)
M <- max(y)
N <- nrow(x)
Id <- diag(M)
t<-Id[y,]
if(nprotoPerClass){
W0<-NULL
BETA0<-NULL
for(i in 1:M){
ii<-which(y==i)
clus <- kmeans(x[ii,],nproto)
W0 <- rbind(W0,clus$centers)
BETA0 <- rbind(BETA0,t[ii[1:nproto],])
}
n<-nproto*M
if(!crisp) BETA0 <- BETA0 + 0.1
} else{
n<-nproto
BETA0<-matrix(0,n,M)
clus <- kmeans(x,n)
class<-clus$cluster
W0<-clus$centers
for(i in 1:n){
ii <- which(class == i)
if(length(ii)==1){
BETA0[i,] = t[ii,]
} else if(length(ii)>0){
BETA0[i,] = sqrt(colMeans(t[ii,]))
} else BETA0[i,] <- runif(M)
} # end for
} # end if
alpha0 <- rep(0,n)
gamma0 <- rep(0.1,n)
return(list(alpha=alpha0,gamma=gamma0,beta=BETA0,W=W0))
}