Skip to content

Commit

Permalink
version 1.7.2
Browse files Browse the repository at this point in the history
  • Loading branch information
Clement Calenge authored and gaborcsardi committed Dec 14, 2007
1 parent 97d0de3 commit 1a0fc51
Show file tree
Hide file tree
Showing 19 changed files with 400 additions and 59 deletions.
7 changes: 7 additions & 0 deletions CONTENTS
Expand Up @@ -396,6 +396,13 @@ Keywords: multivariate
Description: The MADIFA: a Factorial Decomposition of the Mahalanobis Distances
URL: ../../../library/adehabitat/html/madifa.html

Entry: dunnfa
Aliases: dunnfa, print.dunnfa
Keywords: multivariate
Description: Factorial Analysis of the Specialization in Habitat Selection
Studies. Unpublished Work of James Dunn (University of Arkansas)
URL: ../../../library/adehabitat/html/dunnfa.html

Entry: mahasuhab
Aliases: mahasuhab
Keywords: spatial
Expand Down
8 changes: 4 additions & 4 deletions DESCRIPTION
@@ -1,12 +1,12 @@
Package: adehabitat
Version: 1.7.1
Version: 1.7.2
Date: 2007/12/14
Title: Analysis of habitat selection by animals
Author: Clement Calenge, contributions from Mathieu Basille, Stephane Dray and Scott Fortmann-Roe
Maintainer: Clement Calenge <calenge@biomserv.univ-lyon1.fr>
Depends: R (>= 1.8.0), ade4
Depends: R (>= 2.7.0), ade4
Suggests: gpclib, sp, spatstat, MASS, tkrplot, shapefiles, CircStats
Description: A collection of tools for the analysis of habitat selection by animals
Encoding: latin1
License: GPL version 2 or newer
Packaged: Fri Dec 14 17:31:10 2007; calenge
License: GPL (>= 2)
Packaged: Mon Jul 28 21:28:20 2008; calenge
3 changes: 3 additions & 0 deletions INDEX
Expand Up @@ -43,6 +43,9 @@ distfacmap Compute distances to the different levels of a
factor map
domain Estimation of the Potential Distribution of a
Species
dunnfa Factorial Analysis of the Specialization in
Habitat Selection Studies. Unpublished Work of
James Dunn (University of Arkansas)
eisera Eigenanalysis of Selection Ratios
enfa Ecological-Niche Factor Analysis
explore.kasc Interactive Exploration of Maps of Class kasc
Expand Down
1 change: 0 additions & 1 deletion R/biv.plot.r
Expand Up @@ -23,7 +23,6 @@
on.exit(par(old.par))
lay <- layout(matrix(c(2,4,1,3),2,2, byrow = TRUE), c(3,1),
c(1,3), TRUE)
layout.show(lay)

## preparation of the data
x <- dfxy[, 1]
Expand Down
4 changes: 4 additions & 0 deletions R/compana.r
Expand Up @@ -5,6 +5,10 @@
test<-match.arg(test)
used<-as.matrix(used)
avail<-as.matrix(avail)
if (any(apply(avail,2, function(x) {
sum(x > .Machine$double.eps)
})<2))
stop("Availability different from zero\n for less than two animals for some habitat types. \n At least 2 animals are required for this analysis")
if ((any(avail==0))&(test=="parametric")) {
warning("parametric tests not suitable with 0 in avail; test has been set to \"randomisation\"")
test<-"randomisation"
Expand Down
121 changes: 121 additions & 0 deletions R/dunnfa.r
@@ -0,0 +1,121 @@
dunnfa <- function(dudi, pr, scannf = TRUE, nf = 2) #(Z, pr)
{
## Verifications
if (!inherits(dudi, "pca"))
stop("Currently only implemented for objects of class \"pca\"")
call <- match.call()
call <- match.call()
if (any(is.na(dudi$tab)))
stop("na entries in table")
if (!is.vector(pr))
stop("pr should be a vector")

## Bases for the analysis
prb <- pr
pr <- pr/sum(pr)
Z <- as.matrix(dudi$tab)
n <- nrow(Z)


## centering and scaling ("used" weighting)
f1 <- function(v) sum(v * pr)
center <- apply(Z, 2, f1)
Zu1 <- sweep(Z, 2, center)
f2 <- function(v) sum((v^2) * pr)
sdu <- apply(Zu1, 2, f2)
Zu <- sweep(Zu1, 2, sqrt(sdu), "/")

## centering for "available" weighting
center <- apply(Zu, 2, mean)
Za <- sweep(Zu, 2, center)

## correlation matrices
Su <- t(apply(Zu, 2, function(x) x*pr))%*%Zu
Sa <- t(Za)%*%Za/nrow(Za)

## inverse of Su
Sumo <- solve(Su)

## Cholesky factorization
Th <- chol(Sa)

## The core of the analysis:
mat <- Th%*%Sumo%*%t(Th)
res <- eigen(mat)


## Number of eigenvalues
if (scannf) {
barplot(s)
cat("Select the number of axes: ")
nf <- as.integer(readLines(n = 1))
}
if (nf <= 0 | nf > ncol(Zu))
nf <- 1


## The results
B <- res$vec
A <- solve(Th)%*%B

## scale the vectors so that their length is 1
A <- apply(A,2, function(x) x/sqrt(sum(x^2)))

## The results:
sor <- list()
sor$eig <- res$val
sor$pr <- prb
sor$co <- A[,1:nf]
sor$liU <- as.data.frame(Zu %*% A)[,1:nf]
sor$liA <- as.data.frame(Za %*% A)[,1:nf]
sor$corA <- cor(as.data.frame(Za), as.data.frame(sor$liA))
if (length(dim(sor$liU))>1) {
sor$mahasu <- apply(sor$liU, 1, function(x) sum(x^2/sor$eig[1:nf]))
} else {
sor$mahasu <- sor$liU^2/sor$eig[1]
}
sor$call <- call
sor$tab <- as.data.frame(Za)
sor$nf <- nf
class(sor) <- "dunnfa"
return(sor)
}



print.dunnfa <- function (x, ...)
{
if (!inherits(x, "dunnfa"))
stop("Object of class 'dunnfa' expected")
cat("Factorial analysis of the specialization of James Dunn")
cat("\n$call: ")
print(x$call)
cat("\neigen values: ")
l0 <- length(x$eig)
cat(signif(x$eig, 4)[1:(min(5, l0))])
if (l0 > 5)
cat(" ...")
cat("\n$nf:", x$nf, "axes saved")
cat("\n")
cat("\n")
sumry <- array("", c(3, 4), list(1:3, c("vector", "length",
"mode", "content")))
sumry[1, ] <- c("$pr", length(x$pr), mode(x$pr), "vector of presence")
sumry[2, ] <- c("$mahasu", length(x$mahasu), mode(x$mahasu), "squared Mahalanobis distances")
sumry[3, ] <- c("$eig", length(x$eig), mode(x$eig), "eigen values")
class(sumry) <- "table"
print(sumry)
cat("\n")
sumry <- array("", c(5, 4), list(1:5, c("data.frame", "nrow",
"ncol", "content")))
sumry[1, ] <- c("$tab", nrow(x$tab), ncol(x$tab), "modified array (centering on available points)")
sumry[2, ] <- c("$liA", nrow(x$liA), ncol(x$liA), "row coordinates (centering on available points)")
sumry[3, ] <- c("$liU", nrow(x$liU), ncol(x$liU), "row coordinates (centering on used points")
sumry[4, ] <- c("$co", nrow(x$co), ncol(x$co), "column coordinates")
sumry[5, ] <- c("$cor", nrow(x$cor), ncol(x$cor), "cor(habitat var., scores) for available points")
class(sumry) <- "table"
print(sumry)
if (length(names(x)) > 15) { cat("\nother elements: ")
cat(names(x)[16:(length(x))], "\n")
}
}
5 changes: 3 additions & 2 deletions R/hr.rast.r
@@ -1,4 +1,4 @@
"hr.rast" <- function(mcp, w)
"hr.rast" <- function(mcp, w, border=c("include", "exclude"))
{
## Verifications
if (inherits(w, "asc"))
Expand All @@ -7,14 +7,15 @@
stop("Non convenient data")
if (!inherits(mcp, "area"))
stop("mcp should be of class \"area\"")
bord <- match.arg(border)

## a list with one element = one polygon
lpc<-split(mcp[,2:3], mcp[,1])
output<-list()

## use of the function mcp.rast for each polygon
for (i in 1:length(lpc))
output[[names(lpc)[i]]]<-mcp.rast(lpc[[i]], w)
output[[names(lpc)[i]]]<-mcp.rast(lpc[[i]], w, bord)

## the output:
output<-as.kasc(output)
Expand Down
1 change: 1 addition & 0 deletions R/kernelbb.r
Expand Up @@ -6,6 +6,7 @@
if (!inherits(x, "traj"))
stop("tr should be of class \"ltraj\"")


## Bases
sorties <- list()
gr <- grid
Expand Down
22 changes: 21 additions & 1 deletion R/mcp.rast.r
@@ -1,4 +1,4 @@
"mcp.rast" <- function(poly, w)
"mcp.rast" <- function(poly, w, border=c("include", "exclude"))
{
## Verifications
if (inherits(w, "asc"))
Expand All @@ -9,6 +9,26 @@
## The first and last relocations should be the same (closed polygon)
if (!all(poly[1,]==poly[nrow(poly),]))
poly<-rbind(poly, poly[1,])
border <- match.arg(border)

## Slightly move the borders of the polygon
slightlymove <- function(mcp, w, bor)
{
if (bor=="include") {
oo <- 1
} else {
oo <- -1
}
amo <- oo*runif(1)*attr(w, "cellsize")/100
me <- apply(mcp,2,mean)
mcp[mcp[,1]<=me[1],1] <- mcp[mcp[,1]<=me[1],1] - amo
mcp[mcp[,1]>me[1],1] <- mcp[mcp[,1]>me[1],1] + amo
mcp[mcp[,1]<=me[2],2] <- mcp[mcp[,1]<=me[2],2] - amo
mcp[mcp[,1]>me[2],2] <- mcp[mcp[,1]>me[2],2] + amo
return(mcp)
}

poly <- slightlymove(poly, w, border)

## prepares the data
xy<-getXYcoords(w)
Expand Down
65 changes: 27 additions & 38 deletions R/spixdf2kasc.r
@@ -1,53 +1,44 @@
"spixdf2kasc" <- function(sg)
spixdf2kasc <- function (sg)
{
## Verifications
if (!require(sp))
stop("the package sp is required for this function")
if (inherits(sg, "SpatialPixelsDataFrame"))
sg <- as(sg, "SpatialGridDataFrame")
if (!inherits(sg, "SpatialGridDataFrame"))
stop(paste("sg should be of class \"SpatialPixelsDataFrame\"",
"\nor \"SpatialGridDataFrame\""))
"\nor \"SpatialGridDataFrame\""))
gr <- gridparameters(sg)
if (nrow(gr)>2)
if (nrow(gr) > 2)
stop("sg should be defined in two dimensions")
if (gr[1,2]!=gr[2,2])
if (gr[1, 2] != gr[2, 2])
stop("the cellsize should be the same in x and y directions")


## gets the coordinates
fullgrid(sg) <- TRUE
xy <- coordinates(sg)

## gets the data and prepare them for the conversion toward kasc
uu <- colnames(summary(sg)$data)
lll <- lapply(1:length(uu), function(i) c(as.matrix(sg[i])))
ka <- do.call("data.frame", lll)
names(ka) <- uu
ka <- data.frame(ka[order(xy[,1]),])
xy <- xy[order(xy[,1]),]
ka <- data.frame(ka[order(xy[,2]),])
xy <- xy[order(xy[,2]),]
nxy <- colnames(xy)

## Adds the attributes
attr(ka, "cellsize") <- gr[2,2]
attr(ka, "xll") <- gr[1,1]
attr(ka, "yll") <- gr[2,1]
attr(ka,"ncol") <- gr[1,3]
attr(ka,"nrow") <- gr[2,3]
nRow <- gr[2, 3]
nRows <- nRow:1
nCol <- gr[1, 3]
nCols <- 1:nCol
lo <- integer(length=nRow*nCol)
for (i in 1:nRow) {
r <- ((i-1)*(nCol)+1):(i*nCol)
lo[r] <- (nRows[i]-1)*(nCol) + nCols
}
uu <- names(sg)
if (length(uu) ==1) {
ka <- data.frame(slot(sg, "data")[lo,])
names(ka) <- uu
} else ka <- slot(sg, "data")[lo,]
attr(ka, "cellsize") <- gr[2, 2]
attr(ka, "xll") <- gr[1, 1]
attr(ka, "yll") <- gr[2, 1]
attr(ka, "ncol") <- gr[1, 3]
attr(ka, "nrow") <- gr[2, 3]
class(ka) <- c("kasc", "data.frame")

## if there is only one variable
## in the spixdf -> conversion to "asc"

if (ncol(ka)==1) {

v <- ka[,1]
if (ncol(ka) == 1) {
v <- ka[, 1]
if ((is.numeric(v)) | (is.logical(v))) {
e <- matrix(v, ncol = attr(ka, "nrow"))
attr(e, "type") <- "numeric"
} else {
}
else {
tc2 <- levels(v)
v <- as.numeric(v)
e <- matrix(v, ncol = attr(ka, "nrow"))
Expand All @@ -60,8 +51,6 @@
class(e) <- "asc"
ka <- e
}

## Output
return(ka)
}

6 changes: 5 additions & 1 deletion man/area2asc.Rd
Expand Up @@ -6,12 +6,16 @@
\code{asc}. It is an alias for \code{mcp.rast}.
}
\usage{
area2asc(poly, w)
area2asc(poly, w, border=c("include", "exclude"))
}
\arguments{
\item{poly}{a data frame with 2 columns giving the coordinates
of a polygon object}
\item{w}{an object of class \code{kasc}, or of class \code{asc}}
\item{border}{a character string indicating what happens when the
center of the pixel is located exactly on the limit of the polygon
(\code{"include"} indicates that the pixel is considered to be
inside the polygon). }
}
\details{
The raster map is needed to pass the format for the output raster
Expand Down

0 comments on commit 1a0fc51

Please sign in to comment.