diff --git a/DESCRIPTION b/DESCRIPTION
index 29593462..4e8a8718 100644
--- a/DESCRIPTION
+++ b/DESCRIPTION
@@ -1,7 +1,7 @@
Package: netZooR
Type: Package
Title: Unified methods for the inference and analysis of gene regulatory networks
-Version: 1.1.15
+Version: 1.2.1
Date: 2022-07-07
Authors@R: c(person("Marouen", "Ben Guebila",
email = "benguebila@hsph.harvard.edu", role = c("aut","cre"), comment = c(ORCID = "0000-0001-5934-966X")),
diff --git a/NAMESPACE b/NAMESPACE
index b73dc999..29f527a0 100644
--- a/NAMESPACE
+++ b/NAMESPACE
@@ -31,6 +31,7 @@ export(monsterTransitionNetworkPlot)
export(monsterTransitionPCAPlot)
export(monsterdTFIPlot)
export(otter)
+export(pandaDiffEdges)
export(pandaPy)
export(pandaToAlpaca)
export(pandaToCondorObject)
diff --git a/R/PANDA.R b/R/PANDA.R
index 40f9c3f3..976a32fe 100644
--- a/R/PANDA.R
+++ b/R/PANDA.R
@@ -65,7 +65,7 @@
#'
-pandaPy <- function(expr_file, motif_file=NULL, ppi_file=NULL, computing="cpu", precision="double",save_memory=FALSE, save_tmp=TRUE, keep_expression_matrix=FALSE, modeProcess="union", remove_missing=FALSE){
+pandaPy <- function(expr_file, motif_file=NULL, ppi_file=NULL, computing="cpu", precision="double",save_memory=FALSE, save_tmp=TRUE, keep_expression_matrix=FALSE, modeProcess="union", remove_missing=FALSE, with_header=FALSE){
if(missing(expr_file)){
stop("Please provide the path of gene expression data file to 'expr_file' variable") }
@@ -106,6 +106,13 @@ pandaPy <- function(expr_file, motif_file=NULL, ppi_file=NULL, computing="cpu",
keepexpression.str <- "keep_expression_matrix=True"
} else{ keepexpression.str <- "keep_expression_matrix=False" }
+ # with header option
+ if(with_header==FALSE){
+ withheader.str <- "with_header=False"
+ }else if (with_header==TRUE){
+ withheader.str <- "with_header=True"
+ }
+
# when pre-processing mode is legacy
if(modeProcess == "legacy"){
@@ -130,7 +137,9 @@ pandaPy <- function(expr_file, motif_file=NULL, ppi_file=NULL, computing="cpu",
reticulate::source_python(pandapath,convert = TRUE)
# invoke Python script to create a Panda object
- obj.str <- paste("panda_obj=Panda(", expr.str, ",", motif.str,",", ppi.str, ",", computing.str, ",", precision.str, ",", savememory.str, ",", savetmp.str, "," , keepexpression.str, ",", mode.str, ")", sep ='')
+ obj.str <- paste("panda_obj=Panda(", expr.str, ",", motif.str,",", ppi.str, ",",
+ computing.str, ",", precision.str, ",", savememory.str, ",", savetmp.str, "," ,
+ keepexpression.str, ",", mode.str, "," , withheader.str, ")", sep ='')
# run Python code
py_run_string(obj.str)
diff --git a/R/SPIDER.R b/R/SPIDER.R
new file mode 100644
index 00000000..7ec15aed
--- /dev/null
+++ b/R/SPIDER.R
@@ -0,0 +1,308 @@
+#' Seeding PANDA Interactions to Derive Epigenetic Regulation
+#'
+#' This function runs the SPIDER algorithm
+#'
+#' @param motif A motif dataset, a data.frame, matrix or exprSet containing 3 columns.
+#' Each row describes an motif associated with a transcription factor (column 1) a
+#' gene (column 2) and a score (column 3) for the motif.
+#' @param epifilter A binary matrix that is of the same size as motif that will be used as a mask to filter motif
+#' for open chromatin region. Motif interactions that fall in open chromatin region will be kept and the others are removed.
+#' @param expr An expression dataset, as a genes (rows) by samples (columns) data.frame
+#' @param ppi A Protein-Protein interaction dataset, a data.frame containing 3 columns.
+#' Each row describes a protein-protein interaction between transcription factor 1(column 1),
+#' transcription factor 2 (column 2) and a score (column 3) for the interaction.
+#' @param alpha value to be used for update variable, alpha (default=0.1)
+#' @param hamming value at which to terminate the process based on hamming distance (default 10^-3)
+#' @param iter sets the maximum number of iterations SPIDER can run before exiting.
+#' @param progress Boolean to indicate printing of output for algorithm progress.
+#' @param output a vector containing which networks to return. Options include "regulatory",
+#' "coregulatory", "cooperative".
+#' @param zScale Boolean to indicate use of z-scores in output. False will use [0,1] scale.
+#' @param randomize method by which to randomize gene expression matrix. Default "None". Must
+#' be one of "None", "within.gene", "by.genes". "within.gene" randomization scrambles each row
+#' of the gene expression matrix, "by.gene" scrambles gene labels.
+#' @param cor.method Correlation method, default is "pearson".
+#' @param scale.by.present Boolean to indicate scaling of correlations by percentage of positive samples.
+#' @param remove.missing.ppi Boolean to indicate whether TFs in the PPI but not in the motif data should be
+#' removed. Only when mode=='legacy'.
+#' @param remove.missing.motif Boolean to indicate whether genes targeted in the motif data but not the
+#' expression data should be removed. Only when mode=='legacy'.
+#' @param remove.missing.genes Boolean to indicate whether genes in the expression data but lacking
+#' information from the motif prior should be removed. Only when mode=='legacy'.
+#' @param edgelist Boolean to indicate if edge lists instead of matrices should be returned.
+#' @param mode The data alignment mode. The mode 'union' takes the union of the genes in the expression matrix and the motif
+#' and the union of TFs in the ppi and motif and fills the matrics with zeros for nonintersecting TFs and gens, 'intersection'
+#' takes the intersection of genes and TFs and removes nonintersecting sets, 'legacy' is the old behavior with version 1.19.3.
+#' #' Parameters remove.missing.ppi, remove.missingmotif, remove.missing.genes work only with mode=='legacy'.
+#' @keywords keywords
+#' @importFrom matrixStats rowSds
+#' @importFrom matrixStats colSds
+#' @importFrom Biobase assayData
+#' @importFrom reshape melt.array
+#' @export
+#' @return An object of class "panda" containing matrices describing networks achieved by convergence
+#' with SPIDER algorithm.\cr
+#' "regNet" is the regulatory network\cr
+#' "coregNet" is the coregulatory network\cr
+#' "coopNet" is the cooperative network
+#' @examples
+#' data(pandaToyData)
+#' spiderRes <- spider(pandaToyData$motif, pandaToyData$epifilter
+#' pandaToyData$expression,pandaToyData$ppi,hamming=.1,progress=TRUE)
+#' @references
+#' Sonawane, Abhijeet Rajendra, et al. "Constructing gene regulatory networks using epigenetic data." npj Systems Biology and Applications 7.1 (2021): 1-13.
+spider <- function(motif,expr=NULL,epifilter=NULL,ppi=NULL,alpha=0.1,hamming=0.001,
+ iter=NA,output=c('regulatory','coexpression','cooperative'),
+ zScale=TRUE,progress=FALSE,randomize=c("None", "within.gene", "by.gene"),cor.method="pearson",
+ scale.by.present=FALSE,edgelist=FALSE,remove.missing.ppi=FALSE,
+ remove.missing.motif=FALSE,remove.missing.genes=FALSE,mode="union"){
+
+ randomize <- match.arg(randomize)
+ if(progress)
+ print('Initializing and validating')
+
+ if(epifilter[c(1,2),] != motif[c(1,2),]){
+ stop('Chromatin accessibility data does not match motif data size and order.')
+ }
+
+ if(class(expr)=="ExpressionSet")
+ expr <- assayData(expr)[["exprs"]]
+
+ if (is.null(expr)){
+ # Use only the motif data here for the gene list
+ num.conditions <- 0
+ if (randomize!="None"){
+ warning("Randomization ignored because gene expression is not used.")
+ randomize <- "None"
+ }
+ } else {
+ if(mode=='legacy'){
+ if(remove.missing.genes){
+ # remove genes from expression data that are not in the motif data
+ n <- nrow(expr)
+ expr <- expr[which(rownames(expr)%in%motif[,2]),]
+ message(sprintf("%s genes removed that were not present in motif", n-nrow(expr)))
+ }
+ if(remove.missing.motif){
+ # remove genes from motif data that are not in the expression data
+ n <- nrow(motif)
+ motif <- motif[which(motif[,2]%in%rownames(expr)),]
+ epifilter <- epifilter[which(motif[,2]%in%rownames(expr)),]
+ message(sprintf("%s motif edges removed that targeted genes missing in expression data", n-nrow(motif)))
+ }
+ # Use the motif data AND the expr data (if provided) for the gene list
+ # Keep everything sorted alphabetically
+ expr <- expr[order(rownames(expr)),]
+ }else if(mode=='union'){
+ gene.names=unique(union(rownames(expr),unique(motif[,2])))
+ tf.names =unique(union(unique(ppi[,1]),unique(motif[,1])))
+ num.TFs <- length(tf.names)
+ num.genes <- length(gene.names)
+ # gene expression matrix
+ expr1=as.data.frame(matrix(0,num.genes,ncol(expr)))
+ rownames(expr1)=gene.names
+ expr1[which(gene.names%in%rownames(expr)),]=expr[]
+ expr=expr1
+ #PPI matrix
+ tfCoopNetwork <- matrix(0,num.TFs,num.TFs)
+ colnames(tfCoopNetwork)=tf.names
+ rownames(tfCoopNetwork)=tf.names
+ Idx1 <- match(ppi[,1], tf.names);
+ Idx2 <- match(ppi[,2], tf.names);
+ Idx <- (Idx2-1)*num.TFs+Idx1;
+ tfCoopNetwork[Idx] <- ppi[,3];
+ Idx <- (Idx1-1)*num.TFs+Idx2;
+ tfCoopNetwork[Idx] <- ppi[,3];
+ #Motif matrix
+ regulatoryNetwork=matrix(0,num.TFs,num.genes)
+ colnames(regulatoryNetwork)=gene.names
+ rownames(regulatoryNetwork)=tf.names
+ Idx1=match(motif[,1], tf.names);
+ Idx2=match(motif[,2], gene.names);
+ Idx=(Idx2-1)*num.TFs+Idx1;
+ regulatoryNetwork[Idx]=motif[,3]*epifilter[,3]
+ }else if(mode=='intersection'){
+ gene.names=unique(intersect(rownames(expr),unique(motif[,2])))
+ tf.names =unique(intersect(unique(ppi[,1]),unique(motif[,1])))
+ num.TFs <- length(tf.names)
+ num.genes <- length(gene.names)
+ # gene expression matrix
+ expr1=as.data.frame(matrix(0,num.genes,ncol(expr)))
+ rownames(expr1)=gene.names
+ interGeneNames=gene.names[which(gene.names%in%rownames(expr))]
+ expr1[interGeneNames,]=expr[interGeneNames,]
+ expr=expr1
+ #PPI matrix
+ tfCoopNetwork <- matrix(0,num.TFs,num.TFs)
+ colnames(tfCoopNetwork)=tf.names
+ rownames(tfCoopNetwork)=tf.names
+ Idx1 <- match(ppi[,1], tf.names);
+ Idx2 <- match(ppi[,2], tf.names);
+ Idx <- (Idx2-1)*num.TFs+Idx1;
+ indIdx=!is.na(Idx)
+ Idx=Idx[indIdx] #remove missing TFs
+ tfCoopNetwork[Idx] <- ppi[indIdx,3];
+ Idx <- (Idx1-1)*num.TFs+Idx2;
+ indIdx=!is.na(Idx)
+ Idx=Idx[indIdx] #remove missing TFs
+ tfCoopNetwork[Idx] <- ppi[indIdx,3];
+ #Motif matrix
+ regulatoryNetwork=matrix(0,num.TFs,num.genes)
+ colnames(regulatoryNetwork)=gene.names
+ rownames(regulatoryNetwork)=tf.names
+ Idx1=match(motif[,1], tf.names);
+ Idx2=match(motif[,2], gene.names);
+ Idx=(Idx2-1)*num.TFs+Idx1;
+ indIdx=!is.na(Idx)
+ Idx=Idx[indIdx] #remove missing genes
+ regulatoryNetwork[Idx]=motif[indIdx,3]*epifilter[indIdx,3];
+ }
+ num.conditions <- ncol(expr)
+ if (randomize=='within.gene'){
+ expr <- t(apply(expr, 1, sample))
+ if(progress)
+ print("Randomizing by reordering each gene's expression")
+ } else if (randomize=='by.gene'){
+ rownames(expr) <- sample(rownames(expr))
+ expr <- expr[order(rownames(expr)),]
+ if(progress)
+ print("Randomizing by reordering each gene labels")
+ }
+ }
+
+ if (mode=='legacy'){
+ # Create vectors for TF names and Gene names from motif dataset
+ tf.names <- sort(unique(motif[,1]))
+ gene.names <- sort(unique(rownames(expr)))
+ num.TFs <- length(tf.names)
+ num.genes <- length(gene.names)
+ }
+
+ # Bad data checking
+ if (num.genes==0){
+ stop("Error validating data. No matched genes.\n Please ensure that gene names in expression data match gene names in motif data")
+ }
+
+ if(num.conditions==0) {
+ warning('No expression data given. SPIDER will run based on an identity co-regulation matrix')
+ geneCoreg <- diag(num.genes)
+ } else if(num.conditions<3) {
+ warning('Not enough expression conditions detected to calculate correlation. Co-regulation network will be initialized to an identity matrix.')
+ geneCoreg <- diag(num.genes)
+ } else {
+
+ if(scale.by.present){
+ num.positive=(expr>0)%*%t((expr>0))
+ geneCoreg <- cor(t(expr), method=cor.method, use="pairwise.complete.obs")*(num.positive/num.conditions)
+ } else {
+ geneCoreg <- cor(t(expr), method=cor.method, use="pairwise.complete.obs")
+ }
+ if(progress)
+ print('Verified sufficient samples')
+ }
+ if (any(is.na(geneCoreg))){ #check for NA and replace them by zero
+ diag(geneCoreg)=1
+ geneCoreg[is.na(geneCoreg)]=0
+ }
+
+ if (any(duplicated(motif))) {
+ warning("Duplicate edges have been found in the motif data. Weights will be summed.")
+ motif <- aggregate(motif[,3], by=list(motif[,1], motif[,2]), FUN=sum)
+ }
+
+ # Prior Regulatory Network
+ if(mode=='legacy'){
+ Idx1=match(motif[,1], tf.names);
+ Idx2=match(motif[,2], gene.names);
+ Idx=(Idx2-1)*num.TFs+Idx1;
+ regulatoryNetwork=matrix(data=0, num.TFs, num.genes);
+ regulatoryNetwork[Idx]=motif[,3]
+ colnames(regulatoryNetwork) <- gene.names
+ rownames(regulatoryNetwork) <- tf.names
+ # PPI data
+ # If no ppi data is given, we use the identity matrix
+ tfCoopNetwork <- diag(num.TFs)
+ # Else we convert our two-column data.frame to a matrix
+ if (!is.null(ppi)){
+ if(any(duplicated(ppi))){
+ warning("Duplicate edges have been found in the PPI data. Weights will be summed.")
+ ppi <- aggregate(ppi[,3], by=list(ppi[,1], ppi[,2]), FUN=sum)
+ }
+ if(remove.missing.ppi){
+ # remove edges in the PPI data that target TFs not in the motif
+ n <- nrow(ppi)
+ ppi <- ppi[which(ppi[,1]%in%tf.names & ppi[,2]%in%tf.names),]
+ message(sprintf("%s PPI edges removed that were not present in motif", n-nrow(ppi)))
+ }
+ Idx1 <- match(ppi[,1], tf.names);
+ Idx2 <- match(ppi[,2], tf.names);
+ Idx <- (Idx2-1)*num.TFs+Idx1;
+ tfCoopNetwork[Idx] <- ppi[,3];
+ Idx <- (Idx1-1)*num.TFs+Idx2;
+ tfCoopNetwork[Idx] <- ppi[,3];
+ }
+ colnames(tfCoopNetwork) <- tf.names
+ rownames(tfCoopNetwork) <- tf.names
+ }
+
+ ## Run SPIDER ##
+ tic=proc.time()[3]
+
+ # adjusting degree distribution
+ regulatoryNetwork = degreeAdjust(regulatoryNetwork)
+
+ if(progress)
+ print('Normalizing networks...')
+ regulatoryNetwork = normalizeNetwork(regulatoryNetwork)
+ tfCoopNetwork = normalizeNetwork(tfCoopNetwork)
+ geneCoreg = normalizeNetwork(geneCoreg)
+
+ if(progress)
+ print('Learning Network...')
+
+ minusAlpha = 1-alpha
+ step=0
+ hamming_cur=1
+ if(progress)
+ print("Using tanimoto similarity")
+ while(hamming_cur>hamming){
+ if ((!is.na(iter))&&step>=iter){
+ print(paste("Reached maximum iterations, iter =",iter),sep="")
+ break
+ }
+ Responsibility=tanimoto(tfCoopNetwork, regulatoryNetwork)
+ Availability=tanimoto(regulatoryNetwork, geneCoreg)
+ RA = 0.5*(Responsibility+Availability)
+
+ hamming_cur=sum(abs(regulatoryNetwork-RA))/(num.TFs*num.genes)
+ regulatoryNetwork=minusAlpha*regulatoryNetwork + alpha*RA
+
+ ppi=tanimoto(regulatoryNetwork, t(regulatoryNetwork))
+ ppi=update.diagonal(ppi, num.TFs, alpha, step)
+ tfCoopNetwork=minusAlpha*tfCoopNetwork + alpha*ppi
+
+ CoReg2=tanimoto(t(regulatoryNetwork), regulatoryNetwork)
+ CoReg2=update.diagonal(CoReg2, num.genes, alpha, step)
+ geneCoreg=minusAlpha*geneCoreg + alpha*CoReg2
+
+ if(progress)
+ message("Iteration", step,": hamming distance =", round(hamming_cur,5))
+ step=step+1
+ }
+
+ toc=proc.time()[3] - tic
+ if(progress)
+ message("Successfully ran SPIDER on ", num.genes, " Genes and ", num.TFs, " TFs.\nTime elapsed:", round(toc,2), "seconds.")
+ prepResult(zScale, output, regulatoryNetwork, geneCoreg, tfCoopNetwork, edgelist, motif)
+}
+
+#' Function to adjust the degree so that the hub nodes are not penalized in z-score transformation
+#'
+#' @param A Input adjacency matrix
+degreeAdjust <- function(A){
+ k1 <- colSums(A)/dim(A,1)
+ k2 <- rowSums(A)/dim(A,2)
+ B <- (matrix(replicate(dim(A,2),k1),nrow=dim(A,1)))^2
+ B <- B + (matrix(t(replicate(dim(A,2),k2)),nrow=dim(A,1)))^2
+ A <- A * sqrt(B);
+}
\ No newline at end of file
diff --git a/README.md b/README.md
index 0fe8904c..c141a136 100644
--- a/README.md
+++ b/README.md
@@ -17,27 +17,57 @@ netZooR is an R package to reconstruct, analyse, and plot biological networks.
netZooR currently integrates:
+
+PANDA
* **PANDA** (Passing Attributes between Networks for Data Assimilation) [[Glass et al. 2013]](http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0064832): PANDA is a method for estimating bipartite gene regulatory networks (GRNs) consisting of two types of nodes: transcription factors (TFs) and genes. An edge between TF $i$ and gene $j$ indicates that gene $j$ is regulated by TF $i$. The edge weight represents the strength of evidence for this regulatory relationship obtained by integrating three types of biological data: gene expression data, protein-protein interaction (PPI) data, and transcription factor binding motif (TFBM) data. PANDA is an iterative approach that begins with a seed GRN estimated from TFBMs and uses message passing between data types to refine the seed network to a final GRN that is consistent with the information contained in gene expression, PPI, and TFBM data.
+
+
+CONDOR
* **CONDOR** (COmplex Network Description Of Regulators) [[Platig et al. 2016]](http://journals.plos.org/ploscompbiol/article?id=10.1371/journal.pcbi.1005033): CONDOR is a tool for community detection in bipartite networks. Many community detection methods for unipartite networks are based on the concept of maximizing a modularity metric that compares the weight of edges within communities to the weight of edges between communities, prioritizing community assignments with higher values of the former relative to the latter. CONDOR extends this concept to bipartite networks by optimizing a bipartite version of modularity defined by [[Barber (2007)]](https://pubmed.ncbi.nlm.nih.gov/18233893/). To enable bipartite community detection on large networks such gene regulatory networks, CONDOR uses a fast unipartite modularity maximization method on one of the two unipartite projections of the bipartite network. In Platig et al. (2016), CONDOR is applied to bipartite networks of single nucleotide polymorphisms (SNPs) and gene expression, where a network edge from a SNP node to a gene node is indicative of an association between the SNP and the gene expression level, commonly known as an expression quantitative trait locus (eQTL). Communities detected with CONDOR contained local hub nodes ("core SNPs") enriched for association with disease, suggesting that functional eQTL relationships are encoded at the community level.
+
+
+LIONESS
* **LIONESS** (Linear Interpolation to Obtain Network Estimates for Single Samples) [[Kuijjer et al. 2019]](https://doi.org/10.1016/j.isci.2019.03.021): LIONESS is a flexible method for single-sample network integration. The machinery behind LIONESS is a leave-one-out approach. To construct a single-sample network for sample $i$, a first network is estimated on the full dataset and a second network is estimated on the dataset with sample $i$ withheld. The single-sample network is then estimated based on the difference between these two networks. Any method that can be used to estimate a network can be used with LIONESS to estimate single-sample networks. Two common use cases are the use of LIONESS to generate single-sample GRNs based on PANDA and the use of LIONESS to generate single-sample Pearson correlation networks.
+
+
+ALPACA
* **ALPACA** (ALtered Partitions Across Community Architectures) [[Padi and Quackenbush 2018]](https://www.nature.com/articles/s41540-018-0052-5): ALPACA is a method for differential network analysis that is based on a novel approach to comparison of network community structures. Comparisons of community structure have typically been accomplished by assessing which nodes switch community membership between networks ("community comparison") or by computing the edge weight differences by subtracting the adjacency matrices of two networks and then performing community detection on the resulting differential network ("edge subtraction"). Both these approaches have important limitations. Community comparison is subject to a resolution limit and cannot detect differences smaller than the average community size in a network. Edge subtraction transfers noise from both of the original networks to the differential network, leading to an imprecise estimator. Moreover, positive and negative edge differences cannot be distinguished in the subsequent community detection performed on the differential network.
In contrast to community comparison and edge subtraction, ALPACA compares the community structure of two networks by optimizing a new metric: "differential modularity". In the ALPACA algorithm, one network is defined as the reference network and the second is defined as the perturbed network. The differential modularity metric measures the extent to which edges in a community in the perturbed network differ from those that would be expected by random chance according to a null distribution based on the reference network. Community structure of the perturbed network is determined by maximizing this differential modularity. The resulting communities are "differential modules" that show how the perturbed network differs from the reference network at the community level.
+
+
+SAMBAR
* **SAMBAR** (Subtyping Agglomerated Mutations By Annotation Relations) [[Kuijjer et al.]](https://www.nature.com/articles/s41416-018-0109-7): SAMBAR is a tool for studying cancer subtypes based on patterns of somatic mutations in curated biological pathways. Rather than characterize cancer according to mutations at the gene level, SAMBAR agglomerates mutations within pathways to define a pathway mutation score. To avoid bias based on pathway representation, these pathway mutation scores correct for the number of genes in each pathway as well as the number of times each gene is represented in the universe of pathways. By taking a pathway rather than gene-by-gene lens, SAMBAR both de-sparsifies somatic mutation data and incorporates important prior biological knowledge. Kuijjer et al. (2018) demonstrate that SAMBAR is capable of outperforming other methods for cancer subtyping, producing subtypes with greater between-subtype distances; the authors use SAMBAR for a pan-cancer subtyping analysis that identifies four diverse pan-cancer subtypes linked to distinct molecular processes.
+
+
+MONSTER
* **MONSTER** (Modeling Network State Transitions from Expression and Regulatory data) [[Schlauch et al.]](https://doi.org/10.1186/s12918-017-0517-y): MONSTER is a method for estimating transitions between network states by modeling the adjacency matrix of one state as a linear transformation of the adjacency matrix of another. Like LIONESS, MONSTER is a flexible method that does not require a particular type of network structure. MONSTER models the perturbation of an initial network A into a perturbed network B according to a matrix product B = AT. T is a transition matrix encoding the changes that map A to B. When A and B are gene regulatory networks, i.e., bipartite networks between TFs and genes, the MONSTER framework leads naturally to the definition of TF involvement as the sum of the off-diagonal weights for a transcription factor $i$ in the transition matrix T. This perspective enables MONSTER to identify differentially involved TFs that contribute to network transitions differently between different conditions. This dimension cannot be captured from a traditional differential expression analysis of TFs, which will not detect TFs that have the same concentration between conditions.
+
+
+OTTER
* **OTTER** (Optimization to Estimate Regulation) [[Weighill et al.]](https://www.biorxiv.org/content/10.1101/2020.06.23.167999v2.abstract): OTTER is a GRN inference method based on the idea that observed biological data (PPI data and gene co-expression data) are projections of a bipartite GRN between TFs and genes. Specifically, PPI data represent the projection of the GRN onto the TF-TF space and gene co-expression data represent the projection of the GRN onto the gene-gene space. OTTER reframes the problem of GRN inference as a problem of relaxed graph matching and finds a GRN that has optimal agreement with the observed PPI and coexpression data. The OTTER objective function is tunable in two ways: first, one can prioritize matching the PPI data or the coexpression data more heavily depending on one's confidence in the data source; second, there is a regularization parameter that can be applied to induce sparsity on the estimated GRN. The OTTER objective function can be solved using spectral decomposition techniques and gradient descent; the latter is shown to be closely related to the PANDA message-passing approach (Glass et al. 2013).
+
+
+CRANE
* **CRANE** (Constrained Random Alteration of Network Edges) [[Lim et al.]](https://doi.org/10.3389/fgene.2020.603264): CRANE is a method for determining statistical significance of structural differences between networks. Analysis with CRANE is a four-phase process. The first step of CRANE is to estimate two networks: a reference network and a perturbed network. In the same spirit as LIONESS, CRANE is flexible: any network inference method (e.g., correlation, partial correlation, PANDA) can be used at this stage. In the second step, differential features are determined by comparing the reference and perturbed networks. Here, CRANE is again flexible: such differential features could arise from simple measures such as a comparison of node degree or centrality, or from more nuanced techniques such as differential module detection with ALPACA. Third, a large number of constrained random networks are developed based on the network structure of the reference network. By comparing each random network with the original reference network, a set of null differential measures is obtained. Fourth, the observed differential features from step two can be compared with the null distribution from step three to generate empirical p-values. A typical workflow for applying CRANE in NetZooR would involve fitting PANDA networks in step one and using ALPACA to estimate differential modules in step two.
+
+
+EGRET
* **EGRET** (Estimating the Genetic Regulatory effects on TFs) [[Weighill et al.]](https://www.genome.org/cgi/doi/10.1101/gr.275107.120): EGRET incorporates genetic variants as a fourth data type in the PANDA message-passing framework, enabling the estimation of genotype-specific GRNs. Genetic variants can alter transcription factor binding by affecting the composition of motif sites on the DNA. Not every genetic variant has such an affect; EGRET incorporates only genetic variants which have (1) been shown to be associated with gene expression (expression quantitative trait loci, or eQTL), and (2) are predicted to affect transcription factor binding based on a tool called QBiC (Martin et al. 2019). This information is used in combination with TFBM predictions as input to the PANDA message-passing framework. The resulting EGRET network is a genotype-specific bipartite GRN that is similar to a PANDA network but incorporates the information contained by individual genetic variation.
+
+
+YARN
* **YARN** (Yet Another RNa-seq package) [[Paulsson et al.]](https://bmcbioinformatics.biomedcentral.com/articles/10.1186/s12859-017-1847-x): YARN is a package that combines quality control, gene filtering, and normalization steps to streamline the preprocessing of large-scale, multi-tissue gene expression data from resources such as the Genotype-Tissue Expression (GTEx) project. Among other steps, YARN uses principal coordinate analysis (PCoA) to determine if samples collected from different sites on the same tissue (for example, transverse and sigmoid colon) can be treated as "transcriptionally indistinguishable" and grouped together to increase power for downstream analyses. Paulsson et al. (2017) demonstrate the use of YARN to develop a pan-cancer RNA-seq dataset for 30,333 genes from 9435 samples across 38 tissues from the GTEx dataset.
+
netZooR also integrates additional functions to:
@@ -103,6 +133,14 @@ BiocManager::install("netZooR")
For more details please refer to the [documentation website](https://netzoo.github.io/netZooR/).
+#### Using bioconda
+
+netZooR is also available through [Bioconda](https://bioconda.github.io/recipes/bioconductor-netzoor/README.html#package-bioconductor-netzoor)
+
+```bash
+conda install bioconductor-netzoor
+```
+
### Python binding
This package will invoke Python programming language in R environment through [reticulate](https://rstudio.github.io/reticulate/) package, by default setting there is no additional configuration needed.
diff --git a/docs/404.html b/docs/404.html
index 40cee443..8ddfd96c 100644
--- a/docs/404.html
+++ b/docs/404.html
@@ -71,7 +71,7 @@
netZooR
- 1.0.4
+ 1.1.16
@@ -130,6 +130,9 @@
YARN: Robust Multi-Tissue RNA-Seq Preprocessing and Normalization
+
+
+ Changelog
@@ -171,7 +174,7 @@ Contents
-
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Deborah Weighill.
+
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Des Weighill.
diff --git a/docs/CONDUCT.html b/docs/CONDUCT.html
index 0f415f9b..eb45d5a2 100644
--- a/docs/CONDUCT.html
+++ b/docs/CONDUCT.html
@@ -71,7 +71,7 @@
netZooR
- 1.0.4
+ 1.1.16
@@ -130,6 +130,9 @@
YARN: Robust Multi-Tissue RNA-Seq Preprocessing and Normalization
+
+
+ Changelog
@@ -178,7 +181,7 @@ Contents
-
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Deborah Weighill.
+
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Des Weighill.
diff --git a/docs/LICENSE-text.html b/docs/LICENSE-text.html
index ce7c4f0f..8fb6c2ed 100644
--- a/docs/LICENSE-text.html
+++ b/docs/LICENSE-text.html
@@ -71,7 +71,7 @@
netZooR
- 1.0.4
+ 1.1.16
@@ -130,6 +130,9 @@
YARN: Robust Multi-Tissue RNA-Seq Preprocessing and Normalization
+
+
+ Changelog
@@ -845,7 +848,7 @@ Contents
-
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Deborah Weighill.
+
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Des Weighill.
diff --git a/docs/articles/ALPACA.html b/docs/articles/ALPACA.html
index 6c44b490..684cdfad 100644
--- a/docs/articles/ALPACA.html
+++ b/docs/articles/ALPACA.html
@@ -31,7 +31,7 @@
netZooR
- 1.0.4
+ 1.1.16
@@ -90,6 +90,9 @@
YARN: Robust Multi-Tissue RNA-Seq Preprocessing and Normalization
+
+
+ Changelog
@@ -131,7 +134,7 @@ 2017-10-19
# install netZooR pkg with vignettes, otherwise remove the "build_vignettes = TRUE" argument.
devtools :: install_github ( "netZoo/netZooR" , build_vignettes = TRUE )
+library ( netZooR )
This vignettes can be accessed in R by using below line. when netZoooR was installed with arguments “build_vignettes = TRUE” .
@@ -152,6 +155,7 @@ 2017-10-19
## [1] 1
## [1] 2
## [1] 3
+## [1] 4
## [1] "Merging 2 communities"
## [1] 1
## [1] "Computing node scores..."
@@ -190,7 +194,7 @@ 2017-10-19
-
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Deborah Weighill.
+
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Des Weighill.
diff --git a/docs/articles/ApplicationinGTExData.html b/docs/articles/ApplicationinGTExData.html
index e0149ae3..23cbd5e8 100644
--- a/docs/articles/ApplicationinGTExData.html
+++ b/docs/articles/ApplicationinGTExData.html
@@ -31,7 +31,7 @@
netZooR
- 1.0.4
+ 1.1.16
@@ -90,6 +90,9 @@
YARN: Robust Multi-Tissue RNA-Seq Preprocessing and Normalization
+
+
+ Changelog
@@ -143,12 +146,12 @@
Running a single PANDA analysis
Load some libraries. We use the data.table library for reading in large datasets as it is more efficient.
-library ( netZooR )
+library ( netZooR )
library ( data.table )
install.packages ( "visNetwork" ,repos = "http://cran.us.r-project.org" )
#>
#> The downloaded binary packages are in
-#> /var/folders/jh/345y9vmx0l7d63r5b05dwp080000gn/T//RtmpMy31Fu/downloaded_packages
+#> /var/folders/jh/345y9vmx0l7d63r5b05dwp080000gn/T//RtmpHZoDhZ/downloaded_packages
library ( visNetwork ) # to visualize the networks
# point R to your python 3 installation. Make sure that this is the installation that has all the required python libraries (numpy, scipy, etc) installed. netZooR uses a python implementation of PANDA under the hood.
@@ -341,7 +344,7 @@
-
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Deborah Weighill.
+
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Des Weighill.
diff --git a/docs/articles/ApplicationwithTBdataset.html b/docs/articles/ApplicationwithTBdataset.html
index 529acd97..3b0e8457 100644
--- a/docs/articles/ApplicationwithTBdataset.html
+++ b/docs/articles/ApplicationwithTBdataset.html
@@ -31,7 +31,7 @@
netZooR
- 1.0.4
+ 1.1.16
@@ -90,6 +90,9 @@
YARN: Robust Multi-Tissue RNA-Seq Preprocessing and Normalization
+
+
+ Changelog
@@ -186,7 +189,7 @@
Running the sample TB datasets
-library ( netZooR )
+library ( netZooR )
#> Loading required package: igraph
#>
#> Attaching package: 'igraph'
@@ -379,7 +382,7 @@
#> [8] base
#>
#> other attached packages:
-#> [1] viridisLite_0.4.0 netZooR_1.0.4 yarn_1.18.0
+#> [1] viridisLite_0.4.0 netZooR_1.1.16 yarn_1.18.0
#> [4] pandaR_1.19.5 Biobase_2.52.0 BiocGenerics_0.38.0
#> [7] reticulate_1.22 igraph_1.2.6
#>
@@ -499,7 +502,7 @@
-
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Deborah Weighill.
+
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Des Weighill.
diff --git a/docs/articles/ApplicationwithTBdataset_files/figure-html/unnamed-chunk-8-1.png b/docs/articles/ApplicationwithTBdataset_files/figure-html/unnamed-chunk-8-1.png
index 1b004574..6eddffd7 100644
Binary files a/docs/articles/ApplicationwithTBdataset_files/figure-html/unnamed-chunk-8-1.png and b/docs/articles/ApplicationwithTBdataset_files/figure-html/unnamed-chunk-8-1.png differ
diff --git a/docs/articles/CONDOR.html b/docs/articles/CONDOR.html
index 9b62474b..9278d21d 100644
--- a/docs/articles/CONDOR.html
+++ b/docs/articles/CONDOR.html
@@ -31,7 +31,7 @@
netZooR
- 1.0.4
+ 1.1.16
@@ -90,6 +90,9 @@
YARN: Robust Multi-Tissue RNA-Seq Preprocessing and Normalization
+
+
+ Changelog
condor works with an edgelist (elist in the code below) as its input.
r = c ( 1 ,1 ,1 ,2 ,2 ,2 ,3 ,3 ,3 ,4 ,4 ) ;
@@ -168,30 +171,30 @@
condor.object <- condorCluster ( condor.object )
## [1] "modularity of projected graph 0"
## [1] "Q = 0"
-## [1] "Q = 0.198347107438017"
-## [1] "Q = 0.231404958677686"
-## [1] "Q = 0.231404958677686"
+## [1] "Q = 0.132231404958678"
+## [1] "Q = 0.148760330578512"
+## [1] "Q = 0.148760330578512"
print ( condor.object $ red.memb )
## red.names com
## 1 Alice 1
-## 2 Janine 2
-## 3 Mary 2
-## 4 Sue 1
+## 2 Janine 1
+## 3 Mary 1
+## 4 Sue 2
print ( condor.object $ blue.memb )
## blue.names com
-## 1 Bob 1
-## 2 Ed 2
+## 1 Bob 2
+## 2 Ed 1
## 3 Hank 2
-## 4 John 1
+## 4 John 2
Nodes in first community are {Alice, John, Bob, Sue}, nodes in second community are {Ed, Janine, Hank, Mary} based on the modularity maximization. Here’s a picture:
gtoy = graph.edgelist ( as.matrix ( elist ) ,directed= FALSE )
set.graph.attribute ( gtoy , "layout" , layout.kamada.kawai ( gtoy ) )
-
## IGRAPH 694fdca UN-- 8 11 --
+## IGRAPH d53f3f0 UN-- 8 11 --
## + attr: layout (g/n), name (v/c)
-## + edges from 694fdca (vertex names):
+## + edges from d53f3f0 (vertex names):
## [1] Alice--Bob Alice--John Alice--Ed Bob --Sue John --Sue
## [6] Sue --Hank John --Janine Ed --Janine Hank --Janine Ed --Mary
## [11] Hank --Mary
@@ -242,7 +245,7 @@
## [8] base
##
## other attached packages:
-## [1] netZooR_1.0.4 yarn_1.18.0 pandaR_1.19.5
+## [1] netZooR_1.1.16 yarn_1.18.0 pandaR_1.19.5
## [4] Biobase_2.52.0 BiocGenerics_0.38.0 reticulate_1.22
## [7] igraph_1.2.6
##
@@ -356,7 +359,7 @@
-
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Deborah Weighill.
+
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Des Weighill.
diff --git a/docs/articles/CONDOR_files/figure-html/unnamed-chunk-10-1.png b/docs/articles/CONDOR_files/figure-html/unnamed-chunk-10-1.png
index 71702b65..c5459d1b 100644
Binary files a/docs/articles/CONDOR_files/figure-html/unnamed-chunk-10-1.png and b/docs/articles/CONDOR_files/figure-html/unnamed-chunk-10-1.png differ
diff --git a/docs/articles/CONDOR_files/figure-html/unnamed-chunk-8-1.png b/docs/articles/CONDOR_files/figure-html/unnamed-chunk-8-1.png
index 37ca825b..899d29cf 100644
Binary files a/docs/articles/CONDOR_files/figure-html/unnamed-chunk-8-1.png and b/docs/articles/CONDOR_files/figure-html/unnamed-chunk-8-1.png differ
diff --git a/docs/articles/EGRET_toy_example.html b/docs/articles/EGRET_toy_example.html
index 84141aaa..1ec64d2e 100644
--- a/docs/articles/EGRET_toy_example.html
+++ b/docs/articles/EGRET_toy_example.html
@@ -31,7 +31,7 @@
netZooR
- 1.0.4
+ 1.1.16
@@ -90,6 +90,9 @@
YARN: Robust Multi-Tissue RNA-Seq Preprocessing and Normalization
+
+
+ Changelog
@@ -143,7 +146,7 @@
#devtools::install_github("netZoo/netZooR@devel")
Load the netZooR package:
+library ( netZooR )
+library ( netZooR )
@@ -203,12 +206,12 @@
#> [1] "Running iteration 1"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 6.29425e-05 secs
+#> Time difference of 6.008148e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 7.605553e-05 secs
+#> Time difference of 9.10759e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -216,12 +219,12 @@
#> [1] "Running iteration 2"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 7.796288e-05 secs
+#> Time difference of 8.201599e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 8.201599e-05 secs
+#> Time difference of 9.584427e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -229,12 +232,12 @@
#> [1] "Running iteration 3"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 7.605553e-05 secs
+#> Time difference of 8.511543e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 8.106232e-05 secs
+#> Time difference of 7.390976e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -242,12 +245,12 @@
#> [1] "Running iteration 4"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 7.414818e-05 secs
+#> Time difference of 7.796288e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 7.104874e-05 secs
+#> Time difference of 7.796288e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -255,12 +258,12 @@
#> [1] "Running iteration 5"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 8.296967e-05 secs
+#> Time difference of 8.416176e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 7.104874e-05 secs
+#> Time difference of 8.487701e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -268,12 +271,12 @@
#> [1] "Running iteration 6"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 7.915497e-05 secs
+#> Time difference of 7.486343e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 7.605553e-05 secs
+#> Time difference of 8.106232e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -281,27 +284,25 @@
#> [1] "Running iteration 7"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 7.796288e-05 secs
+#> Time difference of 8.487701e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 8.606911e-05 secs
+#> Time difference of 8.487701e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
-#> .......................
-#> Warning: glm.fit: fitted probabilities numerically 0 or 1 occurred
-#> ..............................[1] "Using OLS method"
+#> .....................................................[1] "Using OLS method"
#> [1] "Finished running iteration 7"
#> [1] "Running iteration 8"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 8.392334e-05 secs
+#> Time difference of 7.31945e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 8.392334e-05 secs
+#> Time difference of 7.915497e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -309,12 +310,14 @@
#> [1] "Running iteration 9"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 7.605553e-05 secs
+#> Time difference of 8.106232e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
-#> .....................................................[1] "Initializing and validating"
+#> .......................
+#> Warning: glm.fit: fitted probabilities numerically 0 or 1 occurred
+#> ..............................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 7.31945e-05 secs
+#> Time difference of 0.0003321171 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -322,12 +325,12 @@
#> [1] "Running iteration 10"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.012222e-05 secs
+#> Time difference of 0.0001831055 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 8.487701e-05 secs
+#> Time difference of 0.0001070499 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -335,25 +338,27 @@
#> [1] "Running iteration 11"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 7.820129e-05 secs
+#> Time difference of 0.0002679825 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 8.201599e-05 secs
+#> Time difference of 7.987022e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
-#> .....................................................[1] "Using OLS method"
+#> .......................
+#> Warning: glm.fit: fitted probabilities numerically 0 or 1 occurred
+#> ..............................[1] "Using OLS method"
#> [1] "Finished running iteration 11"
#> [1] "Running iteration 12"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 7.987022e-05 secs
+#> Time difference of 8.893013e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 8.296967e-05 secs
+#> Time difference of 8.821487e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -361,12 +366,12 @@
#> [1] "Running iteration 13"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 7.915497e-05 secs
+#> Time difference of 7.009506e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 8.010864e-05 secs
+#> Time difference of 7.414818e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -374,12 +379,12 @@
#> [1] "Running iteration 14"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 8.201599e-05 secs
+#> Time difference of 7.009506e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 8.010864e-05 secs
+#> Time difference of 7.31945e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -387,12 +392,12 @@
#> [1] "Running iteration 15"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 8.106232e-05 secs
+#> Time difference of 0.0001008511 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 8.606911e-05 secs
+#> Time difference of 0.0001218319 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -400,27 +405,25 @@
#> [1] "Running iteration 16"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.799004e-05 secs
+#> Time difference of 7.796288e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.083748e-05 secs
+#> Time difference of 7.915497e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
-#> .......................
-#> Warning: glm.fit: fitted probabilities numerically 0 or 1 occurred
-#> ..............................[1] "Using OLS method"
+#> .....................................................[1] "Using OLS method"
#> [1] "Finished running iteration 16"
#> [1] "Running iteration 17"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.608269e-05 secs
+#> Time difference of 9.489059e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.608269e-05 secs
+#> Time difference of 0.0002009869 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -428,12 +431,12 @@
#> [1] "Running iteration 18"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.000125885 secs
+#> Time difference of 9.799004e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001049042 secs
+#> Time difference of 8.702278e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -441,12 +444,12 @@
#> [1] "Running iteration 19"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001070499 secs
+#> Time difference of 0.0001039505 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001060963 secs
+#> Time difference of 9.799004e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -454,12 +457,12 @@
#> [1] "Running iteration 20"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001089573 secs
+#> Time difference of 8.583069e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001020432 secs
+#> Time difference of 0.0001370907 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -467,12 +470,12 @@
#> [1] "Running iteration 21"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001089573 secs
+#> Time difference of 8.702278e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001130104 secs
+#> Time difference of 8.201599e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -480,12 +483,12 @@
#> [1] "Running iteration 22"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001089573 secs
+#> Time difference of 8.08239e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001070499 secs
+#> Time difference of 8.201599e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -493,14 +496,12 @@
#> [1] "Running iteration 23"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001020432 secs
+#> Time difference of 7.414818e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
-#> .......................
-#> Warning: glm.fit: fitted probabilities numerically 0 or 1 occurred
-#> ..............................[1] "Initializing and validating"
+#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.918213e-05 secs
+#> Time difference of 9.512901e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -508,12 +509,12 @@
#> [1] "Running iteration 24"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001020432 secs
+#> Time difference of 8.201599e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001029968 secs
+#> Time difference of 8.08239e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -521,12 +522,14 @@
#> [1] "Running iteration 25"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001029968 secs
+#> Time difference of 9.10759e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
-#> .....................................................[1] "Initializing and validating"
+#> .......................
+#> Warning: glm.fit: fitted probabilities numerically 0 or 1 occurred
+#> ..............................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001461506 secs
+#> Time difference of 8.606911e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -534,12 +537,12 @@
#> [1] "Running iteration 26"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001029968 secs
+#> Time difference of 8.201599e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001008511 secs
+#> Time difference of 9.393692e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -547,25 +550,27 @@
#> [1] "Running iteration 27"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001020432 secs
+#> Time difference of 8.702278e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001060963 secs
+#> Time difference of 9.393692e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
-#> .....................................................[1] "Using OLS method"
+#> .......................
+#> Warning: glm.fit: fitted probabilities numerically 0 or 1 occurred
+#> ..............................[1] "Using OLS method"
#> [1] "Finished running iteration 27"
#> [1] "Running iteration 28"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001051426 secs
+#> Time difference of 7.796288e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001060963 secs
+#> Time difference of 8.583069e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -573,12 +578,12 @@
#> [1] "Running iteration 29"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.989738e-05 secs
+#> Time difference of 8.106232e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001001358 secs
+#> Time difference of 0.0001380444 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -586,14 +591,12 @@
#> [1] "Running iteration 30"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.393692e-05 secs
+#> Time difference of 9.202957e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
-#> .......................
-#> Warning: glm.fit: fitted probabilities numerically 0 or 1 occurred
-#> ..............................[1] "Initializing and validating"
+#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.512901e-05 secs
+#> Time difference of 8.606911e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -601,12 +604,12 @@
#> [1] "Running iteration 31"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.393692e-05 secs
+#> Time difference of 7.987022e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.012222e-05 secs
+#> Time difference of 8.916855e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -614,12 +617,12 @@
#> [1] "Running iteration 32"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.393692e-05 secs
+#> Time difference of 9.799004e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.703636e-05 secs
+#> Time difference of 0.0001268387 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -627,12 +630,12 @@
#> [1] "Running iteration 33"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.703636e-05 secs
+#> Time difference of 9.894371e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.202957e-05 secs
+#> Time difference of 8.487701e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -640,12 +643,12 @@
#> [1] "Running iteration 34"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 8.98838e-05 secs
+#> Time difference of 7.510185e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.608269e-05 secs
+#> Time difference of 8.392334e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -653,12 +656,12 @@
#> [1] "Running iteration 35"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 8.893013e-05 secs
+#> Time difference of 0.000166893 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.608269e-05 secs
+#> Time difference of 9.703636e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -666,14 +669,12 @@
#> [1] "Running iteration 36"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.489059e-05 secs
+#> Time difference of 9.298325e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
-#> .......................
-#> Warning: glm.fit: fitted probabilities numerically 0 or 1 occurred
-#> ..............................[1] "Initializing and validating"
+#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001008511 secs
+#> Time difference of 9.298325e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -681,12 +682,12 @@
#> [1] "Running iteration 37"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.012222e-05 secs
+#> Time difference of 8.106232e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 8.98838e-05 secs
+#> Time difference of 9.10759e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -699,20 +700,22 @@
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.608269e-05 secs
+#> Time difference of 8.320808e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
-#> .....................................................[1] "Using OLS method"
+#> .......................
+#> Warning: glm.fit: fitted probabilities numerically 0 or 1 occurred
+#> ..............................[1] "Using OLS method"
#> [1] "Finished running iteration 38"
#> [1] "Running iteration 39"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001058578 secs
+#> Time difference of 9.202957e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.202957e-05 secs
+#> Time difference of 9.703636e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -720,12 +723,12 @@
#> [1] "Running iteration 40"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.608269e-05 secs
+#> Time difference of 8.583069e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.584427e-05 secs
+#> Time difference of 0.0001020432 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -733,12 +736,12 @@
#> [1] "Running iteration 41"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.202957e-05 secs
+#> Time difference of 0.0001020432 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.202957e-05 secs
+#> Time difference of 9.417534e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -746,12 +749,12 @@
#> [1] "Running iteration 42"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.703636e-05 secs
+#> Time difference of 9.512901e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001058578 secs
+#> Time difference of 8.98838e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -759,12 +762,12 @@
#> [1] "Running iteration 43"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.10759e-05 secs
+#> Time difference of 8.893013e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.202957e-05 secs
+#> Time difference of 8.606911e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -772,12 +775,12 @@
#> [1] "Running iteration 44"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.083748e-05 secs
+#> Time difference of 0.0001151562 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.894371e-05 secs
+#> Time difference of 8.416176e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -785,12 +788,12 @@
#> [1] "Running iteration 45"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.298325e-05 secs
+#> Time difference of 8.893013e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.083748e-05 secs
+#> Time difference of 9.393692e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -798,12 +801,12 @@
#> [1] "Running iteration 46"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.393692e-05 secs
+#> Time difference of 8.797646e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.10759e-05 secs
+#> Time difference of 0.0001378059 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -811,7 +814,7 @@
#> [1] "Running iteration 47"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.10759e-05 secs
+#> Time difference of 9.608269e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
@@ -824,25 +827,27 @@
#> [1] "Running iteration 48"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.393692e-05 secs
+#> Time difference of 9.703636e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.918213e-05 secs
+#> Time difference of 0.0001111031 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
-#> .....................................................[1] "Using OLS method"
+#> .......................
+#> Warning: glm.fit: fitted probabilities numerically 0 or 1 occurred
+#> ..............................[1] "Using OLS method"
#> [1] "Finished running iteration 48"
#> [1] "Running iteration 49"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.608269e-05 secs
+#> Time difference of 9.894371e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 8.511543e-05 secs
+#> Time difference of 8.98838e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -850,27 +855,25 @@
#> [1] "Running iteration 50"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.393692e-05 secs
+#> Time difference of 8.98838e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.393692e-05 secs
+#> Time difference of 0.0004348755 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
-#> .......................
-#> Warning: glm.fit: fitted probabilities numerically 0 or 1 occurred
-#> ..............................[1] "Using OLS method"
+#> .....................................................[1] "Using OLS method"
#> [1] "Finished running iteration 50"
#> [1] "Running iteration 51"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.703636e-05 secs
+#> Time difference of 0.0001049042 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.703636e-05 secs
+#> Time difference of 9.10759e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -878,12 +881,12 @@
#> [1] "Running iteration 52"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.608269e-05 secs
+#> Time difference of 0.0001058578 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.799004e-05 secs
+#> Time difference of 8.106232e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -891,12 +894,12 @@
#> [1] "Running iteration 53"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.202957e-05 secs
+#> Time difference of 0.0001139641 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.918213e-05 secs
+#> Time difference of 0.000123024 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -904,14 +907,12 @@
#> [1] "Running iteration 54"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001060963 secs
+#> Time difference of 0.000109911 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
-#> .......................
-#> Warning: glm.fit: fitted probabilities numerically 0 or 1 occurred
-#> ..............................[1] "Initializing and validating"
+#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001029968 secs
+#> Time difference of 9.989738e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -919,12 +920,12 @@
#> [1] "Running iteration 55"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.894371e-05 secs
+#> Time difference of 0.0001420975 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001111031 secs
+#> Time difference of 9.799004e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -932,12 +933,12 @@
#> [1] "Running iteration 56"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.202957e-05 secs
+#> Time difference of 8.702278e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001008511 secs
+#> Time difference of 0.0001089573 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -945,12 +946,12 @@
#> [1] "Running iteration 57"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001070499 secs
+#> Time difference of 8.702278e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001070499 secs
+#> Time difference of 8.583069e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -958,12 +959,12 @@
#> [1] "Running iteration 58"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001108646 secs
+#> Time difference of 0.0001289845 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001151562 secs
+#> Time difference of 9.989738e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -971,12 +972,12 @@
#> [1] "Running iteration 59"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001189709 secs
+#> Time difference of 9.393692e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.000123024 secs
+#> Time difference of 0.0001499653 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -984,40 +985,40 @@
#> [1] "Running iteration 60"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001199245 secs
+#> Time difference of 8.702278e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001239777 secs
+#> Time difference of 9.298325e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
-#> .......................
-#> Warning: glm.fit: fitted probabilities numerically 0 or 1 occurred
-#> ..............................[1] "Using OLS method"
+#> .....................................................[1] "Using OLS method"
#> [1] "Finished running iteration 60"
#> [1] "Running iteration 61"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001828671 secs
+#> Time difference of 0.0001070499 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001261234 secs
+#> Time difference of 9.393692e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
-#> .....................................................[1] "Using OLS method"
+#> .......................
+#> Warning: glm.fit: fitted probabilities numerically 0 or 1 occurred
+#> ..............................[1] "Using OLS method"
#> [1] "Finished running iteration 61"
#> [1] "Running iteration 62"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001330376 secs
+#> Time difference of 8.606911e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001358986 secs
+#> Time difference of 9.918213e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1025,12 +1026,12 @@
#> [1] "Running iteration 63"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001280308 secs
+#> Time difference of 9.10759e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001370907 secs
+#> Time difference of 9.703636e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1038,12 +1039,12 @@
#> [1] "Running iteration 64"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001311302 secs
+#> Time difference of 9.012222e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001280308 secs
+#> Time difference of 9.512901e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1051,25 +1052,27 @@
#> [1] "Running iteration 65"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001399517 secs
+#> Time difference of 0.0001041889 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001280308 secs
+#> Time difference of 0.0003099442 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
-#> .....................................................[1] "Using OLS method"
+#> .......................
+#> Warning: glm.fit: fitted probabilities numerically 0 or 1 occurred
+#> ..............................[1] "Using OLS method"
#> [1] "Finished running iteration 65"
#> [1] "Running iteration 66"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001280308 secs
+#> Time difference of 0.0001001358 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.000138998 secs
+#> Time difference of 9.202957e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1077,25 +1080,27 @@
#> [1] "Running iteration 67"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001351833 secs
+#> Time difference of 0.0001471043 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001339912 secs
+#> Time difference of 0.0001029968 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
-#> .....................................................[1] "Using OLS method"
+#> .......................
+#> Warning: glm.fit: fitted probabilities numerically 0 or 1 occurred
+#> ..............................[1] "Using OLS method"
#> [1] "Finished running iteration 67"
#> [1] "Running iteration 68"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001349449 secs
+#> Time difference of 0.000164032 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001490116 secs
+#> Time difference of 0.0001120567 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1103,12 +1108,14 @@
#> [1] "Running iteration 69"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001349449 secs
+#> Time difference of 0.0001139641 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
-#> .....................................................[1] "Initializing and validating"
+#> .......................
+#> Warning: glm.fit: fitted probabilities numerically 0 or 1 occurred
+#> ..............................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001339912 secs
+#> Time difference of 0.0001449585 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1116,12 +1123,12 @@
#> [1] "Running iteration 70"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001301765 secs
+#> Time difference of 0.0001249313 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001289845 secs
+#> Time difference of 0.0001649857 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1129,7 +1136,7 @@
#> [1] "Running iteration 71"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001199245 secs
+#> Time difference of 0.0001308918 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
@@ -1142,27 +1149,25 @@
#> [1] "Running iteration 72"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001149178 secs
+#> Time difference of 0.0001058578 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001189709 secs
+#> Time difference of 9.894371e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
-#> .......................
-#> Warning: glm.fit: fitted probabilities numerically 0 or 1 occurred
-#> ..............................[1] "Using OLS method"
+#> .....................................................[1] "Using OLS method"
#> [1] "Finished running iteration 72"
#> [1] "Running iteration 73"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001249313 secs
+#> Time difference of 0.0001049042 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001130104 secs
+#> Time difference of 0.0001010895 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1170,12 +1175,12 @@
#> [1] "Running iteration 74"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001080036 secs
+#> Time difference of 0.0001039505 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001101494 secs
+#> Time difference of 0.0001220703 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1183,12 +1188,12 @@
#> [1] "Running iteration 75"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001020432 secs
+#> Time difference of 9.703636e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001111031 secs
+#> Time difference of 8.893013e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1196,12 +1201,12 @@
#> [1] "Running iteration 76"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001051426 secs
+#> Time difference of 9.512901e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001039505 secs
+#> Time difference of 9.894371e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1209,12 +1214,12 @@
#> [1] "Running iteration 77"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001001358 secs
+#> Time difference of 0.0001518726 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001111031 secs
+#> Time difference of 9.608269e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1222,12 +1227,12 @@
#> [1] "Running iteration 78"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.989738e-05 secs
+#> Time difference of 0.0001149178 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001049042 secs
+#> Time difference of 0.0001130104 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1235,14 +1240,12 @@
#> [1] "Running iteration 79"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.703636e-05 secs
+#> Time difference of 9.10759e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
-#> .......................
-#> Warning: glm.fit: fitted probabilities numerically 0 or 1 occurred
-#> ..............................[1] "Initializing and validating"
+#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.894371e-05 secs
+#> Time difference of 9.489059e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1250,7 +1253,7 @@
#> [1] "Running iteration 80"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.894371e-05 secs
+#> Time difference of 0.0001239777 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
@@ -1258,19 +1261,17 @@
#> Time difference of 0.0001051426 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
-#> .......................
-#> Warning: glm.fit: fitted probabilities numerically 0 or 1 occurred
-#> ..............................[1] "Using OLS method"
+#> .....................................................[1] "Using OLS method"
#> [1] "Finished running iteration 80"
#> [1] "Running iteration 81"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.608269e-05 secs
+#> Time difference of 9.703636e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001149178 secs
+#> Time difference of 9.608269e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1278,12 +1279,12 @@
#> [1] "Running iteration 82"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001020432 secs
+#> Time difference of 0.0001208782 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.512901e-05 secs
+#> Time difference of 0.0001010895 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1291,25 +1292,27 @@
#> [1] "Running iteration 83"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001039505 secs
+#> Time difference of 0.0001080036 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001039505 secs
+#> Time difference of 9.799004e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
-#> .....................................................[1] "Using OLS method"
+#> .......................
+#> Warning: glm.fit: fitted probabilities numerically 0 or 1 occurred
+#> ..............................[1] "Using OLS method"
#> [1] "Finished running iteration 83"
#> [1] "Running iteration 84"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001029968 secs
+#> Time difference of 0.0002040863 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001001358 secs
+#> Time difference of 0.0001449585 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1317,12 +1320,12 @@
#> [1] "Running iteration 85"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001041889 secs
+#> Time difference of 0.0001251698 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.703636e-05 secs
+#> Time difference of 0.0001070499 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1330,12 +1333,12 @@
#> [1] "Running iteration 86"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001010895 secs
+#> Time difference of 9.489059e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.799004e-05 secs
+#> Time difference of 9.703636e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1343,12 +1346,12 @@
#> [1] "Running iteration 87"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001080036 secs
+#> Time difference of 0.0001089573 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.298325e-05 secs
+#> Time difference of 0.0001068115 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1361,7 +1364,7 @@
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.989738e-05 secs
+#> Time difference of 9.298325e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1369,12 +1372,12 @@
#> [1] "Running iteration 89"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001039505 secs
+#> Time difference of 0.0001049042 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001010895 secs
+#> Time difference of 0.0001041889 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1382,27 +1385,25 @@
#> [1] "Running iteration 90"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 9.799004e-05 secs
+#> Time difference of 0.0001149178 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 8.606911e-05 secs
+#> Time difference of 0.0001068115 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
-#> .......................
-#> Warning: glm.fit: fitted probabilities numerically 0 or 1 occurred
-#> ..............................[1] "Using OLS method"
+#> .....................................................[1] "Using OLS method"
#> [1] "Finished running iteration 90"
#> [1] "Running iteration 91"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001020432 secs
+#> Time difference of 0.00020504 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001039505 secs
+#> Time difference of 0.0001139641 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1410,12 +1411,12 @@
#> [1] "Running iteration 92"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001010895 secs
+#> Time difference of 0.0001029968 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001142025 secs
+#> Time difference of 0.0001111031 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1423,12 +1424,12 @@
#> [1] "Running iteration 93"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001111031 secs
+#> Time difference of 0.0001208782 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001120567 secs
+#> Time difference of 9.298325e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1436,12 +1437,12 @@
#> [1] "Running iteration 94"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001199245 secs
+#> Time difference of 0.0001001358 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001111031 secs
+#> Time difference of 0.0001189709 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1449,12 +1450,12 @@
#> [1] "Running iteration 95"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001111031 secs
+#> Time difference of 9.608269e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001170635 secs
+#> Time difference of 9.799004e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1462,12 +1463,12 @@
#> [1] "Running iteration 96"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001149178 secs
+#> Time difference of 9.584427e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001111031 secs
+#> Time difference of 9.608269e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1475,12 +1476,12 @@
#> [1] "Running iteration 97"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001029968 secs
+#> Time difference of 0.0001308918 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001060963 secs
+#> Time difference of 0.0001080036 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1488,12 +1489,12 @@
#> [1] "Running iteration 98"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001020432 secs
+#> Time difference of 9.703636e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001409054 secs
+#> Time difference of 0.0001239777 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1501,12 +1502,12 @@
#> [1] "Running iteration 99"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001130104 secs
+#> Time difference of 0.0001268387 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.000109911 secs
+#> Time difference of 0.0001060963 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1514,12 +1515,12 @@
#> [1] "Running iteration 100"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.000109911 secs
+#> Time difference of 9.918213e-05 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001091957 secs
+#> Time difference of 0.0001018047 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
@@ -1527,17 +1528,17 @@
#> [1] "Running iteration 101"
#> [1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001158714 secs
+#> Time difference of 0.0001211166 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Initializing and validating"
#> [1] "Verified adequate samples, calculating correlation matrix"
-#> Time difference of 0.0001120567 secs
+#> Time difference of 0.0001218319 secs
#> [1] "More data cleaning"
#> [1] "Main calculation"
#> .....................................................[1] "Using OLS method"
#> [1] "Finished running iteration 101"
-#> Time difference of 5.257792 mins
+#> Time difference of 4.959143 mins
We can print the details of the analysis result
monsterRes
@@ -1621,7 +1622,7 @@
-
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Deborah Weighill.
+
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Des Weighill.
diff --git a/docs/articles/MONSTER_files/figure-html/unnamed-chunk-10-1.png b/docs/articles/MONSTER_files/figure-html/unnamed-chunk-10-1.png
index 13483032..086c4b44 100644
Binary files a/docs/articles/MONSTER_files/figure-html/unnamed-chunk-10-1.png and b/docs/articles/MONSTER_files/figure-html/unnamed-chunk-10-1.png differ
diff --git a/docs/articles/MONSTER_files/figure-html/unnamed-chunk-11-1.png b/docs/articles/MONSTER_files/figure-html/unnamed-chunk-11-1.png
index d9b7e992..fdbbd0c4 100644
Binary files a/docs/articles/MONSTER_files/figure-html/unnamed-chunk-11-1.png and b/docs/articles/MONSTER_files/figure-html/unnamed-chunk-11-1.png differ
diff --git a/docs/articles/MONSTER_files/figure-html/unnamed-chunk-13-1.png b/docs/articles/MONSTER_files/figure-html/unnamed-chunk-13-1.png
index b69a6498..15671012 100644
Binary files a/docs/articles/MONSTER_files/figure-html/unnamed-chunk-13-1.png and b/docs/articles/MONSTER_files/figure-html/unnamed-chunk-13-1.png differ
diff --git a/docs/articles/SAMBAR.html b/docs/articles/SAMBAR.html
index c7b0925b..b63caa0d 100644
--- a/docs/articles/SAMBAR.html
+++ b/docs/articles/SAMBAR.html
@@ -31,7 +31,7 @@
netZooR
- 1.0.4
+ 1.1.16
@@ -90,6 +90,9 @@
YARN: Robust Multi-Tissue RNA-Seq Preprocessing and Normalization
+
+
+ Changelog
@@ -157,7 +160,7 @@
-
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Deborah Weighill.
+
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Des Weighill.
diff --git a/docs/articles/TutorialOTTER.html b/docs/articles/TutorialOTTER.html
index 60100999..f98373fa 100644
--- a/docs/articles/TutorialOTTER.html
+++ b/docs/articles/TutorialOTTER.html
@@ -31,7 +31,7 @@
netZooR
- 1.0.4
+ 1.1.16
@@ -90,6 +90,9 @@
YARN: Robust Multi-Tissue RNA-Seq Preprocessing and Normalization
+
+
+ Changelog
@@ -148,7 +151,7 @@
Load packages
-library ( netZooR )
+library ( netZooR )
library ( fgsea )
library ( ggplot2 )
library ( reshape2 )
@@ -357,34 +360,34 @@
fgseaRes <- fgsea ( pathways , degreeDiff_all , minSize= 15 , maxSize= 500 , nperm= 1000 )
head ( fgseaRes )
#> pathway pval padj
-#> 1: KEGG_GLYCOLYSIS_GLUCONEOGENESIS 0.002132196 0.01534327
-#> 2: KEGG_CITRATE_CYCLE_TCA_CYCLE 0.248175182 0.40972222
-#> 3: KEGG_PENTOSE_PHOSPHATE_PATHWAY 0.069387755 0.15950172
-#> 4: KEGG_PENTOSE_AND_GLUCURONATE_INTERCONVERSIONS 0.433609959 0.61020175
-#> 5: KEGG_FRUCTOSE_AND_MANNOSE_METABOLISM 0.050772627 0.13024283
-#> 6: KEGG_GALACTOSE_METABOLISM 0.347368421 0.52105263
+#> 1: KEGG_GLYCOLYSIS_GLUCONEOGENESIS 0.002028398 0.01741098
+#> 2: KEGG_CITRATE_CYCLE_TCA_CYCLE 0.217131474 0.36256859
+#> 3: KEGG_PENTOSE_PHOSPHATE_PATHWAY 0.083168317 0.17735894
+#> 4: KEGG_PENTOSE_AND_GLUCURONATE_INTERCONVERSIONS 0.390873016 0.56708626
+#> 5: KEGG_FRUCTOSE_AND_MANNOSE_METABOLISM 0.036734694 0.10659083
+#> 6: KEGG_GALACTOSE_METABOLISM 0.329243354 0.49808610
#> ES NES nMoreExtreme size leadingEdge
-#> 1: 0.4360719 1.786218 0 58 HK1,ENO2,ENO1,ALDOC,BPGM,PFKM,...
-#> 2: -0.3428560 -1.153405 135 29 DLST,PCK2,SUCLG2P2,MDH2,FH,IDH2,...
-#> 3: 0.4435710 1.430352 33 21 ALDOC,PFKM,PFKP,ALDOA,PFKL,GPI,...
-#> 4: 0.3180477 1.029746 208 22 UGT1A1,UGT1A10,UGT1A6,CRYL1,UGT2B7
-#> 5: 0.4118882 1.445861 22 31 HK1,ALDOC,PFKFB4,PFKM,MTMR2,PFKP,...
-#> 6: 0.3247973 1.082019 164 25 GLB1,HK1,PFKM,PFKP,PFKL,GAA,...
+#> 1: 0.4360719 1.779675 0 58 HK1,ENO2,ENO1,ALDOC,BPGM,PFKM,...
+#> 2: -0.3428560 -1.172256 108 29 DLST,PCK2,SUCLG2P2,MDH2,FH,IDH2,...
+#> 3: 0.4435710 1.414328 41 21 ALDOC,PFKM,PFKP,ALDOA,PFKL,GPI,...
+#> 4: 0.3180477 1.031980 196 22 UGT1A1,UGT1A10,UGT1A6,CRYL1,UGT2B7
+#> 5: 0.4118882 1.458885 17 31 HK1,ALDOC,PFKFB4,PFKM,MTMR2,PFKP,...
+#> 6: 0.3247973 1.088178 160 25 GLB1,HK1,PFKM,PFKP,PFKL,GAA,...
# Subset to pathways with FDR < 0.05
sig <- fgseaRes [ fgseaRes $ padj < 0.05 ,]
# Get the top 10 significant pathways enriched for genes having lower targeting in LCLs
sig [ order ( sig $ NES ) [ 1 : 10 ] ,]
#> pathway pval padj ES NES
-#> 1: KEGG_DNA_REPLICATION 0.001841621 0.01534327 -0.7725018 -2.724198
-#> 2: KEGG_CELL_CYCLE 0.001751313 0.01534327 -0.5948628 -2.683607
-#> 3: KEGG_SPLICEOSOME 0.001776199 0.01534327 -0.5766563 -2.582334
-#> 4: KEGG_PROTEIN_EXPORT 0.001897533 0.01534327 -0.6851677 -2.181782
-#> 5: KEGG_NUCLEOTIDE_EXCISION_REPAIR 0.001838235 0.01534327 -0.5785505 -2.114989
-#> 6: KEGG_BASE_EXCISION_REPAIR 0.001818182 0.01534327 -0.5965607 -2.087925
-#> 7: KEGG_PYRIMIDINE_METABOLISM 0.001811594 0.01534327 -0.4606911 -1.957338
-#> 8: KEGG_RNA_POLYMERASE 0.001862197 0.01534327 -0.5731001 -1.906608
-#> 9: KEGG_MISMATCH_REPAIR 0.005769231 0.03094406 -0.6107380 -1.904153
-#> 10: KEGG_N_GLYCAN_BIOSYNTHESIS 0.003703704 0.02260536 -0.4815544 -1.793979
+#> 1: KEGG_DNA_REPLICATION 0.001901141 0.01741098 -0.7725018 -2.758721
+#> 2: KEGG_CELL_CYCLE 0.001773050 0.01741098 -0.5948628 -2.744328
+#> 3: KEGG_SPLICEOSOME 0.001795332 0.01741098 -0.5766563 -2.635589
+#> 4: KEGG_PROTEIN_EXPORT 0.001980198 0.01741098 -0.6851677 -2.242257
+#> 5: KEGG_NUCLEOTIDE_EXCISION_REPAIR 0.001919386 0.01741098 -0.5785505 -2.157588
+#> 6: KEGG_BASE_EXCISION_REPAIR 0.001934236 0.01741098 -0.5965607 -2.107505
+#> 7: KEGG_PYRIMIDINE_METABOLISM 0.001841621 0.01741098 -0.4606911 -2.012965
+#> 8: KEGG_MISMATCH_REPAIR 0.002008032 0.01741098 -0.6107380 -1.970065
+#> 9: KEGG_RNA_POLYMERASE 0.001992032 0.01741098 -0.5731001 -1.955036
+#> 10: KEGG_N_GLYCAN_BIOSYNTHESIS 0.003891051 0.02463808 -0.4815544 -1.828574
#> nMoreExtreme size leadingEdge
#> 1: 0 34 RFC5,RFC3,FEN1,MCM7,POLE2,DNA2,...
#> 2: 0 121 ORC6,CDC7,CDC20,PKMYT1,CCNE1,ORC1,...
@@ -393,8 +396,8 @@
#> 5: 0 41 RFC5,RFC3,POLE2,CCNH,RFC4,PCNA,...
#> 6: 0 33 FEN1,HMGB1,MUTYH,UNG,POLE2,NEIL3,...
#> 7: 0 90 POLR2D,DUT,POLE2,PRIM1,DTYMK,DHODH,...
-#> 8: 0 28 POLR2D,POLR3B,POLR2H,POLR2L,POLR3GL,POLR2J,...
-#> 9: 2 22 RFC5,RFC3,RFC4,PCNA,EXO1,RFC2,...
+#> 8: 0 22 RFC5,RFC3,RFC4,PCNA,EXO1,RFC2,...
+#> 9: 0 28 POLR2D,POLR3B,POLR2H,POLR2L,POLR3GL,POLR2J,...
#> 10: 1 45 B4GALT2,DPM3,ALG14,FUT8,ALG6,MGAT2,...
@@ -460,7 +463,7 @@
-
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Deborah Weighill.
+
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Des Weighill.
diff --git a/docs/articles/TutorialOTTER_files/figure-html/unnamed-chunk-15-1.png b/docs/articles/TutorialOTTER_files/figure-html/unnamed-chunk-15-1.png
index 1a50f734..e41fc90a 100644
Binary files a/docs/articles/TutorialOTTER_files/figure-html/unnamed-chunk-15-1.png and b/docs/articles/TutorialOTTER_files/figure-html/unnamed-chunk-15-1.png differ
diff --git a/docs/articles/index.html b/docs/articles/index.html
index 5083476f..a9cfd584 100644
--- a/docs/articles/index.html
+++ b/docs/articles/index.html
@@ -71,7 +71,7 @@
netZooR
- 1.0.4
+ 1.1.16
@@ -130,6 +130,9 @@
YARN: Robust Multi-Tissue RNA-Seq Preprocessing and Normalization
+
+
+ Changelog
@@ -192,7 +195,7 @@ All vignettes
-
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Deborah Weighill.
+
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Des Weighill.
diff --git a/docs/articles/pandaR.html b/docs/articles/pandaR.html
index c6a45631..b2d6b04f 100644
--- a/docs/articles/pandaR.html
+++ b/docs/articles/pandaR.html
@@ -31,7 +31,7 @@
netZooR
- 1.0.4
+ 1.1.16
@@ -90,6 +90,9 @@
YARN: Robust Multi-Tissue RNA-Seq Preprocessing and Normalization
+
+
+ Changelog
## Loading required package: igraph
## Warning: package 'igraph' was built under R version 4.1.2
##
@@ -265,7 +268,7 @@
-
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Deborah Weighill.
+
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Des Weighill.
diff --git a/docs/articles/pandaRApplicationinGTExData.html b/docs/articles/pandaRApplicationinGTExData.html
index b208f7d6..908c5778 100644
--- a/docs/articles/pandaRApplicationinGTExData.html
+++ b/docs/articles/pandaRApplicationinGTExData.html
@@ -31,7 +31,7 @@
netZooR
- 1.0.4
+ 1.1.16
@@ -90,6 +90,9 @@
YARN: Robust Multi-Tissue RNA-Seq Preprocessing and Normalization
+
+
+ Changelog
@@ -147,7 +150,7 @@
Load packages
@@ -288,7 +291,7 @@
Plot the top differential edges betwen LCL and WB
# Select the top differential edge weights betweeen LCL and whole blood
-diffRes <- pandaDiffEdges ( lcl_vis , wb_vis , condition_name= "LCL" )
+diffRes <- pandaDiffEdges ( lcl_vis , wb_vis , condition_name= "LCL" )
head ( diffRes )
# Number of differential edges is:
nrow ( diffRes )
@@ -343,19 +346,19 @@
fgseaRes <- fgsea ( pathways , degreeDiff_all , minSize= 15 , maxSize= 500 , nperm= 1000 )
head ( fgseaRes )
#> pathway pval padj
-#> 1: KEGG_GLYCOLYSIS_GLUCONEOGENESIS 0.036407767 0.10558252
-#> 2: KEGG_CITRATE_CYCLE_TCA_CYCLE 0.001821494 0.01648352
-#> 3: KEGG_PENTOSE_PHOSPHATE_PATHWAY 0.110132159 0.22544701
-#> 4: KEGG_PENTOSE_AND_GLUCURONATE_INTERCONVERSIONS 0.639198218 0.72693131
-#> 5: KEGG_FRUCTOSE_AND_MANNOSE_METABOLISM 0.042889391 0.11660553
-#> 6: KEGG_GALACTOSE_METABOLISM 0.039130435 0.10950283
+#> 1: KEGG_GLYCOLYSIS_GLUCONEOGENESIS 0.029850746 0.08991319
+#> 2: KEGG_CITRATE_CYCLE_TCA_CYCLE 0.001831502 0.01749271
+#> 3: KEGG_PENTOSE_PHOSPHATE_PATHWAY 0.083885210 0.17792202
+#> 4: KEGG_PENTOSE_AND_GLUCURONATE_INTERCONVERSIONS 0.648230088 0.71807684
+#> 5: KEGG_FRUCTOSE_AND_MANNOSE_METABOLISM 0.037444934 0.10402654
+#> 6: KEGG_GALACTOSE_METABOLISM 0.025641026 0.08391204
#> ES NES nMoreExtreme size
-#> 1: 0.3355458 1.4089240 14 58
-#> 2: -0.5923935 -2.0223405 0 29
-#> 3: 0.4235984 1.3503798 49 21
-#> 4: 0.2675875 0.8668447 286 22
-#> 5: 0.4168076 1.4779398 18 30
-#> 6: 0.4491757 1.5179036 17 25
+#> 1: 0.3355458 1.4197869 11 58
+#> 2: -0.5923935 -2.0056899 0 29
+#> 3: 0.4235984 1.3970008 37 21
+#> 4: 0.2675875 0.8884628 292 22
+#> 5: 0.4168076 1.5152490 16 30
+#> 6: 0.4491757 1.5737014 11 25
#> leadingEdge
#> 1: ALDH3B1,LDHAL6A,PGM1,PKM,ALDOA,ENO1,...
#> 2: MDH2,CS,DLST,SDHD,IDH3B,SUCLG2,...
@@ -368,27 +371,27 @@
# Get the top 10 significant pathways enriched for genes having lower targeting in LCLs
sig [ order ( sig $ NES ) [ 1 : 10 ] ,]
#> pathway pval padj ES
-#> 1: KEGG_SPLICEOSOME 0.001574803 0.01648352 -0.6155980
-#> 2: KEGG_RIBOSOME 0.001633987 0.01648352 -0.6438056
-#> 3: KEGG_DNA_REPLICATION 0.001779359 0.01648352 -0.7301502
-#> 4: KEGG_CELL_CYCLE 0.001582278 0.01648352 -0.5352336
-#> 5: KEGG_NUCLEOTIDE_EXCISION_REPAIR 0.001757469 0.01648352 -0.5896497
-#> 6: KEGG_PROTEIN_EXPORT 0.001808318 0.01648352 -0.6735362
-#> 7: KEGG_AMINOACYL_TRNA_BIOSYNTHESIS 0.001718213 0.01648352 -0.5489421
-#> 8: KEGG_CITRATE_CYCLE_TCA_CYCLE 0.001821494 0.01648352 -0.5923935
-#> 9: KEGG_BASAL_TRANSCRIPTION_FACTORS 0.001773050 0.01648352 -0.5641948
-#> 10: KEGG_MISMATCH_REPAIR 0.007233273 0.03595970 -0.5977217
+#> 1: KEGG_SPLICEOSOME 0.001647446 0.01749271 -0.6155980
+#> 2: KEGG_RIBOSOME 0.001663894 0.01749271 -0.6438056
+#> 3: KEGG_DNA_REPLICATION 0.001798561 0.01749271 -0.7301502
+#> 4: KEGG_CELL_CYCLE 0.001626016 0.01749271 -0.5352336
+#> 5: KEGG_NUCLEOTIDE_EXCISION_REPAIR 0.001712329 0.01749271 -0.5896497
+#> 6: KEGG_PROTEIN_EXPORT 0.001818182 0.01749271 -0.6735362
+#> 7: KEGG_AMINOACYL_TRNA_BIOSYNTHESIS 0.001733102 0.01749271 -0.5489421
+#> 8: KEGG_CITRATE_CYCLE_TCA_CYCLE 0.001831502 0.01749271 -0.5923935
+#> 9: KEGG_BASAL_TRANSCRIPTION_FACTORS 0.001795332 0.01749271 -0.5641948
+#> 10: KEGG_MISMATCH_REPAIR 0.001818182 0.01749271 -0.5977217
#> NES nMoreExtreme size leadingEdge
-#> 1: -2.808508 0 116 SRSF6,SRSF3,SRSF7,HNRNPA1L2,PRPF38A,PPIL1,...
-#> 2: -2.724798 0 80 RPS2,RPL23A,RPL19,RPS12,RPS17,RPL10A,...
-#> 3: -2.613218 0 34 RFC3,RFC5,RPA2,POLE3,PRIM1,PCNA,...
-#> 4: -2.454800 0 121 YWHAQ,ORC6,DBF4,ANAPC10,BUB3,WEE1,...
-#> 5: -2.182026 0 41 XPA,RFC3,RFC5,RPA2,POLE3,ERCC8,...
-#> 6: -2.136133 0 22 SRP9,IMMP1L,HSPA5,SEC11C,OXA1L,SPCS2,...
-#> 7: -2.028489 0 40 MARS2,NARS,YARS,FARS2,YARS2,NARS2,...
-#> 8: -2.022340 0 29 MDH2,CS,DLST,SDHD,IDH3B,SUCLG2,...
-#> 9: -1.960503 0 31 TAF4,TAF7,TAF9,GTF2E1,GTF2F2,TBP,...
-#> 10: -1.895686 3 22 RFC3,RFC5,RPA2,PCNA,MSH2,RPA1,...
+#> 1: -2.762220 0 116 SRSF6,SRSF3,SRSF7,HNRNPA1L2,PRPF38A,PPIL1,...
+#> 2: -2.721909 0 80 RPS2,RPL23A,RPL19,RPS12,RPS17,RPL10A,...
+#> 3: -2.594473 0 34 RFC3,RFC5,RPA2,POLE3,PRIM1,PCNA,...
+#> 4: -2.422663 0 121 YWHAQ,ORC6,DBF4,ANAPC10,BUB3,WEE1,...
+#> 5: -2.212853 0 41 XPA,RFC3,RFC5,RPA2,POLE3,ERCC8,...
+#> 6: -2.146583 0 22 SRP9,IMMP1L,HSPA5,SEC11C,OXA1L,SPCS2,...
+#> 7: -2.036029 0 40 MARS2,NARS,YARS,FARS2,YARS2,NARS2,...
+#> 8: -2.005690 0 29 MDH2,CS,DLST,SDHD,IDH3B,SUCLG2,...
+#> 9: -1.965811 0 31 TAF4,TAF7,TAF9,GTF2E1,GTF2F2,TBP,...
+#> 10: -1.904960 0 22 RFC3,RFC5,RPA2,PCNA,MSH2,RPA1,...
@@ -451,7 +454,7 @@
-
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Deborah Weighill.
+
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Des Weighill.
diff --git a/docs/articles/pandaRApplicationinGTExData_files/figure-html/unnamed-chunk-14-1.png b/docs/articles/pandaRApplicationinGTExData_files/figure-html/unnamed-chunk-14-1.png
index 7e51e1db..7feac827 100644
Binary files a/docs/articles/pandaRApplicationinGTExData_files/figure-html/unnamed-chunk-14-1.png and b/docs/articles/pandaRApplicationinGTExData_files/figure-html/unnamed-chunk-14-1.png differ
diff --git a/docs/articles/yarn.html b/docs/articles/yarn.html
index 3e1bdfb3..6aba91f8 100644
--- a/docs/articles/yarn.html
+++ b/docs/articles/yarn.html
@@ -31,7 +31,7 @@
netZooR
- 1.0.4
+ 1.1.16
@@ -90,6 +90,9 @@
YARN: Robust Multi-Tissue RNA-Seq Preprocessing and Normalization
+
+
+ Changelog
@@ -115,7 +118,7 @@
YARN: Robust Multi-Tissue RNA-Seq Preprocessing and Normalization
Joseph N. Paulson & John Quackenbush
- 2022-06-27
+ 2022-08-16
Source: vignettes/yarn.Rmd
yarn.Rmd
@@ -176,13 +179,13 @@
First always remember to have the library loaded.
+library ( netZooR )
Download the GTEx gene count data as an ExpressionSet object or load the sample skin dataset.
For computational reasons we load the sample skin data instead of having the user download the
Check mis-annotation of gender or other phenotypes using group-specific genes
@@ -270,9 +273,9 @@
## [8] base
##
## other attached packages:
-## [1] RColorBrewer_1.1-2 netZooR_1.0.4 yarn_1.18.0
+## [1] RColorBrewer_1.1-2 netZooR_1.1.16 yarn_1.18.0
## [4] pandaR_1.19.5 Biobase_2.52.0 BiocGenerics_0.38.0
-## [7] reticulate_1.22 igraph_1.3.2
+## [7] reticulate_1.22 igraph_1.3.4
##
## loaded via a namespace (and not attached):
## [1] utf8_1.2.2 proto_1.0.0
@@ -383,7 +386,7 @@
-
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Deborah Weighill.
+
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Des Weighill.
diff --git a/docs/authors.html b/docs/authors.html
index 66aacea7..db9b337b 100644
--- a/docs/authors.html
+++ b/docs/authors.html
@@ -6,7 +6,7 @@
-
Authors • netZooR
+
Citation and Authors • netZooR
@@ -39,7 +39,7 @@
-
+
@@ -58,7 +58,7 @@
-
+
@@ -71,7 +71,7 @@
netZooR
- 1.0.4
+ 1.1.16
@@ -130,6 +130,9 @@
YARN: Robust Multi-Tissue RNA-Seq Preprocessing and Normalization
+
+
+ Changelog
@@ -151,17 +154,31 @@
+
+
+
Ben Guebila, M, Wang, T., Quackenbush, J. The Network Zoo: a multilingual package for the inference and analysis of biological networks bioRxiv, 2022
+
@Article{,
+ title = {The Network Zoo: a multilingual package for the inference and analysis of biological networks},
+ author = {Marouen Ben Guebila and Tian Wang and John Quackenbush},
+ year = {2022},
+ journal = {bioRxiv},
+ doi = {10.1101/2022.05.30.494077},
+}
+
- Marouen Ben Guebila . Author, maintainer.
+
Marouen Ben Guebila . Author, maintainer.
- Tian Wang . Author.
+
Tian Wang . Author.
@@ -169,11 +186,11 @@ Authors
- Marieke Kuijjer . Author.
+
Marieke Kuijjer . Author.
- Megha Padi . Author.
+
Megha Padi . Author.
@@ -181,7 +198,11 @@ Authors
- Deborah Weighill . Author.
+
Des Weighill . Author.
+
+
+
+ Kate Shutta . Contributor.
@@ -194,7 +215,7 @@
Authors
-
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Deborah Weighill.
+
Developed by Marouen Ben Guebila, Tian Wang, John Platig, Marieke Kuijjer, Megha Padi, Rebekka Burkholz, Des Weighill.
diff --git a/docs/index.html b/docs/index.html
index d1aad454..b04ac34e 100644
--- a/docs/index.html
+++ b/docs/index.html
@@ -5,14 +5,14 @@
-
netZooR integrates tools for the inference and analysis of gene regulatory networks. • netZooR
+Unified methods for the inference and analysis of gene regulatory networks • netZooR
-
-
+
+