Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
nrows/ncols => nrow/ncol
  • Loading branch information
haifengl committed Dec 16, 2020
1 parent 5383568 commit abfd4eb
Show file tree
Hide file tree
Showing 126 changed files with 1,436 additions and 1,436 deletions.
6 changes: 3 additions & 3 deletions core/src/main/java/smile/base/cart/CART.java
Expand Up @@ -152,14 +152,14 @@ public CART(DataFrame x, StructField y, int maxDepth, int maxNodes, int nodeSize
this.x = x;
this.response = y;
this.schema = x.schema();
this.importance = new double[x.ncols()];
this.importance = new double[x.ncol()];
this.maxDepth = maxDepth;
this.maxNodes = maxNodes;
this.nodeSize = nodeSize;
this.mtry = mtry;

int n = x.size();
int p = x.ncols();
int p = x.ncol();

if (mtry < 1 || mtry > p) {
logger.debug("Invalid mtry. Use all features.");
Expand Down Expand Up @@ -225,7 +225,7 @@ private int size(Node node) {
*/
public static int[][] order(DataFrame x) {
int n = x.size();
int p = x.ncols();
int p = x.ncol();
StructType schema = x.schema();

double[] a = new double[n];
Expand Down
4 changes: 2 additions & 2 deletions core/src/main/java/smile/base/mlp/Layer.java
Expand Up @@ -95,8 +95,8 @@ public Layer(int n, int p) {
* @param bias the bias vector.
*/
public Layer(Matrix weight, double[] bias) {
this.n = weight.nrows();
this.p = weight.ncols();
this.n = weight.nrow();
this.p = weight.ncol();
this.weight = weight;
this.bias = bias;

Expand Down
2 changes: 1 addition & 1 deletion core/src/main/java/smile/classification/AdaBoost.java
Expand Up @@ -252,7 +252,7 @@ public static AdaBoost fit(Formula formula, DataFrame data, int ntrees, int maxD
}
}

double[] importance = new double[x.ncols()];
double[] importance = new double[x.ncol()];
for (DecisionTree tree : trees) {
double[] imp = tree.importance();
for (int i = 0; i < imp.length; i++) {
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/java/smile/classification/DecisionTree.java
Expand Up @@ -272,7 +272,7 @@ public DecisionTree(DataFrame x, int[] y, StructField response, int k, SplitRule
LeafNode node = new DecisionNode(count);
this.root = node;

Optional<Split> split = findBestSplit(node, 0, index.length, new boolean[x.ncols()]);
Optional<Split> split = findBestSplit(node, 0, index.length, new boolean[x.ncol()]);

if (maxNodes == Integer.MAX_VALUE) {
// deep-first split
Expand Down
4 changes: 2 additions & 2 deletions core/src/main/java/smile/classification/FLD.java
Expand Up @@ -115,7 +115,7 @@ public FLD(double[] mean, double[][] mu, Matrix scaling, IntSet labels) {
this.scaling = scaling;
this.labels = labels;

int L = scaling.ncols();
int L = scaling.ncol();
this.mean = new double[L];
scaling.tv(mean, this.mean);

Expand Down Expand Up @@ -357,7 +357,7 @@ public double[] project(double[] x) {

@Override
public double[][] project(double[][] x) {
double[][] y = new double[x.length][scaling.ncols()];
double[][] y = new double[x.length][scaling.ncol()];

for (int i = 0; i < x.length; i++) {
if (x[i].length != p) {
Expand Down
14 changes: 7 additions & 7 deletions core/src/main/java/smile/classification/GradientTreeBoost.java
Expand Up @@ -312,7 +312,7 @@ public double[] importance() {
* Train L2 tree boost.
*/
private static GradientTreeBoost train2(Formula formula, DataFrame x, ClassLabels codec, int[][] order, int ntrees, int maxDepth, int maxNodes, int nodeSize, double shrinkage, double subsample) {
int n = x.nrows();
int n = x.nrow();
int k = codec.k;
int[] y = codec.y;

Expand All @@ -333,15 +333,15 @@ private static GradientTreeBoost train2(Formula formula, DataFrame x, ClassLabel
sampling(samples, permutation, nc, y, subsample);

logger.info("Training {} tree", Strings.ordinal(t+1));
RegressionTree tree = new RegressionTree(x, loss, field, maxDepth, maxNodes, nodeSize, x.ncols(), samples, order);
RegressionTree tree = new RegressionTree(x, loss, field, maxDepth, maxNodes, nodeSize, x.ncol(), samples, order);
trees[t] = tree;

for (int i = 0; i < n; i++) {
h[i] += shrinkage * tree.predict(x.get(i));
}
}

double[] importance = new double[x.ncols()];
double[] importance = new double[x.ncol()];
for (RegressionTree tree : trees) {
double[] imp = tree.importance();
for (int i = 0; i < imp.length; i++) {
Expand All @@ -358,7 +358,7 @@ private static GradientTreeBoost train2(Formula formula, DataFrame x, ClassLabel
private static GradientTreeBoost traink(Formula formula, DataFrame x, ClassLabels codec, int[][] order,
int ntrees, int maxDepth, int maxNodes, int nodeSize,
double shrinkage, double subsample) {
int n = x.nrows();
int n = x.nrow();
int k = codec.k;
int[] y = codec.y;

Expand Down Expand Up @@ -391,7 +391,7 @@ private static GradientTreeBoost traink(Formula formula, DataFrame x, ClassLabel
for (int j = 0; j < k; j++) {
sampling(samples, permutation, nc, y, subsample);

RegressionTree tree = new RegressionTree(x, loss[j], field, maxDepth, maxNodes, nodeSize, x.ncols(), samples, order);
RegressionTree tree = new RegressionTree(x, loss[j], field, maxDepth, maxNodes, nodeSize, x.ncol(), samples, order);
forest[j][t] = tree;

double[] hj = h[j];
Expand All @@ -401,7 +401,7 @@ private static GradientTreeBoost traink(Formula formula, DataFrame x, ClassLabel
}
}

double[] importance = new double[x.ncols()];
double[] importance = new double[x.ncol()];
for (RegressionTree[] grove : forest) {
for (RegressionTree tree : grove) {
double[] imp = tree.importance();
Expand Down Expand Up @@ -575,7 +575,7 @@ public int predict(Tuple x, double[] posteriori) {
public int[][] test(DataFrame data) {
DataFrame x = formula.x(data);

int n = x.nrows();
int n = x.nrow();
int ntrees = trees != null ? trees.length : forest[0].length;
int[][] prediction = new int[ntrees][n];

Expand Down
2 changes: 1 addition & 1 deletion core/src/main/java/smile/classification/QDA.java
Expand Up @@ -189,7 +189,7 @@ public static QDA fit(double[][] x, int[] y, double[] priori, double tol) {
Matrix[] cov = DiscriminantAnalysis.cov(x, y, da.mu, da.ni);

int k = cov.length;
int p = cov[0].nrows();
int p = cov[0].nrow();
double[][] eigen = new double[k][];
Matrix[] scaling = new Matrix[k];

Expand Down
6 changes: 3 additions & 3 deletions core/src/main/java/smile/classification/RandomForest.java
Expand Up @@ -299,15 +299,15 @@ public static RandomForest fit(Formula formula, DataFrame data, int ntrees, int
DataFrame x = formula.x(data);
BaseVector y = formula.y(data);

if (mtry > x.ncols()) {
if (mtry > x.ncol()) {
throw new IllegalArgumentException("Invalid number of variables to split on at a node of the tree: " + mtry);
}

int mtryFinal = mtry > 0 ? mtry : (int) Math.sqrt(x.ncols());
int mtryFinal = mtry > 0 ? mtry : (int) Math.sqrt(x.ncol());

ClassLabels codec = ClassLabels.fit(y);
final int k = codec.k;
final int n = x.nrows();
final int n = x.nrow();

final int[] weight = classWeight != null ? classWeight : Collections.nCopies(k, 1).stream().mapToInt(i -> i).toArray();

Expand Down
Expand Up @@ -295,7 +295,7 @@ public static Binomial binomial(SparseDataset x, int[] y, double lambda, double
throw new IllegalArgumentException("Invalid maximum number of iterations: " + maxIter);
}

int p = x.ncols();
int p = x.ncol();
ClassLabels codec = ClassLabels.fit(y);
int k = codec.k;
y = codec.y;
Expand Down Expand Up @@ -366,7 +366,7 @@ public static Multinomial multinomial(SparseDataset x, int[] y, double lambda, d
throw new IllegalArgumentException("Invalid maximum number of iterations: " + maxIter);
}

int p = x.ncols();
int p = x.ncol();
ClassLabels codec = ClassLabels.fit(y);
int k = codec.k;
y = codec.y;
Expand Down Expand Up @@ -475,7 +475,7 @@ static class BinomialObjective implements DifferentiableMultivariateFunction {
this.x = x;
this.y = y;
this.lambda = lambda;
this.p = x.ncols();
this.p = x.ncol();

partitionSize = Integer.parseInt(System.getProperty("smile.data.partition.size", "1000"));
partitions = x.size() / partitionSize + (x.size() % partitionSize == 0 ? 0 : 1);
Expand Down Expand Up @@ -593,7 +593,7 @@ static class MultinomialObjective implements DifferentiableMultivariateFunction
this.y = y;
this.k = k;
this.lambda = lambda;
this.p = x.ncols();
this.p = x.ncol();

partitionSize = Integer.parseInt(System.getProperty("smile.data.partition.size", "1000"));
partitions = x.size() / partitionSize + (x.size() % partitionSize == 0 ? 0 : 1);
Expand Down
Expand Up @@ -105,7 +105,7 @@ public static DeterministicAnnealing fit(double[][] data, int Kmax, double alpha
Matrix cov = new Matrix(MathEx.cov(data, centroids[0]));
double[] ev = new double[d];
Arrays.fill(ev, 1.0);
double lambda = PowerIteration.eigen(cov, ev, 0.0f, 1E-4, Math.max(20, 2 * cov.nrows()));
double lambda = PowerIteration.eigen(cov, ev, 0.0f, 1E-4, Math.max(20, 2 * cov.nrow()));
double T = 2.0 * lambda + 0.01;

int k = 2;
Expand Down
Expand Up @@ -88,7 +88,7 @@ public static SpectralClustering fit(Matrix W, int k, int maxIter, double tol) {
throw new IllegalArgumentException("Invalid number of clusters: " + k);
}

int n = W.nrows();
int n = W.nrow();
double[] D = W.colSums();
for (int i = 0; i < n; i++) {
if (D[i] == 0.0) {
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/java/smile/feature/Standardizer.java
Expand Up @@ -89,7 +89,7 @@ public static Standardizer fit(DataFrame data) {
double[] mu = new double[schema.length()];
double[] std = new double[schema.length()];

int n = data.nrows();
int n = data.nrow();
for (int i = 0; i < mu.length; i++) {
if (schema.field(i).isNumeric()) {
final int col = i;
Expand Down
6 changes: 3 additions & 3 deletions core/src/main/java/smile/glm/GLM.java
Expand Up @@ -341,11 +341,11 @@ public static GLM fit(Formula formula, DataFrame data, Model model, double tol,
}

Matrix X = formula.matrix(data, true);
Matrix XW = new Matrix(X.nrows(), X.ncols());
Matrix XW = new Matrix(X.nrow(), X.ncol());
double[] y = formula.y(data).toDoubleArray();

int n = X.nrows();
int p = X.ncols();
int n = X.nrow();
int p = X.ncol();

if (n <= p) {
throw new IllegalArgumentException(String.format("The input matrix is not over determined: %d rows, %d columns", n, p));
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/java/smile/imputation/LLSImputation.java
Expand Up @@ -124,7 +124,7 @@ public void impute(double[][] data) throws MissingValueImputationException {
}

boolean sufficient = true;
for (int m = 0; m < A.nrows(); m++) {
for (int m = 0; m < A.nrow(); m++) {
for (int n = 0; n < k; n++) {
if (Double.isNaN(A.get(m, n))) {
sufficient = false;
Expand Down
18 changes: 9 additions & 9 deletions core/src/main/java/smile/manifold/LLE.java
Expand Up @@ -190,7 +190,7 @@ public static LLE of(double[][] data, int k, int d) {
int offset = eigen.wr[eigen.wr.length - 1] < 1E-12 ? 2 : 1;
double[][] coordinates = new double[n][d];
for (int j = d; --j >= 0; ) {
int c = V.ncols() - j - offset;
int c = V.ncol() - j - offset;
for (int i = 0; i < n; i++) {
coordinates[i][j] = V.get(i, c);
}
Expand All @@ -215,20 +215,20 @@ private static class M extends DMatrix {
public M(SparseMatrix Wt) {
this.Wt = Wt;

x = new double[Wt.nrows()];
Wx = new double[Wt.nrows()];
Wtx = new double[Wt.ncols()];
WtWx = new double[Wt.nrows()];
x = new double[Wt.nrow()];
Wx = new double[Wt.nrow()];
Wtx = new double[Wt.ncol()];
WtWx = new double[Wt.nrow()];
}

@Override
public int nrows() {
return Wt.nrows();
public int nrow() {
return Wt.nrow();
}

@Override
public int ncols() {
return nrows();
public int ncol() {
return nrow();
}

@Override
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/java/smile/manifold/LaplacianEigenmap.java
Expand Up @@ -190,7 +190,7 @@ public static <T> LaplacianEigenmap of(T[] data, Distance<T> distance, int k, in
double[][] coordinates = new double[n][d];
for (int j = d; --j >= 0; ) {
double norm = 0.0;
int c = V.ncols() - j - 2;
int c = V.ncol() - j - 2;
for (int i = 0; i < n; i++) {
double xi = V.get(i, c) * D[i];
coordinates[i][j] = xi;
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/java/smile/manifold/UMAP.java
Expand Up @@ -464,7 +464,7 @@ private static double[][] spectralLayout(AdjacencyList nng, int d) {
Matrix V = eigen.Vr;
double[][] coordinates = new double[n][d];
for (int j = d; --j >= 0; ) {
int c = V.ncols() - j - 2;
int c = V.ncol() - j - 2;
for (int i = 0; i < n; i++) {
double x = V.get(i, c);
coordinates[i][j] = x;
Expand Down
4 changes: 2 additions & 2 deletions core/src/main/java/smile/projection/ICA.java
Expand Up @@ -230,8 +230,8 @@ private static Matrix whiten(double[][] data) {
// covariance matrix on centered data.
double[] mean = MathEx.rowMeans(data);
Matrix X = new Matrix(data);
int n = X.nrows();
int m = X.ncols();
int n = X.nrow();
int m = X.ncol();
for (int j = 0; j < m; j++) {
for (int i = 0; i < n; i++) {
X.sub(i, j, mean[i]);
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/java/smile/projection/KPCA.java
Expand Up @@ -112,7 +112,7 @@ public KPCA(T[] data, MercerKernel<T> kernel, double[] mean, double mu, double[]
this.coordinates = coordinates;
this.latent = latent;
this.projection = projection;
this.p = projection.nrows();
this.p = projection.nrow();
}

/**
Expand Down
8 changes: 4 additions & 4 deletions core/src/main/java/smile/projection/LinearProjection.java
Expand Up @@ -35,8 +35,8 @@ public interface LinearProjection extends Projection<double[]> {
@Override
default double[] project(double[] x) {
Matrix A = getProjection();
int p = A.nrows();
int n = A.ncols();
int p = A.nrow();
int n = A.ncol();

if (x.length != n) {
throw new IllegalArgumentException(String.format("Invalid input vector size: %d, expected: %d", x.length, n));
Expand All @@ -50,8 +50,8 @@ default double[] project(double[] x) {
@Override
default double[][] project(double[][] x) {
Matrix A = getProjection();
int p = A.nrows();
int n = A.ncols();
int p = A.nrow();
int n = A.ncol();

if (x[0].length != n) {
throw new IllegalArgumentException(String.format("Invalid input vector size: %d, expected: %d", x[0].length, n));
Expand Down
6 changes: 3 additions & 3 deletions core/src/main/java/smile/projection/ProbabilisticPCA.java
Expand Up @@ -83,7 +83,7 @@ public ProbabilisticPCA(double noise, double[] mu, Matrix loading, Matrix projec
this.loading = loading;
this.projection = projection;

pmu = new double[projection.nrows()];
pmu = new double[projection.nrow()];
projection.mv(mu, pmu);
}

Expand Down Expand Up @@ -124,7 +124,7 @@ public double[] project(double[] x) {
throw new IllegalArgumentException(String.format("Invalid input vector size: %d, expected: %d", x.length, mu.length));
}

double[] y = new double[projection.nrows()];
double[] y = new double[projection.nrow()];
projection.mv(x, y);
MathEx.sub(y, pmu);
return y;
Expand All @@ -136,7 +136,7 @@ public double[][] project(double[][] x) {
throw new IllegalArgumentException(String.format("Invalid input vector size: %d, expected: %d", x[0].length, mu.length));
}

double[][] y = new double[x.length][projection.nrows()];
double[][] y = new double[x.length][projection.nrow()];
for (int i = 0; i < x.length; i++) {
projection.mv(x[i], y[i]);
MathEx.sub(y[i], pmu);
Expand Down
6 changes: 3 additions & 3 deletions core/src/main/java/smile/regression/ElasticNet.java
Expand Up @@ -114,8 +114,8 @@ public static LinearModel fit(Formula formula, DataFrame data, double lambda1, d
Matrix X = formula.matrix(data, false);
double[] y = formula.y(data).toDoubleArray();

int n = X.nrows();
int p = X.ncols();
int n = X.nrow();
int p = X.ncol();
double[] center = X.colMeans();
double[] scale = X.colSds();

Expand All @@ -124,7 +124,7 @@ public static LinearModel fit(Formula formula, DataFrame data, double lambda1, d
System.arraycopy(y, 0, y2, 0, y.length);

// Scales the original data array and pads a weighted identity matrix
Matrix X2 = new Matrix(X.nrows()+ p, p);
Matrix X2 = new Matrix(X.nrow()+ p, p);
double padding = c * Math.sqrt(lambda2);
for (int j = 0; j < p; j++) {
for (int i = 0; i < n; i++) {
Expand Down

0 comments on commit abfd4eb

Please sign in to comment.