Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Javadoc for all remaining undocumented public methods and fields. #175

Merged
merged 5 commits into from
Sep 17, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -55,15 +55,25 @@ public enum EnsembleType {
RF
}

@Option(longName="ensemble-type",usage="Ensemble method, options are {ADABOOST, BAGGING, RF}.")
/**
* Ensemble method, options are {ADABOOST, BAGGING, RF}.
*/
@Option(longName = "ensemble-type", usage = "Ensemble method, options are {ADABOOST, BAGGING, RF}.")
public EnsembleType type = EnsembleType.BAGGING;
@Option(longName="ensemble-size",usage="Number of base learners in the ensemble.")
/**
* Number of base learners in the ensemble.
*/
@Option(longName = "ensemble-size", usage = "Number of base learners in the ensemble.")
public int ensembleSize = -1;
@Option(longName="ensemble-seed",usage="RNG seed.")
/**
* RNG seed.
*/
@Option(longName = "ensemble-seed", usage = "RNG seed.")
public long seed = Trainer.DEFAULT_SEED;

/**
* Wraps the supplied trainer using the ensemble trainer described by these options.
*
* @param trainer The trainer to wrap.
* @return An ensemble trainer.
*/
Expand All @@ -72,21 +82,21 @@ public Trainer<Label> wrapTrainer(Trainer<Label> trainer) {
switch (type) {
case ADABOOST:
logger.info("Using Adaboost with " + ensembleSize + " members.");
return new AdaBoostTrainer(trainer,ensembleSize,seed);
return new AdaBoostTrainer(trainer, ensembleSize, seed);
case BAGGING:
logger.info("Using Bagging with " + ensembleSize + " members.");
return new BaggingTrainer<>(trainer,new VotingCombiner(),ensembleSize,seed);
return new BaggingTrainer<>(trainer, new VotingCombiner(), ensembleSize, seed);
case EXTRA_TREES:
if (trainer instanceof DecisionTreeTrainer) {
logger.info("Using Extra Trees with " + ensembleSize + " members.");
return new ExtraTreesTrainer<>((DecisionTreeTrainer<Label>)trainer,new VotingCombiner(),ensembleSize,seed);
return new ExtraTreesTrainer<>((DecisionTreeTrainer<Label>) trainer, new VotingCombiner(), ensembleSize, seed);
} else {
throw new IllegalArgumentException("ExtraTreesTrainer requires a DecisionTreeTrainer");
}
case RF:
if (trainer instanceof DecisionTreeTrainer) {
logger.info("Using Random Forests with " + ensembleSize + " members.");
return new RandomForestTrainer<>((DecisionTreeTrainer<Label>)trainer,new VotingCombiner(),ensembleSize,seed);
return new RandomForestTrainer<>((DecisionTreeTrainer<Label>) trainer, new VotingCombiner(), ensembleSize, seed);
} else {
throw new IllegalArgumentException("RandomForestTrainer requires a DecisionTreeTrainer");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -207,8 +207,17 @@ public TPFP(List<Integer> falsePos, List<Integer> truePos, List<Double> threshol
* Not yet a record, but it will be one day.
*/
public static class ROC {
/**
* The false positive rate at the corresponding threshold.
*/
public final double[] fpr;
/**
* The true positive rate at the corresponding threshold.
*/
public final double[] tpr;
/**
* The threshold values.
*/
public final double[] thresholds;

/**
Expand All @@ -231,8 +240,17 @@ public ROC(double[] fpr, double[] tpr, double[] thresholds) {
* Not yet a record, but it will be one day.
*/
public static class PRCurve {
/**
* The precision at the corresponding threshold.
*/
public final double[] precision;
/**
* The recall at the corresponding threshold.
*/
public final double[] recall;
/**
* The threshold values.
*/
public final double[] thresholds;

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,16 +102,30 @@ public static final class Context extends MetricContext<Label> {

private final ConfusionMatrix<Label> cm;

/**
* Constructs a context and compute the confusion matrix using the specified model and predictions.
* @param model The model.
* @param predictions The predictions.
*/
public Context(Model<Label> model, List<Prediction<Label>> predictions) {
super(model, predictions);
this.cm = new LabelConfusionMatrix(model.getOutputIDInfo(), predictions);
}

/**
* Constructs a context and compute the confusion matrix using the specified model and predictions.
* @param model The model.
* @param predictions The predictions.
*/
public Context(SequenceModel<Label> model, List<Prediction<Label>> predictions) {
super(model, predictions);
this.cm = new LabelConfusionMatrix(model.getOutputIDInfo(), predictions);
}

/**
* Gets the confusion matrix.
* @return The confusion matrix.
*/
public ConfusionMatrix<Label> getCM() {
return cm;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,10 @@
public final class GaussianLabelDataSource extends DemoLabelDataSource {

@Config(mandatory = true, description = "2d mean of the first Gaussian.")
public double[] firstMean;
private double[] firstMean;

@Config(mandatory = true, description = "4 element covariance matrix of the first Gaussian.")
public double[] firstCovarianceMatrix;
private double[] firstCovarianceMatrix;

@Config(mandatory = true, description = "2d mean of the second Gaussian.")
private double[] secondMean;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,12 @@ private LabelledDataGenerator() {}

private static final LabelFactory labelFactory = new LabelFactory();

/**
* Generates a train/test dataset pair which is dense in the features,
* each example has 4 features,{A,B,C,D}, and there are 4 classes,
* {Foo,Bar,Baz,Quux}.
* @return A pair of datasets.
*/
public static Pair<Dataset<Label>,Dataset<Label>> denseTrainTest() {
return denseTrainTest(-1.0);
}
Expand Down Expand Up @@ -99,6 +105,12 @@ public static Pair<Dataset<Label>,Dataset<Label>> denseTrainTest(double negate)
return new Pair<>(train,test);
}

/**
* Generates a pair of datasets, where the features are sparse,
* and unknown features appear in the test data. It has the same
* 4 classes {Foo,Bar,Baz,Quux}.
* @return A pair of train and test datasets.
*/
public static Pair<Dataset<Label>,Dataset<Label>> sparseTrainTest() {
return sparseTrainTest(-1.0);
}
Expand Down Expand Up @@ -173,6 +185,11 @@ public static Pair<Dataset<Label>,Dataset<Label>> sparseTrainTest(double negate)
return new Pair<>(train,test);
}

/**
* Generates a pair of datasets with sparse features and unknown features
* in the test data. Has binary labels {Foo,Bar}.
* @return A pair of train and test datasets.
*/
public static Pair<Dataset<Label>,Dataset<Label>> binarySparseTrainTest() {
return binarySparseTrainTest(-1.0);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,13 @@
public abstract class ConfidencePredictingSequenceModel extends SequenceModel<Label> {
private static final long serialVersionUID = 1L;

/**
* Constructs a ConfidencePredictingSequenceModel with the supplied parameters.
* @param name The model name.
* @param description The model provenance.
* @param featureIDMap The feature domain.
* @param labelIDMap The output domain.
*/
protected ConfidencePredictingSequenceModel(String name, ModelProvenance description, ImmutableFeatureMap featureIDMap, ImmutableOutputInfo<Label> labelIDMap) {
super(name,description,featureIDMap,labelIDMap);
}
Expand Down Expand Up @@ -81,7 +88,13 @@ private static <SUB extends Subsequence> Double multiplyWeights(List<Prediction<
*/
public static class Subsequence implements Serializable {
private static final long serialVersionUID = 1L;
/**
* The subsequence start index.
*/
public final int begin;
/**
* The subsequence end index.
*/
public final int end;

/**
Expand Down
Loading