Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Deprecate Backprop and Pretrain options when configuring ComputationGraph and MultiLayer #6296

Merged
merged 2 commits into from Aug 28, 2018
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Expand Up @@ -28,10 +28,11 @@
import org.deeplearning4j.nn.conf.layers.samediff.SameDiffVertex;
import org.deeplearning4j.nn.conf.memory.MemoryReport;
import org.deeplearning4j.nn.conf.memory.NetworkMemoryReport;
import org.deeplearning4j.nn.layers.samediff.SameDiffGraphVertex;
import org.nd4j.base.Preconditions;
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.activations.IActivation;
import org.nd4j.linalg.dataset.api.MultiDataSet;
import org.nd4j.linalg.dataset.api.iterator.DataSetIterator;
import org.nd4j.shade.jackson.databind.JsonNode;
import org.nd4j.shade.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
Expand Down Expand Up @@ -597,7 +598,9 @@ public static class GraphBuilder {
protected List<InputType> networkInputTypes = new ArrayList<>();
protected List<String> networkOutputs = new ArrayList<>();

@Deprecated
protected boolean pretrain = false;
@Deprecated
protected boolean backprop = true;
protected BackpropType backpropType = BackpropType.Standard;
protected int tbpttFwdLength = DEFAULT_TBPTT_LENGTH;
Expand Down Expand Up @@ -649,8 +652,11 @@ public GraphBuilder inputPreProcessor(String layer, InputPreProcessor processor)
/**
* Whether to do back prop (standard supervised learning) or not
*
* DEPRECATED: doesn't affect training any more. Use {@link org.deeplearning4j.nn.graph.ComputationGraph#fit(MultiDataSet)} when training for backprop.
*
* @param backprop whether to do back prop or not
*/
@Deprecated
public GraphBuilder backprop(boolean backprop) {
this.backprop = backprop;
return this;
Expand All @@ -659,8 +665,11 @@ public GraphBuilder backprop(boolean backprop) {
/**
* Whether to do layerwise pre training or not
*
* DEPRECATED: doesn't affect training any more. Use {@link org.deeplearning4j.nn.graph.ComputationGraph#pretrain(MultiDataSetIterator)} when training for layerwise pretraining.
*
* @param pretrain whether to do pre train or not
*/
@Deprecated
public GraphBuilder pretrain(boolean pretrain) {
this.pretrain = pretrain;
return this;
Expand Down
Expand Up @@ -25,7 +25,6 @@
import org.deeplearning4j.nn.conf.memory.LayerMemoryReport;
import org.deeplearning4j.nn.conf.memory.MemoryReport;
import org.deeplearning4j.nn.conf.memory.NetworkMemoryReport;
import org.deeplearning4j.nn.layers.AbstractLayer;
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.activations.IActivation;
import org.nd4j.linalg.factory.Nd4j;
Expand Down Expand Up @@ -389,7 +388,9 @@ public static class Builder {
protected List<NeuralNetConfiguration> confs = new ArrayList<>();
protected double dampingFactor = 100;
protected Map<Integer, InputPreProcessor> inputPreProcessors = new HashMap<>();
@Deprecated
protected boolean pretrain = false;
@Deprecated
protected boolean backprop = true;
protected BackpropType backpropType = BackpropType.Standard;
protected int tbpttFwdLength = DEFAULT_TBPTT_LENGTH;
Expand Down Expand Up @@ -419,6 +420,9 @@ public Builder inputPreProcessors(Map<Integer, InputPreProcessor> processors) {

/**
* Whether to do back prop or not
*
* DEPRECATED: doesn't affect training any more. Use {@link org.deeplearning4j.nn.multilayer.MultiLayerNetwork#fit(DataSetIterator)} when training for backprop.
*
* @param backprop whether to do back prop or not
* @return
*/
Expand Down Expand Up @@ -506,6 +510,9 @@ public Builder tBPTTBackwardLength(int backwardLength) {

/**
* Whether to do pre train or not
*
* DEPRECATED: doesn't affect training any more. Use {@link org.deeplearning4j.nn.multilayer.MultiLayerNetwork#pretrain(DataSetIterator)} when training for layerwise pretraining.
*
* @param pretrain whether to do pre train or not
* @return builder pattern
*/
Expand Down