Skip to content

Commit

Permalink
lstm: Replace NULL by nullptr (#1415)
Browse files Browse the repository at this point in the history
Signed-off-by: Stefan Weil <sw@weilnetz.de>
  • Loading branch information
stweil authored and zdenop committed Mar 25, 2018
1 parent 3fa25d9 commit 8f7be2e
Show file tree
Hide file tree
Showing 20 changed files with 196 additions and 196 deletions.
24 changes: 12 additions & 12 deletions lstm/fullyconnected.cpp
Expand Up @@ -38,7 +38,7 @@ namespace tesseract {

FullyConnected::FullyConnected(const STRING& name, int ni, int no,
NetworkType type)
: Network(type, name, ni, no), external_source_(NULL), int_mode_(false) {
: Network(type, name, ni, no), external_source_(nullptr), int_mode_(false) {
}

FullyConnected::~FullyConnected() {
Expand Down Expand Up @@ -147,8 +147,8 @@ void FullyConnected::Forward(bool debug, const NetworkIO& input,
int thread_id = 0;
#endif
double* temp_line = temp_lines[thread_id];
const double* d_input = NULL;
const int8_t* i_input = NULL;
const double* d_input = nullptr;
const int8_t* i_input = nullptr;
if (input.int_mode()) {
i_input = input.i(t);
} else {
Expand Down Expand Up @@ -184,16 +184,16 @@ void FullyConnected::SetupForward(const NetworkIO& input,
acts_.Resize(input, no_);
// Source_ is a transposed copy of input. It isn't needed if provided.
external_source_ = input_transpose;
if (external_source_ == NULL) source_t_.ResizeNoInit(ni_, input.Width());
if (external_source_ == nullptr) source_t_.ResizeNoInit(ni_, input.Width());
}
}

void FullyConnected::ForwardTimeStep(const double* d_input, const int8_t* i_input,
int t, double* output_line) {
// input is copied to source_ line-by-line for cache coherency.
if (IsTraining() && external_source_ == NULL && d_input != NULL)
if (IsTraining() && external_source_ == nullptr && d_input != nullptr)
source_t_.WriteStrided(t, d_input);
if (d_input != NULL)
if (d_input != nullptr)
weights_.MatrixDotVector(d_input, output_line);
else
weights_.MatrixDotVector(i_input, output_line);
Expand All @@ -210,7 +210,7 @@ void FullyConnected::ForwardTimeStep(const double* d_input, const int8_t* i_inpu
} else if (type_ == NT_SOFTMAX || type_ == NT_SOFTMAX_NO_CTC) {
SoftmaxInPlace(no_, output_line);
} else if (type_ != NT_LINEAR) {
ASSERT_HOST("Invalid fully-connected type!" == NULL);
ASSERT_HOST("Invalid fully-connected type!" == nullptr);
}
}

Expand Down Expand Up @@ -240,11 +240,11 @@ bool FullyConnected::Backward(bool debug, const NetworkIO& fwd_deltas,
for (int t = 0; t < width; ++t) {
int thread_id = 0;
#endif
double* backprop = NULL;
double* backprop = nullptr;
if (needs_to_backprop_) backprop = temp_backprops[thread_id];
double* curr_errors = errors[thread_id];
BackwardTimeStep(fwd_deltas, t, curr_errors, errors_t.get(), backprop);
if (backprop != NULL) {
if (backprop != nullptr) {
back_deltas->WriteTimeStep(t, backprop);
}
}
Expand Down Expand Up @@ -278,14 +278,14 @@ void FullyConnected::BackwardTimeStep(const NetworkIO& fwd_deltas, int t,
type_ == NT_LINEAR)
fwd_deltas.ReadTimeStep(t, curr_errors); // fwd_deltas are the errors.
else
ASSERT_HOST("Invalid fully-connected type!" == NULL);
ASSERT_HOST("Invalid fully-connected type!" == nullptr);
// Generate backprop only if needed by the lower layer.
if (backprop != NULL) weights_.VectorDotMatrix(curr_errors, backprop);
if (backprop != nullptr) weights_.VectorDotMatrix(curr_errors, backprop);
errors_t->WriteStrided(t, curr_errors);
}

void FullyConnected::FinishBackward(const TransposedArray& errors_t) {
if (external_source_ == NULL)
if (external_source_ == nullptr)
weights_.SumOuterTransposed(errors_t, source_t_, true);
else
weights_.SumOuterTransposed(errors_t, *external_source_, true);
Expand Down
42 changes: 21 additions & 21 deletions lstm/lstm.cpp
Expand Up @@ -103,7 +103,7 @@ LSTM::LSTM(const STRING& name, int ni, int ns, int no, bool two_dimensional,
ns_(ns),
nf_(0),
is_2d_(two_dimensional),
softmax_(NULL),
softmax_(nullptr),
input_width_(0) {
if (two_dimensional) na_ += ns_;
if (type_ == NT_LSTM || type_ == NT_LSTM_SUMMARY) {
Expand All @@ -128,7 +128,7 @@ StaticShape LSTM::OutputShape(const StaticShape& input_shape) const {
StaticShape result = input_shape;
result.set_depth(no_);
if (type_ == NT_LSTM_SUMMARY) result.set_width(1);
if (softmax_ != NULL) return softmax_->OutputShape(result);
if (softmax_ != nullptr) return softmax_->OutputShape(result);
return result;
}

Expand All @@ -150,7 +150,7 @@ void LSTM::SetEnableTraining(TrainingState state) {
}
training_ = state;
}
if (softmax_ != NULL) softmax_->SetEnableTraining(state);
if (softmax_ != nullptr) softmax_->SetEnableTraining(state);
}

// Sets up the network for training. Initializes weights using weights of
Expand All @@ -163,7 +163,7 @@ int LSTM::InitWeights(float range, TRand* randomizer) {
num_weights_ += gate_weights_[w].InitWeightsFloat(
ns_, na_ + 1, TestFlag(NF_ADAM), range, randomizer);
}
if (softmax_ != NULL) {
if (softmax_ != nullptr) {
num_weights_ += softmax_->InitWeights(range, randomizer);
}
return num_weights_;
Expand All @@ -172,7 +172,7 @@ int LSTM::InitWeights(float range, TRand* randomizer) {
// Recursively searches the network for softmaxes with old_no outputs,
// and remaps their outputs according to code_map. See network.h for details.
int LSTM::RemapOutputs(int old_no, const std::vector<int>& code_map) {
if (softmax_ != NULL) {
if (softmax_ != nullptr) {
num_weights_ -= softmax_->num_weights();
num_weights_ += softmax_->RemapOutputs(old_no, code_map);
}
Expand All @@ -185,7 +185,7 @@ void LSTM::ConvertToInt() {
if (w == GFS && !Is2D()) continue;
gate_weights_[w].ConvertToInt();
}
if (softmax_ != NULL) {
if (softmax_ != nullptr) {
softmax_->ConvertToInt();
}
}
Expand All @@ -198,7 +198,7 @@ void LSTM::DebugWeights() {
msg.add_str_int(" Gate weights ", w);
gate_weights_[w].Debug2D(msg.string());
}
if (softmax_ != NULL) {
if (softmax_ != nullptr) {
softmax_->DebugWeights();
}
}
Expand All @@ -211,7 +211,7 @@ bool LSTM::Serialize(TFile* fp) const {
if (w == GFS && !Is2D()) continue;
if (!gate_weights_[w].Serialize(IsTraining(), fp)) return false;
}
if (softmax_ != NULL && !softmax_->Serialize(fp)) return false;
if (softmax_ != nullptr && !softmax_->Serialize(fp)) return false;
return true;
}

Expand Down Expand Up @@ -252,7 +252,7 @@ void LSTM::Forward(bool debug, const NetworkIO& input,
NetworkScratch* scratch, NetworkIO* output) {
input_map_ = input.stride_map();
input_width_ = input.Width();
if (softmax_ != NULL)
if (softmax_ != nullptr)
output->ResizeFloat(input, no_);
else if (type_ == NT_LSTM_SUMMARY)
output->ResizeXTo1(input, no_);
Expand Down Expand Up @@ -286,13 +286,13 @@ void LSTM::Forward(bool debug, const NetworkIO& input,
// Used only if a softmax LSTM.
NetworkScratch::FloatVec softmax_output;
NetworkScratch::IO int_output;
if (softmax_ != NULL) {
if (softmax_ != nullptr) {
softmax_output.Init(no_, scratch);
ZeroVector<double>(no_, softmax_output);
int rounded_softmax_inputs = gate_weights_[CI].RoundInputs(ns_);
if (input.int_mode())
int_output.Resize2d(true, 1, rounded_softmax_inputs, scratch);
softmax_->SetupForward(input, NULL);
softmax_->SetupForward(input, nullptr);
}
NetworkScratch::FloatVec curr_input;
curr_input.Init(na_, scratch);
Expand All @@ -311,7 +311,7 @@ void LSTM::Forward(bool debug, const NetworkIO& input,
int mod_t = Modulo(t, buf_width); // Current timestep.
// Setup the padded input in source.
source_.CopyTimeStepGeneral(t, 0, ni_, input, t, 0);
if (softmax_ != NULL) {
if (softmax_ != nullptr) {
source_.WriteTimeStepPart(t, ni_, nf_, softmax_output);
}
source_.WriteTimeStepPart(t, ni_ + nf_, ns_, curr_output);
Expand Down Expand Up @@ -393,12 +393,12 @@ void LSTM::Forward(bool debug, const NetworkIO& input,
}
FuncMultiply<HFunc>(curr_state, temp_lines[GO], ns_, curr_output);
if (IsTraining()) state_.WriteTimeStep(t, curr_state);
if (softmax_ != NULL) {
if (softmax_ != nullptr) {
if (input.int_mode()) {
int_output->WriteTimeStepPart(0, 0, ns_, curr_output);
softmax_->ForwardTimeStep(NULL, int_output->i(0), t, softmax_output);
softmax_->ForwardTimeStep(nullptr, int_output->i(0), t, softmax_output);
} else {
softmax_->ForwardTimeStep(curr_output, NULL, t, softmax_output);
softmax_->ForwardTimeStep(curr_output, nullptr, t, softmax_output);
}
output->WriteTimeStep(t, softmax_output);
if (type_ == NT_LSTM_SOFTMAX_ENCODED) {
Expand Down Expand Up @@ -480,10 +480,10 @@ bool LSTM::Backward(bool debug, const NetworkIO& fwd_deltas,
for (int w = 0; w < WT_COUNT; ++w) {
gate_errors_t[w].Init(ns_, width, scratch);
}
// Used only if softmax_ != NULL.
// Used only if softmax_ != nullptr.
NetworkScratch::FloatVec softmax_errors;
NetworkScratch::GradientStore softmax_errors_t;
if (softmax_ != NULL) {
if (softmax_ != nullptr) {
softmax_errors.Init(no_, scratch);
softmax_errors_t.Init(no_, width, scratch);
}
Expand Down Expand Up @@ -529,7 +529,7 @@ bool LSTM::Backward(bool debug, const NetworkIO& fwd_deltas,
} else {
ZeroVector<double>(ns_, outputerr);
}
} else if (softmax_ == NULL) {
} else if (softmax_ == nullptr) {
fwd_deltas.ReadTimeStep(t, outputerr);
} else {
softmax_->BackwardTimeStep(fwd_deltas, t, softmax_errors,
Expand Down Expand Up @@ -656,7 +656,7 @@ bool LSTM::Backward(bool debug, const NetworkIO& fwd_deltas,
if (w == GFS && !Is2D()) continue;
gate_weights_[w].SumOuterTransposed(*gate_errors_t[w], *source_t, false);
}
if (softmax_ != NULL) {
if (softmax_ != nullptr) {
softmax_->FinishBackward(*softmax_errors_t);
}
return needs_to_backprop_;
Expand All @@ -673,7 +673,7 @@ void LSTM::Update(float learning_rate, float momentum, float adam_beta,
if (w == GFS && !Is2D()) continue;
gate_weights_[w].Update(learning_rate, momentum, adam_beta, num_samples);
}
if (softmax_ != NULL) {
if (softmax_ != nullptr) {
softmax_->Update(learning_rate, momentum, adam_beta, num_samples);
}
#if DEBUG_DETAIL > 3
Expand All @@ -692,7 +692,7 @@ void LSTM::CountAlternators(const Network& other, double* same,
if (w == GFS && !Is2D()) continue;
gate_weights_[w].CountAlternators(lstm->gate_weights_[w], same, changed);
}
if (softmax_ != NULL) {
if (softmax_ != nullptr) {
softmax_->CountAlternators(*lstm->softmax_, same, changed);
}
}
Expand Down
2 changes: 1 addition & 1 deletion lstm/lstm.h
Expand Up @@ -65,7 +65,7 @@ class LSTM : public Network {
spec.add_str_int("LS", ns_);
else if (type_ == NT_LSTM_SOFTMAX_ENCODED)
spec.add_str_int("LE", ns_);
if (softmax_ != NULL) spec += softmax_->spec();
if (softmax_ != nullptr) spec += softmax_->spec();
return spec;
}

Expand Down

0 comments on commit 8f7be2e

Please sign in to comment.