Navigation Menu

Skip to content

Commit

Permalink
#5056 implement Yolo2OutputLayer.computeScoreForExamples
Browse files Browse the repository at this point in the history
  • Loading branch information
AlexDBlack committed Jul 25, 2018
1 parent 90fba44 commit a767452
Show file tree
Hide file tree
Showing 2 changed files with 38 additions and 5 deletions.
Expand Up @@ -89,8 +89,9 @@ public void testYoloActivateScoreBasic() {


MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.l2(0.01)
.list()
.layer(new ConvolutionLayer.Builder().nIn(1).nOut(1).kernelSize(1,1).build())
.layer(new ConvolutionLayer.Builder().nIn(depth).nOut(depth).kernelSize(1,1).build())
.layer(new Yolo2OutputLayer.Builder()
.boundingBoxPriors(bbPrior)
.build())
Expand Down Expand Up @@ -150,6 +151,16 @@ public void testYoloActivateScoreBasic() {
double score2 = y2impl.computeScore(0, 0, true, LayerWorkspaceMgr.noWorkspaces());

assertEquals(score, score2, 1e-8);

//Test computeScoreForExamples:
INDArray scoreArr1 = net.scoreExamples(new DataSet(input, labels), false);
INDArray scoreArr2 = net.scoreExamples(new DataSet(input, labels), true);
assertFalse(scoreArr1.isAttached());
assertFalse(scoreArr2.isAttached());

assertArrayEquals(new long[]{mb,1}, scoreArr1.shape());
assertArrayEquals(new long[]{mb,1}, scoreArr2.shape());
assertNotEquals(scoreArr1, scoreArr2);
}


Expand Down
Expand Up @@ -98,12 +98,12 @@ public Yolo2OutputLayer(NeuralNetConfiguration conf) {

@Override
public Pair<Gradient, INDArray> backpropGradient(INDArray epsilon, LayerWorkspaceMgr workspaceMgr) {
INDArray epsOut = computeBackpropGradientAndScore(workspaceMgr, false);
INDArray epsOut = computeBackpropGradientAndScore(workspaceMgr, false, false);

return new Pair<>(EMPTY_GRADIENT, epsOut);
}

private INDArray computeBackpropGradientAndScore(LayerWorkspaceMgr workspaceMgr, boolean scoreOnly){
private INDArray computeBackpropGradientAndScore(LayerWorkspaceMgr workspaceMgr, boolean scoreOnly, boolean computeScoreForExamples){
assertInputSet(true);
Preconditions.checkState(labels != null, "Cannot calculate gradients/score: labels are null");
Preconditions.checkState(labels.rank() == 4, "Expected rank 4 labels array with shape [minibatch, 4+numClasses, h, w]" +
Expand Down Expand Up @@ -235,6 +235,26 @@ private INDArray computeBackpropGradientAndScore(LayerWorkspaceMgr workspaceMgr,
//Calculate the loss:
ILossFunction lossConfidence = new LossL2();
IActivation identity = new ActivationIdentity();


if(computeScoreForExamples){
INDArray positionLoss = layerConf().getLossPositionScale().computeScoreArray(labelXYCenter2d, predictedXYCenter2d, identity, mask1_ij_obj_2d );
INDArray sizeScaleLoss = layerConf().getLossPositionScale().computeScoreArray(labelWHSqrt2d, predictedWHSqrt2d, identity, mask1_ij_obj_2d);
INDArray confidenceLossPt1 = lossConfidence.computeScoreArray(labelConfidence2d, predictedConfidence2d, identity, mask1_ij_obj_2d);
INDArray confidenceLossPt2 = lossConfidence.computeScoreArray(labelConfidence2d, predictedConfidence2d, identity, mask1_ij_noobj_2d).muli(lambdaNoObj);
INDArray classPredictionLoss = layerConf().getLossClassPredictions().computeScoreArray(classLabels2d, classPredictionsPreSoftmax2d, new ActivationSoftmax(), mask1_ij_obj_2d);

INDArray scoreForExamples = positionLoss.addi(sizeScaleLoss).muli(lambdaCoord)
.addi(confidenceLossPt1).addi(confidenceLossPt2.muli(lambdaNoObj))
.addi(classPredictionLoss)
.dup('c');

scoreForExamples = scoreForExamples.reshape('c', mb, b*h*w).sum(1).addi(fullNetworkL1 + fullNetworkL2);

return workspaceMgr.leverageTo(ArrayType.ACTIVATIONS, scoreForExamples);
}


double positionLoss = layerConf().getLossPositionScale().computeScore(labelXYCenter2d, predictedXYCenter2d, identity, mask1_ij_obj_2d, false );
double sizeScaleLoss = layerConf().getLossPositionScale().computeScore(labelWHSqrt2d, predictedWHSqrt2d, identity, mask1_ij_obj_2d, false);
double confidenceLoss = lossConfidence.computeScore(labelConfidence2d, predictedConfidence2d, identity, mask1_ij_obj_2d, false)
Expand Down Expand Up @@ -366,7 +386,7 @@ public double computeScore(double fullNetworkL1, double fullNetworkL2, boolean t
this.fullNetworkL1 = fullNetworkL1;
this.fullNetworkL2 = fullNetworkL2;

computeBackpropGradientAndScore(workspaceMgr, true);
computeBackpropGradientAndScore(workspaceMgr, true, false);
return score();
}

Expand Down Expand Up @@ -530,7 +550,9 @@ public Pair<Gradient, Double> gradientAndScore() {

@Override
public INDArray computeScoreForExamples(double fullNetworkL1, double fullNetworkL2, LayerWorkspaceMgr workspaceMgr) {
throw new UnsupportedOperationException();
this.fullNetworkL1 = fullNetworkL1;
this.fullNetworkL2 = fullNetworkL2;
return computeBackpropGradientAndScore(workspaceMgr, false, true);
}

@Override
Expand Down

0 comments on commit a767452

Please sign in to comment.