Skip to content
This repository was archived by the owner on Aug 15, 2019. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions demos/intro/index.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
<!-- Copyright 2017 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================-->
<h1>Introduction tutorial code</h1>

<h3>Check the JavaScript console!</h3>

<script src="bundle.js"></script>
139 changes: 139 additions & 0 deletions demos/intro/intro.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,139 @@
/* Copyright 2017 Google Inc. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/

import {NDArrayMathGPU, Scalar, Array1D, Array2D, Graph, Session, SGDOptimizer, InCPUMemoryShuffledInputProviderBuilder, FeedEntry, CostReduction} from '../deeplearnjs';

// This file parallels (some of) the code in the introduction tutorial.

/**
* 'NDArrayMathGPU' section of tutorial
*/
{
const math = new NDArrayMathGPU();

math.scope((keep, track) => {
const a = track(Array2D.new([2, 2], [1.0, 2.0, 3.0, 4.0]));
const b = track(Array2D.new([2, 2], [0.0, 2.0, 4.0, 6.0]));

// Non-blocking math calls.
const diff = math.sub(a, b);
const squaredDiff = math.elementWiseMul(diff, diff);
const sum = math.sum(squaredDiff);
const size = Scalar.new(a.size);
const average = math.divide(sum, size);

// Blocking call to actually read the values from average. Waits until the
// GPU has finished executing the operations before returning values.
// average is a Scalar so we use .get()
console.log('mean squared difference: ' + average.get());
});
}

{
/**
* 'Graphs and Tensors' section of tutorial
*/

const g = new Graph();

// Placeholders are input containers. This is the container for where we will
// feed an input NDArray when we execute the graph.
const inputShape = [3];
const inputTensor = g.placeholder('input', inputShape);

const labelShape = [1];
const labelTensor = g.placeholder('label', labelShape);

// Variables are containers that hold a value that can be updated from
// training.
// Here we initialize the multiplier variable randomly.
const multiplier = g.variable('multiplier', Array2D.randNormal([1, 3]));

// Top level graph methods take Tensors and return Tensors.
const outputTensor = g.matmul(multiplier, inputTensor);
const costTensor = g.meanSquaredCost(outputTensor, labelTensor);

// Tensors, like NDArrays, have a shape attribute.
console.log(outputTensor.shape);

/**
* 'Session and FeedEntry' section of the tutorial.
*/

const learningRate = .00001;
const batchSize = 3;
const math = new NDArrayMathGPU();

const session = new Session(g, math);
const optimizer = new SGDOptimizer(learningRate);

const inputs: Array1D[] = [
Array1D.new([1.0, 2.0, 3.0]),
Array1D.new([10.0, 20.0, 30.0]),
Array1D.new([100.0, 200.0, 300.0])
];

const labels: Array1D[] = [
Array1D.new([4.0]),
Array1D.new([40.0]),
Array1D.new([400.0])
];

// Shuffles inputs and labels and keeps them mutually in sync.
const shuffledInputProviderBuilder =
new InCPUMemoryShuffledInputProviderBuilder([inputs, labels]);
const [inputProvider, labelProvider] =
shuffledInputProviderBuilder.getInputProviders();

// Maps tensors to InputProviders.
const feedEntries: FeedEntry[] = [
{tensor: inputTensor, data: inputProvider},
{tensor: labelTensor, data: labelProvider}
];

const NUM_BATCHES = 10;
for (let i = 0; i < NUM_BATCHES; i++) {
// Wrap session.train in a scope so the cost gets cleaned up automatically.
math.scope(() => {
// Train takes a cost tensor to minimize. Trains one batch. Returns the
// average cost as a Scalar.
const cost = session.train(
costTensor, feedEntries, batchSize, optimizer, CostReduction.MEAN);

console.log('last average cost (' + i + '): ' + cost.get());
});
}

// Wrap session.eval in a scope so the intermediate values get cleaned up
// automatically.
math.scope((keep, track) => {
const testInput = track(Array1D.new([0.1, 0.2, 0.3]));

// session.eval can take NDArrays as input data.
const testFeedEntries: FeedEntry[] = [
{tensor: inputTensor, data: testInput}
];

const testOutput = session.eval(outputTensor, testFeedEntries);

console.log('---inference output---');
console.log('shape: ' + testOutput.shape);
console.log('value: ' + testOutput.get(0));
});

// Cleanup training data.
inputs.forEach(input => input.dispose());
labels.forEach(label => label.dispose());
}
11 changes: 11 additions & 0 deletions demos/mnist/index.html
Original file line number Diff line number Diff line change
@@ -1,3 +1,14 @@
<!-- Copyright 2017 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================-->
<h1>MNIST demo</h1>
<div>Test accuracy: <span id="accuracy"></span></div>
<script src="bundle.js"></script>
64 changes: 41 additions & 23 deletions docs/tutorials/intro.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,16 @@ functions that can be used directly.
* TOC
{:toc}

You can find the code that supplements this tutorial
[here](https://github.com/PAIR-code/deeplearnjs/tree/master/demos/intro).

Run it yourself with:
```ts
./scripts/watch-demo demos/intro/intro.ts
```

And visit `http://localhost:8080/demos/intro/`.

For the purposes of the documentation, we will use TypeScript code examples.
For vanilla JavaScript, you may need to remove TypeScript syntax like
`const`, `let`, or other type definitions.
Expand Down Expand Up @@ -83,7 +93,8 @@ math.scope((keep, track) => {

// Blocking call to actually read the values from average. Waits until the
// GPU has finished executing the operations before returning values.
console.log(average.get()); // average is a Scalar so we use .get()
// average is a Scalar so we use .get()
console.log(average.get());
});
```

Expand Down Expand Up @@ -212,7 +223,8 @@ const inputTensor = g.placeholder('input', inputShape);
const labelShape = [1];
const labelTensor = g.placeholder('label', labelShape);

// Variables are containers that hold a value that can be updated from training.
// Variables are containers that hold a value that can be updated from
// training.
// Here we initialize the multiplier variable randomly.
const multiplier = g.variable('multiplier', Array2D.randNormal([1, 3]));

Expand Down Expand Up @@ -243,10 +255,10 @@ keeping them in sync.
Training with the `Graph` object from above:

```js
const learningRate = .001;
const batchSize = 2;

const learningRate = .00001;
const batchSize = 3;
const math = new NDArrayMathGPU();

const session = new Session(g, math);
const optimizer = new SGDOptimizer(learningRate);

Expand All @@ -257,42 +269,44 @@ const inputs: Array1D[] = [
];

const labels: Array1D[] = [
Array1D.new([2.0, 6.0, 12.0]),
Array1D.new([20.0, 60.0, 120.0]),
Array1D.new([200.0, 600.0, 1200.0])
Array1D.new([4.0]),
Array1D.new([40.0]),
Array1D.new([400.0])
];

// Shuffles inputs and labels and keeps them mutually in sync.
const shuffledInputProviderBuilder =
new InCPUMemoryShuffledInputProviderBuilder([inputs, labels]);
new InCPUMemoryShuffledInputProviderBuilder([inputs, labels]);
const [inputProvider, labelProvider] =
shuffledInputProviderBuilder.getInputProviders();
shuffledInputProviderBuilder.getInputProviders();

// Maps tensors to InputProviders.
const feedEntries: FeedEntry[] = [
{tensor: inputTensor, data: inputProvider},
{tensor: labelTensor, data: labelProvider}
];

// Wrap session.train in a scope so the cost gets cleaned up automatically.
math.scope(() => {
// Train takes a cost tensor to minimize. Trains one batch. Returns the
// average cost as a Scalar.
const cost = session.train(
costTensor, feedEntries, batchSize, optimizer, CostReduction.MEAN);
const NUM_BATCHES = 10;
for (let i = 0; i < NUM_BATCHES; i++) {
// Wrap session.train in a scope so the cost gets cleaned up automatically.
math.scope(() => {
// Train takes a cost tensor to minimize. Trains one batch. Returns the
// average cost as a Scalar.
const cost = session.train(
costTensor, feedEntries, batchSize, optimizer, CostReduction.MEAN);

console.log('last average cost: ' + cost.get());
});
console.log('last average cost (' + i + '): ' + cost.get());
});
}
```

After training, we can infer through the graph:

```js

// Wrap session.eval in a scope so the intermediate values get cleaned up
// automatically.
math.scope((keep, track) => {
const testInput = track(Array1D.new([1.0, 2.0, 3.0]));
const testInput = track(Array1D.new([0.1, 0.2, 0.3]));

// session.eval can take NDArrays as input data.
const testFeedEntries: FeedEntry[] = [
Expand All @@ -301,10 +315,14 @@ math.scope((keep, track) => {

const testOutput = session.eval(outputTensor, testFeedEntries);

console.log('inference output:');
console.log(testOutput.shape);
console.log(testOutput.getValues());
console.log('---inference output---');
console.log('shape: ' + testOutput.shape);
console.log('value: ' + testOutput.get(0));
});

// Cleanup training data.
inputs.forEach(input => input.dispose());
labels.forEach(label => label.dispose());
```

Want to learn more? Read [these tutorials](index.md).