Skip to content

Commit

Permalink
Merge pull request #774 from null-a/grunt-ignore-ad-js-files
Browse files Browse the repository at this point in the history
Avoid need to exclude files from linter by hand.
  • Loading branch information
stuhlmueller committed Feb 14, 2017
2 parents 13ba93d + f6b404c commit c5f62d8
Show file tree
Hide file tree
Showing 5 changed files with 108 additions and 90 deletions.
35 changes: 27 additions & 8 deletions Gruntfile.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,21 @@
var _ = require('lodash');
var open = require('open');
var child_process = require('child_process');
var path = require('path');
var fs = require('fs');

function isCodeGenFile(fn) {
return isPlainJsFile(fn) && fs.existsSync(adSource(fn));
}

function isPlainJsFile(fn) {
return path.extname(fn) === '.js' &&
path.extname(path.parse(fn).name) !== '.ad';
}

function adSource(fn) {
return fn.slice(0, -3) + '.ad.js';
}

var jslintSettings = {
options: {
Expand All @@ -16,8 +31,9 @@ var jslintSettings = {
src: [
'Gruntfile.js',
'src/header.wppl',
'src/**/!(dists|enumerate|elbo|eubo|ScoreAggregator).js'
]
'src/**/*.js'
],
filter: _.negate(isCodeGenFile)
},
test: {
src: ['tests/**/*.js']
Expand All @@ -35,12 +51,15 @@ module.exports = function(grunt) {
all: ['tests/test-*.js']
},
jshint: {
files: [
'Gruntfile.js',
'src/header.wppl',
'src/**/*.js',
'tests/**/*.js'
],
all: {
src: [
'Gruntfile.js',
'src/header.wppl',
'src/**/*.js',
'tests/**/*.js'
],
filter: _.negate(isCodeGenFile)
},
options: {
maxerr: 500,
camelcase: true,
Expand Down
69 changes: 34 additions & 35 deletions src/dists.ad.js
Original file line number Diff line number Diff line change
Expand Up @@ -286,18 +286,18 @@ function mvBernoulliScore(ps, x) {
var pSub1 = ad.tensor.sub(ps, 1);

return ad.tensor.sumreduce(
ad.tensor.log(
ad.tensor.log(
ad.tensor.add(
ad.tensor.mul(x, ps),
ad.tensor.mul(xSub1, pSub1))));
ad.tensor.mul(x, ps),
ad.tensor.mul(xSub1, pSub1))));
}


var MultivariateBernoulli = makeDistributionType({
name: 'MultivariateBernoulli',
desc: 'Distribution over a vector of independent Bernoulli variables. Each element ' +
'of the vector takes on a value in ``{0, 1}``. Note that this differs from ``Bernoulli`` which ' +
'has support ``{true, false}``.',
'of the vector takes on a value in ``{0, 1}``. Note that this differs from ``Bernoulli`` which ' +
'has support ``{true, false}``.',
params: [{name: 'ps', desc: 'probabilities', type: types.unitIntervalVector}],
mixins: [finiteSupport],
sample: function() {
Expand Down Expand Up @@ -393,7 +393,7 @@ var Gaussian = makeDistributionType({
// distribution described by params.
var mu = this.params.mu;
var sigma = this.params.sigma;
return ad.scalar.add(ad.scalar.mul(sigma, x), mu); }
return ad.scalar.add(ad.scalar.mul(sigma, x), mu); }
});


Expand All @@ -410,7 +410,6 @@ function mvGaussianSample(mu, cov) {
function mvGaussianScore(mu, cov, x) {
var _x = ad.value(x);
var _mu = ad.value(mu);
var _cov = ad.value(cov);
if (!util.isVector(_x) || !util.tensorEqDim0(_x, _mu)) {
return -Infinity;
}
Expand All @@ -422,7 +421,7 @@ function mvGaussianScore(mu, cov, x) {
var zT = ad.tensor.transpose(z);
var prec = ad.tensor.inverse(cov);
return ad.scalar.mul(-0.5, ad.scalar.add(
dLog2Pi, ad.scalar.add(
dLog2Pi, ad.scalar.add(
logDetCov,
ad.tensor.get(ad.tensor.dot(ad.tensor.dot(zT, prec), z), 0))));
}
Expand All @@ -431,8 +430,8 @@ function mvGaussianScore(mu, cov, x) {
var MultivariateGaussian = makeDistributionType({
name: 'MultivariateGaussian',
desc: 'Multivariate Gaussian distribution with full covariance matrix. ' +
'If ``mu`` has length d and ``cov`` is a ``d``-by-``d`` matrix, ' +
'then the distribution is over vectors of length ``d``.',
'If ``mu`` has length d and ``cov`` is a ``d``-by-``d`` matrix, ' +
'then the distribution is over vectors of length ``d``.',
params: [{name: 'mu', desc: 'mean', type: types.unboundedVector},
{name: 'cov', desc: 'covariance', type: types.posDefMatrix}],
wikipedia: 'Multivariate_normal_distribution',
Expand Down Expand Up @@ -476,7 +475,7 @@ function diagCovGaussianScore(mu, sigma, x) {
var z = ad.tensor.div(ad.tensor.sub(x, mu), sigma);

return ad.scalar.mul(-0.5, ad.scalar.add(
dLog2Pi, ad.scalar.add(
dLog2Pi, ad.scalar.add(
logDetCov,
ad.tensor.sumreduce(ad.tensor.mul(z, z)))));
}
Expand All @@ -485,9 +484,9 @@ function diagCovGaussianScore(mu, sigma, x) {
var DiagCovGaussian = makeDistributionType({
name: 'DiagCovGaussian',
desc: 'A distribution over tensors in which each element is independent and Gaussian distributed, ' +
'with its own mean and standard deviation. i.e. A multivariate Gaussian distribution with ' +
'diagonal covariance matrix. The distribution is over tensors that have the same shape as the ' +
'parameters ``mu`` and ``sigma``, which in turn must have the same shape as each other.',
'with its own mean and standard deviation. i.e. A multivariate Gaussian distribution with ' +
'diagonal covariance matrix. The distribution is over tensors that have the same shape as the ' +
'parameters ``mu`` and ``sigma``, which in turn must have the same shape as each other.',
params: [
{name: 'mu', desc: 'mean', type: types.unboundedTensor},
{name: 'sigma', desc: 'standard deviations', type: types.positiveTensor}
Expand Down Expand Up @@ -530,8 +529,8 @@ var squishToProbSimplex = function(x) {
var LogisticNormal = makeDistributionType({
name: 'LogisticNormal',
desc: 'A distribution over probability vectors obtained by transforming a random variable ' +
'drawn from ``DiagCovGaussian({mu: mu, sigma: sigma})``. If ``mu`` and ``sigma`` have length ``d`` ' +
'then the distribution is over probability vectors of length ``d+1``.',
'drawn from ``DiagCovGaussian({mu: mu, sigma: sigma})``. If ``mu`` and ``sigma`` have length ``d`` ' +
'then the distribution is over probability vectors of length ``d+1``.',
params: [
{name: 'mu', desc: 'mean', type: types.unboundedVector},
{name: 'sigma', desc: 'standard deviations', type: types.positiveVector}
Expand Down Expand Up @@ -736,10 +735,10 @@ var Cauchy = makeDistributionType({
var location = this.params.location;
return -LOG_PI - Math.log(scale) - Math.log(1 + Math.pow((x - location) / scale, 2));
},
base: function () {
base: function() {
return new Uniform({a: 0, b: 1});
},
transform: function (x) {
transform: function(x) {
'use ad';
var location = this.params.location;
var scale = this.params.scale;
Expand All @@ -761,7 +760,7 @@ function discreteScore(ps, i) {

function inDiscreteSupport(val, dim) {
return (val === Math.floor(val)) && (0 <= val) && (val < dim);
};
}

function discreteScoreVector(probs, val) {
'use ad';
Expand Down Expand Up @@ -905,10 +904,10 @@ var Exponential = makeDistributionType({
'use ad';
return Math.log(this.params.a) - this.params.a * val;
},
base: function () {
base: function() {
return new Uniform({a: 0, b: 1});
},
transform: function (x) {
transform: function(x) {
'use ad';
return Math.log(x) / -this.params.a;
},
Expand Down Expand Up @@ -1238,20 +1237,20 @@ function dirichletScore(alpha, val) {
}

return ad.scalar.add(
ad.tensor.sumreduce(
ad.tensor.sumreduce(
ad.tensor.sub(
ad.tensor.mul(
ad.tensor.mul(
ad.tensor.sub(alpha, 1),
ad.tensor.log(val)),
ad.tensor.logGamma(alpha))),
ad.scalar.logGamma(ad.tensor.sumreduce(alpha)));
ad.tensor.logGamma(alpha))),
ad.scalar.logGamma(ad.tensor.sumreduce(alpha)));
}

var Dirichlet = makeDistributionType({
name: 'Dirichlet',
desc: 'Distribution over probability vectors. ' +
'If ``alpha`` has length ``d`` then the distribution ' +
'is over probability vectors of length ``d``.',
'If ``alpha`` has length ``d`` then the distribution ' +
'is over probability vectors of length ``d``.',
params: [{name: 'alpha', desc: 'concentration', type: types.positiveVector}],
wikipedia: true,
mixins: [continuousSupport, noHMC],
Expand Down Expand Up @@ -1399,9 +1398,9 @@ var SampleBasedMarginal = makeDistributionType({

function printMarginal(dist) {
return 'Marginal:\n' + _.map(dist, function(obj, val) { return [val, obj.prob]; })
.sort(function(a, b) { return b[1] - a[1]; })
.map(function(pair) { return ' ' + pair[0] + ' : ' + pair[1]; })
.join('\n');
.sort(function(a, b) { return b[1] - a[1]; })
.map(function(pair) { return ' ' + pair[0] + ' : ' + pair[1]; })
.join('\n');
}


Expand Down Expand Up @@ -1461,11 +1460,11 @@ var Delta = makeDistributionType({

function metadata() {
return _.chain(distributions)
.toPairs() // pair[0] = key, pair[1] = value
.sortBy(function(pair) { return pair[0]; })
.map(function(pair) { return pair[1]; })
.map(function(dist) { return dist.prototype.meta; })
.value();
.toPairs() // pair[0] = key, pair[1] = value
.sortBy(function(pair) { return pair[0]; })
.map(function(pair) { return pair[1]; })
.map(function(dist) { return dist.prototype.meta; })
.value();
}

var distributions = {
Expand Down
52 changes: 26 additions & 26 deletions src/inference/elbo.ad.js
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ module.exports = function(env) {
var _score = ad.value(score);
if (!isFinite(_score)) { // Also catches NaN.
var msg = 'ELBO: The score of the previous sample under the ' +
source + ' program was ' + _score + '.';
source + ' program was ' + _score + '.';
if (_.isNaN(_score)) {
msg += ' Reducing the step size may help.';
}
Expand All @@ -98,26 +98,26 @@ module.exports = function(env) {
var grad = {};

return util.cpsLoop(
this.opts.samples,

// Loop body.
function(i, next) {
this.iter = i;
return this.estimateGradient(function(g, elbo_i) {
paramStruct.addEq(grad, g); // Accumulate gradient estimates.
elbo += elbo_i;
return next();
});
}.bind(this),

// Loop continuation.
function() {
paramStruct.divEq(grad, this.opts.samples);
elbo /= this.opts.samples;
this.updateBaselines();
env.coroutine = this.coroutine;
return this.cont(grad, elbo);
}.bind(this));
this.opts.samples,

// Loop body.
function(i, next) {
this.iter = i;
return this.estimateGradient(function(g, elbo_i) {
paramStruct.addEq(grad, g); // Accumulate gradient estimates.
elbo += elbo_i;
return next();
});
}.bind(this),

// Loop continuation.
function() {
paramStruct.divEq(grad, this.opts.samples);
elbo /= this.opts.samples;
this.updateBaselines();
env.coroutine = this.coroutine;
return this.cont(grad, elbo);
}.bind(this));

},

Expand Down Expand Up @@ -236,8 +236,8 @@ module.exports = function(env) {
// weight for any factors not seen during this step.
_.each(this.baselineUpdates, function(obj, address) {
baselines[address] = _.has(baselines, address) ?
decay * baselines[address] + (1 - decay) * obj.mean :
obj.mean;
decay * baselines[address] + (1 - decay) * obj.mean :
obj.mean;
}, this);
},

Expand All @@ -259,8 +259,8 @@ module.exports = function(env) {
var m = top(this.mapDataStack).multiplier;

var node = new SampleNode(
this.prevNode, logp, logq,
ret.reparam, a, dist, guideDist, val, m, this.opts.debugWeights);
this.prevNode, logp, logq,
ret.reparam, a, dist, guideDist, val, m, this.opts.debugWeights);

this.prevNode = node;
this.nodes.push(node);
Expand Down Expand Up @@ -297,7 +297,7 @@ module.exports = function(env) {
}
var m = top(this.mapDataStack).multiplier;
var node = new FactorNode(
this.prevNode, score, m, this.opts.debugWeights);
this.prevNode, score, m, this.opts.debugWeights);
this.prevNode = node;
this.nodes.push(node);
return k(s);
Expand Down
6 changes: 3 additions & 3 deletions src/inference/elbograph.js
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ function JoinNode() {
function propagateWeights(nodes) {
// Note that this modifies the weights of the graph in-place.
var i = nodes.length;
while(--i) {
while (--i) {
var node = nodes[i];
if (node instanceof SplitNode) {
// Account for (a) the fact that we (potentially) only looked
Expand All @@ -84,7 +84,7 @@ function propagateWeights(nodes) {
parent.weight += node.weight;
});
}
};
}

var edge = function(parent, child) {
return ' ' + parent.id + ' -> ' + child.id + ';';
Expand Down Expand Up @@ -118,7 +118,7 @@ function generateDot(nodes) {
});
});
return 'digraph {\n' + edges.join('\n') + '\n}\n';
};
}

module.exports = {
RootNode: RootNode,
Expand Down
36 changes: 18 additions & 18 deletions src/inference/eubo.ad.js
Original file line number Diff line number Diff line change
Expand Up @@ -60,24 +60,24 @@ module.exports = function(env) {

return util.cpsForEach(

// Body.
function(trace, i, traces, next) {
return this.estimateGradient(trace, function(g, eubo_i) {
paramStruct.addEq(grad, g); // Accumulate gradient estimates.
eubo += eubo_i;
return next();
});
}.bind(this),

// Continuation.
function() {
paramStruct.divEq(grad, traces.length);
eubo /= traces.length;
env.coroutine = this.coroutine;
return this.cont(grad, eubo);
}.bind(this),

traces);
// Body.
function(trace, i, traces, next) {
return this.estimateGradient(trace, function(g, eubo_i) {
paramStruct.addEq(grad, g); // Accumulate gradient estimates.
eubo += eubo_i;
return next();
});
}.bind(this),

// Continuation.
function() {
paramStruct.divEq(grad, traces.length);
eubo /= traces.length;
env.coroutine = this.coroutine;
return this.cont(grad, eubo);
}.bind(this),

traces);

},

Expand Down

0 comments on commit c5f62d8

Please sign in to comment.