Skip to content

Commit

Permalink
Merge pull request #783 from jpchen/master
Browse files Browse the repository at this point in the history
Add Laplace and TensorLaplace Distribution
  • Loading branch information
stuhlmueller committed Mar 10, 2017
2 parents 0ba29cc + c8996fd commit 3d0e4ac
Show file tree
Hide file tree
Showing 10 changed files with 154 additions and 7 deletions.
17 changes: 17 additions & 0 deletions docs/primitive-distributions.txt
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,15 @@

`Wikipedia entry <https://en.wikipedia.org/wiki/Normal_distribution>`__

.. js:function:: Laplace({location: ..., scale: ...})

* location: *(real)*
* scale: *(real (0, Infinity))*

Distribution over ``[-Infinity, Infinity]``

`Wikipedia entry <https://en.wikipedia.org/wiki/Laplace_distribution>`__

.. js:function:: LogisticNormal({mu: ..., sigma: ...})

* mu: mean *(vector)*
Expand Down Expand Up @@ -165,6 +174,14 @@

Distribution over a tensor of independent Gaussian variables.

.. js:function:: TensorLaplace({location: ..., scale: ..., dims: ...})

* location: *(real)*
* scale: *(real (0, Infinity))*
* dims: dimension of tensor *(int (>=1) array)*

Distribution over a tensor of independent Laplace variables.

.. js:function:: Uniform({a: ..., b: ...})

* a: lower bound *(real)*
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
"test": "tests"
},
"dependencies": {
"adnn": "^2.0.3",
"adnn": "^2.0.4",
"amdefine": "^1.0.0",
"ast-types": "^0.8.13",
"colors": "^1.1.2",
Expand Down
101 changes: 96 additions & 5 deletions src/dists.ad.js
Original file line number Diff line number Diff line change
Expand Up @@ -390,11 +390,12 @@ var Gaussian = makeDistributionType({
return new Gaussian({mu: 0, sigma: 1});
},
transform: function(x) {
'use ad';
// Transform a sample x from the base distribution to the
// distribution described by params.
var mu = this.params.mu;
var sigma = this.params.sigma;
return ad.scalar.add(ad.scalar.mul(sigma, x), mu); }
return sigma * x + mu; }
});


Expand Down Expand Up @@ -517,6 +518,45 @@ var DiagCovGaussian = makeDistributionType({
}
});

function laplaceSample(location, scale) {
// Generated from goo.gl/3BxCGd (wiki)
var z = util.random();
var u = z - 0.5;
return location - scale * Math.sign(u) * Math.log(1 - 2 * Math.abs(u));
}

function laplaceScore(location, scale, x) {
'use ad';
return -1 * (Math.log(2 * scale) + Math.abs(x - location) / scale);
}

var Laplace = makeDistributionType({
name: 'Laplace',
desc: 'Distribution over ``[-Infinity, Infinity]``',
params: [{name: 'location', desc: '', type: types.unboundedReal},
{name: 'scale', desc: '', type: types.positiveReal}],
wikipedia: true,
mixins: [continuousSupport],
sample: function() {
return laplaceSample(ad.value(this.params.location), ad.value(this.params.scale));
},
score: function(val) {
return laplaceScore(this.params.location, this.params.scale, val);
},
base: function() {
return new Laplace({location: 0, scale: 1});
},
transform: function(x) {
'use ad';
var location = this.params.location;
var scale = this.params.scale;
return scale * x + location;
},
support: function() {
return { lower: -Infinity, upper: Infinity };
}
});

var squishToProbSimplex = function(x) {
// Map a d dimensional vector onto the d simplex.
var d = ad.value(x).dims[0];
Expand Down Expand Up @@ -554,8 +594,6 @@ var LogisticNormal = makeDistributionType({
var _mu = ad.value(mu);
var _val = ad.value(val);



if (!util.isVector(_val) || _val.dims[0] - 1 !== _mu.dims[0]) {
return -Infinity;
}
Expand Down Expand Up @@ -661,8 +699,6 @@ var IspNormal = makeDistributionType({
}
});



function tensorGaussianSample(mu, sigma, dims) {
var x = new Tensor(dims);
var n = x.length;
Expand Down Expand Up @@ -717,6 +753,57 @@ var TensorGaussian = makeDistributionType({
}
});

function tensorLaplaceSample(location, scale, dims) {
var x = new Tensor(dims);
var n = x.length;
while (n--) {
x.data[n] = laplaceSample(location, scale);
}
return x;
}

function tensorLaplaceScore(location, scale, dims, x) {
'use ad';
var _x = ad.value(x);

if (!util.isTensor(_x) || !_.isEqual(_x.dims, dims)) {
return -Infinity;
}

var l = _x.length;
var ln2b = l * Math.log(2 * scale);
var xMuB = T.sumreduce(T.abs(T.sub(x, location))) / scale;
return -1 * (ln2b + xMuB);
}

var TensorLaplace = makeDistributionType({
name: 'TensorLaplace',
desc: 'Distribution over a tensor of independent Laplace variables.',
params: [
{name: 'location', desc: '', type: types.unboundedReal},
{name: 'scale', desc: '', type: types.positiveReal},
{name: 'dims', desc: 'dimension of tensor', type: types.array(types.positiveInt)}
],
mixins: [continuousSupport],
sample: function() {
var location = ad.value(this.params.location);
var scale = ad.value(this.params.scale);
var dims = this.params.dims;
return tensorLaplaceSample(location, scale, dims);
},
score: function(x) {
return tensorLaplaceScore(this.params.location, this.params.scale, this.params.dims, x);
},
base: function() {
var dims = this.params.dims;
return new TensorLaplace({location: 0, scale: 1, dims: dims});
},
transform: function(x) {
var location = this.params.location;
var scale = this.params.scale;
return ad.tensor.add(ad.tensor.mul(x, scale), location);
}
});


var Cauchy = makeDistributionType({
Expand Down Expand Up @@ -1478,6 +1565,8 @@ var distributions = {
MultivariateGaussian: MultivariateGaussian,
DiagCovGaussian: DiagCovGaussian,
TensorGaussian: TensorGaussian,
Laplace: Laplace,
TensorLaplace: TensorLaplace,
LogisticNormal: LogisticNormal,
LogitNormal: LogitNormal,
IspNormal: IspNormal,
Expand All @@ -1503,6 +1592,8 @@ module.exports = _.assign({
discreteSample: discreteSample,
gaussianSample: gaussianSample,
tensorGaussianSample: tensorGaussianSample,
laplaceSample: laplaceSample,
tensorLaplaceSample: tensorLaplaceSample,
gammaSample: gammaSample,
dirichletSample: dirichletSample,
// helpers
Expand Down
2 changes: 1 addition & 1 deletion src/guide.js
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ function paramSpec(type, targetParam) {
case 'int':
return {const: targetParam};
case 'array':
if (type.elementType.name === 'any') {
if (type.elementType.name === 'any' || type.elementType.name === 'int') {
return {const: targetParam};
}
default:
Expand Down
2 changes: 2 additions & 0 deletions src/transforms/caching.js
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ var cacheExempt = [
'gamma',
'gaussian',
'ispNormal',
'laplace',
'logisticNormal',
'logitNormal',
'multinomial',
Expand All @@ -33,6 +34,7 @@ var cacheExempt = [
'poisson',
'randomInteger',
'tensorGaussian',
'tensorLaplace',
'uniform',
'factor',
'sample',
Expand Down
4 changes: 4 additions & 0 deletions tests/test-data/stochastic/expected/laplace.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
{
"mean": 0,
"std": 5
}
4 changes: 4 additions & 0 deletions tests/test-data/stochastic/expected/tensorLaplace.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
{
"mean": 0,
"std": 5
}
3 changes: 3 additions & 0 deletions tests/test-data/stochastic/models/laplace.wppl
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
var model = function() {
return laplace(0, 5 / Math.sqrt(2));
};
4 changes: 4 additions & 0 deletions tests/test-data/stochastic/models/tensorLaplace.wppl
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
var model = function() {
var x = tensorLaplace(0, 2.5, [2]);
return T.sumreduce(x);
};
22 changes: 22 additions & 0 deletions tests/test-inference.js
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,8 @@ var tests = [
uniform: { args: { samples: 10000 } },
beta: true,
exponential: true,
laplace: { args: { samples: 10000 } },
tensorLaplace: { args: { samples: 10000 } },
binomial: true,
multinomial: true,
poisson: true,
Expand Down Expand Up @@ -638,6 +640,26 @@ var tests = [
verbose: false
}
},
tensorLaplace: {
mean: { tol: 0.3 },
std: { tol: 0.3 },
args: {
optMethod: {adam: {stepSize: 0.002}},
samples: 10000,
steps: 20000,
verbose: false
}
},
laplace: {
mean: { tol: 0.3 },
std: { tol: 0.3 },
args: {
optMethod: {adam: {stepSize: 0.001}},
samples: 10000,
steps: 40000,
verbose: false
}
},
exponential: {
args: {
samples: 10000,
Expand Down

0 comments on commit 3d0e4ac

Please sign in to comment.