/
aabi18.bib
36 lines (33 loc) · 3.97 KB
/
aabi18.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
@Proceedings{AABI-2018,
booktitle = {Proceedings of The 1st Symposium on Advances in Approximate Bayesian Inference},
name = {Symposium on Advances in Approximate Bayesian Inference},
shortname = {AABI},
editor = {Francisco Ruiz and Cheng Zhang and Dawen Liang and Thang Bui},
volume = {96},
year = {2018},
start = {2018-12-02},
end = {2018-12-02},
published = {2019-01-05},
url = {http://approximateinference.org/},
location = {Le 1000 Conference Center, Montréal, QC H3B 0A2, Canada},
shortname = {AABI}
}
@InProceedings{acerbi18a,
title = {An Exploration of Acquisition and Mean Functions
in Variational Bayesian Monte Carlo},
author = {Acerbi, Luigi},
pages = {1-10},
abstract = {Variational Bayesian Monte Carlo (VBMC) is a novel framework for tackling approximate posterior and model inference in models with black-box, expensive likelihoods by means of a sample-ecient approach (Acerbi, 2018). VBMC combines variational inference with Gaussian-process (GP) based, active-sampling Bayesian quadrature, using the latter to efficiently approximate the intractable integral in the variational objective. VBMC has been shown to outperform state-of-the-art inference methods for expensive likelihoods on a benchmark consisting of meaningful synthetic densities and a real model-fitting problem from computational neuroscience. In this paper, we study the performance of VBMC under variations of two key components of the framework. First, we propose and evaluate a new general family of acquisition functions for active sampling, which includes as special cases the acquisition functions used in the original work. Second, we test different mean functions for the GP surrogate, including a novel squared-exponential GP mean function. From our empirical study, we derive insights about the stability of the current VBMC algorithm, which may help inform future theoretical and applied developments of the method.}
}
@InProceedings{cherief-abdellatif18a,
title = {Consistency of ELBO maximization for model selection},
author = {Cherief-Abdellatif, Badr-Eddine },
pages = {11-31},
abstract = {The Evidence Lower Bound (ELBO) is a quantity that plays a key role in variational inference. It can also be used as a criterion in model selection. However, though extremely popular in practice in the variational Bayes community, there has never been a general theoretic justication for selecting based on the ELBO. In this paper, we show that the ELBO maximization strategy has strong theoretical guarantees, and is robust to model misspeciffication while most works rely on the assumption that one model is correctly speciffied. We illustrate our theoretical results by an application to the selection of the number of principal components in probabilistic PCA.}
}
@InProceedings{lueckmann18a,
title = {Likelihood-free inference with emulator networks},
author = {Lueckmann, Jan-Matthis and Bassetto, Giacomo and Karaletsos, Theofanis and Macke, Jakob H. },
pages = {32-53},
abstract = {Approximate Bayesian Computation (ABC) provides methods for Bayesian inference in simulation-based models which do not permit tractable likelihoods. We present a new ABC method which uses probabilistic neural emulator networks to learn synthetic likelihoods on simulated data - both 'local' emulators which approximate the likelihood for specific observed data, as well as 'global' ones which are applicable to a range of data. Simulations are chosen adaptively using an acquisition function which takes into account uncertainty about either the posterior distribution of interest, or the parameters of the emulator. Our approach does not rely on user-defined rejection thresholds or distance functions. We illustrate inference with emulator networks on synthetic examples and on a biophysical neuron model, and show that emulators allow accurate and efficient inference even on problems which are challenging for conventional ABC approaches.}
}