Skip to content

Commit

Permalink
Merge c935f04 into 2361c93
Browse files Browse the repository at this point in the history
  • Loading branch information
theogf committed Mar 30, 2021
2 parents 2361c93 + c935f04 commit d60d01e
Show file tree
Hide file tree
Showing 89 changed files with 1,448 additions and 2,068 deletions.
1 change: 1 addition & 0 deletions .JuliaFormatter.toml
@@ -0,0 +1 @@
style = "blue"
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Expand Up @@ -14,7 +14,7 @@ jobs:
matrix:
version:
- '1'
- '1.3'
- '1.6'
- 'nightly'
os:
- ubuntu-latest
Expand Down
23 changes: 8 additions & 15 deletions Project.toml
@@ -1,24 +1,21 @@
name = "AugmentedGaussianProcesses"
uuid = "38eea1fd-7d7d-5162-9d08-f89d0f2e271e"
authors = ["Theo Galy-Fajou <theo.galyfajou@gmail.com>"]
version = "0.9.4"
version = "0.10.0"

[deps]
AdvancedHMC = "0bf59076-c3b1-5ca4-86bd-e02cd72cde3d"
Clustering = "aaaa29a8-35af-508c-8bc3-b662a17a0fe5"
DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4"
Dates = "ade2ca70-3891-5945-98fb-dc099432e06a"
DeterminantalPointProcesses = "4d968f93-c0cd-4b7f-b189-b034d1a24a0e"
Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7"
Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f"
FastGaussQuadrature = "442a2c76-b920-505d-bb47-c5924d526838"
Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c"
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
InducingPoints = "b4bd816d-b975-4295-ac05-5f2992945579"
KernelFunctions = "ec8451be-7e33-11e9-00cf-bbf324bd1392"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
MCMCChains = "c7f686f2-ff18-58e9-bc7b-31028e88f75d"
MLDataUtils = "cc2ba9b6-d476-5e6d-8eaf-a92d5412d41d"
PDMats = "90014a1f-27ba-587c-ab20-58faa44d9150"
ProgressMeter = "92933f4c-e287-5a05-a399-4b506db050ca"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
RecipesBase = "3cdcf5f2-1ef4-517c-9805-6587b60abb01"
Expand All @@ -32,24 +29,20 @@ Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"

[compat]
AdvancedHMC = "0.2.13"
Clustering = "0.13.3, 0.14"
DataStructures = "0.17, 0.18"
DeterminantalPointProcesses = "0.1.0"
Distances = "0.8, 0.9, 0.10"
Distributions = "0.21.5, 0.22, 0.23, 0.24"
FastGaussQuadrature = "0.4"
Flux = "0.10, 0.11"
Flux = "0.10, 0.11, 0.12"
ForwardDiff = "0.10"
KernelFunctions = "0.5, 0.6, 0.7, 0.8"
InducingPoints = "0.1"
KernelFunctions = "0.8, 0.9"
MCMCChains = "0.3.15, 2.0, 3.0, 4.0"
MLDataUtils = "0.5"
PDMats = "0.10, 0.11"
ProgressMeter = "1"
RecipesBase = "1.0, 1.1"
Reexport = "0.2, 1"
SimpleTraits = "0.9"
SpecialFunctions = "0.9, 0.10, 1"
StatsBase = "0.32, 0.33"
StatsFuns = "0.8, 0.9"
Zygote = "0.5, 0.6"
julia = "1.3"
Zygote = "0.6.7"
julia = "1.6"
3 changes: 2 additions & 1 deletion docs/make.jl
Expand Up @@ -79,5 +79,6 @@ makedocs(modules = [AugmentedGaussianProcesses],
deploydocs(
deps = Deps.pip("mkdocs", "python-markdown-math"),
repo = "github.com/theogf/AugmentedGaussianProcesses.jl.git",
target = "build"
target = "build",
push_preview = true,
)
2 changes: 1 addition & 1 deletion docs/src/template_likelihood.jl
Expand Up @@ -11,7 +11,7 @@ See all functions you need to implement
"""
struct TemplateLikelihood{T<:Real,A<:AbstractVector{T}} <: Likelihood{T}
struct TemplateLikelihood{T<:Real,A<:AbstractVector{T}} <: AbstractLikelihood{T}
## Additional parameters can be added
θ::A
function TemplateLikelihood{T}() where {T<:Real}
Expand Down
168 changes: 80 additions & 88 deletions src/AugmentedGaussianProcesses.jl
Expand Up @@ -4,102 +4,94 @@ General Framework for the data augmented Gaussian Processes
"""
module AugmentedGaussianProcesses
const AGP = AugmentedGaussianProcesses; export AGP
export AbstractGP, GP, VGP, SVGP, VStP, MCGP, MOVGP, MOSVGP, MOARGP,OnlineSVGP
export Likelihood, RegressionLikelihood, ClassificationLikelihood, MultiClassLikelihood
export GaussianLikelihood, StudentTLikelihood, LaplaceLikelihood, HeteroscedasticLikelihood
export LogisticLikelihood, BayesianSVM
export SoftMaxLikelihood, LogisticSoftMaxLikelihood
export PoissonLikelihood, NegBinomialLikelihood
export Inference, Analytic, AnalyticVI, AnalyticSVI, GibbsSampling, HMCSampling, MCIntegrationVI, MCIntegrationSVI, QuadratureVI, QuadratureSVI
export NumericalVI, NumericalSVI
export PriorMean, ZeroMean, ConstantMean, EmpiricalMean, AffineMean
#Useful functions
export train!, sample
export predict_f, predict_y, proba_y
export fstar, ELBO
export covariance, diag_covariance, prior_mean
export @augmodel

#General modules
using Reexport
using LinearAlgebra
using Random
@reexport using KernelFunctions
using KernelFunctions: ColVecs, RowVecs
using Zygote, ForwardDiff
using Flux: params, destructure
@reexport using Flux.Optimise
using PDMats: PDMat, invquad
using AdvancedHMC
using MCMCChains
using StatsBase
using StatsFuns
using SpecialFunctions
using Distributions
using FastGaussQuadrature: gausshermite
using ProgressMeter, SimpleTraits
#Exported modules
# export KMeansModule
export KMeansInducingPoints
const AGP = AugmentedGaussianProcesses
export AGP
export AbstractGP, GP, VGP, SVGP, VStP, MCGP, MOVGP, MOSVGP, MOARGP, OnlineSVGP # All models
export AbstractLikelihood, RegressionLikelihood, ClassificationLikelihood, MultiClassLikelihood, EventLikelihood # All categories of likelihoods
export GaussianLikelihood, StudentTLikelihood, LaplaceLikelihood, HeteroscedasticLikelihood # Regression Likelihoods
export LogisticLikelihood, BayesianSVM # Classification Likelihoods
export SoftMaxLikelihood, LogisticSoftMaxLikelihood # Multiclass Classification Likelihoods
export PoissonLikelihood, NegBinomialLikelihood # Event Likelihoods
export AbstractInference, Analytic, AnalyticVI, AnalyticSVI # Inference objects
export GibbsSampling, HMCSampling # Sampling inference
export NumericalVI, NumericalSVI, MCIntegrationVI, MCIntegrationSVI, QuadratureVI, QuadratureSVI # Numerical inference
export PriorMean, ZeroMean, ConstantMean, EmpiricalMean, AffineMean # Prior means
#Useful functions
export train!, sample
export predict_f, predict_y, proba_y
export fstar
export ELBO
export covariance, diag_covariance, prior_mean
export @augmodel

#Useful functions and module
include(joinpath("functions", "PGSampler.jl"))
include(joinpath("functions", "GIGSampler.jl"))
include(joinpath("functions", "lap_transf_dist.jl"))
#include("functions/PerturbativeCorrection.jl")
# include("functions/GPAnalysisTools.jl")
# include("functions/IO_model.jl")
#Custom modules
using .PGSampler
using .GIGSampler
#General modules
using Reexport
using LinearAlgebra
using Random
@reexport using KernelFunctions
using KernelFunctions: ColVecs, RowVecs
using Zygote, ForwardDiff
using ChainRulesCore: ChainRulesCore, NO_FIELDS, DoesNotExist
using Flux: params, destructure
@reexport using Flux.Optimise
using AdvancedHMC
using MCMCChains
using StatsBase
@reexport using InducingPoints
using StatsFuns
using SpecialFunctions
using Distributions:
Distributions, Distribution,
dim, cov, mean, var,
pdf, logpdf, loglikelihood,
Normal, Poisson, NegativeBinomial, InverseGamma, Laplace, MvNormal, Gamma
using FastGaussQuadrature: gausshermite
using ProgressMeter, SimpleTraits

include(joinpath("inducingpoints" , "InducingPoints.jl"))
@reexport using .InducingPoints
#Include custom module for additional distributions
include(joinpath("ComplementaryDistributions", "ComplementaryDistributions.jl"))
using .ComplementaryDistributions

# using .PerturbativeCorrection
# using .GPAnalysisTools
# using .IO_model
# Main classes
abstract type AbstractInference{T<:Real} end
abstract type VariationalInference{T} <: AbstractInference{T} end
abstract type SamplingInference{T} <: AbstractInference{T} end
abstract type AbstractLikelihood{T<:Real} end
abstract type AbstractLatent{T<:Real,Tpr,Tpo} end

include(joinpath("mean", "priormean.jl"))
include(joinpath("data", "datacontainer.jl"))
include(joinpath("functions", "utils.jl"))

# Main classes
abstract type Inference{T<:Real} end
abstract type VariationalInference{T} <: Inference{T} end
abstract type SamplingInference{T} <: Inference{T} end
abstract type Likelihood{T<:Real} end
abstract type AbstractLatent{T<:Real,Tpr,Tpo} end
# Models
include(joinpath("models", "AbstractGP.jl"))
include(joinpath("gpblocks", "latentgp.jl"))
include(joinpath("models", "GP.jl"))
include(joinpath("models", "VGP.jl"))
include(joinpath("models", "MCGP.jl"))
include(joinpath("models", "SVGP.jl"))
include(joinpath("models", "VStP.jl"))
include(joinpath("models", "MOSVGP.jl"))
include(joinpath("models", "MOVGP.jl"))
include(joinpath("models", "OnlineSVGP.jl"))
include(joinpath("models", "single_output_utils.jl"))
include(joinpath("models", "multi_output_utils.jl"))

include(joinpath("mean", "priormean.jl"))
include(joinpath("data", "datacontainer.jl"))
include(joinpath("functions", "utils.jl"))
include(joinpath("inference", "inference.jl"))
include(joinpath("likelihood", "likelihood.jl"))
include(joinpath("likelihood", "generic_likelihood.jl"))

# Models
include(joinpath("models", "AbstractGP.jl"))
include(joinpath("gpblocks", "latentgp.jl"))
include(joinpath("models", "GP.jl"))
include(joinpath("models", "VGP.jl"))
include(joinpath("models", "MCGP.jl"))
include(joinpath("models", "SVGP.jl"))
include(joinpath("models", "VStP.jl"))
include(joinpath("models", "MOSVGP.jl"))
include(joinpath("models", "MOVGP.jl"))
include(joinpath("models", "OnlineSVGP.jl"))
include(joinpath("models", "single_output_utils.jl"))
include(joinpath("models", "multi_output_utils.jl"))
include(joinpath("functions", "KLdivergences.jl"))
include(joinpath("functions", "ELBO.jl"))
include(joinpath("data", "utils.jl"))
include(joinpath("functions", "plotting.jl"))

include(joinpath("inference", "inference.jl"))
include(joinpath("likelihood", "likelihood.jl"))
# Training and prediction functions
include(joinpath("training", "training.jl"))
include(joinpath("training", "onlinetraining.jl"))
include(joinpath("hyperparameter", "autotuning.jl"))
include(joinpath("training", "predictions.jl"))
include("ar_predict.jl")

include(joinpath("likelihood", "generic_likelihood.jl"))

include(joinpath("functions", "KLdivergences.jl"))
include(joinpath("data", "utils.jl"))
include(joinpath("functions", "plotting.jl"))

# Training and prediction functions
include(joinpath("training", "training.jl"))
include(joinpath("training", "onlinetraining.jl"))
include(joinpath("hyperparameter", "autotuning.jl"))
include(joinpath("training", "predictions.jl"))
include("ar_predict.jl")
end #End Module
13 changes: 13 additions & 0 deletions src/ComplementaryDistributions/ComplementaryDistributions.jl
@@ -0,0 +1,13 @@
module ComplementaryDistributions

using Distributions
using Random
using SpecialFunctions
using StatsFuns: twoπ

export GeneralizedInverseGaussian, PolyaGamma, LaplaceTransformDistribution
include("generalizedinversegaussian.jl")
include("polyagamma.jl")
include("lap_transf_dist.jl")

end
@@ -1,10 +1,3 @@
"""Module for a Generalized Inverse Gaussian Sampler"""
module GIGSampler

using Distributions
using SpecialFunctions

export GeneralizedInverseGaussian

"""Sampler object"""
struct GeneralizedInverseGaussian{T<:Real} <: Distributions.ContinuousUnivariateDistribution
Expand All @@ -22,7 +15,7 @@ function GeneralizedInverseGaussian(a::T, b::T, p::T) where T
end

Distributions.params(d::GeneralizedInverseGaussian) = (d.a, d.b, d.p)
@inline Distributions.partype(d::GeneralizedInverseGaussian{T}) where T <: Real = T
@inline Distributions.partype(::GeneralizedInverseGaussian{T}) where T <: Real = T


function Distributions.mean(d::GeneralizedInverseGaussian)
Expand All @@ -40,7 +33,6 @@ end

Distributions.mode(d::GeneralizedInverseGaussian) = ((d.p - 1) + sqrt((d.p - 1)^2 + d.a * d.b)) / d.a


function Distributions.pdf(d::GeneralizedInverseGaussian{T}, x::Real) where T <: Real
if x > 0
a, b, p = params(d)
Expand Down Expand Up @@ -165,6 +157,4 @@ function _rou_shift(λ::Real, β::Real)
return x
end
end
end

end #module GIGSampler
end
@@ -1,17 +1,14 @@
using Random

###
# Distribution only based on its laplace transform function `f` and exponential tilting `c²`.
# The sampling is from Ridout, M. S. (2009). Generating random numbers from a distribution specified by its laplace transform. Statistics and Computing, 19(4):439.
# And the inverse laplace operation is done via Grassmann, W. K. (Ed.). (2000). Computational Probability. International Series in #Operations Research & Management Science. doi:10.1007/978-1-4757-4828-4

struct LaplaceTransformDistribution{T,TAlg} <: Distributions.ContinuousUnivariateDistribution
f::Function # Laplace transform of the pdf
::T # Exponential tilting parameter
alg::TAlg # Algorithm to compute the inverse Laplace transform
function LaplaceTransformDistribution{T,TAlg}(f::Function,c²::T,alg::TAlg) where {T<:Real,TAlg}
@assert _check_f(f) "The function passed is not valid"# Do series of check on f
@assert>=0 "c² has to a be non-negative real"
_check_f(f) || error("The function passed is not valid") # Do a series of checks on f
>= 0 || error("c² has to a be non-negative real")
new{T,TAlg}(f,c²,alg)
end
end
Expand All @@ -21,13 +18,13 @@ LaplaceTransformDistribution(f::Function,c²::T=0.0,alg::TAlg=BromwichInverseLap
function _check_f(f)
return true # TODO Add tests for complete monotonicity / PDR
end
_gradf(d::LaplaceTransformDistribution,x::Real) = ForwardDiff.gradient(dist.f,[x])[1]
_gradlogf(d::LaplaceTransformDistribution,x::Real) = ForwardDiff.gradient(logdist.f,[x])[1]
_hessianlogf(d::LaplaceTransformDistribution,x::Real) = ForwardDiff.hessian(logdist.f,[x])[1]
_gradf(d::LaplaceTransformDistribution, x::Real) = first(ForwardDiff.gradient(d.f,[x]))
_gradlogf(d::LaplaceTransformDistribution, x::Real) = first(ForwardDiff.gradient(log d.f, [x]))
_hessianlogf(d::LaplaceTransformDistribution, x::Real) = first(ForwardDiff.hessian(log d.f, [x]))

Distributions.pdf(dist::LaplaceTransformDistribution,x::Real) = apply_f(dist,x)
Distributions.mean(dist::LaplaceTransformDistribution) = _gradf(dist,dist.c²)/dist.f(dist.c²)
Distributions.var(dist::LaplaceTransformDistribution) = _hessianlogf(dist,dist.c²)/dist.f(dist.c²)-mean(dist)^2
Distributions.pdf(dist::LaplaceTransformDistribution,x::Real) = apply_f(dist, x)
Distributions.mean(dist::LaplaceTransformDistribution) = _gradf(dist, dist.c²) / dist.f(dist.c²)
Distributions.var(dist::LaplaceTransformDistribution) = _hessianlogf(dist, dist.c²) / dist.f(dist.c²) - mean(dist)^2

function Random.rand(dist::LaplaceTransformDistribution)
first(rand(dist,1))
Expand Down

0 comments on commit d60d01e

Please sign in to comment.