diff --git a/docs/Manifest.toml b/docs/Manifest.toml index 0e4b168..314bc96 100644 --- a/docs/Manifest.toml +++ b/docs/Manifest.toml @@ -13,15 +13,21 @@ uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" [[DocStringExtensions]] deps = ["LibGit2", "Markdown", "Pkg", "Test"] -git-tree-sha1 = "88bb0edb352b16608036faadcc071adda068582a" +git-tree-sha1 = "50ddf44c53698f5e784bbebb3f4b21c5807401b1" uuid = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae" -version = "0.8.1" +version = "0.8.3" [[Documenter]] -deps = ["Base64", "Dates", "DocStringExtensions", "InteractiveUtils", "JSON", "LibGit2", "Logging", "Markdown", "REPL", "Test", "Unicode"] -git-tree-sha1 = "0be9bf63e854a2408c2ecd3c600d68d4d87d8a73" +deps = ["Base64", "Dates", "DocStringExtensions", "IOCapture", "InteractiveUtils", "JSON", "LibGit2", "Logging", "Markdown", "REPL", "Test", "Unicode"] +git-tree-sha1 = "a4875e0763112d6d017126f3944f4133abb342ae" uuid = "e30172f5-a6a5-5a46-863b-614d45cd2de4" -version = "0.24.2" +version = "0.25.5" + +[[IOCapture]] +deps = ["Logging"] +git-tree-sha1 = "377252859f740c217b936cebcd918a44f9b53b59" +uuid = "b5f81e59-6552-4d32-b1f0-c071b021bf89" +version = "0.1.1" [[InteractiveUtils]] deps = ["Markdown"] @@ -29,11 +35,12 @@ uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" [[JSON]] deps = ["Dates", "Mmap", "Parsers", "Unicode"] -git-tree-sha1 = "b34d7cef7b337321e97d22242c3c2b91f476748e" +git-tree-sha1 = "81690084b6198a2e1da36fcfda16eeca9f9f24e4" uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" -version = "0.21.0" +version = "0.21.1" [[LibGit2]] +deps = ["Printf"] uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" [[Libdl]] @@ -50,10 +57,10 @@ uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" uuid = "a63ad114-7e13-5084-954f-fe012c677804" [[Parsers]] -deps = ["Dates", "Test"] -git-tree-sha1 = "0139ba59ce9bc680e2925aec5b7db79065d60556" +deps = ["Dates"] +git-tree-sha1 = "50c9a9ed8c714945e01cd53a21007ed3865ed714" uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0" -version = "0.3.10" +version = "1.0.15" [[Pkg]] deps = ["Dates", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "UUIDs"] diff --git a/docs/Project.toml b/docs/Project.toml index 3fc1987..57b2ba4 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -1,8 +1,8 @@ authors = ["Marco Congedo "] -version = "0.4.8" +version = "0.4.9" [deps] Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4" [compat] -Documenter = "~0.24" +Documenter = "~0.25" diff --git a/docs/src/MainModule.md b/docs/src/MainModule.md index 25e60d8..2a1f17b 100644 --- a/docs/src/MainModule.md +++ b/docs/src/MainModule.md @@ -49,7 +49,7 @@ All packages above are built-in julia packages. ### types #### Metric::Enumerated type -``` +```julia @enum Metric begin Euclidean =1 invEuclidean =2 @@ -67,10 +67,9 @@ end Riemannian manipulations are defined for a given *metric* (see [metrics](@ref)). An instance for this type is requested as an argument in many functions contained in the [riemannianGeometry.jl](@ref) unit in order to specify - the metric. + the metric, for example: -``` - ## Example +```julia # generate a 15x15 symmetric positive definite matrix P=randP(15) # distance from P to the identity matrix according to the logdet0 metric @@ -85,15 +84,21 @@ end and then pass `metric` as argument in all your computations, *e.g.*, referring to the above example, - d=distance(metric, P). +```julia +d=distance(metric, P). +``` To know what is the current metric, you can get it as a string using: - s=string(metric) +```julia +s=string(metric) +``` To see the list of metrics in type `Metric` use: - instances(Metric) +```julia +instances(Metric) +``` #### Array of Matrices types @@ -312,22 +317,30 @@ To see the list of metrics in type `Metric` use: - Typecasting `Adjoint` matrices: - ```Matrix(X')``` +```julia +Matrix(X') +``` - here is how to get an `Hermitian` matrix out of the diagonal part of an `Hermitian` matrix H: - ```Hermitian(Matrix(Diagonal(H)))``` + ```julia + Hermitian(Matrix(Diagonal(H))) + ``` - here is how to get a `LowerTriangular` matrix out of an `Hermitian` matrix H: - ```LowerTriangular(Matrix(H))``` +```julia +LowerTriangular(Matrix(H)) +``` For example, you can use this to pass a full inter-distance matrix to the [`laplacian`](@ref) function to obtain the Laplacian matrix. A useful function is [`typeofMatrix`](@ref). For example, the following line typecasts matrix `M` to the type of matrix `P` and put the result in `A`: - A=typeofMatrix(P)(M) +```julia +A=typeofMatrix(P)(M) +``` #### Threads Some functions in **PosDefManifold** explicitly call BLAS routines @@ -336,8 +349,10 @@ concerned functions. Most functions calls BLAS routine implicitly via Julia. You can set the number of threads the BLAS library should use by: - using LinearAlgebra - BLAS.set_num_threads(n) +```julia +using LinearAlgebra +BLAS.set_num_threads(n) +``` where `n` is the number of threads. By default, **PosDefManifold** reserves to BLAS diff --git a/docs/src/index.md b/docs/src/index.md index 5348aee..45690f1 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -17,7 +17,7 @@ To obtain the latest development version execute instead ## About the Author [Marco Congedo](https://sites.google.com/site/marcocongedo) is -a research scientist of CNRS (Centre National de la Recherche Scientifique), +a Research Director of CNRS (Centre National de la Recherche Scientifique), working in Grenoble, France. diff --git a/src/PosDefManifold.jl b/src/PosDefManifold.jl index 3d12b8e..ebbaa86 100644 --- a/src/PosDefManifold.jl +++ b/src/PosDefManifold.jl @@ -1,8 +1,7 @@ # Main Module of the PosDefManifold Package for julia language -# v0.4.8 last update 9th of April 2020 # MIT License -# Copyright (c) 2019, Marco Congedo, CNRS, Grenobe, France: +# Copyright (c) 2019-21, Marco Congedo, CNRS, Grenobe, France: # https://sites.google.com/site/marcocongedo/home # __precompile__() diff --git a/src/linearAlgebra.jl b/src/linearAlgebra.jl index 5013893..07445e0 100644 --- a/src/linearAlgebra.jl +++ b/src/linearAlgebra.jl @@ -1,7 +1,7 @@ # Unit linearAlgebra.jl, part of PosDefManifold Package for julia language # # MIT License -# Copyright (c) 2019, Marco Congedo, CNRS, Grenobe, France: +# Copyright (c) 2019-21, Marco Congedo, CNRS, Grenobe, France: # https://sites.google.com/site/marcocongedo/home # # DESCRIPTION @@ -99,19 +99,22 @@ end which returns the concrete type (see example below), thus cannot be used for [typecasting matrices](@ref). - ## Examples - using LinearAlgebra, PosDefManifold - P=randP(3) # generate a 3x3 Hermitian matrix - typeofMatrix(P) # returns `Hermitian` - typeof(P) # returns `Hermitian{Float64,Array{Float64,2}}` - # typecast P as a `Matrix` M - M=Matrix(P) - # typecast M as a matrix of the same type as P and write the result in A - A=typeofMatrix(P)(M) + **Examples** - Pset=randP(3, 4) # generate a set of 4 3x3 Hermitian matrix - # Pset is an ℍVector type - typeofMatrix(Pset) # again returns `Hermitian` +```julia +using LinearAlgebra, PosDefManifold +P=randP(3) # generate a 3x3 Hermitian matrix +typeofMatrix(P) # returns `Hermitian` +typeof(P) # returns `Hermitian{Float64,Array{Float64,2}}` +# typecast P as a `Matrix` M +M=Matrix(P) +# typecast M as a matrix of the same type as P and write the result in A +A=typeofMatrix(P)(M) + +Pset=randP(3, 4) # generate a set of 4 3x3 Hermitian matrix +# Pset is an ℍVector type +typeofMatrix(Pset) # again returns `Hermitian` +``` """ typeofMatrix(H::Union{ℍ, ℍVector, ℍVector₂}) = ℍ @@ -145,14 +148,16 @@ typeofMat=typeofMatrix is not of the `ℍVector`, `𝔻Vector`, `𝕃Vector` or `𝕄Vector` type. - ## Examples - using LinearAlgebra, PosDefManifold - P=randP(3, 4) # generate 4 3x3 Hermitian matrix - typeofMatrix(P) # returns `Array{Hermitian,1}` - typeof(P) # also returns `Array{Hermitian,1}` + **Examples** +```julia +using LinearAlgebra, PosDefManifold +P=randP(3, 4) # generate 4 3x3 Hermitian matrix +typeofMatrix(P) # returns `Array{Hermitian,1}` +typeof(P) # also returns `Array{Hermitian,1}` - typeofMatrix(P[1]) # returns `Array{Hermitian,1}` - typeof(P[1]) # returns `Hermitian{Float64,Array{Float64,2}}` +typeofMatrix(P[1]) # returns `Array{Hermitian,1}` +typeof(P[1]) # returns `Hermitian{Float64,Array{Float64,2}}` +``` """ typeofVector(H::Union{ℍ, ℍVector, ℍVector₂}) = ℍVector @@ -210,38 +215,41 @@ typeofVec=typeofVector It is not possible to overload the `size` function for matrix vectors since this causes problems to other Julia functions. - ## Examples - using LinearAlgebra, PosDefManifold - # (1) - M=randn(3, 4) # generate a 3x4 `Matrix` - dim(M) # returns (3, 4) - dim(M, 1) # returns 3 - dim(M, 2) # returns 4 - dim(M, 3) # out of range: returns 0 - - # (2) - Pset=randP(3, 4) # generate an ℍVector holding 4 3x3 Hermitian matrices - dim(Pset) # returns (4, 3, 3) - dim(Pset, 1) # returns 4 - dim(Pset, 2) # returns 3 - dim(Pset, 3) # returns 3 - - # (3) - # Generate a set of 4 random 3x3 SPD matrices - Pset=randP(3, 4) - # Generate a set of 40 random 4x4 SPD matrices - Qset=randP(3, 40) - A=ℍVector₂([Pset, Qset]) - dim(A) # return (2, [4, 40], 3, 3) - dim(A, 1) # return 2 - dim(A, 2) # return [4, 40] - dim(A, 2)[1] # return 4 - dim(A, 3) # return 3 - dim(A, 4) # return 3 - dim(A, 5) # out of range: return 0 - - # note: to create an ℍVector₂ object holding k ℍVector objects use - sets=ℍVector₂(undef, k) # and then fill them + **Examples** +```julia +using LinearAlgebra, PosDefManifold +# (1) +M=randn(3, 4) # generate a 3x4 `Matrix` +dim(M) # returns (3, 4) +dim(M, 1) # returns 3 +dim(M, 2) # returns 4 +dim(M, 3) # out of range: returns 0 + +# (2) +Pset=randP(3, 4) # generate an ℍVector holding 4 3x3 Hermitian matrices +dim(Pset) # returns (4, 3, 3) +dim(Pset, 1) # returns 4 +dim(Pset, 2) # returns 3 +dim(Pset, 3) # returns 3 + +# (3) +# Generate a set of 4 random 3x3 SPD matrices +Pset=randP(3, 4) +# Generate a set of 40 random 4x4 SPD matrices +Qset=randP(3, 40) +A=ℍVector₂([Pset, Qset]) +dim(A) # return (2, [4, 40], 3, 3) +dim(A, 1) # return 2 +dim(A, 2) # return [4, 40] +dim(A, 2)[1] # return 4 +dim(A, 3) # return 3 +dim(A, 4) # return 3 +dim(A, 5) # out of range: return 0 + +# note: to create an ℍVector₂ object holding k ℍVector objects use +sets=ℍVector₂(undef, k) # and then fill them +``` + """ dim(X::AnyMatrix, d::Int) = 1<=d<=2 ? size(X, d) : 0 dim(X::AnyMatrix) = size(X) @@ -280,18 +288,22 @@ If `X` is a Matrix, `dims`=1 (default) remove rows, If `X` is a Vector, `dims` has no effect. The second argument is either an integer or a vector of integers. - ## Examples: - a=randn(5) - b=remove(a, 2) - b=remove(a, collect(1:3)) # remove rows 1 to 3 - A=randn(3, 3) - B=remove(A, 2) - B=remove(A, 2; dims=2) - A=randn(5, 5) - B=remove(A, collect(1:2:5)) # remove rows 1, 3 and 5 - C=remove(A, [1, 4]) - A=randn(10, 10) - A=remove(A, [collect(2:3); collect(8:10)]; dims=2) + + **Examples** +```julia +a=randn(5) +b=remove(a, 2) +b=remove(a, collect(1:3)) # remove rows 1 to 3 +A=randn(3, 3) +B=remove(A, 2) +B=remove(A, 2; dims=2) +A=randn(5, 5) +B=remove(A, collect(1:2:5)) # remove rows 1, 3 and 5 +C=remove(A, [1, 4]) +A=randn(10, 10) +A=remove(A, [collect(2:3); collect(8:10)]; dims=2) +``` + """ function remove(X::Union{Vector, Matrix}, what::Union{Int, Vector{Int}}; dims=1) 1=0. ? tolerance=tol : tolerance = 0. @@ -372,19 +387,20 @@ det1Msg="function det1 in LinearAlgebra.jl of PosDefMaifold package: the determi **See also**: [`tr`](@ref), [`det1`](@ref). - ## Examples - using LinearAlgebra, PosDefManifold - - P=randP(5) # generate a random real positive definite matrix 5x5 - Q=tr1(P) - tr(Q) # must be 1 - # using a tolerance - Q=tr1(P; tol=1e-12) + **Examples** +```julia +using LinearAlgebra, PosDefManifold - Pc=randP(ComplexF64, 5) # generate a random real positive definite matrix 5x5 - Qc=tr1(Pc) - tr(Qc) # must be 1 +P=randP(5) # generate a random real positive definite matrix 5x5 +Q=tr1(P) +tr(Q) # must be 1 +# using a tolerance +Q=tr1(P; tol=1e-12) +Pc=randP(ComplexF64, 5) # generate a random real positive definite matrix 5x5 +Qc=tr1(Pc) +tr(Qc) # must be 1 +``` """ function tr1(X::AnyMatrix; tol::Real=0.) @@ -430,15 +446,18 @@ tr1Msg2="function tr1 in LinearAlgebra.jl of PosDefMaifold package: the imaginar **See also**: [`det1`](@ref), [`procrustes`](@ref). - ## Examples - using LinearAlgebra, PosDefManifold - X=randn(5, 5) # generate an arbitrary 5x5 matrix - S=nearestPosDef(X) + **Examples** + ```julia +using LinearAlgebra, PosDefManifold +X=randn(5, 5) # generate an arbitrary 5x5 matrix +S=nearestPosDef(X) + +P=randP(5) # generate a random real positive definite 5x5 matrix +S=nearestPosDef(Matrix(P)) # typecasting an Hermitian matrix as a `Matrix` +# Since P is a positive definite matrix S must be equal to P +S ≈ P ? println(" ⭐ ") : println(" ⛔ ") +``` - P=randP(5) # generate a random real positive definite 5x5 matrix - S=nearestPosDef(Matrix(P)) # typecasting an Hermitian matrix as a `Matrix` - # Since P is a positive definite matrix S must be equal to P - S ≈ P ? println(" ⭐ ") : println(" ⛔ ") """ function nearestPosDef(D::𝔻; tol::Real=0.) tol>=0. ? tolerance=tol : tolerance = 0. @@ -475,9 +494,11 @@ end **See also**: [`nearestPosDef`](@ref), [`procrustes`](@ref). - ## Examples - using PosDefManifold - U=nearestOrth(randn(5, 5)) + **Examples** +```julia +using PosDefManifold +U=nearestOrth(randn(5, 5)) +``` """ function nearestOrthogonal(X::AnyMatrix) @@ -525,15 +546,18 @@ nearestOrth=nearestOrthogonal **See also**: [`colNorm`](@ref), [`colProd`](@ref). - ## Examples - using PosDefManifold - X=randn(10, 20) - normalizeCol!(X, 2) # (1) normalize columns 2 - normalizeCol!(X, 2, 10.0) # (2) divide columns 2 by 10.0 - normalizeCol!(X, 2:4) # (3) normalize columns 2 to 4 - X=randn(ComplexF64, 10, 20) - normalizeCol!(X, 3) # (1) normalize columns 3 - normalizeCol!(X, 3:6, (2.0 + 0.5im)) # (4) divide columns 3 to 5 by (2.0 + 0.5im) + **Examples** +```julia +using PosDefManifold +X=randn(10, 20) +normalizeCol!(X, 2) # (1) normalize columns 2 +normalizeCol!(X, 2, 10.0) # (2) divide columns 2 by 10.0 +normalizeCol!(X, 2:4) # (3) normalize columns 2 to 4 +X=randn(ComplexF64, 10, 20) +normalizeCol!(X, 3) # (1) normalize columns 3 +normalizeCol!(X, 3:6, (2.0 + 0.5im)) # (4) divide columns 3 to 5 by (2.0 + 0.5im) +``` + """ function normalizeCol!(X::𝕄{T}, j::Int) where T<:RealOrComplex w=colNorm(X, j) @@ -594,17 +618,17 @@ normalizeCol!(X::𝕄{T}, range::UnitRange, by::Number) where T<:RealOrComplex = "at position *pos*", where *pos* is the position where the first non-positive element has been found. + **Examples** +```julia +using PosDefManifold +a=[1, 0, 2, 8] +ispos(a, msg="non-positive element found") + +# it will print: +# ┌ Warning: non-positive element found at position 2 +# └ @ [here julie will point to the line of code issuing the warning] ``` - ## Examples - using PosDefManifold - a=[1, 0, 2, 8] - ispos(a, msg="non-positive element found") - - # it will print: - # ┌ Warning: non-positive element found at position 2 - # └ @ [here julie will point to the line of code issuing the warning] -``` - """ +""" function ispos(λ::Vector{T}; tol::Real=0, rev=true, @@ -666,12 +690,14 @@ ispos(Λ::Diagonal{T}; **See also**: [`normalizeCol!`](@ref), [`colNorm`](@ref). - ## Examples - using PosDefManifold - X=randn(10, 20) - p=colProd(X, 1, 3) - Y=randn(10, 30) - q=colProd(X, Y, 2, 25) + **Examples** +```julia +using PosDefManifold +X=randn(10, 20) +p=colProd(X, 1, 3) +Y=randn(10, 30) +q=colProd(X, Y, 2, 25) +``` """ colProd(X::Union{𝕄{T}, ℍ{T}}, j::Int, l::Int) where T<:RealOrComplex = @@ -693,10 +719,12 @@ colProd(X::Union{𝕄{T}, ℍ{T}}, Y::Union{𝕄{T}, ℍ{T}}, j::Int, l::Int) wh **See also**: [`normalizeCol!`](@ref), [`colProd`](@ref), [`sumOfSqr`](@ref). - ## Examples - using PosDefManifold - X=randn(10, 20) - normOfSecondColumn=colNorm(X, 2) + **Examples** +```julia +using PosDefManifold +X=randn(10, 20) +normOfSecondColumn=colNorm(X, 2) +``` """ colNorm(X::Union{𝕄{T}, ℍ{T}}, j::Int) where T<:RealOrComplex = @@ -734,12 +762,14 @@ colNorm(X::Union{𝕄{T}, ℍ{T}}, j::Int) where T<:RealOrComplex = **See also**: [`colNorm`](@ref), [`sumOfSqrDiag`](@ref), [`sumOfSqrTril`](@ref). - ## Examples - using PosDefManifold - X=randn(10, 20) - sum2=sumOfSqr(X) # (1) sum of squares of all elements - sum2=sumOfSqr(X, 1) # (2) sum of squares of elements in column 1 - sum2=sumOfSqr(X, 2:4) # (3) sum of squares of elements in column 2 to 4 +**Examples** +```julia +using PosDefManifold +X=randn(10, 20) +sum2=sumOfSqr(X) # (1) sum of squares of all elements +sum2=sumOfSqr(X, 1) # (2) sum of squares of elements in column 1 +sum2=sumOfSqr(X, 2:4) # (3) sum of squares of elements in column 2 to 4 +``` """ sumOfSqr(A::Array) = 𝚺(abs2(a) for a in A) @@ -787,11 +817,15 @@ ss=sumOfSqr **See also**: [`sumOfSqr`](@ref), [`sumOfSqrTril`](@ref). - ## Examples - using LinearAlgebra, PosDefManifold - X=randn(10, 20) - sumDiag2=sumOfSqrDiag(X) # (1) - sumDiag2=sumOfSqrDiag(𝔻(X)) # (2) 𝔻=LinearAlgebra.Diagonal + **Examples** +```julia +using LinearAlgebra, PosDefManifold +X=randn(10, 20) +sumDiag2=sumOfSqrDiag(X) # (1) +sumDiag2=sumOfSqrDiag(𝔻(X)) # (2) +# 𝔻=LinearAlgebra.Diagonal is declated in the main module +``` + """ sumOfSqrDiag(X::𝕄{T}) where T<:RealOrComplex = 𝚺(abs2(X[i, i]) for i=1:minimum(size(X))) @@ -804,7 +838,7 @@ ssd=sumOfSqrDiag """ sumOfSqrTril(X::AnyMatrix, k::Int=0) -**alias**: `sst` + **alias**: `sst` Given a real or complex `Matrix`, `Diagonal`, `Hermitian` or `LowerTriangular` matrix ``X`` (see [AnyMatrix type](@ref)), @@ -826,19 +860,21 @@ ssd=sumOfSqrDiag **See also**: [`sumOfSqr`](@ref), [`sumOfSqrDiag`](@ref). - ## Examples - using PosDefManifold - A=[4. 3.; 2. 5.; 1. 2.] - #3×2 Array{Float64,2}: - # 4.0 3.0 - # 2.0 5.0 - # 1.0 2.0 + **Examples** +```julia +using PosDefManifold +A=[4. 3.; 2. 5.; 1. 2.] +#3×2 Array{Float64,2}: +# 4.0 3.0 +# 2.0 5.0 +# 1.0 2.0 - s=sumOfSqrTril(A, -1) - # 9.0 = 1²+2²+2² +s=sumOfSqrTril(A, -1) +# 9.0 = 1²+2²+2² - s=sumOfSqrTril(A, 0) - # 50.0 = 1²+2²+2²+4²+5² +s=sumOfSqrTril(A, 0) +# 50.0 = 1²+2²+2²+4²+5² +``` """ function sumOfSqrTril(X::AnyMatrix, k::Int=0) @@ -894,13 +930,15 @@ sst=sumOfSqrTril **See also**: [`DiagOfProd`](@ref), [`tr1`](@ref). - ## Examples - using PosDefManifold - P=randP(ComplexF64, 5) # generate a random complex positive definite matrix 5x5 - Q=randP(ComplexF64, 5) # generate a random complex positive definite matrix 5x5 - tr(P, Q) ≈ tr(P*Q) ? println(" ⭐ ") : println(" ⛔ ") - tr(P, Q) ≈ tr(sqrt(P)*Q*sqrt(P)) ? println(" ⭐ ") : println(" ⛔ ") - tr(sqr(P), Q) ≈ tr(P*Q*P) ? println(" ⭐ ") : println(" ⛔ ") + **Examples** +```julia +using PosDefManifold +P=randP(ComplexF64, 5) # generate a random complex positive definite matrix 5x5 +Q=randP(ComplexF64, 5) # generate a random complex positive definite matrix 5x5 +tr(P, Q) ≈ tr(P*Q) ? println(" ⭐ ") : println(" ⛔ ") +tr(P, Q) ≈ tr(sqrt(P)*Q*sqrt(P)) ? println(" ⭐ ") : println(" ⛔ ") +tr(sqr(P), Q) ≈ tr(P*Q*P) ? println(" ⭐ ") : println(" ⛔ ") +``` """ tr(P::ℍ{T}, Q::ℍ{T}) where T<:RealOrComplex = real(tr(DiagOfProd(P, Q))) @@ -969,13 +1007,16 @@ tr(H::Union{ℍ{T}, 𝕄{T}}, D::𝔻{T}) where T<:RealOrComplex = tr(D, H) These formula are used in methods (1), (2) and (3). - ## Examples - using PosDefManifold - P=randP(5) # generate a random real positive definite matrix 5x5 - v=randn(5) - q1=quadraticForm(v, P) # or q1=qf(v, P) - q2=v'*P*v - q1 ≈ q2 ? println(" ⭐ ") : println(" ⛔ ") + **Examples** +```julia +using PosDefManifold +P=randP(5) # generate a random real positive definite matrix 5x5 +v=randn(5) +q1=quadraticForm(v, P) # or q1=qf(v, P) +q2=v'*P*v +q1 ≈ q2 ? println(" ⭐ ") : println(" ⛔ ") +``` + """ function quadraticForm(v::Vector{T}, P::ℍ{T}) where T<:Real r=length(v) @@ -1019,7 +1060,6 @@ end qf=quadraticForm - """ fidelity(P::ℍ{T}, Q::ℍ{T}) where T<:RealOrComplex @@ -1031,11 +1071,13 @@ qf=quadraticForm This is used in quantum physics and is related to the [Wasserstein](@ref) metric. See for example Bhatia, Jain and Lim (2019b)[🎓](@ref). - ## Examples - using PosDefManifold - P=randP(5); - Q=randP(5); - f=fidelity(P, Q) + **Examples** +```julia +using PosDefManifold +P=randP(5); +Q=randP(5); +f=fidelity(P, Q) +``` """ function fidelity(P::ℍ{T}, Q::ℍ{T}) where T<:RealOrComplex @@ -1081,21 +1123,23 @@ end **See also**: [`DiagOfProd`](@ref), [`tr`](@ref). - ## Examples + **Examples** +```julia +using PosDefManifold +P=randP(5) # use P=randP(ComplexF64, 5) for generating an Hermitian matrix - using PosDefManifold - P=randP(5) # use P=randP(ComplexF64, 5) for generating an Hermitian matrix +# diagonal matrix with the inverse of the first sub-diagonal of P +D=fDiag(inv, P, -1) - # diagonal matrix with the inverse of the first sub-diagonal of P - D=fDiag(inv, P, -1) +(Λ, U) = evd(P) # Λ holds the eigenvalues of P, see evd - (Λ, U) = evd(P) # Λ holds the eigenvalues of P, see evd +# diagonal matrix with the log of the eigenvalues +Δ=fDiag(log, Λ) - # diagonal matrix with the log of the eigenvalues - Δ=fDiag(log, Λ) +# using an anonymous function for the square of the eigenvalues +Δ=fDiag(x->x^2, Λ) +``` - # using an anonymous function for the square of the eigenvalues - Δ=fDiag(x->x^2, Λ) """ fDiag(func::Function, X::𝔻{T}, k::Int=0) where T<:RealOrComplex = 𝔻(func.(diag(X))) @@ -1122,10 +1166,13 @@ fDiag(func::Function, X::Union{𝕄{T}, ℍ{T}}, k::Int=0) where T<:RealOrCompl **See also**: [`tr`](@ref), [`fDiag`](@ref). - ## Examples - using PosDefManifold, LinearAlgebra - P, Q=randP(5), randP(5) - DiagOfProd(P, Q)≈Diagonal(P*Q) ? println("⭐ ") : println("⛔ ") + **Examples** +```julia +using PosDefManifold, LinearAlgebra +P, Q=randP(5), randP(5) +DiagOfProd(P, Q)≈Diagonal(P*Q) ? println("⭐ ") : println("⛔ ") +``` + """ DiagOfProd(P::ℍ{T}, Q::ℍ{T}) where T<:RealOrComplex = 𝔻([colProd(P, Q, i, i) for i=1:size(P, 1)]) @@ -1148,14 +1195,16 @@ dop=DiagOfProd then only the first `numCol` columns of ``X`` are orthogonalized. In this case only the firt `numCol` columns will be returned. - ## Examples - using LinearAlgebra, PosDefManifold - X=randn(10, 10); - U=mgs(X) # result is 10⋅10 - U=mgs(X, 3) # result is 10⋅3 - U'*U ≈ I ? println(" ⭐ ") : println(" ⛔ ") - # julia undertands also: - U'U ≈ I ? println(" ⭐ ") : println(" ⛔ ") + **Examples** +```julia +using LinearAlgebra, PosDefManifold +X=randn(10, 10); +U=mgs(X) # result is 10⋅10 +U=mgs(X, 3) # result is 10⋅3 +U'*U ≈ I ? println(" ⭐ ") : println(" ⛔ ") +# julia undertands also: +U'U ≈ I ? println(" ⭐ ") : println(" ⛔ ") +``` """ function mgs(X::𝕄{T}, numCol::Int=0) where T<:RealOrComplex @@ -1237,49 +1286,51 @@ end # mgs function This option is provided to allow calling this function repeatedly without normalizing the same weights vector each time. By default `✓w` is false. - ## Examples - - using LinearAlgebra, PosDefManifold - Pset=randP(4, 1000); # generate 1000 positive definite 4x4 matrices - mean(Pset) # arithmetic mean calling Julia function - Threads.nthreads() # check how many threads are available - fVec(mean, Pset) # multi-threaded arithmetic mean - - inv(mean(inv, Pset)) # Harmonic mean calling Julia function - inv(fVec(mean, inv, Pset)) # multi-threaded Harmonic mean - - exp(mean(log, Pset)) # log Euclidean mean calling Julia function - exp(fVec(mean, log, Pset)) # multi-threaded log Euclidean mean - - # notice that Julia `exp` function has changed the type of the result - # to `Symmetric`. To obtain an `Hermitian` output use - ℍ(exp(fVec(mean, log, Pset))) - - w=(randn(1000)).^2 - w=w./sum(w) # generate normalized random weights - - # weighted arithmetic mean calling Julia function - sum(Pset[i]*w[i] for i=1:length(w)) - # multi-threaded weighted arithmetic mean - fVec(sum, Pset, w=w) - - # weighted harmonic mean calling Julia function - inv(sum(inv(Pset[i])*w[i] for i=1:length(w))) - # multi-threaded weighted harmonic mean - inv(fVec(sum, inv, Pset, w=w)) - - # pre-allocating memory - Pset=randP(100, 1000); # generate 1000 positive definite 100x100 matrices - Qset=MatrixVector(repeat([similar(Pset[1])], Threads.nthreads())) - fVec(mean, log, Pset, allocs=Qset) + **Examples** +```julia +using LinearAlgebra, PosDefManifold +Pset=randP(4, 1000); # generate 1000 positive definite 4x4 matrices +mean(Pset) # arithmetic mean calling Julia function +Threads.nthreads() # check how many threads are available +fVec(mean, Pset) # multi-threaded arithmetic mean + +inv(mean(inv, Pset)) # Harmonic mean calling Julia function +inv(fVec(mean, inv, Pset)) # multi-threaded Harmonic mean + +exp(mean(log, Pset)) # log Euclidean mean calling Julia function +exp(fVec(mean, log, Pset)) # multi-threaded log Euclidean mean + +# notice that Julia `exp` function has changed the type of the result +# to `Symmetric`. To obtain an `Hermitian` output use +ℍ(exp(fVec(mean, log, Pset))) + +w=(randn(1000)).^2 +w=w./sum(w) # generate normalized random weights + +# weighted arithmetic mean calling Julia function +sum(Pset[i]*w[i] for i=1:length(w)) +# multi-threaded weighted arithmetic mean +fVec(sum, Pset, w=w) + +# weighted harmonic mean calling Julia function +inv(sum(inv(Pset[i])*w[i] for i=1:length(w))) +# multi-threaded weighted harmonic mean +inv(fVec(sum, inv, Pset, w=w)) + +# pre-allocating memory +Pset=randP(100, 1000); # generate 1000 positive definite 100x100 matrices +Qset=MatrixVector(repeat([similar(Pset[1])], Threads.nthreads())) +fVec(mean, log, Pset, allocs=Qset) + +# How much computing time we save ? +# (example min time obtained with 4 threads & 4 BLAS threads) +using BenchmarkTools +# standard Julia function +@benchmark(mean(log, Pset)) # (5.271 s) +# fVec +@benchmark(fVec(mean, log, Pset)) # (1.540 s) +``` - # How much computing time we save ? - # (example min time obtained with 4 threads & 4 BLAS threads) - using BenchmarkTools - # standard Julia function - @benchmark(mean(log, Pset)) # (5.271 s) - # fVec - @benchmark(fVec(mean, log, Pset)) # (1.540 s) """ function fVec(f::Function, g::Function, 𝐏::AnyMatrixVector; w::Vector=[], @@ -1410,48 +1461,49 @@ fVec(f::Function, 𝐏::AnyMatrixVector; be obtained as an `Hermitian` matrix by `cong(B, P, ℍ)` and as a generic matrix object by `cong(B, P, 𝕄)`. - ## Examples - - using LinearAlgebra, PosDefManifold - - # (1) - P=randP(3) # generate a 3x3 positive matrix - M=randn(3, 3) - C=cong(M, P, ℍ) # equivalent to C=ℍ(M*P*M') - - # (2) - Pset=randP(4, 100); # generate 100 positive definite 4x4 matrices - M=randn(4, 4) - Qset=cong(M, Pset, ℍVector) # = [M*Pset_1*M',...,M*Pset_k*M'] as an ℍVector type - - # recenter the matrices in Pset to their Fisher mean: - Qset=cong(invsqrt(mean(Fisher, Pset)), Pset, ℍVector) - - # as a check, the Fisher mean of Qset is now the identity - mean(Fisher, Qset)≈I ? println("⭐") : println("⛔") - - # (3) - Pset1=randP(4, 10); # generate 10 positive definite 4x4 matrices - Pset2=randP(4, 8); - Pset=ℍVector₂([Pset1, Pset2]); - M=randn(4, 4) - Qset=cong(M, Pset, MatrixVector₂) - Qset[1][1]≈M*Pset[1][1]*M' ? println("⭐") : println("⛔") - Qset[1][5]≈M*Pset[1][5]*M' ? println("⭐") : println("⛔") - Qset[2][1]≈M*Pset[2][1]*M' ? println("⭐") : println("⛔") - Qset[2][4]≈M*Pset[2][4]*M' ? println("⭐") : println("⛔") - - - # (4) - Pset1=randP(4, 2); # generate 2 positive definite 4x4 matrices - Pset2=randP(4, 2); - Pset=ℍVector₂([Pset1, Pset2]); - U=𝕄Vector([randU(4), randU(4)]) - Qset=cong(U, Pset, MatrixVector₂) - Qset[1][1]≈U[1]*Pset[1][1]*U[1]' ? println("⭐") : println("⛔") - Qset[1][2]≈U[1]*Pset[1][2]*U[2]' ? println("⭐") : println("⛔") - Qset[2][1]≈U[2]*Pset[2][1]*U[1]' ? println("⭐") : println("⛔") - Qset[2][2]≈U[2]*Pset[2][2]*U[2]' ? println("⭐") : println("⛔") + **Examples** +```julia +using LinearAlgebra, PosDefManifold + +# (1) +P=randP(3) # generate a 3x3 positive matrix +M=randn(3, 3) +C=cong(M, P, ℍ) # equivalent to C=ℍ(M*P*M') + +# (2) +Pset=randP(4, 100); # generate 100 positive definite 4x4 matrices +M=randn(4, 4) +Qset=cong(M, Pset, ℍVector) # = [M*Pset_1*M',...,M*Pset_k*M'] as an ℍVector type + +# recenter the matrices in Pset to their Fisher mean: +Qset=cong(invsqrt(mean(Fisher, Pset)), Pset, ℍVector) + +# as a check, the Fisher mean of Qset is now the identity +mean(Fisher, Qset)≈I ? println("⭐") : println("⛔") + +# (3) +Pset1=randP(4, 10); # generate 10 positive definite 4x4 matrices +Pset2=randP(4, 8); +Pset=ℍVector₂([Pset1, Pset2]); +M=randn(4, 4) +Qset=cong(M, Pset, MatrixVector₂) +Qset[1][1]≈M*Pset[1][1]*M' ? println("⭐") : println("⛔") +Qset[1][5]≈M*Pset[1][5]*M' ? println("⭐") : println("⛔") +Qset[2][1]≈M*Pset[2][1]*M' ? println("⭐") : println("⛔") +Qset[2][4]≈M*Pset[2][4]*M' ? println("⭐") : println("⛔") + +# (4) +Pset1=randP(4, 2); # generate 2 positive definite 4x4 matrices +Pset2=randP(4, 2); +Pset=ℍVector₂([Pset1, Pset2]); +U=𝕄Vector([randU(4), randU(4)]) +Qset=cong(U, Pset, MatrixVector₂) +Qset[1][1]≈U[1]*Pset[1][1]*U[1]' ? println("⭐") : println("⛔") +Qset[1][2]≈U[1]*Pset[1][2]*U[2]' ? println("⭐") : println("⛔") +Qset[2][1]≈U[2]*Pset[2][1]*U[1]' ? println("⭐") : println("⛔") +Qset[2][2]≈U[2]*Pset[2][2]*U[2]' ? println("⭐") : println("⛔") +``` + """ congruence(B::AnyMatrix, P::AnyMatrix, matrixType) = matrixType(B*P*B') @@ -1529,13 +1581,16 @@ cong=congruence **See also**: [`spectralFunctions`](@ref). - ## Examples - using PosDefManifold - A=randn(3, 3); - S=A+A'; - Λ, U=evd(S); # which is equivalent to (Λ, U)=evd(P) - (U*Λ*U') ≈ S ? println(" ⭐ ") : println(" ⛔ ") - # => UΛU'=S, UΛ=SU, ΛU'=U'S + **Examples** +```julia +using PosDefManifold +A=randn(3, 3); +S=A+A'; +Λ, U=evd(S); # which is equivalent to (Λ, U)=evd(P) +(U*Λ*U') ≈ S ? println(" ⭐ ") : println(" ⛔ ") +# => UΛU'=S, UΛ=SU, ΛU'=U'S +``` + """ function evd(S::Union{𝕄{T}, ℍ{T}}) where T<:RealOrComplex # return tuple (Λ, U) F = eigen(S) @@ -1565,13 +1620,14 @@ thus ``F^{-1}`` is a whitening matrix. **See also**: [`invfrf`](@ref). -## Examples -``` +**Examples** +```julia using LinearAlgebra, PosDefManifold P=randP(3) F = frf(P) F*F'≈P ? println(" ⭐ ") : println(" ⛔ ") ``` + """ function frf(P::ℍ{T}) where T<:RealOrComplex #size(P, 1)≠size(A, 2) && throw(ArgumentError(📌*", frf function: input matrix must be square")) @@ -1601,13 +1657,14 @@ thus ``F`` is a whitening matrix. **See also**: [`frf`](@ref). -## Examples -``` +**Examples** +```julia using LinearAlgebra, PosDefManifold P=randP(3) F = invfrf(P) F*P*F'≈I ? println(" ⭐ ") : println(" ⛔ ") ``` + """ function invfrf(P::ℍ{T}) where T<:RealOrComplex #size(P, 1)≠size(A, 2) && throw(ArgumentError(📌*", frf function: input matrix must be square")) @@ -1658,13 +1715,15 @@ end **See also**: [`evd`](@ref). - ## Examples - using LinearAlgebra, PosDefManifold - n=5 - P=randP(n) # P=randP(ComplexF64, 5) to generate an Hermitian complex matrix - noise=0.1; - Q=spectralFunctions(P, x->x+noise) # add white noise to the eigenvalues - tr(Q)-tr(P) ≈ noise*n ? println(" ⭐ ") : println(" ⛔ ") +**Examples** +```julia +using LinearAlgebra, PosDefManifold +n=5 +P=randP(n) # P=randP(ComplexF64, 5) to generate an Hermitian complex matrix +noise=0.1; +Q=spectralFunctions(P, x->x+noise) # add white noise to the eigenvalues +tr(Q)-tr(P) ≈ noise*n ? println(" ⭐ ") : println(" ⛔ ") +``` """ function spectralFunctions(P::ℍ{T}, func::Function) where T<:RealOrComplex @@ -1695,15 +1754,17 @@ spectralFunctions(D::𝔻{T}, func::Function) where T<:Real = fDiag(func, D) **See also**: [`invsqrt`](@ref). - ## Examples - using LinearAlgebra, PosDefManifold - P=randP(5); # use P=randP(ComplexF64, 5) for generating an Hermitian matrix - Q=pow(P, 0.5); # => QQ=P - Q, W=pow(P, 0.5, -0.5); - W*P*W ≈ I ? println(" ⭐ ") : println(" ⛔ ") - Q*Q ≈ P ? println(" ⭐ ") : println(" ⛔ ") - R, S=pow(P, 0.3, 0.7); - R*S ≈ P ? println(" ⭐ ") : println(" ⛔ ") + **Examples** +```julia +using LinearAlgebra, PosDefManifold +P=randP(5); # use P=randP(ComplexF64, 5) for generating an Hermitian matrix +Q=pow(P, 0.5); # => QQ=P +Q, W=pow(P, 0.5, -0.5); +W*P*W ≈ I ? println(" ⭐ ") : println(" ⛔ ") +Q*Q ≈ P ? println(" ⭐ ") : println(" ⛔ ") +R, S=pow(P, 0.3, 0.7); +R*S ≈ P ? println(" ⭐ ") : println(" ⛔ ") +``` """ pow(P::ℍ{T}, p) where T<:RealOrComplex = spectralFunctions(P, x->x^p) # one argument @@ -1745,11 +1806,13 @@ end **See also**: [`pow`](@ref). - ## Examples - using LinearAlgebra, PosDefManifold - P=randP(ComplexF64, 5); - Q=invsqrt(P); - Q*P*Q ≈ I ? println(" ⭐ ") : println(" ⛔ ") + **Examples** +```julia +using LinearAlgebra, PosDefManifold +P=randP(ComplexF64, 5); +Q=invsqrt(P); +Q*P*Q ≈ I ? println(" ⭐ ") : println(" ⛔ ") +``` """ invsqrt(P::ℍ{T}) where T<:RealOrComplex = spectralFunctions(P, x->1/√x) @@ -1773,11 +1836,13 @@ invsqrt(D::𝔻{T}) where T<:Real = spectralFunctions(D, x->1/√x) **See also**: [`pow`](@ref). - ## Examples - using PosDefManifold - P=randP(5); - P²=sqr(P); # => P²=PP - sqrt(P²)≈ P ? println(" ⭐ ") : println(" ⛔ ") + **Examples** +```julia +using PosDefManifold +P=randP(5); +P²=sqr(P); # => P²=PP +sqrt(P²)≈ P ? println(" ⭐ ") : println(" ⛔ ") +``` """ sqr(P::ℍ{T}) where T<:RealOrComplex = ℍ(P*P) @@ -1836,22 +1901,24 @@ sqr(X::Union{𝕄{T}, 𝕃{T}, 𝔻{S}}) where T<:RealOrComplex where S<:Real = **See also**: [`mgs`](@ref). - ## Examples - using LinearAlgebra, PosDefManifold - # Generate an Hermitian (complex) matrix - H=randP(ComplexF64, 10); - # 3 eigenvectors and eigenvalues - U, iterations, convergence=powIter(H, 3, verbose=true) - # all eigenvectors - Λ, U, iterations, convergence=powIter(H, size(H, 2), evalues=true, verbose=true); - U'*U ≈ I && U*Λ*U'≈H ? println(" ⭐ ") : println(" ⛔ ") - - # passing a `Matrix` object - Λ, U, iterations, convergence=powIter(Matrix(H), 3, evalues=true) - - # passing a `LowerTriangular` object (must be a real matrix in this case) - L=𝕃(randP(10)) - Λ, U, iterations, convergence=powIter(L, 3, evalues=true) + **Examples** +```julia +using LinearAlgebra, PosDefManifold +# Generate an Hermitian (complex) matrix +H=randP(ComplexF64, 10); +# 3 eigenvectors and eigenvalues +U, iterations, convergence=powIter(H, 3, verbose=true) +# all eigenvectors +Λ, U, iterations, convergence=powIter(H, size(H, 2), evalues=true, verbose=true); +U'*U ≈ I && U*Λ*U'≈H ? println(" ⭐ ") : println(" ⛔ ") + +# passing a `Matrix` object +Λ, U, iterations, convergence=powIter(Matrix(H), 3, evalues=true) + +# passing a `LowerTriangular` object (must be a real matrix in this case) +L=𝕃(randP(10)) +Λ, U, iterations, convergence=powIter(L, 3, evalues=true) +``` """ function powerIterations(H::𝕄{T}, q::Int; @@ -1927,11 +1994,13 @@ powIter=powerIterations **See also**: [`choInv`](@ref). - ## Examples - using PosDefManifold - P=randP(5); - L=choL(P); - L*L'≈ P ? println(" ⭐ ") : println(" ⛔ ") + **Examples** +```julia +using PosDefManifold +P=randP(5); +L=choL(P); +L*L'≈ P ? println(" ⭐ ") : println(" ⛔ ") +``` """ function choL(P::ℍ{T}) where T<:RealOrComplex @@ -2012,34 +2081,36 @@ choL(D::𝔻{T}) where T<:Real = √D **See also**: [`choInv!`](@ref), [`choL`](@ref). - ## Examples - using PosDefManifold - n, t = 800, 6000 - etol = 1e-9 - Z=randn(t, n) - Y=Z'*Z - Yi=inv(Y) - - A, B=choInv!(copy(Y)) - norm(A*A'-Y)/√n < etol ? println(" ⭐ ") : println(" ⛔ ") - norm(B*B'-Yi)/√n < etol ? println(" ⭐ ") : println(" ⛔ ") - - A, D, B=choInv!(copy(Y); kind=:LDLt) - norm(Y-A*D*A')/√n < etol ? println(" ⭐ ") : println(" ⛔ ") - norm(Yi-B*inv(D)*B')/√n < etol ? println(" ⭐ ") : println(" ⛔ ") - - # repeat the test for complex matrices - Z=randn(ComplexF64, t, n) - Y=Z'*Z - Yi=inv(Y) - - A, B=choInv!(copy(Y)) - norm(A*A'-Y)/√n < etol ? println(" ⭐ ") : println(" ⛔ ") - norm(B*B'-Yi)/√n < etol ? println(" ⭐ ") : println(" ⛔ ") - - A, D, B=choInv!(copy(Y); kind=:LDLt) - norm(Y-A*D*A')/√n < etol ? println(" ⭐ ") : println(" ⛔ ") - norm(Yi-B*inv(D)*B')/√n < etol ? println(" ⭐ ") : println(" ⛔ ") + **Examples** +```julia +using PosDefManifold +n, t = 800, 6000 +etol = 1e-9 +Z=randn(t, n) +Y=Z'*Z +Yi=inv(Y) + +A, B=choInv!(copy(Y)) +norm(A*A'-Y)/√n < etol ? println(" ⭐ ") : println(" ⛔ ") +norm(B*B'-Yi)/√n < etol ? println(" ⭐ ") : println(" ⛔ ") + +A, D, B=choInv!(copy(Y); kind=:LDLt) +norm(Y-A*D*A')/√n < etol ? println(" ⭐ ") : println(" ⛔ ") +norm(Yi-B*inv(D)*B')/√n < etol ? println(" ⭐ ") : println(" ⛔ ") + +# repeat the test for complex matrices +Z=randn(ComplexF64, t, n) +Y=Z'*Z +Yi=inv(Y) + +A, B=choInv!(copy(Y)) +norm(A*A'-Y)/√n < etol ? println(" ⭐ ") : println(" ⛔ ") +norm(B*B'-Yi)/√n < etol ? println(" ⭐ ") : println(" ⛔ ") + +A, D, B=choInv!(copy(Y); kind=:LDLt) +norm(Y-A*D*A')/√n < etol ? println(" ⭐ ") : println(" ⛔ ") +norm(Yi-B*inv(D)*B')/√n < etol ? println(" ⭐ ") : println(" ⛔ ") +``` """ choInv(P::AbstractArray{T}; diff --git a/src/riemannianGeometry.jl b/src/riemannianGeometry.jl index 0d93fd5..db017f7 100644 --- a/src/riemannianGeometry.jl +++ b/src/riemannianGeometry.jl @@ -1,7 +1,7 @@ # Unit riemannianGeometry.jl, part of PosDefManifold Package for julia language # # MIT License -# Copyright (c) 2019, Marco Congedo, CNRS, Grenobe, France: +# Copyright (c) 2019-21, Marco Congedo, CNRS, Grenobe, France: # https://sites.google.com/site/marcocongedo/home # # DESCRIPTION @@ -138,14 +138,16 @@ end **See also**: [`mean`](@ref). - ## Examples - using PosDefManifold - P=randP(10) - Q=randP(10) - # Wasserstein mean - M=geodesic(Wasserstein, P, Q, 0.5) - # extrapolate suing the Fisher metric - E=geodesic(Fisher, P, Q, 2) + **Examples** +```julia +using PosDefManifold +P=randP(10) +Q=randP(10) +# Wasserstein mean +M=geodesic(Wasserstein, P, Q, 0.5) +# extrapolate suing the Fisher metric +E=geodesic(Fisher, P, Q, 2) +``` """ function geodesic(metric::Metric, P::ℍ{T}, Q::ℍ{T}, a::Real) where T<:RealOrComplex @@ -301,21 +303,25 @@ end # function **See also**: [`distanceSqrMat`](@ref). - ## Examples (1) - using PosDefManifold - P=randP(10) - d=distanceSqr(Wasserstein, P) - e=distanceSqr(Fisher, P) - metric=Metric(Int(logdet0)) # or metric=logdet0 - s=string(metric) # check what is the current metric - f=distance²(metric, P) #using the alias distance² - - ## Examples (2) - using PosDefManifold - P=randP(10) - Q=randP(10) - d=distanceSqr(logEuclidean, P, Q) - e=distance²(Jeffrey, P, Q) + **Examples (1)** +```julia +using PosDefManifold +P=randP(10) +d=distanceSqr(Wasserstein, P) +e=distanceSqr(Fisher, P) +metric=Metric(Int(logdet0)) # or metric=logdet0 +s=string(metric) # check what is the current metric +f=distance²(metric, P) #using the alias distance² +``` + + **Examples (2)** +```julia +using PosDefManifold +P=randP(10) +Q=randP(10) +d=distanceSqr(logEuclidean, P, Q) +e=distance²(Jeffrey, P, Q) +``` """ function distanceSqr(metric::Metric, P::ℍ{T}) where T<:RealOrComplex @@ -511,20 +517,22 @@ distance(metric::Metric, D::𝔻{T}, E::𝔻{T}) where T<:Real = √(distance²( **See also**: [`laplacian`](@ref), [`laplacianEigenMaps`](@ref), [`spectralEmbedding`](@ref). - ## Examples - using PosDefManifold - # Generate a set of 8 random 10x10 SPD matrices - Pset=randP(10, 8) # or, using unicode: 𝐏=randP(10, 8) - # Compute the squared inter-distance matrix according to the log Euclidean metric. - # This is much faster as compared to the Fisher metric and in general - # it is a good approximation. - Δ²=distanceSqrMat(logEuclidean, Pset) - - # return a matrix of type Float64 - Δ²64=distanceSqrMat(Float64, logEuclidean, Pset) - - # Get the full matrix of inter-distances - fullΔ²=Hermitian(Δ², :L) + **Examples** +```julia +using PosDefManifold +# Generate a set of 8 random 10x10 SPD matrices +Pset=randP(10, 8) # or, using unicode: 𝐏=randP(10, 8) +# Compute the squared inter-distance matrix according to the log Euclidean metric. +# This is much faster as compared to the Fisher metric and in general +# it is a good approximation. +Δ²=distanceSqrMat(logEuclidean, Pset) + +# return a matrix of type Float64 +Δ²64=distanceSqrMat(Float64, logEuclidean, Pset) + +# Get the full matrix of inter-distances +fullΔ²=Hermitian(Δ², :L) +``` """ function distanceSqrMat(type::Type{T}, metric::Metric, 𝐏::ℍVector; @@ -668,17 +676,19 @@ distance²Mat=distanceSqrMat **See**: [distance](@ref). - ## Examples - using PosDefManifold - # Generate a set of 4 random 10x10 SPD matrices - Pset=randP(10, 4) # or, using unicode: 𝐏=randP(10, 4) - Δ=distanceMat(Fisher, Pset) + **Examples** +```julia +using PosDefManifold +# Generate a set of 4 random 10x10 SPD matrices +Pset=randP(10, 4) # or, using unicode: 𝐏=randP(10, 4) +Δ=distanceMat(Fisher, Pset) - # return a matrix of type Float64 - Δ64=distanceMat(Float64, Fisher, Pset) +# return a matrix of type Float64 +Δ64=distanceMat(Float64, Fisher, Pset) - # Get the full matrix of inter-distances - fullΔ=Hermitian(Δ, :L) +# Get the full matrix of inter-distances +fullΔ=Hermitian(Δ, :L) +``` """ distanceMat(type::Type{T}, metric::Metric, 𝐏::ℍVector; @@ -714,7 +724,7 @@ distanceMat(metric::Metric, 𝐏::ℍVector; If `densityInvariant=true` is used, then the density-invariant transformation is applied - ``W <- E^{-1}WE^{-1}`` + ``W \\leftarrow E^{-1}WE^{-1}`` where ``E`` is the diagonal matrix holding on the main diagonal the sum of the rows (or columns) of ``W``. @@ -747,24 +757,26 @@ distanceMat(metric::Metric, 𝐏::ℍVector; **See also**: [`distanceSqrMat`](@ref), [`laplacianEigenMaps`](@ref), [`spectralEmbedding`](@ref). - ## Examples - using PosDefManifold - # Generate a set of 4 random 10x10 SPD matrices - Pset=randP(10, 4) # or, using unicode: 𝐏=randP(10, 4) - Δ²=distanceSqrMat(Fisher, Pset) - Ω=laplacian(Δ²) - - # density-invariant Laplacian - Ω=laplacian(Δ²; densityInvariant=true) - - # increase the bandwidth - r=size(Δ², 1) - myεFactor=0.1 - med=Statistics.median([Δ²[i, j] for j=1:r-1 for i=j+1:r]) - ε=2*myεFactor*med - Ω=laplacian(Δ², ε; densityInvariant=true) + **Examples** +```julia +using PosDefManifold +# Generate a set of 4 random 10x10 SPD matrices +Pset=randP(10, 4) # or, using unicode: 𝐏=randP(10, 4) +Δ²=distanceSqrMat(Fisher, Pset) +Ω=laplacian(Δ²) + +# density-invariant Laplacian +Ω=laplacian(Δ²; densityInvariant=true) + +# increase the bandwidth +r=size(Δ², 1) +myεFactor=0.1 +med=Statistics.median([Δ²[i, j] for j=1:r-1 for i=j+1:r]) +ε=2*myεFactor*med +Ω=laplacian(Δ², ε; densityInvariant=true) +``` - """ +""" function laplacian(Δ²::𝕃{T}, epsilon::Real=0; densityInvariant=false) where T<:Real @@ -779,7 +791,7 @@ distanceMat(metric::Metric, 𝐏::ℍVector; end # product DWD with D diagonal, using only the lower triangle Ω of W - # overwrite Ω wirg the lower triangle of the result + # overwrite Ω with the lower triangle of the result DWD!(D::Vector{T}, Ω::𝕃{T}, r::Int) where T<:Real = for j=1:r, i=j:r Ω[i, j]*=D[i]*D[j] end @@ -855,15 +867,17 @@ distanceMat(metric::Metric, 𝐏::ℍVector; **See also**: [`distanceSqrMat`](@ref), [`laplacian`](@ref), [`spectralEmbedding`](@ref). - ## Examples - using PosDefManifold - # Generate a set of 4 random 10x10 SPD matrices - Pset=randP(10, 4) - Δ²=distanceSqrMat(Fisher, Pset) - Ω=laplacian(Δ²) - evalues, maps, iterations, convergence=laplacianEM(Ω, 2) - evalues, maps, iterations, convergence=laplacianEM(Ω, 2; verbose=true) - evalues, maps, iterations, convergence=laplacianEM(Ω, 2; verbose=true, maxiter=500) + **Examples** +```julia +using PosDefManifold +# Generate a set of 4 random 10x10 SPD matrices +Pset=randP(10, 4) +Δ²=distanceSqrMat(Fisher, Pset) +Ω=laplacian(Δ²) +evalues, maps, iterations, convergence=laplacianEM(Ω, 2) +evalues, maps, iterations, convergence=laplacianEM(Ω, 2; verbose=true) +evalues, maps, iterations, convergence=laplacianEM(Ω, 2; verbose=true, maxiter=500) +``` """ function laplacianEigenMaps(Ω::𝕃{T}, q::Int; @@ -944,33 +958,35 @@ laplacianEM=laplacianEigenMaps **See also**: [`distanceSqrMat`](@ref), [`laplacian`](@ref), [`laplacianEigenMaps`](@ref). - ## Examples - using PosDefManifold - # Generate a set of k random 10x10 SPD matrices - k=10 - Pset=randP(10, k) - evalues, maps, iter, conv=spectralEmbedding(Fisher, Pset, 2) - - # show convergence information - evalues, maps, iter, conv=spectralEmbedding(Fisher, Pset, 2; verbose=true) - - # use Float64 precision. - evalues, maps, iter, conv=spectralEmbedding(Float64, Fisher, Pset, 2) - - using Plots - # check eigevalues and eigenvectors - plot(diag(evalues)) - plot(maps[:, 1]) - plot!(maps[:, 2]) - plot!(maps[:, 3]) - - # plot the data in the embedded space - plot(maps[:, 1], maps[:, 2], seriestype=:scatter, title="Spectral Embedding", label="Pset") - - # try a different value of epsilon - evalues, maps, iter, conv=spEmb(Fisher, Pset, k-1, 0.01; maxiter=1000) - plot(maps[:, 1], maps[:, 2], seriestype=:scatter, title="Spectral Embedding", label="Pset") - # see the example in `Laplacian` function for more on this + **Examples** +```julia +using PosDefManifold +# Generate a set of k random 10x10 SPD matrices +k=10 +Pset=randP(10, k) +evalues, maps, iter, conv=spectralEmbedding(Fisher, Pset, 2) + +# show convergence information +evalues, maps, iter, conv=spectralEmbedding(Fisher, Pset, 2; verbose=true) + +# use Float64 precision. +evalues, maps, iter, conv=spectralEmbedding(Float64, Fisher, Pset, 2) + +using Plots +# check eigevalues and eigenvectors +plot(diag(evalues)) +plot(maps[:, 1]) +plot!(maps[:, 2]) +plot!(maps[:, 3]) + +# plot the data in the embedded space +plot(maps[:, 1], maps[:, 2], seriestype=:scatter, title="Spectral Embedding", label="Pset") + +# try a different value of epsilon +evalues, maps, iter, conv=spEmb(Fisher, Pset, k-1, 0.01; maxiter=1000) +plot(maps[:, 1], maps[:, 2], seriestype=:scatter, title="Spectral Embedding", label="Pset") +# see the example in `Laplacian` function for more on this +``` """ function spectralEmbedding(type::Type{T}, metric::Metric, 𝐏::ℍVector, q::Int, epsilon::Real=0; @@ -1114,33 +1130,35 @@ spEmb=spectralEmbedding **See**: [geodesic](@ref), [mean](@ref), [Fréchet mean](@ref). - ## Examples - using LinearAlgebra, Statistics, PosDefManifold - # Generate 2 random 3x3 SPD matrices - P=randP(3) - Q=randP(3) - M=mean(logdet0, P, Q) # (1) - M=mean(Euclidean, P, Q) # (1) - - # passing several matrices and associated weights listing them - # weights vector, does not need to be normalized - R=randP(3) - mean(Fisher, ℍVector([P, Q, R]); w=[1, 2, 3]) - - # Generate a set of 4 random 3x3 SPD matrices - Pset=randP(3, 4) - weights=[1, 2, 3, 1] - # passing a vector of Hermitian matrices (ℍVector type) - M=mean(Euclidean, Pset; w=weights) # (2) weighted Euclidean mean - M=mean(Wasserstein, Pset) # (2) unweighted Wassertein mean - # display convergence information when using an iterative algorithm - M=mean(Fisher, Pset; verbose=true) - - # run multi-threaded when the number of matrices is high - using BenchmarkTools - Pset=randP(20, 160) - @benchmark(mean(logEuclidean, Pset; ⏩=false)) # single-threaded - @benchmark(mean(logEuclidean, Pset)) # multi-threaded + **Examples** +```julia +using LinearAlgebra, Statistics, PosDefManifold +# Generate 2 random 3x3 SPD matrices +P=randP(3) +Q=randP(3) +M=mean(logdet0, P, Q) # (1) +M=mean(Euclidean, P, Q) # (1) + +# passing several matrices and associated weights listing them +# weights vector, does not need to be normalized +R=randP(3) +mean(Fisher, ℍVector([P, Q, R]); w=[1, 2, 3]) + +# Generate a set of 4 random 3x3 SPD matrices +Pset=randP(3, 4) +weights=[1, 2, 3, 1] +# passing a vector of Hermitian matrices (ℍVector type) +M=mean(Euclidean, Pset; w=weights) # (2) weighted Euclidean mean +M=mean(Wasserstein, Pset) # (2) unweighted Wassertein mean +# display convergence information when using an iterative algorithm +M=mean(Fisher, Pset; verbose=true) + +# run multi-threaded when the number of matrices is high +using BenchmarkTools +Pset=randP(20, 160) +@benchmark(mean(logEuclidean, Pset; ⏩=false)) # single-threaded +@benchmark(mean(logEuclidean, Pset)) # multi-threaded +``` """ mean(metric::Metric, P::ℍ{T}, Q::ℍ{T}) where T<:RealOrComplex = @@ -1351,45 +1369,48 @@ end # function **See also**: [`mean`](@ref). - ## Examples - using PosDefManifold - # Generate a set of 4 random 3x3 SPD matrices - Pset=randP(3, 4) # or, using unicode: 𝐏=randP(3, 4) - # Generate a set of 40 random 4x4 SPD matrices - Qset=randP(3, 40) # or, using unicode: 𝐐=randP(3, 40) - # listing directly ℍVector objects - means(logEuclidean, ℍVector₂([Pset, Qset])) # or: means(logEuclidean, ℍVector₂([𝐏, 𝐐])) - # note that [𝐏, 𝐐] is actually a ℍVector₂ type object + **Examples** +```julia +using PosDefManifold +# Generate a set of 4 random 3x3 SPD matrices +Pset=randP(3, 4) # or, using unicode: 𝐏=randP(3, 4) +# Generate a set of 40 random 4x4 SPD matrices +Qset=randP(3, 40) # or, using unicode: 𝐐=randP(3, 40) +# listing directly ℍVector objects +means(logEuclidean, ℍVector₂([Pset, Qset])) # or: means(logEuclidean, ℍVector₂([𝐏, 𝐐])) +# note that [𝐏, 𝐐] is actually a ℍVector₂ type object + +# creating and passing an object of ℍVector₂ type +sets=ℍVector₂(undef, 2) # or: 𝒫=ℍVector₂(undef, 2) +sets[1]=Pset # or: 𝒫[1]=𝐏 +sets[2]=Qset # or: 𝒫[2]=𝐐 +means(logEuclidean, sets) # or: means(logEuclidean, 𝒫) - # creating and passing an object of ℍVector₂ type - sets=ℍVector₂(undef, 2) # or: 𝒫=ℍVector₂(undef, 2) - sets[1]=Pset # or: 𝒫[1]=𝐏 - sets[2]=Qset # or: 𝒫[2]=𝐐 - means(logEuclidean, sets) # or: means(logEuclidean, 𝒫) +# going multi-threated - # going multi-threated +# first, create 20 sets of 200 50x50 SPD matrices +sets=ℍVector₂([randP(50, 200) for i=1:20]) - # first, create 20 sets of 200 50x50 SPD matrices - sets=ℍVector₂([randP(50, 200) for i=1:20]) +# How much computing time we save ? +# (example min time obtained with 4 threads & 4 BLAS threads) +using BenchmarkTools - # How much computing time we save ? - # (example min time obtained with 4 threads & 4 BLAS threads) - using BenchmarkTools +# non multi-threaded, mean with closed-form solution +@benchmark(means(logEuclidean, sets; ⏩=false)) # (6.196 s) - # non multi-threaded, mean with closed-form solution - @benchmark(means(logEuclidean, sets; ⏩=false)) # (6.196 s) +# multi-threaded, mean with closed-form solution +@benchmark(means(logEuclidean, sets)) # (1.897 s) - # multi-threaded, mean with closed-form solution - @benchmark(means(logEuclidean, sets)) # (1.897 s) +sets=ℍVector₂([randP(10, 200) for i=1:10]) - sets=ℍVector₂([randP(10, 200) for i=1:10]) +# non multi-threaded, mean with iterative solution +# wait a bit +@benchmark(means(Fisher, sets; ⏩=false)) # (4.672 s ) - # non multi-threaded, mean with iterative solution - # wait a bit - @benchmark(means(Fisher, sets; ⏩=false)) # (4.672 s ) +# multi-threaded, mean with iterative solution +@benchmark(means(Fisher, sets)) # (1.510 s) +``` - # multi-threaded, mean with iterative solution - @benchmark(means(Fisher, sets)) # (1.510 s) """ means(metric::Metric, 𝒫::ℍVector₂; ⏩=true) = ℍVector([mean(metric, 𝐏; ⏩=⏩) for 𝐏 in 𝒫]) @@ -1454,29 +1475,31 @@ means(metric::Metric, 𝒟::𝔻Vector₂; ⏩=true) = **See also**: [`powerMean`](@ref), [`wasMean`](@ref), [`mean`](@ref). - ## Examples - using LinearAlgebra, Statistics, PosDefManifold - # Generate a set of 4 random 3x3 SPD matrices - Pset=randP(3, 4) # or, using unicode: 𝐏=randP(3, 4) + **Examples** +```julia +using LinearAlgebra, Statistics, PosDefManifold +# Generate a set of 4 random 3x3 SPD matrices +Pset=randP(3, 4) # or, using unicode: 𝐏=randP(3, 4) - # weights vector, does not need to be normalized - weights=[1, 2, 3, 1] +# weights vector, does not need to be normalized +weights=[1, 2, 3, 1] - # unweighted mean - G = generalizedMean(Pset, 0.25) # or: G = generalizedMean(𝐏, 0.25) +# unweighted mean +G = generalizedMean(Pset, 0.25) # or: G = generalizedMean(𝐏, 0.25) - # weighted mean - G = generalizedMean(Pset, 0.5; w=weights) +# weighted mean +G = generalizedMean(Pset, 0.5; w=weights) - # with weights previously normalized we can set ✓w=false - weights=weights./sum(weights) - G = generalizedMean(Pset, 0.5; w=weights, ✓w=false) +# with weights previously normalized we can set ✓w=false +weights=weights./sum(weights) +G = generalizedMean(Pset, 0.5; w=weights, ✓w=false) - # run multi-threaded when the number of matrices is high - using BenchmarkTools - Pset=randP(20, 160) - @benchmark(generalizedMean(Pset; ⏩=false)) # single-threaded - @benchmark(generalizedMean(Pset)) # multi-threaded +# run multi-threaded when the number of matrices is high +using BenchmarkTools +Pset=randP(20, 160) +@benchmark(generalizedMean(Pset; ⏩=false)) # single-threaded +@benchmark(generalizedMean(Pset)) # multi-threaded +``` """ function generalizedMean(𝐏::Union{ℍVector, 𝔻Vector}, p::Real; @@ -1503,7 +1526,6 @@ function generalizedMean(𝐏::Union{ℍVector, 𝔻Vector}, p::Real; end # function - """ ``` geometricMean(𝐏::Union{ℍVector, 𝔻Vector}; @@ -1583,42 +1605,44 @@ See the [log Euclidean](@ref) metric. **See also**: [`geometricpMean`](@ref), [`powerMean`](@ref), [`wasMean`](@ref), [`logdet0Mean`](@ref), [`mean`](@ref). -## Examples - using LinearAlgebra, PosDefManifold - # Generate a set of 4 random 3x3 SPD matrices - Pset=randP(3, 4) # or, using unicode: 𝐏=randP(3, 4) - - # unweighted mean - G, iter, conv = geometricMean(Pset) # or G, iter, conv = geometricMean(𝐏) - - # weights vector, does not need to be normalized - weights=[1, 2, 3, 1] - - # weighted mean - G, iter, conv = geometricMean(Pset, w=weights) - - # print the convergence at all iterations - G, iter, conv = geometricMean(Pset; verbose=true) - - # now suppose Pset has changed a bit, initialize with G to hasten convergence - Pset[1]=ℍ(Pset[1]+(randP(3)/100)) - G, iter, conv = geometricMean(Pset; w=weights, ✓w=true, verbose=true, init=G) - - # run multi-threaded when the number of matrices is high - using BenchmarkTools - Pset=randP(20, 120) - @benchmark(geometricMean(Pset; ⏩=false)) # single-threaded - @benchmark(geometricMean(Pset)) # multi-threaded - - # show the mean and the input points using spectral embedding - using Plots - k=80 - Pset=randP(20, k) - G, iter, conv = geometricMean(Pset) - push!(Pset, G) - Λ, U, iter, conv=spectralEmbedding(Fisher, Pset, 2; verbose=true) - plot(U[1:k, 1], U[1:k, 2], seriestype=:scatter, title="Spectral Embedding", label="Pset") - plot!(U[k+1:k+1, 1], U[k+1:k+1, 2], seriestype=:scatter, label="mean") +**Examples** +```julia +using LinearAlgebra, PosDefManifold +# Generate a set of 4 random 3x3 SPD matrices +Pset=randP(3, 4) # or, using unicode: 𝐏=randP(3, 4) + +# unweighted mean +G, iter, conv = geometricMean(Pset) # or G, iter, conv = geometricMean(𝐏) + +# weights vector, does not need to be normalized +weights=[1, 2, 3, 1] + +# weighted mean +G, iter, conv = geometricMean(Pset, w=weights) + +# print the convergence at all iterations +G, iter, conv = geometricMean(Pset; verbose=true) + +# now suppose Pset has changed a bit, initialize with G to hasten convergence +Pset[1]=ℍ(Pset[1]+(randP(3)/100)) +G, iter, conv = geometricMean(Pset; w=weights, ✓w=true, verbose=true, init=G) + +# run multi-threaded when the number of matrices is high +using BenchmarkTools +Pset=randP(20, 120) +@benchmark(geometricMean(Pset; ⏩=false)) # single-threaded +@benchmark(geometricMean(Pset)) # multi-threaded + +# show the mean and the input points using spectral embedding +using Plots +k=80 +Pset=randP(20, k) +G, iter, conv = geometricMean(Pset) +push!(Pset, G) +Λ, U, iter, conv=spectralEmbedding(Fisher, Pset, 2; verbose=true) +plot(U[1:k, 1], U[1:k, 2], seriestype=:scatter, title="Spectral Embedding", label="Pset") +plot!(U[k+1:k+1, 1], U[k+1:k+1, 2], seriestype=:scatter, label="mean") +``` """ function geometricMean( 𝐏::ℍVector; @@ -1757,59 +1781,61 @@ gMean=geometricMean **See also**: [`geometricMean`](@ref), [`powerMean`](@ref), [`wasMean`](@ref), [`logdet0Mean`](@ref), [`mean`](@ref). - ## Examples - using LinearAlgebra, PosDefManifold, Plots - - # This examples show that this algorithm is more robust to outliers - # as compared to the standard geometric mean algorithm - - # Generate a set of 100 random 10x10 SPD matrices - Pset=randP(10, 100) - - # Get the usual geometric mean for comparison - G, iter1, conv1 = geometricMean(Pset, verbose=true) - - # change p to observe how the convergence behavior changes accordingly - # Get the median (default) - H, iter2, conv2 = geometricpMean(Pset, verbose=true) - # Get the p-mean for p=0.25 - H, iter2, conv2 = geometricpMean(Pset, 0.25, verbose=true) - - println(iter1, " ", iter2); println(conv1, " ", conv2) - - # move the first matrix in Pset to possibly create an otlier - Pset[1]=geodesic(Fisher, G, Pset[1], 3) - G1, iter1, conv1 = geometricMean(Pset, verbose=true) - H1, iter2, conv2 = geometricpMean(Pset, 0.25, verbose=true) - println(iter1, " ", iter2); println(conv1, " ", conv2) - - # collect the geometric and p-means, before and after the - # introduction of the outier in vector of Hermitian matrices `S`. - S=HermitianVector([G, G1, H, H1]) - - # check the interdistance matrix Δ² to verify that the geometric mean - # after the introduction of the outlier (``G1``) is farther away from - # the geometric mean as compared to how much ``H1`` is further away - # from ``H``, i.e., that element (4,3) is much smaller than element (2,1). - Δ²=distance²Mat(Float64, Fisher, S) - - # how far are all these matrices from all the others? - fullΔ²=Hermitian(Δ², :L) - dist=[sum(fullΔ²[:, i]) for i=1:size(fullΔ², 1)] - - # plot the matrices in `S` using spectral embedding. - using Plots - Λ, U, iter, conv = laplacianEM(laplacian(Δ²), 3; verbose=true) - plot([U[1, 1]], [U[1, 2]], seriestype=:scatter, label="g-mean") - plot!([U[2, 1]], [U[2, 2]], seriestype=:scatter, label="g-mean outlier") - plot!([U[3, 1]], [U[3, 2]], seriestype=:scatter, label="p-mean") - plot!([U[4, 1]], [U[4, 2]], seriestype=:scatter, label="p-mean outlier") - - # estimate how much you gain running the algorithm in multi-threaded mode - using BenchmarkTools - Pset=randP(20, 120) - @benchmark(geometricpMean(Pset; ⏩=true)) # single-threaded - @benchmark(geometricpMean(Pset)) # multi-threaded + **Examples** +```julia +using LinearAlgebra, PosDefManifold, Plots + +# This examples show that this algorithm is more robust to outliers +# as compared to the standard geometric mean algorithm + +# Generate a set of 100 random 10x10 SPD matrices +Pset=randP(10, 100) + +# Get the usual geometric mean for comparison +G, iter1, conv1 = geometricMean(Pset, verbose=true) + +# change p to observe how the convergence behavior changes accordingly +# Get the median (default) +H, iter2, conv2 = geometricpMean(Pset, verbose=true) +# Get the p-mean for p=0.25 +H, iter2, conv2 = geometricpMean(Pset, 0.25, verbose=true) + +println(iter1, " ", iter2); println(conv1, " ", conv2) + +# move the first matrix in Pset to possibly create an otlier +Pset[1]=geodesic(Fisher, G, Pset[1], 3) +G1, iter1, conv1 = geometricMean(Pset, verbose=true) +H1, iter2, conv2 = geometricpMean(Pset, 0.25, verbose=true) +println(iter1, " ", iter2); println(conv1, " ", conv2) + +# collect the geometric and p-means, before and after the +# introduction of the outier in vector of Hermitian matrices `S`. +S=HermitianVector([G, G1, H, H1]) + +# check the interdistance matrix Δ² to verify that the geometric mean +# after the introduction of the outlier (``G1``) is farther away from +# the geometric mean as compared to how much ``H1`` is further away +# from ``H``, i.e., that element (4,3) is much smaller than element (2,1). +Δ²=distance²Mat(Float64, Fisher, S) + +# how far are all these matrices from all the others? +fullΔ²=Hermitian(Δ², :L) +dist=[sum(fullΔ²[:, i]) for i=1:size(fullΔ², 1)] + +# plot the matrices in `S` using spectral embedding. +using Plots +Λ, U, iter, conv = laplacianEM(laplacian(Δ²), 3; verbose=true) +plot([U[1, 1]], [U[1, 2]], seriestype=:scatter, label="g-mean") +plot!([U[2, 1]], [U[2, 2]], seriestype=:scatter, label="g-mean outlier") +plot!([U[3, 1]], [U[3, 2]], seriestype=:scatter, label="p-mean") +plot!([U[4, 1]], [U[4, 2]], seriestype=:scatter, label="p-mean outlier") + +# estimate how much you gain running the algorithm in multi-threaded mode +using BenchmarkTools +Pset=randP(20, 120) +@benchmark(geometricpMean(Pset; ⏩=true)) # single-threaded +@benchmark(geometricpMean(Pset)) # multi-threaded +``` """ function geometricpMean(𝐏::ℍVector, p::Real=goldeninv; @@ -1949,32 +1975,35 @@ gpMean=geometricpMean **See also**: [`powerMean`](@ref), [`wasMean`](@ref), [`logdet0Mean`](@ref), [`mean`](@ref). - ## Examples - using LinearAlgebra, PosDefManifold - # Generate a set of 4 random 3x3 SPD matrices - Pset=randP(3, 4) # or, using unicode: 𝐏=randP(3, 4) + **Examples** +```julia +using LinearAlgebra, PosDefManifold +# Generate a set of 4 random 3x3 SPD matrices +Pset=randP(3, 4) # or, using unicode: 𝐏=randP(3, 4) - # unweighted mean - G, iter, conv = logdet0Mean(Pset) # or G, iter, conv = logdet0Mean(𝐏) +# unweighted mean +G, iter, conv = logdet0Mean(Pset) # or G, iter, conv = logdet0Mean(𝐏) - # weights vector, does not need to be normalized - weights=[1, 2, 3, 1] +# weights vector, does not need to be normalized +weights=[1, 2, 3, 1] - # weighted mean - G, iter, conv = logdet0Mean(Pset, w=weights) +# weighted mean +G, iter, conv = logdet0Mean(Pset, w=weights) - # print the convergence at all iterations - G, iter, conv = logdet0Mean(Pset; w=weights, verbose=true) +# print the convergence at all iterations +G, iter, conv = logdet0Mean(Pset; w=weights, verbose=true) - # suppose Pset has changed a bit; initialize with G to hasten convergence - Pset[1]=ℍ(Pset[1]+(randP(3)/100)) - G, iter, conv = logdet0Mean(Pset; w=weights, ✓w=false, verbose=true, init=G) +# suppose Pset has changed a bit; initialize with G to hasten convergence +Pset[1]=ℍ(Pset[1]+(randP(3)/100)) +G, iter, conv = logdet0Mean(Pset; w=weights, ✓w=false, verbose=true, init=G) + +# estimate how much you gain running the algorithm in multi-threaded mode +using BenchmarkTools +Pset=randP(20, 120) +@benchmark(logdet0Mean(Pset; ⏩=false)) # single-threaded +@benchmark(logdet0Mean(Pset)) # multi-threaded +``` - # estimate how much you gain running the algorithm in multi-threaded mode - using BenchmarkTools - Pset=randP(20, 120) - @benchmark(logdet0Mean(Pset; ⏩=false)) # single-threaded - @benchmark(logdet0Mean(Pset)) # multi-threaded """ function logdet0Mean(𝐏::Union{ℍVector, 𝔻Vector}; w::Vector=[], @@ -2096,32 +2125,34 @@ ld0Mean=logdet0Mean **See also**: [`powerMean`](@ref), [`wasMean`](@ref), [`logdet0Mean`](@ref), [`mean`](@ref). - ## Examples - using LinearAlgebra, PosDefManifold - # Generate a set of 4 random 3x3 SPD matrices - Pset=randP(3, 4) # or, using unicode: 𝐏=randP(3, 4) + **Examples** +```julia +using LinearAlgebra, PosDefManifold +# Generate a set of 4 random 3x3 SPD matrices +Pset=randP(3, 4) # or, using unicode: 𝐏=randP(3, 4) - # unweighted mean - G, iter, conv = wasMean(Pset) # or: G, iter, conv = wasMean(𝐏) +# unweighted mean +G, iter, conv = wasMean(Pset) # or: G, iter, conv = wasMean(𝐏) - # weights vector, does not need to be normalized - weights=[1, 2, 3, 1] +# weights vector, does not need to be normalized +weights=[1, 2, 3, 1] - # weighted mean - G, iter, conv = wasMean(Pset; w=weights) +# weighted mean +G, iter, conv = wasMean(Pset; w=weights) - # print the convergence at all iterations - G, iter, conv = wasMean(Pset; w=weights, verbose=true) +# print the convergence at all iterations +G, iter, conv = wasMean(Pset; w=weights, verbose=true) - # suppose 𝐏 has changed a bit; initialize with G to hasten convergence - Pset[1]=ℍ(Pset[1]+(randP(3)/100)) - G, iter, conv = wasMean(Pset; w=weights, verbose=true, init=G) +# suppose 𝐏 has changed a bit; initialize with G to hasten convergence +Pset[1]=ℍ(Pset[1]+(randP(3)/100)) +G, iter, conv = wasMean(Pset; w=weights, verbose=true, init=G) - # estimate how much you gain running the algorithm in multi-threaded mode - using BenchmarkTools - Pset=randP(20, 120) - @benchmark(wasMean(Pset; ⏩=false)) # single-threaded - @benchmark(wasMean(Pset)) # multi-threaded +# estimate how much you gain running the algorithm in multi-threaded mode +using BenchmarkTools +Pset=randP(20, 120) +@benchmark(wasMean(Pset; ⏩=false)) # single-threaded +@benchmark(wasMean(Pset)) # multi-threaded +``` """ function wasMean(𝐏::ℍVector; @@ -2266,32 +2297,34 @@ wasMean(𝐃::𝔻Vector; **See also**: [`generalizedMean`](@ref), [`wasMean`](@ref), [`logdet0Mean`](@ref), [`mean`](@ref). - ## Examples - using LinearAlgebra, PosDefManifold - # Generate a set of 4 random 3x3 SPD matrices - Pset=randP(3, 4) # or, using unicode: 𝐏=randP(3, 4) + **Examples** +```julia +using LinearAlgebra, PosDefManifold +# Generate a set of 4 random 3x3 SPD matrices +Pset=randP(3, 4) # or, using unicode: 𝐏=randP(3, 4) - # unweighted mean - G, iter, conv = powerMean(Pset, 0.5) # or G, iter, conv = powerMean(𝐏, 0.5) +# unweighted mean +G, iter, conv = powerMean(Pset, 0.5) # or G, iter, conv = powerMean(𝐏, 0.5) - # weights vector, does not need to be normalized - weights=[1, 2, 3, 1] +# weights vector, does not need to be normalized +weights=[1, 2, 3, 1] - # weighted mean - G, iter, conv = powerMean(Pset, 0.5; w=weights) +# weighted mean +G, iter, conv = powerMean(Pset, 0.5; w=weights) - # print the convergence at all iterations - G, iter, conv = powerMean(Pset, 0.5; w=weights, verbose=true) +# print the convergence at all iterations +G, iter, conv = powerMean(Pset, 0.5; w=weights, verbose=true) - # suppose 𝐏 has changed a bit; initialize with G to hasten convergence - Pset[1]=ℍ(Pset[1]+(randP(3)/100)) - G, iter, conv = powerMean(Pset, 0.5; w=weights, verbose=true, init=G) +# suppose 𝐏 has changed a bit; initialize with G to hasten convergence +Pset[1]=ℍ(Pset[1]+(randP(3)/100)) +G, iter, conv = powerMean(Pset, 0.5; w=weights, verbose=true, init=G) - # estimate how much you gain running the algorithm in multi-threaded mode - using BenchmarkTools - Pset=randP(20, 120) - @benchmark(powerMean(Pset, 0.5; ⏩=false)) # single-threaded - @benchmark(powerMean(Pset, 0.5)) # multi-threaded +# estimate how much you gain running the algorithm in multi-threaded mode +using BenchmarkTools +Pset=randP(20, 120) +@benchmark(powerMean(Pset, 0.5; ⏩=false)) # single-threaded +@benchmark(powerMean(Pset, 0.5)) # multi-threaded +``` """ function powerMean(𝐏::ℍVector, p::Real; @@ -2414,26 +2447,29 @@ powerMean(𝐃::𝔻Vector, p::Real; Since the inductive mean uses the [`geodesic`](@ref) function, it is not available for the Von Neumann metric. -## Examples - # A set of 100 matrices for which we want to compute the mean - 𝐏=randP(10, 100) +**Examples** +```julia +# A set of 100 matrices for which we want to compute the mean +𝐏=randP(10, 100) - 𝐏1=ℍVector(collect(𝐏[i] for i=1:50)) # first 50 - 𝐏2=ℍVector(collect(𝐏[i] for i=51:100)) # last 50 +𝐏1=ℍVector(collect(𝐏[i] for i=1:50)) # first 50 +𝐏2=ℍVector(collect(𝐏[i] for i=51:100)) # last 50 - # inductive mean of the whole set 𝐏 - G=inductiveMean(Fisher, 𝐏) +# inductive mean of the whole set 𝐏 +G=inductiveMean(Fisher, 𝐏) - # mean using the usual gradient descent algorithm - H, iter, conv=geometricMean(𝐏) +# mean using the usual gradient descent algorithm +H, iter, conv=geometricMean(𝐏) - # inductive mean of 𝐏 given only 𝐏2, - # the number of matrices in 𝐏1 and the mean of 𝐏1 - G2=inductiveMean(Fisher, 𝐏2, length(𝐏1), mean(Fisher, 𝐏1)) +# inductive mean of 𝐏 given only 𝐏2, +# the number of matrices in 𝐏1 and the mean of 𝐏1 +G2=inductiveMean(Fisher, 𝐏2, length(𝐏1), mean(Fisher, 𝐏1)) + +# average error +norm(G-H)/(dim(G, 1)^2) +norm(G2-H)/(dim(G, 1)^2) +``` - # average error - norm(G-H)/(dim(G, 1)^2) - norm(G2-H)/(dim(G, 1)^2) """ function inductiveMean(metric::Metric, 𝐏::ℍVector) if metric ∉ (VonNeumann) @@ -2459,11 +2495,13 @@ end where ``\\lambda_(min)`` and ``\\lambda_(max)`` are the extremal generalized eigenvalues of ``P`` and ``Q``. -## Examples +**Examples** +```julia +P=randP(3) +Q=randP(3) +M=midrange(Fisher, P, Q) +``` - P=randP(3) - Q=randP(3) - M=midrange(Fisher, P, Q) """ function midrange(metric::Metric, P::ℍ{T}, Q::ℍ{T}) where T<:RealOrComplex if metric == Fisher @@ -2526,20 +2564,22 @@ indMean=inductiveMean **See also**: [`vecP`](@ref), [`parallelTransport`](@ref). - ## Examples - using PosDefManifold - (1) - P=randP(3) - Q=randP(3) - metric=Fisher - G=mean(metric, P, Q) - # projecting P at the base point given by the geometric mean of P and Q - S=logMap(metric, P, G) - - (2) - Pset=randP(3, 4) - # projecting all matrices in Pset at the base point given by their geometric mean. - Sset=logMap(Fisher, Pset, mean(Fisher, Pset)) + **Examples** +```julia +using PosDefManifold +(1) +P=randP(3) +Q=randP(3) +metric=Fisher +G=mean(metric, P, Q) +# projecting P at the base point given by the geometric mean of P and Q +S=logMap(metric, P, G) + +(2) +Pset=randP(3, 4) +# projecting all matrices in Pset at the base point given by their geometric mean. +Sset=logMap(Fisher, Pset, mean(Fisher, Pset)) +``` """ function logMap(metric::Metric, P::ℍ{T}, G::ℍ{T}) where T<:RealOrComplex @@ -2592,24 +2632,26 @@ end The inverse operation is [`logMap`](@ref). - ## Examples - (1) - using PosDefManifold, LinearAlgebra - P=randP(3) - Q=randP(3) - G=mean(Fisher, P, Q) - # projecting P on the tangent space at the Fisher mean base point G - S=logMap(Fisher, P, G) - # projecting back onto the manifold - P2=expMap(Fisher, S, G) - - (2) - Pset=randP(3, 4) - # projecting all matrices in Pset at the base point given by their geometric mean. - G=mean(Fisher, Pset) - Sset=logMap(Fisher, Pset, G) - # projecting back onto the manifold - Pset2=expMap(Fisher, Sset, G) + **Examples** +```julia +(1) +using PosDefManifold, LinearAlgebra +P=randP(3) +Q=randP(3) +G=mean(Fisher, P, Q) +# projecting P on the tangent space at the Fisher mean base point G +S=logMap(Fisher, P, G) +# projecting back onto the manifold +P2=expMap(Fisher, S, G) + +(2) +Pset=randP(3, 4) +# projecting all matrices in Pset at the base point given by their geometric mean. +G=mean(Fisher, Pset) +Sset=logMap(Fisher, Pset, G) +# projecting back onto the manifold +Pset2=expMap(Fisher, Sset, G) +``` """ function expMap(metric::Metric, S::ℍ{T}, G::ℍ{T}) where T<:RealOrComplex @@ -2662,17 +2704,20 @@ end cannot be reverted by the [`matP`](@ref), that is, in this case the matrix is 'stuck' in the tangent space. - ## Examples - using PosDefManifold - P=randP(3) - Q=randP(3) - G=mean(Fisher, P, Q) - # projecting P at the base point given by the geometric mean of P and Q - S=logMap(Fisher, P, G) - # vectorize S - v=vecP(S) - # vectorize onlt the first two columns of S - v=vecP(S; range=1:2) + **Examples** +```julia +using PosDefManifold +P=randP(3) +Q=randP(3) +G=mean(Fisher, P, Q) +# projecting P at the base point given by the geometric mean of P and Q +S=logMap(Fisher, P, G) +# vectorize S +v=vecP(S) +# vectorize onlt the first two columns of S +v=vecP(S; range=1:2) +``` + """ vecP(S::Union{ℍ{T}, Symmetric{R}}; range::UnitRange=1:size(S, 2)) where T<:RealOrComplex where R<:Real = @@ -2695,21 +2740,23 @@ vecP(S::Union{ℍ{T}, Symmetric{R}}; **To Do**: This function may be rewritten more efficiently. - ## Examples - using PosDefManifold - P=randP(3) - Q=randP(3) - G=mean(Fishr, P, Q) - # projecting P at onto the tangent space at the Fisher mean base point - S=logMap(Fisher, P, G) - # vectorize S - v=vecP(S) - # Rotate the vector by an orthogonal matrix - n=Int(size(S, 1)*(size(S, 1)+1)/2) - U=randP(n) - z=U*v - # Get the point in the tangent space - S=matP(z) + **Examples** +```julia +using PosDefManifold +P=randP(3) +Q=randP(3) +G=mean(Fishr, P, Q) +# projecting P at onto the tangent space at the Fisher mean base point +S=logMap(Fisher, P, G) +# vectorize S +v=vecP(S) +# Rotate the vector by an orthogonal matrix +n=Int(size(S, 1)*(size(S, 1)+1)/2) +U=randP(n) +z=U*v +# Get the point in the tangent space +S=matP(z) +``` """ function matP(ς::Vector{T}) where T<:RealOrComplex n=Int((-1+√(1+8*length(ς)))/2) # Size of the matrix whose vectorization vector v has size length(v) @@ -2772,47 +2819,49 @@ The transport reduces in this case to: **See also**: [`logMap`](@ref), [`expMap`](@ref), [`vecP`](@ref), [`matP`](@ref). - ## Examples - using PosDefManifold - - (1) - P=randP(3) - Q=randP(3) - G=mean(Fisher, P, Q) - - # i. projecting P onto the tangent space at base-point G - S=logMap(Fisher, P, G) - # ii. parallel transport S to the tangent space at base-point Q - S_=parallelTransport(S, G, Q) - # iii. projecting back into the manifold at base-point Q - P_=expMap(Fisher, S_, Q) - - # i., ii. and iii. can be done simply by - PP_=parallelTransport(P, G, Q) - # check - P_≈PP_ ? println(" ⭐ ") : println(" ⛔ ") - - (2) - P=randP(3) - Q=randP(3) - G=mean(Fisher, P, Q) - # transport to the tangent space at base-point the identity - PP_=parallelTransport(P, G) - - (3) - Pset=randP(3, 4) - Q=randP(3) - G=mean(Fisher, Pset) - # trasport at once all matrices in Pset - Pset2=parallelTransport(Pset, G, Q) - - (4) - Pset=randP(3, 4) - G=mean(Fisher, Pset) - # recenter all matrices so to have mean=I - Pset2=parallelTransport(Pset, G) - # check - mean(Fisher, Pset2) ≈ I ? println(" ⭐ ") : println(" ⛔ ") + **Examples** +```julia +using PosDefManifold + +(1) +P=randP(3) +Q=randP(3) +G=mean(Fisher, P, Q) + +# i. projecting P onto the tangent space at base-point G +S=logMap(Fisher, P, G) +# ii. parallel transport S to the tangent space at base-point Q +S_=parallelTransport(S, G, Q) +# iii. projecting back into the manifold at base-point Q +P_=expMap(Fisher, S_, Q) + +# i., ii. and iii. can be done simply by +PP_=parallelTransport(P, G, Q) +# check +P_≈PP_ ? println(" ⭐ ") : println(" ⛔ ") + +(2) +P=randP(3) +Q=randP(3) +G=mean(Fisher, P, Q) +# transport to the tangent space at base-point the identity +PP_=parallelTransport(P, G) + +(3) +Pset=randP(3, 4) +Q=randP(3) +G=mean(Fisher, Pset) +# trasport at once all matrices in Pset +Pset2=parallelTransport(Pset, G, Q) + +(4) +Pset=randP(3, 4) +G=mean(Fisher, Pset) +# recenter all matrices so to have mean=I +Pset2=parallelTransport(Pset, G) +# check +mean(Fisher, Pset2) ≈ I ? println(" ⭐ ") : println(" ⛔ ") +``` """ function parallelTransport(S::ℍ{T}, P::ℍ{T}, Q::ℍ{T}) where T<:RealOrComplex @@ -2887,14 +2936,17 @@ pt=parallelTransport can be solved using Julia package [Manopt](https://github.com/kellertuer/Manopt.jl). - ## Examples - using PosDefManifold - P=randP(3) - Q=randP(3) - # argmin problem - U=procrustes(P, Q) - # argmax problem - V=procrustes(P, Q, "max") + **Examples** +```julia +using PosDefManifold +P=randP(3) +Q=randP(3) +# argmin problem +U=procrustes(P, Q) +# argmax problem +V=procrustes(P, Q, "max") +``` + """ function procrustes(P::ℍ{T}, Q::ℍ{T}, extremum="min") where T<:RealOrComplex Pup=eigvecs(P) diff --git a/src/signalProcessing.jl b/src/signalProcessing.jl index 07523c2..f46c1f8 100644 --- a/src/signalProcessing.jl +++ b/src/signalProcessing.jl @@ -1,7 +1,7 @@ # Unit signalProcessing.jl, part of PosDefManifold Package for julia language # # MIT License -# Copyright (c) 2019, Marco Congedo, CNRS, Grenobe, France: +# Copyright (c) 2019-21, Marco Congedo, CNRS, Grenobe, France: # https://sites.google.com/site/marcocongedo/home # # DESCRIPTION @@ -21,10 +21,13 @@ It uses the *Wilson–Hilferty transformation* for `df`>=20 - see [chi-squared distribution](https://en.wikipedia.org/wiki/Chi-squared_distribution). - ## Examples - using Plots, PosDefManifold - chi=[randχ²(2) for i=1:10000] - histogram(chi) # needs Plots package. Check your plots back-end. + **Examples** +```julia +using Plots, PosDefManifold +chi=[randχ²(2) for i=1:10000] +histogram(chi) # needs Plots package. Check your plots back-end. +``` + """ randChi²(df::Int) = df<20 ? sum(randn()^2 for i=1:df) : df*(1.0-2.0/(9.0*df)+randn()*sqrt2/sqrt(9.0*df))^3 @@ -47,12 +50,14 @@ randχ²=randChi² **See also**: `randU` ([`randUnitaryMat`](@ref)), `randP` ([`randPosDefMat`](@ref)). - ## Examples - using Plots, PosDefManifold - λ=sort(randλ(10), rev=true) - σ=sort(randλ(10, eigvalsSNR=10), rev=true) - plot(λ) # needs Plots package. Check your plots back-end. - plot!(σ) # needs Plots package. Check your plots back-end. + **Examples** +```julia +using Plots, PosDefManifold +λ=sort(randλ(10), rev=true) +σ=sort(randλ(10, eigvalsSNR=10), rev=true) +plot(λ) # needs Plots package. Check your plots back-end. +plot!(σ) # needs Plots package. Check your plots back-end. +``` """ randEigvals(n::Int; @@ -116,18 +121,20 @@ randλ=randEigvals **See also**: `randλ` ([`randEigvals`](@ref)), `randU` ([`randUnitaryMat`](@ref)), `randP` ([`randPosDefMat`](@ref)), `randχ²` ([`randChi²`](@ref)). - ## Examples - using PosDefManifold - # (1) - n=3; - U=randU(n); - Λ=randΛ(n, eigvalsSNR=100) - P=U*Λ*U' # generate an SPD matrix - using LinearAlgebra - Q=ℍ(U*Λ*U') # generate an SPD matrix and flag it as 'Hermitian' - - # (2) generate an array of 10 matrices of simulated eigenvalues - Dvec=randΛ(n, 10) + **Examples** +```julia +using PosDefManifold +# (1) +n=3; +U=randU(n); +Λ=randΛ(n, eigvalsSNR=100) +P=U*Λ*U' # generate an SPD matrix +using LinearAlgebra +Q=ℍ(U*Λ*U') # generate an SPD matrix and flag it as 'Hermitian' + +# (2) generate an array of 10 matrices of simulated eigenvalues +Dvec=randΛ(n, 10) +``` """ randEigvalsMat(n::Int; @@ -159,14 +166,16 @@ randΛ=randEigvalsMat **See also**: `randΛ` ([`randEigvals`](@ref)), `randP` ([`randPosDefMat`](@ref)). - ## Examples - using PosDefManifold - n=3; - X=randU(n)*sqrt(randΛ(n))*randU(n)' # (1) generate a random square real matrix + **Examples** +```julia +using PosDefManifold +n=3; +X=randU(n)*sqrt(randΛ(n))*randU(n)' # (1) generate a random square real matrix - U=randU(ComplexF64, n); - V=randU(ComplexF64, n); - Y=U*sqrt(randΛ(n))*V' # (2) generate a random square complex matrix +U=randU(ComplexF64, n); +V=randU(ComplexF64, n); +Y=U*sqrt(randΛ(n))*V' # (2) generate a random square complex matrix +``` """ randUnitaryMat(n::Int)=mgs(randn(Float64, n, n)) @@ -251,14 +260,16 @@ randU=randUnitaryMat **See also**: the aforementioned paper and `randΛ` ([`randEigvalsMat`](@ref)). - ## Examples - using PosDefManifold - R=randP(10, df=10, eigvalsSNR=1000) # 1 SDP Matrix of size 10x10 #(1) - H=randP(ComplexF64, 5, eigvalsSNR=10) # 1 Hermitian Matrix of size 5x5 # (2) - ℛ=randP(10, 1000, eigvalsSNR=100) # 1000 SPD Matrices of size 10x10 # (3) - using Plots - heatmap(Matrix(ℛ[1]), yflip=true, c=:bluesreds) - ℋ=randP(ComplexF64, 20, 1000) # 1000 Hermitian Matrices of size 20x20 # (4) + **Examples** +```julia +using PosDefManifold +R=randP(10, df=10, eigvalsSNR=1000) # 1 SDP Matrix of size 10x10 #(1) +H=randP(ComplexF64, 5, eigvalsSNR=10) # 1 Hermitian Matrix of size 5x5 # (2) +ℛ=randP(10, 1000, eigvalsSNR=100) # 1000 SPD Matrices of size 10x10 # (3) +using Plots +heatmap(Matrix(ℛ[1]), yflip=true, c=:bluesreds) +ℋ=randP(ComplexF64, 20, 1000) # 1000 Hermitian Matrices of size 20x20 # (4) +``` """ randPosDefMat(n::Int; @@ -356,32 +367,34 @@ randP=randPosDefMat **See also**: `randP` ([`randPosDefMat`](@ref)). - ## Examples - # (1) - using LinearAlgebra, Plots, PosDefManifold - n=3 - U=randU(n) - # in Q we will write two matrices, - # the unregularized and regularized matrix side by side - Q=Matrix{Float64}(undef, n, n*2) - P=ℍ(U*Diagonal(randn(n).^2)*U') # generate a real 3x3 positive matrix - for i=1:n, j=1:n Q[i, j]=P[i, j] end - regularize!(P, SNR=5) - for i=1:n, j=1:n Q[i, j+n]=P[i, j] end # the regularized matrix is on the right - heatmap(Matrix(Q), yflip=true, c=:bluesreds) - - # (2) - 𝐏=[ℍ(U*Diagonal(randn(3).^2)*U') for i=1:5] # 5 real 3x3 positive matrices - regularize!(𝐏, SNR=1000) - - ## Run a test - using LinearAlgebra - 𝐏=randP(10, 100, SNR=1000); # 100 real Hermitian matrices - signalVar=sum(tr(P) for P in 𝐏); - regularize!(𝐏, SNR=1000); - signalPlusNoiseVar=sum(tr(P) for P in 𝐏); - output_snr=signalVar/(signalPlusNoiseVar-signalVar) - # output_snr should be approx. equal to 1000 + **Examples** +```julia +# (1) +using LinearAlgebra, Plots, PosDefManifold +n=3 +U=randU(n) +# in Q we will write two matrices, +# the unregularized and regularized matrix side by side +Q=Matrix{Float64}(undef, n, n*2) +P=ℍ(U*Diagonal(randn(n).^2)*U') # generate a real 3x3 positive matrix +for i=1:n, j=1:n Q[i, j]=P[i, j] end +regularize!(P, SNR=5) +for i=1:n, j=1:n Q[i, j+n]=P[i, j] end # the regularized matrix is on the right +heatmap(Matrix(Q), yflip=true, c=:bluesreds) + +# (2) +𝐏=[ℍ(U*Diagonal(randn(3).^2)*U') for i=1:5] # 5 real 3x3 positive matrices +regularize!(𝐏, SNR=1000) + +## Run a test +using LinearAlgebra +𝐏=randP(10, 100, SNR=1000); # 100 real Hermitian matrices +signalVar=sum(tr(P) for P in 𝐏); +regularize!(𝐏, SNR=1000); +signalPlusNoiseVar=sum(tr(P) for P in 𝐏); +output_snr=signalVar/(signalPlusNoiseVar-signalVar) +# output_snr should be approx. equal to 1000 +``` """ function regularize!(P::ℍ; @@ -415,12 +428,15 @@ end If ``X`` is wide or square (r<=c) return ``XX^H/c``. If ``X`` is tall (r>c) return ``X^HX/r``. - ## Examples - using PosDefManifold - X=randn(5, 150); - G=gram(X) # => G=X*X'/150 - X=randn(100, 2); - F=gram(X); # => G=X'*X/100 + **Examples** +```julia +using PosDefManifold +X=randn(5, 150); +G=gram(X) # => G=X*X'/150 +X=randn(100, 2); +F=gram(X); # => G=X'*X/100 +``` + """ function gram(X::𝕄{T}) where T<:RealOrComplex (r, c)=size(X) @@ -439,23 +455,26 @@ trade(P::ℍ{T}) where T<:RealOrComplex `P` must be flagged by julia as `Hermitian`. See [typecasting matrices](@ref). - ### Examples - using PosDefManifold - P=randP(3) - t, d=trade(P) # equivalent to (t, d)=trade(P) - - # TraDe plot - using Plots - k=100 - n=10 - 𝐏=randP(n, k, SNR=1000); # 100 real Hermitian matrices - x=Vector{Float64}(undef, k) - y=Vector{Float64}(undef, k) - for i=1:k - x[i], y[i] = trade(𝐏[i]) - end - x=log.(x./n) - y=log.(y) - plot(x, y, seriestype=:scatter) + **Examples** +```julia +using PosDefManifold +P=randP(3) +t, d=trade(P) # equivalent to (t, d)=trade(P) + +# TraDe plot +using Plots +k=100 +n=10 +𝐏=randP(n, k, SNR=1000); # 100 real Hermitian matrices +x=Vector{Float64}(undef, k) +y=Vector{Float64}(undef, k) +for i=1:k + x[i], y[i] = trade(𝐏[i]) +end +x=log.(x./n) +y=log.(y) +plot(x, y, seriestype=:scatter) +``` + """ trade(P::ℍ{T}) where T<:RealOrComplex = (tr(P), det(P)) diff --git a/src/statistics.jl b/src/statistics.jl index c375cec..73d0bfd 100644 --- a/src/statistics.jl +++ b/src/statistics.jl @@ -1,7 +1,7 @@ # Unit statistics.jl, part of PosDefManifold Package for julia language # # MIT License -# Copyright (c) 2019, Marco Congedo, CNRS, Grenobe, France: +# Copyright (c) 2019-21, Marco Congedo, CNRS, Grenobe, France: # https://sites.google.com/site/marcocongedo/home # # DESCRIPTION @@ -33,9 +33,11 @@ `` p_i=\\frac{\\textrm{e}^{c_i}}{\\sum_{i=1}^{k}\\textrm{e}^{c_i}} ``. - ## Examples - χ=[1.0, 2.3, 0.4, 5.0] - π=softmax(χ) + **Examples** +```julia +χ=[1.0, 2.3, 0.4, 5.0] +π=softmax(χ) +``` """ softmax(χ::Vector{T}) where T<:Real = exp.(χ) ./ 𝚺(exp.(χ)) @@ -54,14 +56,16 @@ softmax(χ::Vector{T}) where T<:Real = exp.(χ) ./ 𝚺(exp.(χ)) is in unit *statistics.jl*, while the code for all the others is in unit *riemannianGeometry.jl*. - ## Examples - using PosDefManifold - # Generate 10 random numbers distributed as a chi-square with 2 df. - ν=[randχ²(2) for i=1:10] - arithmetic_mean=mean(Euclidean, ν) - geometric_mean=mean(Fisher, ν) - harmonic_mean=mean(invEuclidean, ν) - harmonic_mean<=geometric_mean<=arithmetic_mean # AGH inequality + **Examples** +```julia +using PosDefManifold +# Generate 10 random numbers distributed as a chi-square with 2 df. +ν=[randχ²(2) for i=1:10] +arithmetic_mean=mean(Euclidean, ν) +geometric_mean=mean(Fisher, ν) +harmonic_mean=mean(invEuclidean, ν) +harmonic_mean<=geometric_mean<=arithmetic_mean # AGH inequality +``` """ function mean(metric::Metric, ν::Vector{T}) where T<:RealOrComplex @@ -104,13 +108,15 @@ end If `corrected` is `true`, then the sum is scaled with ``k-1``, whereas if it is `false` the sum is scaled with ``k``. - ## Examples - using PosDefManifold - # Generate 10 random numbers distributed as a chi-square with 2 df. - ν=[randχ²(2) for i=1:10] - arithmetic_sd=std(Euclidean, ν) # mean not provided - geometric_mean=mean(Fisher, ν) - geometric_sd=std(Fisher, ν, mean=geometric_mean) # mean provided + **Examples** +```julia +using PosDefManifold +# Generate 10 random numbers distributed as a chi-square with 2 df. +ν=[randχ²(2) for i=1:10] +arithmetic_sd=std(Euclidean, ν) # mean not provided +geometric_mean=mean(Fisher, ν) +geometric_sd=std(Fisher, ν, mean=geometric_mean) # mean provided +``` """ function std(metric::Metric, ν::Vector{T}; diff --git a/src/test.jl b/src/test.jl index d72fd35..ce3404f 100644 --- a/src/test.jl +++ b/src/test.jl @@ -1,7 +1,7 @@ # Unit test.jl, part of PosDefManifold Package for julia language # # MIT License -# Copyright (c) 2019, Marco Congedo, CNRS, Grenobe, France: +# Copyright (c) 2019-21, Marco Congedo, CNRS, Grenobe, France: # https://sites.google.com/site/marcocongedo/home # # DESCRIPTION