From cacbf40b382d155756e890b3f49c1a6579de92ce Mon Sep 17 00:00:00 2001 From: Michael Reed <18372368+chakravala@users.noreply.github.com> Date: Sat, 15 Jun 2019 10:53:05 -0400 Subject: [PATCH] transitioned to AbstractTensors v0.1.6 operations --- Project.toml | 2 ++ src/algebra.jl | 49 +++++++++++++++++++++++++++++++++++++++---------- 2 files changed, 41 insertions(+), 10 deletions(-) diff --git a/Project.toml b/Project.toml index cf96532..73ee733 100644 --- a/Project.toml +++ b/Project.toml @@ -16,7 +16,9 @@ StaticArrays = "90137ffa-7385-5640-81b9-e52037218182" [compat] julia = "1" +Reduce = "1.1" DirectSum = "0.2.3" +AbstractTensors = "0.1.6" [extras] Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" diff --git a/src/algebra.jl b/src/algebra.jl index 7016213..5b57c08 100644 --- a/src/algebra.jl +++ b/src/algebra.jl @@ -2,9 +2,9 @@ # This file is part of Grassmann.jl. It is licensed under the GPL license # Grassmann Copyright (C) 2019 Michael Reed -import Base: +, -, *, ^, /, inv +import Base: +, -, *, ^, /, inv, <, >, <<, >>, >>> import AbstractLattices: ∧, ∨, dist -import AbstractTensors: ⊗ +import AbstractTensors: ⊗, ⊛, ⊙, ⊠, ⨼, ⨽, ⋆ import DirectSum: dualcheck, tangent, hasinforigin, hasorigininf export tangent @@ -100,6 +100,10 @@ end return out end +# Hodge star ★ + +const complementright = ⋆ + ## complement export complementleft, complementright, ⋆ @@ -117,10 +121,6 @@ for side ∈ (:left,:right) end end -# Hodge star ★ - -const ⋆ = complementright - ## reverse import Base: reverse, conj, ~ @@ -177,7 +177,7 @@ end ## exterior product -export ∧, ∨ +export ∧, ∨, ⊗ @pure function ∧(a::Basis{V},b::Basis{V}) where V A,B = bits(a), bits(b) @@ -201,6 +201,10 @@ end @inline ∧(a::TensorAlgebra{V},b::UniformScaling{T}) where {V,T<:Field} = a∧V(b) @inline ∧(a::UniformScaling{T},b::TensorAlgebra{V}) where {V,T<:Field} = V(a)∧b +for op ∈ (:⊗,:^) + @eval $op(a::TensorAlgebra{V},b::TensorAlgebra{V}) where V = a∧b +end + ## regressive product: (L = grade(a) + grade(b); (-1)^(L*(L-ndims(V)))*⋆(⋆(a)∧⋆(b))) @pure function ∨(a::Basis{V},b::Basis{V}) where V @@ -221,6 +225,8 @@ end @inline ∨(a::TensorAlgebra{V},b::UniformScaling{T}) where {V,T<:Field} = a∨V(b) @inline ∨(a::UniformScaling{T},b::TensorAlgebra{V}) where {V,T<:Field} = V(a)∨b +Base.:&(a::TensorAlgebra{V},b::TensorAlgebra{V}) where V = a∨b + ## interior product: a ∨ ⋆(b) import LinearAlgebra: dot, ⋅ @@ -240,6 +246,12 @@ function dot(a::X,b::Y) where {X<:TensorTerm{V},Y<:TensorTerm{V}} where V return SValue{V}(typeof(V) <: Signature ? (g ? -v : v) : g*v,Basis{V}(C)) end +export ⨼, ⨽ + +⨼(a::TensorAlgebra{V},b::TensorAlgebra{V}) where V = ⋆(a)∨b +<(a::TensorAlgebra{V},b::TensorAlgebra{V}) where V = ⋆(a)∨b +>(a::TensorAlgebra{V},b::TensorAlgebra{V}) where V = dot(a,b) + ## cross product import LinearAlgebra: cross @@ -277,6 +289,13 @@ function ⊠(x...) return out/F end +<<(a::TensorAlgebra{V},b::TensorAlgebra{V}) where V = a⊙b +>>(a::TensorAlgebra{V},b::TensorAlgebra{V}) where V = a⊠b + +## sandwich product + +>>>(x::TensorAlgebra{V},y::TensorAlgebra{V}) where V = x * y * ~x + ### Product Algebra Constructor function generate_product_algebra(Field=Field,VEC=:mvec,MUL=:*,ADD=:+,SUB=:-,CONJ=:conj) @@ -949,12 +968,20 @@ end @pure inv(b::Basis) = parityreverse(grade(b)) ? -1*b : b for Value ∈ MSV - @eval function inv(b::$Value{V,G,B,T}) where {V,G,B,T} - $Value{V,G,B}((parityreverse(G) ? -one(T) : one(T))/value(b)) + @eval begin + function inv(b::$Value{V,G,B,T}) where {V,G,B,T} + $Value{V,G,B}((parityreverse(G) ? -one(T) : one(T))/value(b)) + end + rem(b::$Value{V,G,B,T},m) where {V,G,B,T} = $Value{V,G,B}(rem(value(b),m)) + div(b::$Value{V,G,B,T},m) where {V,G,B,T} = $Value{V,G,B}(div(value(b),m)) end end for Blade ∈ MSB - @eval inv(a::$Blade) = (A=~a; A/(A⋅a)) + @eval begin + inv(a::$Blade) = (A=~a; A/(A⋅a)) + rem(a::$Blade{T,V,G},m) where {T,V,G} = $Blade{T,V,G}(rem.(value(a),m)) + div(a::$Blade{T,V,G},m) where {T,V,G} = $Blade{T,V,G}(div.(value(a),m)) + end end for Term ∈ (:TensorTerm,MSB...,:MultiVector,:MultiGrade) @eval begin @@ -963,6 +990,8 @@ for Term ∈ (:TensorTerm,MSB...,:MultiVector,:MultiGrade) @pure /(a::$Term,b::UniformScaling) = a*inv(vectorspace(a)(b)) end end +rem(a::MultiVector{T,V},m) where {T,V} = MultiVector{T,V}(rem.(value(a),m)) +div(a::MultiVector{T,V},m) where {T,V} = MultiVector{T,V}(div.(value(a),m)) ## exponential & logarithm function