-
Notifications
You must be signed in to change notification settings - Fork 117
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* Move NDTensors module into ITensors module
- Loading branch information
Showing
48 changed files
with
8,406 additions
and
39 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,7 +1,3 @@ | ||
[deps] | ||
BenchmarkTools = "6e4b80f9-dd63-53aa-95a3-0cdb28fa8baf" | ||
ITensors = "9136182c-28ba-11e9-034c-db9fb085ebd5" | ||
NDTensors = "23ae76d9-e61a-49c4-8f12-3f1a16adf9cf" | ||
|
||
[compat] | ||
NDTensors = "= 0.1.32" |
4 changes: 0 additions & 4 deletions
4
src/ContractionSequenceOptimization/ContractionSequenceOptimization.jl
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,10 +1,186 @@ | ||
module NDTensors | ||
|
||
using Base.Threads | ||
using Compat | ||
using Dictionaries | ||
using Random | ||
using LinearAlgebra | ||
using StaticArrays | ||
using HDF5 | ||
using Requires | ||
using Strided | ||
using TimerOutputs | ||
using TupleTools | ||
|
||
using Base: @propagate_inbounds, ReshapedArray | ||
|
||
using Base.Cartesian: @nexprs | ||
|
||
using Base.Threads: @spawn | ||
|
||
##################################### | ||
# Imports and exports | ||
# | ||
include("exports.jl") | ||
include("imports.jl") | ||
|
||
##################################### | ||
# DenseTensor and DiagTensor | ||
# | ||
# Definitions to move to NDTensors | ||
include("aliasstyle.jl") | ||
include("similar.jl") | ||
include("tupletools.jl") | ||
include("dims.jl") | ||
include("tensorstorage.jl") | ||
include("tensor.jl") | ||
include("contraction_logic.jl") | ||
include("dense.jl") | ||
include("symmetric.jl") | ||
include("linearalgebra.jl") | ||
include("diag.jl") | ||
include("combiner.jl") | ||
include("truncate.jl") | ||
include("svd.jl") | ||
|
||
##################################### | ||
# BlockSparseTensor | ||
# | ||
include("blocksparse/blockdims.jl") | ||
include("blocksparse/block.jl") | ||
include("blocksparse/blockoffsets.jl") | ||
include("blocksparse/blocksparse.jl") | ||
include("blocksparse/blocksparsetensor.jl") | ||
include("blocksparse/diagblocksparse.jl") | ||
include("blocksparse/combiner.jl") | ||
include("blocksparse/linearalgebra.jl") | ||
|
||
##################################### | ||
# Empty | ||
# | ||
include("empty.jl") | ||
|
||
##################################### | ||
# Deprecations | ||
# | ||
include("deprecated.jl") | ||
|
||
##################################### | ||
# A global timer used with TimerOutputs.jl | ||
# | ||
|
||
const timer = TimerOutput() | ||
|
||
##################################### | ||
# Optional block sparse multithreading | ||
# | ||
|
||
include("blas_get_num_threads.jl") | ||
|
||
const _using_threaded_blocksparse = Ref(false) | ||
|
||
function enable_threaded_blocksparse_docstring(module_name) | ||
return """ | ||
$(module_name).enable_threaded_blocksparse() | ||
$(module_name).disable_threaded_blocksparse() | ||
Enable or disable block sparse multithreading. | ||
Returns the current state of `$(module_name).using_threaded_blocksparse()`, i.e. `true` if threaded block sparse was previously enabled, and `false` if threaded block sparse was previously disabled. This is helpful for turning block sparse threading on or off temporarily. For example: | ||
```julia | ||
using_threaded_blocksparse = $(module_name).enable_threaded_blocksparse() | ||
# Run code that you want to be threaded | ||
if !using_threaded_blocksparse | ||
$(module_name).disable_threaded_blocksparse() | ||
end | ||
``` | ||
Note that you need to start Julia with multiple threads. For example, to start Julia with 4 threads, you can use any of the following: | ||
``` | ||
\$ julia --threads=4 | ||
\$ julia -t 4 | ||
\$ JULIA_NUM_THREADS=4 julia | ||
``` | ||
In addition, we have found that it is best to disable `BLAS` and `Strided` multithreading when using block sparse multithreading. You can do that with the commands `using LinearAlgebra; BLAS.set_num_threads(1)` and `$(module_name).Strided.disable_threads()`. | ||
See also: `$(module_name).enable_threaded_blocksparse`, `$(module_name).disable_threaded_blocksparse`, `$(module_name).using_threaded_blocksparse`. | ||
""" | ||
end | ||
|
||
function _enable_threaded_blocksparse() | ||
current_using_threaded_blocksparse = using_threaded_blocksparse() | ||
if !current_using_threaded_blocksparse | ||
if Threads.nthreads() == 1 | ||
println( | ||
"WARNING: You are trying to enable block sparse multithreading, but you have started Julia with only a single thread. You can start Julia with `N` threads with `julia -t N`, and check the number of threads Julia can use with `Threads.nthreads()`. Your system has $(Sys.CPU_THREADS) threads available to use, which you can determine by running `Sys.CPU_THREADS`.\n", | ||
) | ||
end | ||
if blas_get_num_threads() > 1 && Threads.nthreads() > 1 | ||
println( | ||
"WARNING: You are enabling block sparse multithreading, but BLAS $(BLAS.vendor()) is currently set to use $(blas_get_num_threads()) threads. When using block sparse multithreading, we recommend setting BLAS to use only a single thread, otherwise you may see suboptimal performance. You can set it with `using LinearAlgebra; BLAS.set_num_threads(1)`.\n", | ||
) | ||
end | ||
if Strided.get_num_threads() > 1 | ||
println( | ||
"WARNING: You are enabling block sparse multithreading, but Strided.jl is currently set to use $(Strided.get_num_threads()) threads for performing dense tensor permutations. When using block sparse multithreading, we recommend setting Strided.jl to use only a single thread, otherwise you may see suboptimal performance. You can set it with `NDTensors.Strided.disable_threads()` and see the current number of threads it is using with `NDTensors.Strided.get_num_threads()`.\n", | ||
) | ||
end | ||
_using_threaded_blocksparse[] = true | ||
end | ||
return current_using_threaded_blocksparse | ||
end | ||
|
||
function _disable_threaded_blocksparse() | ||
current_using_threaded_blocksparse = using_threaded_blocksparse() | ||
if current_using_threaded_blocksparse | ||
_using_threaded_blocksparse[] = false | ||
end | ||
return current_using_threaded_blocksparse | ||
end | ||
|
||
""" | ||
$(enable_threaded_blocksparse_docstring(@__MODULE__)) | ||
""" | ||
using_threaded_blocksparse() = _using_threaded_blocksparse[] | ||
|
||
""" | ||
$(enable_threaded_blocksparse_docstring(@__MODULE__)) | ||
""" | ||
enable_threaded_blocksparse() = _enable_threaded_blocksparse() | ||
|
||
""" | ||
$(enable_threaded_blocksparse_docstring(@__MODULE__)) | ||
""" | ||
disable_threaded_blocksparse() = _disable_threaded_blocksparse() | ||
|
||
##################################### | ||
# Optional TBLIS contraction backend | ||
# | ||
|
||
const _using_tblis = Ref(false) | ||
|
||
using_tblis() = _using_tblis[] | ||
|
||
function enable_tblis() | ||
_using_tblis[] = true | ||
return nothing | ||
end | ||
|
||
function disable_tblis() | ||
_using_tblis[] = false | ||
return nothing | ||
end | ||
|
||
storage(T::Tensor) = store(T) | ||
function __init__() | ||
@require TBLIS = "48530278-0828-4a49-9772-0f3830dfa1e9" begin | ||
enable_tblis() | ||
include("tblis.jl") | ||
end | ||
@require Octavian = "6fd5a793-0b7e-452c-907f-f8bfe9c57db4" begin | ||
include("octavian.jl") | ||
end | ||
end | ||
|
||
#function NDTensors.blockdims(inds::IndexSet, block::Tuple) | ||
# return ntuple(i -> blockdim(inds,block,i), ValLength(block)) | ||
#end | ||
end # module NDTensors |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,34 @@ | ||
""" | ||
AliasStyle | ||
A trait that determines the aliasing behavior of a constructor or function, | ||
for example whether or not a function or constructor might return an alias | ||
of one of the inputs (i.e. the output shares memory with one of the inputs, | ||
such that modifying the output also modifies the input or vice versa). | ||
See also [`AllowAlias`](@ref) and [`NeverAlias`](@ref). | ||
""" | ||
abstract type AliasStyle end | ||
|
||
""" | ||
AllowAlias | ||
Singleton type used in a constructor or function indicating | ||
that the constructor or function may return an alias of the input data when | ||
possible, i.e. the output may share data with the input. For a constructor | ||
`T(AllowAlias(), args...)`, this would act like `Base.convert(T, args...)`. | ||
See also [`AliasStyle`](@ref) and [`NeverAlias`](@ref). | ||
""" | ||
struct AllowAlias <: AliasStyle end | ||
|
||
""" | ||
NeverAlias | ||
Singleton type used in a constructor or function indicating | ||
that the constructor or function will never return an alias of the input data, | ||
i.e. the output will never share data with one of the inputs. | ||
See also [`AliasStyle`](@ref) and [`AllowAlias`](@ref). | ||
""" | ||
struct NeverAlias <: AliasStyle end |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,40 @@ | ||
|
||
# | ||
# blas_get_num_threads() | ||
# Get the number of BLAS threads | ||
# This can be replaced by BLAS.get_num_threads() in Julia v1.6 | ||
# | ||
|
||
function guess_vendor() | ||
# like determine_vendor, but guesses blas in some cases | ||
# where determine_vendor returns :unknown | ||
ret = BLAS.vendor() | ||
if Sys.isapple() && (ret == :unknown) | ||
ret = :osxblas | ||
end | ||
return ret | ||
end | ||
|
||
_tryparse_env_int(key) = tryparse(Int, get(ENV, key, "")) | ||
|
||
blas_get_num_threads()::Union{Int,Nothing} = _get_num_threads() | ||
|
||
function _get_num_threads(; _blas=guess_vendor())::Union{Int,Nothing} | ||
if _blas === :openblas || _blas === :openblas64 | ||
return Int(ccall((BLAS.@blasfunc(openblas_get_num_threads), BLAS.libblas), Cint, ())) | ||
elseif _blas === :mkl | ||
return Int(ccall((:mkl_get_max_threads, BLAS.libblas), Cint, ())) | ||
elseif _blas === :osxblas | ||
key = "VECLIB_MAXIMUM_THREADS" | ||
nt = _tryparse_env_int(key) | ||
if nt === nothing | ||
@warn "Failed to read environment variable $key" maxlog = 1 | ||
else | ||
return nt | ||
end | ||
else | ||
@assert _blas === :unknown | ||
end | ||
@warn "Could not get number of BLAS threads. Returning `nothing` instead." maxlog = 1 | ||
return nothing | ||
end |
Oops, something went wrong.