Skip to content

Commit

Permalink
Merge pull request #178 from slimgroup/cuda-ctx
Browse files Browse the repository at this point in the history
Init openacc ctx at init to avoid cuda conflicts
  • Loading branch information
mloubout committed Mar 30, 2023
2 parents ba767e3 + dca34c9 commit 783c142
Show file tree
Hide file tree
Showing 5 changed files with 12 additions and 2 deletions.
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "JUDI"
uuid = "f3b833dc-6b2e-5b9c-b940-873ed6319979"
authors = ["Philipp Witte, Mathias Louboutin"]
version = "3.2.2"
version = "3.2.3"

This comment has been minimized.

Copy link
@mloubout

mloubout Mar 30, 2023

Author Member

[deps]
ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4"
Expand Down
5 changes: 5 additions & 0 deletions src/JUDI.jl
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,11 @@ function __init__()
# Initialize lock at session start
PYLOCK[] = ReentrantLock()

if get(ENV, "DEVITO_PLATFORM", "") == "nvidiaX"
@info "Initializing openacc/openmp offloading"
devito_model(Model((21, 21, 21), (10., 10., 10.), (0., 0., 0.), randn(Float32, 21, 21, 21)), Options())
end

@require Zygote="e88e6eb3-aa80-5325-afca-941959d7151f" begin
Zygote.unbroadcast(x::AbstractArray, x̄::LazyPropagation) = Zygote.unbroadcast(x, eval_prop(x̄))
end
Expand Down
4 changes: 3 additions & 1 deletion src/TimeModeling/LinearOperators/basics.jl
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,10 @@ setindex!(S::AbstractSize, v, I::Symbol) = setindex!(S.dims, v, I)
iterate(S::AbstractSize) = iterate(S.dims)
iterate(S::AbstractSize, state) = iterate(S.dims, state)

Base.isless(i::Int64, a::AbstractSize) = isless(i, nsamples(a))

convert(::Type{T}, S::AbstractSize) where T<:Number = convert(T, nsamples(S))
(::Type{T})(S::AbstractSize) where T<:Number = convert(T, nsamples(S))
(::Type{T})(S::AbstractSize) where T<:Union{Integer, AbstractFloat} = convert(T, nsamples(S))
(c::Colon)(i::T, S::AbstractSize) where T = c(i, T(S))

Base.keys(S::AbstractSize) = keys(S.dims)
Expand Down
1 change: 1 addition & 0 deletions src/TimeModeling/Preconditioners/ModelPreconditioners.jl
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@ end
matvec(D::TopMute{T, N}, x::PhysicalParameter{T}) where {T, N} = PhysicalParameter(x, matvec(D, x.data))
matvec(D::TopMute{T, N}, x::judiWeights{T}) where {T, N} = judiWeights{T}(x.nsrc, [matvec(D, x.data[s]) for s=1:x.nsrc])
matvec(D::TopMute{T, N}, x::Vector{T}) where {T, N} = vec(matvec(D, reshape(x, size(D.wb)..., :)))
matvec_T(D::TopMute{T, N}, x) where {T, N} = matvec(D, x)

# Real diagonal operator
conj(I::TopMute{T, N}) where {T, N} = I
Expand Down
2 changes: 2 additions & 0 deletions src/TimeModeling/Types/ModelStructure.jl
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,8 @@ function similar(bc::Broadcast.Broadcasted{Broadcast.ArrayStyle{PhysicalParamete
PhysicalParameter(Ad, A.d, A.o)
end

similar(bc::Broadcast.Broadcasted{Broadcast.ArrayStyle{PhysicalParameter}}) = similar(bc, eltype(find_pm(bc)))

"`A = find_pm(As)` returns the first PhysicalParameter among the arguments."
find_pm(bc::Base.Broadcast.Broadcasted) = find_pm(bc.args)
find_pm(args::Tuple) = find_pm(find_pm(args[1]), Base.tail(args))
Expand Down

1 comment on commit 783c142

@JuliaRegistrator
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Registration pull request created: JuliaRegistries/General/80641

After the above pull request is merged, it is recommended that a tag is created on this repository for the registered package version.

This will be done automatically if the Julia TagBot GitHub Action is installed, or can be done manually through the github interface, or via:

git tag -a v3.2.3 -m "<description of version>" 783c142c79e006c261c7cb834ce040e428b6617f
git push origin v3.2.3

Please sign in to comment.