Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c"
ArrayInterface = "4fba245c-0d91-5ea0-9b3e-6abc04ee57a9"
CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba"
DLFP8Types = "f4c16678-4a16-415b-82ef-ed337c5d6c7c"
FillArrays = "1a297f60-69ca-5386-bcde-b61e274b549b"
Float8s = "81dfefd7-55b0-40c6-a251-db853704e186"
GPUCompiler = "61eb1bfa-7361-4325-ad38-22787b887f55"
KernelAbstractions = "63c18a36-062a-441e-b654-da1e3ab1ce7c"
Expand All @@ -54,6 +55,7 @@ ReactantAbstractFFTsExt = "AbstractFFTs"
ReactantArrayInterfaceExt = "ArrayInterface"
ReactantCUDAExt = ["CUDA", "GPUCompiler", "KernelAbstractions", "LLVM"]
ReactantDLFP8TypesExt = "DLFP8Types"
ReactantFillArraysExt = "FillArrays"
ReactantFloat8sExt = "Float8s"
ReactantKernelAbstractionsExt = "KernelAbstractions"
ReactantMPIExt = "MPI"
Expand All @@ -77,6 +79,7 @@ Downloads = "1.6"
EnumX = "1"
Enzyme = "0.13.49"
EnzymeCore = "0.8.11"
FillArrays = "1.13"
Float8s = "0.1"
Functors = "0.5"
GPUArraysCore = "0.2"
Expand All @@ -103,9 +106,9 @@ Scratch = "1.2"
Sockets = "1.10"
SpecialFunctions = "2.4"
Statistics = "1.10"
unzip_jll = "6"
YaoBlocks = "0.13, 0.14"
julia = "1.10"
unzip_jll = "6"

[extras]
Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e"
Expand Down
121 changes: 121 additions & 0 deletions ext/ReactantFillArraysExt.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
module ReactantFillArraysExt

using Reactant: Reactant, TracedUtils, TracedRNumber, Ops, Sharding, unwrapped_eltype
using ReactantCore: ReactantCore
using FillArrays: FillArrays, AbstractFill, Fill, Ones, Zeros, OneElement
using GPUArraysCore: @allowscalar

# Tracing
Reactant._parent_type(T::Type{<:AbstractFill}) = T
Reactant._parent_type(T::Type{<:OneElement}) = T

for AT in (Fill, Ones, Zeros)
@eval Base.@nospecializeinfer function Reactant.traced_type_inner(
@nospecialize(FA::Type{$(AT){T,N,Axes}}),
seen,
mode::Reactant.TraceMode,
@nospecialize(track_numbers::Type),
@nospecialize(sharding),
@nospecialize(runtime)
) where {T,N,Axes}
# T will be a number so we need to trace it
return $(AT){
Reactant.traced_type_inner(T, seen, mode, Number, sharding, runtime),N,Axes
}
end
end

Base.@nospecializeinfer function Reactant.make_tracer(
seen, @nospecialize(prev::Fill{T,N,Axes}), @nospecialize(path), mode; kwargs...
) where {T,N,Axes}
return Fill(
Reactant.make_tracer(
seen, prev.value, (path..., 1), mode; kwargs..., track_numbers=Number
),
prev.axes,
)
end

Base.@nospecializeinfer function Reactant.make_tracer(
seen,
@nospecialize(prev::Ones{T,N,Axes}),
@nospecialize(path),
mode;
@nospecialize(sharding = Sharding.NoSharding()),
@nospecialize(runtime = nothing),
kwargs...,
) where {T,N,Axes}
return Ones(
Reactant.traced_type_inner(T, seen, mode, Number, sharding, runtime), prev.axes
)
end

Base.@nospecializeinfer function Reactant.make_tracer(
seen,
@nospecialize(prev::Zeros{T,N,Axes}),
@nospecialize(path),
mode;
@nospecialize(sharding = Sharding.NoSharding()),
@nospecialize(runtime = nothing),
kwargs...,
) where {T,N,Axes}
return Zeros(
Reactant.traced_type_inner(T, seen, mode, Number, sharding, runtime), prev.axes
)
end

Base.@nospecializeinfer function Reactant.traced_type_inner(
@nospecialize(FA::Type{OneElement{T,N,I,A}}),
seen,
mode::Reactant.TraceMode,
@nospecialize(track_numbers::Type),
@nospecialize(sharding),
@nospecialize(runtime)
) where {T,N,I,A}
# T will be a number so we need to trace it
return OneElement{
Reactant.traced_type_inner(T, seen, mode, Number, sharding, runtime),N,I,A
}
end

Base.@nospecializeinfer function Reactant.make_tracer(
seen, @nospecialize(prev::OneElement{T,N,I,A}), @nospecialize(path), mode; kwargs...
) where {T,N,I,A}
return OneElement(
Reactant.make_tracer(
seen, prev.val, (path..., 1), mode; kwargs..., track_numbers=Number
),
prev.ind,
prev.axes,
)
end

# Materialize into a dense array
function ReactantCore.materialize_traced_array(x::Fill{T}) where {T}
return TracedUtils.broadcast_to_size(
TracedUtils.promote_to(TracedRNumber{unwrapped_eltype(T)}, x.value), size(x)
)
end

function ReactantCore.materialize_traced_array(x::Ones{T}) where {T}
return TracedUtils.broadcast_to_size(unwrapped_eltype(T)(1), size(x))
end

function ReactantCore.materialize_traced_array(x::Zeros{T}) where {T}
return TracedUtils.broadcast_to_size(unwrapped_eltype(T)(0), size(x))
end

function ReactantCore.materialize_traced_array(x::OneElement{T}) where {T}
y = TracedUtils.broadcast_to_size(unwrapped_eltype(T)(0), size(x))
@allowscalar setindex!(y, x.val, x.ind...)
return y
end

# some functions to avoid bad performance
for AT in (Fill, Ones, Zeros, OneElement)
@eval function Base.similar(x::$AT{<:TracedRNumber}, ::Type{T}, dims::Dims) where {T}
return TracedUtils.broadcast_to_size(unwrapped_eltype(T)(0), dims)
end
end

end
2 changes: 1 addition & 1 deletion src/Compiler.jl
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,7 @@ function create_result(
sym = Symbol("result", var_idx[])
var_idx[] += 1

@assert haskey(result_stores, path)
@assert haskey(result_stores, path) "Expected $(path) in $(keys(result_stores))"
restore = result_stores[path]
delete!(result_stores, path)
if path_to_shard_info !== nothing && haskey(path_to_shard_info, path)
Expand Down
1 change: 1 addition & 0 deletions src/ConcreteRArray.jl
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ for T in Base.uniontypes(ReactantPrimitive)
end

function Base.convert(::Type{T}, x::AbstractConcreteNumber) where {T<:Number}
T == typeof(x) && return x
return convert(T, to_number(x))
end

Expand Down
9 changes: 9 additions & 0 deletions src/Reactant.jl
Original file line number Diff line number Diff line change
Expand Up @@ -42,11 +42,20 @@ function ancestor(T::Type{<:AbstractArray})
p_T == T && return T
return ancestor(p_T)
end
if applicable(_parent_type, T)
p_T = _parent_type(T)
p_T == T && return T
return ancestor(p_T)
end
@warn "`Adapt.parent_type` is not implemented for $(T). Assuming $T isn't a wrapped \
array." maxlog = 1
return T
end

# A lot of packages don't define `Adapt.parent_type`. We use `_parent_type` as a way to
# define the parent type of an array without type-piracy.
function _parent_type end

include("accelerators/Accelerators.jl")

using .Accelerators.TPU: has_tpu
Expand Down
4 changes: 3 additions & 1 deletion src/xla/PJRT/Buffer.jl
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,9 @@ function XLA.buffer_on_cpu(buffer::Buffer)
end

function XLA.to_host(buffer::Buffer, data, sharding)
GC.@preserve buffer begin
@assert data !== C_NULL
@assert buffer.buffer !== C_NULL
GC.@preserve buffer data begin
@ccall MLIR.API.mlir_c.BufferToHost(
buffer.buffer::Ptr{Cvoid}, data::Ptr{Cvoid}
)::Cvoid
Expand Down
1 change: 1 addition & 0 deletions test/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ DLFP8Types = "f4c16678-4a16-415b-82ef-ed337c5d6c7c"
Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f"
Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9"
FFTW = "7a1cc6ca-52ef-59f5-83cd-3a7055c09341"
FillArrays = "1a297f60-69ca-5386-bcde-b61e274b549b"
Float8s = "81dfefd7-55b0-40c6-a251-db853704e186"
Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c"
Functors = "d9f16b24-f501-4c13-a1f2-28368ffc5196"
Expand Down
29 changes: 29 additions & 0 deletions test/integration/fillarrays.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
using Reactant, Test, FillArrays

fn(x, y) = (2 .* x .- 3) * y'

@testset "Fill" begin
x = Fill(2.0f0, 4, 5)
rx = Reactant.to_rarray(x)

@test @jit(fn(rx, rx)) ≈ fn(x, x)

@testset "Ones" begin
y = Ones(Float32, 4, 5)
ry = Reactant.to_rarray(y)
@test @jit(fn(rx, ry)) ≈ fn(x, y)
end

@testset "Zeros" begin
y = Zeros(Float32, 4, 5)
ry = Reactant.to_rarray(y)
@test @jit(fn(rx, ry)) ≈ fn(x, y)
end
end

@testset "OneElement" begin
x = OneElement(3.4f0, (3, 4), (32, 32))
rx = Reactant.to_rarray(x)

@test @jit(fn(rx, rx)) ≈ fn(x, x)
end
1 change: 1 addition & 0 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ const REACTANT_TEST_GROUP = lowercase(get(ENV, "REACTANT_TEST_GROUP", "all"))
@safetestset "Random" include("integration/random.jl")
@safetestset "Python" include("integration/python.jl")
@safetestset "Optimisers" include("integration/optimisers.jl")
@safetestset "FillArrays" include("integration/fillarrays.jl")
end

if REACTANT_TEST_GROUP == "all" || REACTANT_TEST_GROUP == "neural_networks"
Expand Down
Loading