Skip to content

Commit

Permalink
Fix softmax tests (#629)
Browse files Browse the repository at this point in the history
  • Loading branch information
pxl-th committed May 14, 2024
1 parent ac43df6 commit ace0a2e
Show file tree
Hide file tree
Showing 3 changed files with 36 additions and 18 deletions.
6 changes: 5 additions & 1 deletion src/runtime/memory/utils.jl
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,16 @@ function alloc_or_retry!(f, isfailed; stream::HIP.HIPStream)

if isfailed(res)
pool = HIP.memory_pool(stream.device)
hard_limit = AMDGPU.hard_memory_limit()
limit_str = hard_limit == typemax(UInt64) ?
"none" : Base.format_bytes(hard_limit)

error("""
Failed to successfully execute function and free resources for it.
Reporting current memory usage:
- HIP pool used: $(Base.format_bytes(HIP.used_memory(pool))).
- HIP pool reserved: $(Base.format_bytes(HIP.reserved_memory(pool))).
- Hard memory limit: $(Base.format_bytes(AMDGPU.hard_memory_limit())).
- Hard memory limit: $limit_str.
""")
end
return res
Expand Down
10 changes: 6 additions & 4 deletions src/stats.jl
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
"""
info()
Returns a tuple of two integers, indicating respectively the free and total amount of memory
(in bytes) available for allocation on the device.
Returns a tuple of two integers, indicating respectively the free and total
amount of memory (in bytes) available for allocation on the device.
"""
function info()
free_ref = Ref{Csize_t}()
Expand All @@ -14,14 +14,16 @@ end
"""
free()
Returns the free amount of memory (in bytes), available for allocation on the device.
Returns the free amount of memory (in bytes),
available for allocation on the device.
"""
free() = info()[1]

"""
total()
Returns the total amount of memory (in bytes), available for allocation on the device.
Returns the total amount of memory (in bytes),
available for allocation on the device.
"""
total() = info()[2]

Expand Down
38 changes: 25 additions & 13 deletions test/dnn/softmax.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,32 +6,44 @@
((5, 5, 5), (1, 2)), ((5, 5, 5), (1, 3)),
((5, 5, 5, 5), (2, 3)), ((5, 5, 5, 5), (2, 4)),
]
if T == Float16
x, dy = ones(T, sz), ones(T, sz) # Really low precision.
else
x, dy = randn(T, sz), randn(T, sz)
end
x, dy = randn(T, sz), randn(T, sz)
xd, dyd = ROCArray(x), ROCArray(dy)

# Regular softmax.

y = NNlib.softmax(x; dims)
yd = MIOpen.softmax(xd; dims)
@test Array(yd) y atol=atol
if T == Float16
@test !any(isnan.(Array(yd)))
else
y = NNlib.softmax(x; dims)
@test Array(yd) y atol=atol
end

dx = NNlib.∇softmax_data(dy, y; dims)
dxd = MIOpen.∇softmax(dyd, yd; dims)
@test Array(dxd) dx atol=atol
if T == Float16
@test !any(isnan.(Array(dxd)))
else
dx = NNlib.∇softmax_data(dy, y; dims)
@test Array(dxd) dx atol=atol
end

# Log softmax.

y = NNlib.logsoftmax(x; dims)
yd = MIOpen.logsoftmax(xd; dims)
@test Array(yd) y atol=atol
if T == Float16
@test !any(isnan.(Array(yd)))
else
y = NNlib.logsoftmax(x; dims)
@test Array(yd) y atol=atol
end

dx = NNlib.∇logsoftmax_data(dy, y; dims)
dxd = MIOpen.∇logsoftmax(dyd, yd; dims)
@test Array(dxd) dx atol=atol
if T == Float16
@test !any(isnan.(Array(dxd)))
else
dx = NNlib.∇logsoftmax_data(dy, y; dims)
@test Array(dxd) dx atol=atol
end
end
end
end

0 comments on commit ace0a2e

Please sign in to comment.