Skip to content

Error when trying to use Flux #491

@racinmat

Description

@racinmat

I'm trying to reproduce basic example in https://fluxml.ai/Flux.jl/stable/models/overview/ using the pyjulia, computing the loss goes fine, but during the training it crashes:

import julia
julia.install()
from julia import Julia
Julia(compiled_modules=False)
from julia import Flux
def actual(x): return 4*x + 2
x_train, x_test = [list(range(0,5))], [list(range(6,10))]
y_train, y_test = list(map(actual, x_train[0])), list(map(actual, x_test[0]))
model = Flux.Dense(1, 1)
model(x_train)
def loss(x, y): return Flux.Losses.mse(model(x), y)
loss(x_train, y_train)
parameters = Flux.params(model)
data = [(x_train, y_train)]
Flux.train_b(loss, parameters, data, Flux.ADAM())

everything runs as expected except of the last line:

RuntimeError: <PyCall.jlwrap (in a Julia function called from Python)
JULIA: Compiling Tuple{typeof(PyCall._pycall!), PyObject, PyObject, Tuple{Matrix{Int64}, Vector{Int64}}, Int64, Ptr{Nothing}}: try/catch is not supported.
Stacktrace:
  [1] error(s::String)
    @ Base ./error.jl:33
  [2] instrument(ir::IRTools.Inner.IR)
    @ Zygote /opt/julia/packages/Zygote/cCyLF/src/compiler/reverse.jl:121
  [3] #Primal#20
    @ /opt/julia/packages/Zygote/cCyLF/src/compiler/reverse.jl:202 [inlined]
  [4] Zygote.Adjoint(ir::IRTools.Inner.IR; varargs::Nothing, normalise::Bool)
    @ Zygote /opt/julia/packages/Zygote/cCyLF/src/compiler/reverse.jl:315
  [5] _generate_pullback_via_decomposition(T::Type)
    @ Zygote /opt/julia/packages/Zygote/cCyLF/src/compiler/emit.jl:101
  [6] #s5902#1220
    @ /opt/julia/packages/Zygote/cCyLF/src/compiler/interface2.jl:28 [inlined]
  [7] var"#s5902#1220"(::Any, ctx::Any, f::Any, args::Any)
    @ Zygote ./none:0
  [8] (::Core.GeneratedFunctionStub)(::Any, ::Vararg{Any})
    @ Core ./boot.jl:580
  [9] _pullback
    @ /opt/julia/packages/PyCall/7a7w0/src/pyfncall.jl:11 [inlined]
 [10] _pullback(::Zygote.Context, ::typeof(PyCall._pycall!), ::PyObject, ::PyObject, ::Tuple{Matrix{Int64}, Vector{Int64}}, ::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})
    @ Zygote /opt/julia/packages/Zygote/cCyLF/src/compiler/interface2.jl:0
 [11] _pullback
    @ /opt/julia/packages/PyCall/7a7w0/src/pyfncall.jl:86 [inlined]
 [12] _pullback(::Zygote.Context, ::PyCall.var"##_#114", ::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, ::PyObject, ::Matrix{Int64}, ::Vector{Int64})
    @ Zygote /opt/julia/packages/Zygote/cCyLF/src/compiler/interface2.jl:0
 [13] _apply
    @ ./boot.jl:814 [inlined]
 [14] adjoint
    @ /opt/julia/packages/Zygote/cCyLF/src/lib/lib.jl:200 [inlined]
 [15] _pullback
    @ /opt/julia/packages/ZygoteRules/AIbCs/src/adjoint.jl:65 [inlined]
 [16] _pullback
    @ /opt/julia/packages/PyCall/7a7w0/src/pyfncall.jl:86 [inlined]
 [17] _pullback(::Zygote.Context, ::PyObject, ::Matrix{Int64}, ::Vector{Int64})
    @ Zygote /opt/julia/packages/Zygote/cCyLF/src/compiler/interface2.jl:0
 [18] _apply(::Function, ::Vararg{Any})
    @ Core ./boot.jl:814
 [19] adjoint
    @ /opt/julia/packages/Zygote/cCyLF/src/lib/lib.jl:200 [inlined]
 [20] _pullback
    @ /opt/julia/packages/ZygoteRules/AIbCs/src/adjoint.jl:65 [inlined]
 [21] _pullback
    @ /opt/julia/packages/Flux/qAdFM/src/optimise/train.jl:110 [inlined]
 [22] _pullback(::Zygote.Context, ::Flux.Optimise.var"#39#45"{PyObject, Tuple{Matrix{Int64}, Vector{Int64}}})
    @ Zygote /opt/julia/packages/Zygote/cCyLF/src/compiler/interface2.jl:0
 [23] pullback(f::Function, ps::Zygote.Params)
    @ Zygote /opt/julia/packages/Zygote/cCyLF/src/compiler/interface.jl:352
 [24] gradient(f::Function, args::Zygote.Params)
    @ Zygote /opt/julia/packages/Zygote/cCyLF/src/compiler/interface.jl:75
 [25] macro expansion
    @ /opt/julia/packages/Flux/qAdFM/src/optimise/train.jl:109 [inlined]
 [26] macro expansion
    @ /opt/julia/packages/Juno/n6wyj/src/progress.jl:134 [inlined]
 [27] train!(loss::PyObject, ps::Zygote.Params, data::Vector{Tuple{Matrix{Int64}, Vector{Int64}}}, opt::Flux.Optimise.ADAM; cb::Flux.Optimise.var"#40#46")
    @ Flux.Optimise /opt/julia/packages/Flux/qAdFM/src/optimise/train.jl:107
 [28] train!(loss::PyObject, ps::Zygote.Params, data::Vector{Tuple{Matrix{Int64}, Vector{Int64}}}, opt::Flux.Optimise.ADAM)
    @ Flux.Optimise /opt/julia/packages/Flux/qAdFM/src/optimise/train.jl:105
 [29] invokelatest(::Any, ::Any, ::Vararg{Any}; kwargs::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})
    @ Base ./essentials.jl:716
 [30] invokelatest(::Any, ::Any, ::Vararg{Any})
    @ Base ./essentials.jl:714
 [31] _pyjlwrap_call(f::Function, args_::Ptr{PyCall.PyObject_struct}, kw_::Ptr{PyCall.PyObject_struct})
    @ PyCall /opt/julia/packages/PyCall/7a7w0/src/callback.jl:28
 [32] pyjlwrap_call(self_::Ptr{PyCall.PyObject_struct}, args_::Ptr{PyCall.PyObject_struct}, kw_::Ptr{PyCall.PyObject_struct})
    @ PyCall /opt/julia/packages/PyCall/7a7w0/src/callback.jl:44>

how should I use Flux from python and define loss function in a differentiable way?

Metadata

Metadata

Assignees

No one assigned

    Labels

    No labels
    No labels

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions