diff --git a/base/loading.jl b/base/loading.jl index 5da5cc931421f..4ec5df16092d7 100644 --- a/base/loading.jl +++ b/base/loading.jl @@ -260,7 +260,7 @@ struct LoadingCache identified::Dict{String, Union{Nothing, Tuple{PkgId, String}}} located::Dict{Tuple{PkgId, Union{String, Nothing}}, Union{Tuple{String, String}, Nothing}} end -const LOADING_CACHE = Ref{Union{LoadingCache, Nothing}}(nothing) +const LOADING_CACHE = Ref{Union{LoadingCache, Nothing}}(nothing) # n.b.: all access to and through this are protected by require_lock LoadingCache() = LoadingCache(load_path(), Dict(), Dict(), Dict(), Set(), Dict(), Dict(), Dict()) @@ -302,10 +302,12 @@ end # Used by Pkg but not used in loading itself function find_package(arg) # ::Union{Nothing,String} + @lock require_lock begin pkgenv = identify_package_env(arg) pkgenv === nothing && return nothing pkg, env = pkgenv return locate_package(pkg, env) + end end # is there a better/faster ground truth? @@ -332,6 +334,7 @@ is also returned, except when the identity is not identified. """ identify_package_env(where::Module, name::String) = identify_package_env(PkgId(where), name) function identify_package_env(where::PkgId, name::String) + assert_havelock(require_lock) cache = LOADING_CACHE[] if cache !== nothing pkg_env = get(cache.identified_where, (where, name), missing) @@ -364,6 +367,7 @@ function identify_package_env(where::PkgId, name::String) return pkg_env end function identify_package_env(name::String) + assert_havelock(require_lock) cache = LOADING_CACHE[] if cache !== nothing pkg_env = get(cache.identified, name, missing) @@ -426,11 +430,12 @@ julia> using LinearAlgebra julia> Base.identify_package(LinearAlgebra, "Pkg") # Pkg is not a dependency of LinearAlgebra ``` """ -identify_package(where::Module, name::String) = _nothing_or_first(identify_package_env(where, name)) -identify_package(where::PkgId, name::String) = _nothing_or_first(identify_package_env(where, name)) -identify_package(name::String) = _nothing_or_first(identify_package_env(name)) +identify_package(where::Module, name::String) = @lock require_lock _nothing_or_first(identify_package_env(where, name)) +identify_package(where::PkgId, name::String) = @lock require_lock _nothing_or_first(identify_package_env(where, name)) +identify_package(name::String) = @lock require_lock _nothing_or_first(identify_package_env(name)) function locate_package_env(pkg::PkgId, stopenv::Union{String, Nothing}=nothing)::Union{Nothing,Tuple{String,String}} + assert_havelock(require_lock) cache = LOADING_CACHE[] if cache !== nothing pathenv = get(cache.located, (pkg, stopenv), missing) @@ -508,7 +513,7 @@ julia> Base.locate_package(pkg) ``` """ function locate_package(pkg::PkgId, stopenv::Union{String, Nothing}=nothing)::Union{Nothing,String} - _nothing_or_first(locate_package_env(pkg, stopenv)) + @lock require_lock _nothing_or_first(locate_package_env(pkg, stopenv)) end """ @@ -1824,51 +1829,60 @@ function show(io::IO, it::ImageTarget) end # should sync with the types of arguments of `stale_cachefile` -const StaleCacheKey = Tuple{PkgId, UInt128, String, String} +const StaleCacheKey = Tuple{PkgId, UInt128, String, String, Bool, CacheFlags} -function compilecache_path(pkg::PkgId; +function compilecache_freshest_path(pkg::PkgId; ignore_loaded::Bool=false, stale_cache::Dict{StaleCacheKey,Bool}=Dict{StaleCacheKey, Bool}(), cachepath_cache::Dict{PkgId, Vector{String}}=Dict{PkgId, Vector{String}}(), - cachepaths::Vector{String}=get!(() -> find_all_in_cache_path(pkg), cachepath_cache, pkg), + cachepaths::Vector{String}=get(() -> find_all_in_cache_path(pkg), cachepath_cache, pkg), sourcepath::Union{String,Nothing}=Base.locate_package(pkg), flags::CacheFlags=CacheFlags()) - path = nothing isnothing(sourcepath) && error("Cannot locate source for $(repr("text/plain", pkg))") - for path_to_try in cachepaths - staledeps = stale_cachefile(sourcepath, path_to_try; ignore_loaded, requested_flags=flags) - if staledeps === true - continue - end - staledeps, _, _ = staledeps::Tuple{Vector{Any}, Union{Nothing, String}, UInt128} - # finish checking staledeps module graph - for dep in staledeps - dep isa Module && continue - modpath, modkey, modbuild_id = dep::Tuple{String, PkgId, UInt128} - modpaths = get!(() -> find_all_in_cache_path(modkey), cachepath_cache, modkey) - for modpath_to_try in modpaths::Vector{String} - stale_cache_key = (modkey, modbuild_id, modpath, modpath_to_try)::StaleCacheKey - if get!(() -> stale_cachefile(stale_cache_key...; ignore_loaded, requested_flags=flags) === true, - stale_cache, stale_cache_key) - continue + try_build_ids = UInt128[UInt128(0)] + if !ignore_loaded + let loaded = get(loaded_precompiles, pkg, nothing) + if loaded !== nothing + for mod in loaded # try these in reverse original load order to see if one is already valid + pushfirst!(try_build_ids, module_build_id(mod)) end - @goto check_next_dep end - @goto check_next_path - @label check_next_dep end - try - # update timestamp of precompilation file so that it is the first to be tried by code loading - touch(path_to_try) - catch ex - # file might be read-only and then we fail to update timestamp, which is fine - ex isa IOError || rethrow() + end + for build_id in try_build_ids + for path_to_try in cachepaths + staledeps = stale_cachefile(pkg, build_id, sourcepath, path_to_try; ignore_loaded, requested_flags=flags) + if staledeps === true + continue + end + staledeps, _, _ = staledeps::Tuple{Vector{Any}, Union{Nothing, String}, UInt128} + # finish checking staledeps module graph + for dep in staledeps + dep isa Module && continue + modpath, modkey, modbuild_id = dep::Tuple{String, PkgId, UInt128} + modpaths = get(() -> find_all_in_cache_path(modkey), cachepath_cache, modkey) + for modpath_to_try in modpaths::Vector{String} + stale_cache_key = (modkey, modbuild_id, modpath, modpath_to_try, ignore_loaded, flags)::StaleCacheKey + if get!(() -> stale_cachefile(modkey, modbuild_id, modpath, modpath_to_try; ignore_loaded, requested_flags=flags) === true, + stale_cache, stale_cache_key) + continue + end + @goto check_next_dep + end + @goto check_next_path + @label check_next_dep + end + try + # update timestamp of precompilation file so that it is the first to be tried by code loading + touch(path_to_try) + catch ex + # file might be read-only and then we fail to update timestamp, which is fine + ex isa IOError || rethrow() + end + return path_to_try + @label check_next_path end - path = path_to_try - break - @label check_next_path end - return path end """ @@ -1884,14 +1898,8 @@ fresh julia session specify `ignore_loaded=true`. !!! compat "Julia 1.10" This function requires at least Julia 1.10. """ -function isprecompiled(pkg::PkgId; - ignore_loaded::Bool=false, - stale_cache::Dict{StaleCacheKey,Bool}=Dict{StaleCacheKey, Bool}(), - cachepath_cache::Dict{PkgId, Vector{String}}=Dict{PkgId, Vector{String}}(), - cachepaths::Vector{String}=get!(() -> find_all_in_cache_path(pkg), cachepath_cache, pkg), - sourcepath::Union{String,Nothing}=Base.locate_package(pkg), - flags::CacheFlags=CacheFlags()) - path = compilecache_path(pkg; ignore_loaded, stale_cache, cachepath_cache, cachepaths, sourcepath, flags) +function isprecompiled(pkg::PkgId; ignore_loaded::Bool=false) + path = compilecache_freshest_path(pkg; ignore_loaded) return !isnothing(path) end @@ -1905,7 +1913,7 @@ associated cache is relocatable. This function requires at least Julia 1.11. """ function isrelocatable(pkg::PkgId) - path = compilecache_path(pkg) + path = compilecache_freshest_path(pkg) isnothing(path) && return false io = open(path, "r") try @@ -1925,6 +1933,23 @@ function isrelocatable(pkg::PkgId) return true end +function parse_cache_buildid(cachepath::String) + f = open(cachepath, "r") + try + checksum = isvalid_cache_header(f) + iszero(checksum) && throw(ArgumentError("Incompatible header in cache file $cachefile.")) + flags = read(f, UInt8) + n = read(f, Int32) + n == 0 && error("no module defined in $cachefile") + skip(f, n) # module name + uuid = UUID((read(f, UInt64), read(f, UInt64))) # pkg UUID + build_id = (UInt128(checksum) << 64) | read(f, UInt64) + return build_id, uuid + finally + close(f) + end +end + # search for a precompile cache file to load, after some various checks function _tryrequire_from_serialized(modkey::PkgId, build_id::UInt128) assert_havelock(require_lock) @@ -2682,8 +2707,19 @@ function __require_prelocked(pkg::PkgId, env) try if !generating_output() && !parallel_precompile_attempted && !disable_parallel_precompile && @isdefined(Precompilation) parallel_precompile_attempted = true - Precompilation.precompilepkgs([pkg]; _from_loading=true, ignore_loaded=false) - return + precompiled = Precompilation.precompilepkgs([pkg]; _from_loading=true, ignore_loaded=false) + # prcompiled returns either nothing, indicating it needs serial precompile, + # or the entry(ies) that it found would be best to load (possibly because it just created it) + # or an empty set of entries (indicating the precompile should be skipped) + if precompiled !== nothing + isempty(precompiled) && return PrecompilableError() # oops, Precompilation forgot to report what this might actually be + local cachefile = precompiled[1] + local ocachefile = nothing + if JLOptions().use_pkgimages == 1 + ocachefile = ocachefile_from_cachefile(cachefile) + end + return cachefile, ocachefile + end end triggers = get(EXT_PRIMED, pkg, nothing) loadable_exts = nothing diff --git a/base/precompilation.jl b/base/precompilation.jl index a6be5d4c59e83..aa51fc54c9d77 100644 --- a/base/precompilation.jl +++ b/base/precompilation.jl @@ -529,7 +529,7 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, default_num_tasks = Sys.iswindows() ? div(Sys.CPU_THREADS::Int, 2) + 1 : Sys.CPU_THREADS::Int + 1 default_num_tasks = min(default_num_tasks, 16) # limit for better stability on shared resource systems - num_tasks = parse(Int, get(ENV, "JULIA_NUM_PRECOMPILE_TASKS", string(default_num_tasks))) + num_tasks = max(1, something(tryparse(Int, get(ENV, "JULIA_NUM_PRECOMPILE_TASKS", string(default_num_tasks))), 1)) parallel_limiter = Base.Semaphore(num_tasks) # suppress precompilation progress messages when precompiling for loading packages, except during interactive sessions @@ -726,9 +726,9 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, pkg in serial_deps && continue # skip serial deps as we don't have their dependency graph if scan_pkg!(stack, could_be_cycle, cycles, pkg, direct_deps) push!(circular_deps, pkg) - for pkg_config in keys(was_processed) + for (pkg_config, evt) in was_processed # notify all to allow skipping - pkg_config[1] == pkg && notify(was_processed[pkg_config]) + pkg_config[1] == pkg && notify(evt) end end end @@ -795,7 +795,7 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, print_lock = io.io isa Base.LibuvStream ? io.io.lock::ReentrantLock : ReentrantLock() first_started = Base.Event() printloop_should_exit = Ref{Bool}(!fancyprint) # exit print loop immediately if not fancy printing - interrupted_or_done = Base.Event() + interrupted_or_done = Ref{Bool}(false) ansi_moveup(n::Int) = string("\e[", n, "A") ansi_movecol1 = "\e[1G" @@ -809,13 +809,21 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, interrupted = Ref(false) function handle_interrupt(err, in_printloop::Bool) - notify(interrupted_or_done) - in_printloop || wait(t_print) # wait to let the print loop cease first + if err isa InterruptException + # record that this interrupted_or_done was from InterruptException + interrupted[] = true + end + interrupted_or_done[] = true + # notify all Event sources + for (pkg_config, evt) in was_processed + notify(evt) + end + notify(first_started) + in_printloop || wait(t_print) # Wait to let the print loop cease first. This makes the printing incorrect, so we shouldn't wait here, but we do anyways. if err isa InterruptException @lock print_lock begin println(io, " Interrupted: Exiting precompilation...", ansi_cleartoendofline) end - interrupted[] = true return true else return false @@ -865,7 +873,7 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, t_print = @async begin try wait(first_started) - (isempty(pkg_queue) || interrupted_or_done.set) && return + (isempty(pkg_queue) || interrupted_or_done[]) && return @lock print_lock begin if target[] !== nothing printpkgstyle(io, :Precompiling, target[]) @@ -887,7 +895,7 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, @lock print_lock begin term_size = displaysize(io)::Tuple{Int, Int} num_deps_show = max(term_size[1] - 3, 2) # show at least 2 deps - pkg_queue_show = if !interrupted_or_done.set && length(pkg_queue) > num_deps_show + pkg_queue_show = if !interrupted_or_done[] && length(pkg_queue) > num_deps_show last(pkg_queue, num_deps_show) else pkg_queue @@ -915,7 +923,7 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, elseif haskey(failed_deps, pkg_config) string(color_string(" ✗ ", Base.error_color()), name) elseif was_recompiled[pkg_config] - !loaded && interrupted_or_done.set && continue + !loaded && interrupted_or_done[] && continue loaded || @async begin # keep successful deps visible for short period sleep(1); filter!(!isequal(pkg_config), pkg_queue) @@ -944,28 +952,36 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, last_length = length(pkg_queue_show) n_print_rows = count("\n", str_) print(io, str_) - printloop_should_exit[] = interrupted_or_done.set && final_loop - final_loop = interrupted_or_done.set # ensures one more loop to tidy last task after finish + printloop_should_exit[] = interrupted_or_done[] && final_loop + final_loop = interrupted_or_done[] # ensures one more loop to tidy last task after finish i += 1 printloop_should_exit[] || print(io, ansi_moveup(n_print_rows), ansi_movecol1) end wait(t) end catch err + # For debugging: + # println("Task failed $err") + # Base.display_error(ErrorException(""), Base.catch_backtrace()) handle_interrupt(err, true) || rethrow() finally fancyprint && print(io, ansi_enablecursor) end end + tasks = Task[] if !_from_loading - Base.LOADING_CACHE[] = Base.LoadingCache() + @lock Base.require_lock begin + Base.LOADING_CACHE[] = Base.LoadingCache() + end end @debug "precompile: starting precompilation loop" direct_deps project_deps ## precompilation loop for (pkg, deps) in direct_deps - cachepaths = get!(() -> Base.find_all_in_cache_path(pkg), cachepath_cache, pkg) + cachepaths = Base.find_all_in_cache_path(pkg) + freshpaths = String[] + cachepath_cache[pkg] = freshpaths sourcepath = Base.locate_package(pkg) single_requested_pkg = length(requested_pkgs) == 1 && (pkg in requested_pkgids || pkg.name in pkg_names) @@ -987,9 +1003,16 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, loaded = warn_loaded && haskey(Base.loaded_modules, pkg) for dep in deps # wait for deps to finish wait(was_processed[(dep,config)]) + if interrupted_or_done[] + return + end end circular = pkg in circular_deps - is_stale = !Base.isprecompiled(pkg; ignore_loaded, stale_cache, cachepath_cache, cachepaths, sourcepath, flags=cacheflags) + freshpath = Base.compilecache_freshest_path(pkg; ignore_loaded, stale_cache, cachepath_cache, cachepaths, sourcepath, flags=cacheflags) + is_stale = freshpath === nothing + if !is_stale + push!(freshpaths, freshpath) + end if !circular && is_stale Base.acquire(parallel_limiter) is_project_dep = pkg in project_deps @@ -999,21 +1022,19 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, std_pipe = Base.link_pipe!(Pipe(); reader_supports_async=true, writer_supports_async=true) t_monitor = @async monitor_std(pkg_config, std_pipe; single_requested_pkg) - name = describe_pkg(pkg, is_project_dep, is_serial_dep, flags, cacheflags) - @lock print_lock begin - if !fancyprint && isempty(pkg_queue) - printpkgstyle(io, :Precompiling, something(target[], "packages...")) - end - end - push!(pkg_queue, pkg_config) - started[pkg_config] = true - fancyprint && notify(first_started) - if interrupted_or_done.set - notify(was_processed[pkg_config]) - Base.release(parallel_limiter) - return - end try + name = describe_pkg(pkg, is_project_dep, is_serial_dep, flags, cacheflags) + @lock print_lock begin + if !fancyprint && isempty(pkg_queue) + printpkgstyle(io, :Precompiling, something(target[], "packages...")) + end + end + push!(pkg_queue, pkg_config) + started[pkg_config] = true + fancyprint && notify(first_started) + if interrupted_or_done[] + return + end # for extensions, any extension in our direct dependencies is one we have a right to load # for packages, we may load any extension (all possible triggers are accounted for above) loadable_exts = haskey(ext_to_parent, pkg) ? filter((dep)->haskey(ext_to_parent, dep), direct_deps[pkg]) : nothing @@ -1026,7 +1047,18 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, else # allows processes to wait if another process is precompiling a given package to # a functionally identical package cache (except for preferences, which may differ) - t = @elapsed ret = precompile_pkgs_maybe_cachefile_lock(io, print_lock, fancyprint, pkg_config, pkgspidlocked, hascolor, parallel_limiter, ignore_loaded) do + t = @elapsed ret = precompile_pkgs_maybe_cachefile_lock(io, print_lock, fancyprint, pkg_config, pkgspidlocked, hascolor, parallel_limiter) do + # refresh and double-check the search now that we have global lock + if interrupted_or_done[] + return ErrorException("canceled") + end + cachepaths = Base.find_all_in_cache_path(pkg) + local freshpath = Base.compilecache_freshest_path(pkg; ignore_loaded, stale_cache, cachepath_cache, cachepaths, sourcepath, flags=cacheflags) + local is_stale = freshpath === nothing + if !is_stale + push!(freshpaths, freshpath) + return nothing # returning nothing indicates another process did the recompile + end logcalls === nothing || @lock print_lock begin Base.@logmsg logcalls "Precompiling $(repr("text/plain", pkg))" end @@ -1034,7 +1066,7 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, flags, cacheflags, loadable_exts) end end - if ret isa Base.PrecompilableError + if ret isa Exception push!(precomperr_deps, pkg_config) !fancyprint && @lock print_lock begin println(io, _timing_string(t), color_string(" ? ", Base.warn_color()), name) @@ -1043,11 +1075,17 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, !fancyprint && @lock print_lock begin println(io, _timing_string(t), color_string(" ✓ ", loaded ? Base.warn_color() : :green), name) end - was_recompiled[pkg_config] = true + if ret !== nothing + was_recompiled[pkg_config] = true + cachefile, _ = ret::Tuple{String, Union{Nothing, String}} + push!(freshpaths, cachefile) + build_id, _ = Base.parse_cache_buildid(cachefile) + stale_cache_key = (pkg, build_id, sourcepath, cachefile, ignore_loaded, cacheflags)::StaleCacheKey + stale_cache[stale_cache_key] = false + end end loaded && (n_loaded[] += 1) catch err - # @show err close(std_pipe.in) # close pipe to end the std output monitor wait(t_monitor) if err isa ErrorException || (err isa ArgumentError && startswith(err.msg, "Invalid header in cache file")) @@ -1074,28 +1112,33 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, # For debugging: # println("Task failed $err_outer") # Base.display_error(ErrorException(""), Base.catch_backtrace())# logging doesn't show here - handle_interrupt(err_outer, false) || rethrow() - notify(was_processed[pkg_config]) - finally - filter!(!istaskdone, tasks) - length(tasks) == 1 && notify(interrupted_or_done) + handle_interrupt(err_outer, false) + rethrow() end end - Base.errormonitor(task) # interrupts are handled separately so ok to watch for other errors like this push!(tasks, task) end end - isempty(tasks) && notify(interrupted_or_done) try - wait(interrupted_or_done) + waitall(tasks; failfast=false, throw=false) + interrupted_or_done[] = true catch err + # For debugging: + # println("Task failed $err") + # Base.display_error(ErrorException(""), Base.catch_backtrace())# logging doesn't show here handle_interrupt(err, false) || rethrow() finally - Base.LOADING_CACHE[] = nothing + try + waitall(tasks; failfast=false, throw=false) + finally + @lock Base.require_lock begin + Base.LOADING_CACHE[] = nothing + end + end end notify(first_started) # in cases of no-op or !fancyprint fancyprint && wait(t_print) - quick_exit = !all(istaskdone, tasks) || interrupted[] # if some not finished internal error is likely + quick_exit = any(t -> !istaskdone(t) || istaskfailed(t), tasks) || interrupted[] # all should have finished (to avoid memory corruption) seconds_elapsed = round(Int, (time_ns() - time_start) / 1e9) ndeps = count(values(was_recompiled)) if ndeps > 0 || !isempty(failed_deps) || (quick_exit && !isempty(std_outputs)) @@ -1154,12 +1197,14 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, end end end - let str=str - @lock print_lock begin - println(io, str) - end + @lock print_lock begin + println(io, str) end - quick_exit && return + if interrupted[] + # done cleanup, now ensure caller aborts too + throw(InterruptException()) + end + quick_exit && return Vector{String}[] err_str = IOBuffer() n_direct_errs = 0 for (pkg_config, err) in failed_deps @@ -1197,7 +1242,7 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, end end end - nothing + return collect(String, Iterators.flatten((v for (pkgid, v) in cachepath_cache if pkgid in requested_pkgids))) end _timing_string(t) = string(lpad(round(t * 1e3, digits = 1), 9), " ms") @@ -1213,7 +1258,7 @@ function _color_string(cstr::String, col::Union{Int64, Symbol}, hascolor) end # Can be merged with `maybe_cachefile_lock` in loading? -function precompile_pkgs_maybe_cachefile_lock(f, io::IO, print_lock::ReentrantLock, fancyprint::Bool, pkg_config, pkgspidlocked, hascolor, parallel_limiter::Base.Semaphore, ignore_loaded::Bool) +function precompile_pkgs_maybe_cachefile_lock(f, io::IO, print_lock::ReentrantLock, fancyprint::Bool, pkg_config, pkgspidlocked, hascolor, parallel_limiter::Base.Semaphore) if !(isdefined(Base, :mkpidlock_hook) && isdefined(Base, :trymkpidlock_hook) && Base.isdefined(Base, :parse_pidfile_hook)) return f() end @@ -1240,13 +1285,8 @@ function precompile_pkgs_maybe_cachefile_lock(f, io::IO, print_lock::ReentrantLo try # wait until the lock is available @invokelatest Base.mkpidlock_hook(() -> begin - # double-check in case the other process crashed or the lock expired - if Base.isprecompiled(pkg; ignore_loaded, flags=cacheflags) # don't use caches for this as the env state will have changed - return nothing # returning nothing indicates a process waited for another - else - delete!(pkgspidlocked, pkg_config) - Base.acquire(f, parallel_limiter) # precompile - end + delete!(pkgspidlocked, pkg_config) + Base.acquire(f, parallel_limiter) end, pidfile; stale_age) finally diff --git a/test/loading.jl b/test/loading.jl index 4060d3aed6c8f..4d440bbc8baac 100644 --- a/test/loading.jl +++ b/test/loading.jl @@ -1023,7 +1023,7 @@ end write(joinpath(tmp, "Env1", "Manifest.toml"), """ """) # Package in current env not present in manifest - pkg, env = Base.identify_package_env("Baz") + pkg, env = @lock Base.require_lock Base.identify_package_env("Baz") @test Base.locate_package(pkg, env) === nothing finally copy!(LOAD_PATH, old_load_path) @@ -1722,7 +1722,8 @@ end Base64_key = Base.PkgId(Base.UUID("2a0f44e3-6c83-55bd-87e4-b1978d98bd5f"), "Base64") oldBase64 = Base.unreference_module(Base64_key) cc = Base.compilecache(Base64_key) - @test Base.isprecompiled(Base64_key, cachepaths=String[cc[1]]) + sourcepath = Base.locate_package(Base64_key) + @test Base.stale_cachefile(Base64_key, UInt128(0), sourcepath, cc[1]) !== true empty!(DEPOT_PATH) Base.require_stdlib(Base64_key) push!(DEPOT_PATH, depot_path) diff --git a/test/precompile.jl b/test/precompile.jl index f78318ec645f2..656f53e6c13b7 100644 --- a/test/precompile.jl +++ b/test/precompile.jl @@ -690,10 +690,10 @@ precompile_test_harness(false) do dir Base.require(Main, :FooBar2) error("the \"break me\" test failed") catch exc - isa(exc, Base.Precompilation.PkgPrecompileError) || rethrow() - occursin("Failed to precompile FooBar2", exc.msg) || rethrow() - # The LoadError is printed to stderr in the precompilepkgs worker and captured in the PkgPrecompileError msg - occursin("LoadError: break me", exc.msg) || rethrow() + isa(exc, LoadError) || rethrow() + exc = exc.error + isa(exc, ErrorException) || rethrow() + "break me" == exc.msg || rethrow() end # Test that trying to eval into closed modules during precompilation is an error @@ -709,7 +709,9 @@ precompile_test_harness(false) do dir try Base.require(Main, :FooBar3) catch exc - isa(exc, Base.Precompilation.PkgPrecompileError) || rethrow() + isa(exc, LoadError) || rethrow() + exc = exc.error + isa(exc, ErrorException) || rethrow() occursin("Evaluation into the closed module `Base` breaks incremental compilation", exc.msg) || rethrow() end end @@ -2145,9 +2147,6 @@ precompile_test_harness("Test flags") do load_path @test cacheflags.check_bounds == 2 @test cacheflags.opt_level == 3 end - id = Base.identify_package("TestFlags") - @test Base.isprecompiled(id, ;flags=modified_flags) - @test !Base.isprecompiled(id, ;flags=current_flags) end if Base.get_bool_env("CI", false) && (Sys.ARCH === :x86_64 || Sys.ARCH === :aarch64)