-
-
Notifications
You must be signed in to change notification settings - Fork 251
/
PlatformEngines.jl
680 lines (608 loc) · 23.6 KB
/
PlatformEngines.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
# This file is a part of Julia. License is MIT: https://julialang.org/license
# Content in this file is extracted from BinaryProvider.jl, see LICENSE.method
module PlatformEngines
using SHA, Downloads, Tar
import ...Pkg: Pkg, TOML, pkg_server, depots1, can_fancyprint, stderr_f
using ..MiniProgressBars
using Base.BinaryPlatforms, p7zip_jll
export verify, unpack, package, download_verify_unpack
const EXE7Z_LOCK = ReentrantLock()
const EXE7Z = Ref{String}()
function exe7z()
# If the JLL is available, use the wrapper function defined in there
if p7zip_jll.is_available()
return p7zip_jll.p7zip()
end
lock(EXE7Z_LOCK) do
if !isassigned(EXE7Z)
EXE7Z[] = find7z()
end
return Cmd([EXE7Z[]])
end
end
function find7z()
name = "7z"
Sys.iswindows() && (name = "$name.exe")
for dir in (joinpath("..", "libexec"), ".")
path = normpath(Sys.BINDIR::String, dir, name)
isfile(path) && return path
end
path = Sys.which(name)
path !== nothing && return path
error("7z binary not found")
end
is_secure_url(url::AbstractString) =
occursin(r"^(https://|\w+://(127\.0\.0\.1|localhost)(:\d+)?($|/))"i, url)
function get_server_dir(
url :: AbstractString,
server :: Union{AbstractString, Nothing} = pkg_server(),
)
server === nothing && return
url == server || startswith(url, "$server/") || return
m = match(r"^\w+://([^\\/]+)(?:$|/)", server)
if m === nothing
@warn "malformed Pkg server value" server
return
end
isempty(Base.DEPOT_PATH) && return
invalid_filename_chars = [':', '/', '<', '>', '"', '/', '\\', '|', '?', '*']
dir = join(replace(c -> c in invalid_filename_chars ? '_' : c, collect(String(m[1]))))
return joinpath(depots1(), "servers", dir)
end
const AUTH_ERROR_HANDLERS = Pair{Union{String, Regex},Any}[]
function handle_auth_error(url, err; verbose::Bool = false)
handled, should_retry = false, false
for (scheme, handler) in AUTH_ERROR_HANDLERS
occursin(scheme, url) || continue
handled, should_retry = handler(url, pkg_server(), err)
handled && break
end
handled && should_retry && return get_auth_header(url; verbose = verbose)
return nothing
end
"""
register_auth_error_handler(urlscheme::Union{AbstractString, Regex}, f)
Registers `f` as the topmost handler for failures in package server authentication.
A handler is only invoked if `occursin(urlscheme, url)` is true (where `url` is the URL Pkg
is currently trying to download.)
`f` must be a function that takes three input arguments `(url, pkgserver, err)`, where `url` is the
URL currently being downloaded, `pkgserver = Pkg.pkg_server()` the current package server, and
`err` is one of `no-auth-file`, `insecure-connection`, `malformed-file`, `no-access-token`,
`no-refresh-key` or `insecure-refresh-url`.
The handler `f` needs to return a tuple of `Bool`s `(handled, should_retry)`. If `handled` is `false`,
the next handler in the stack will be called, otherwise handling terminates; `get_auth_header` is called again if `should_retry`
is `true`.
`register_auth_error_handler` returns a zero-arg function that can be called to deregister the handler.
"""
function register_auth_error_handler(urlscheme::Union{AbstractString, Regex}, @nospecialize(f))
unique!(pushfirst!(AUTH_ERROR_HANDLERS, urlscheme => f))
return () -> deregister_auth_error_handler(urlscheme, f)
end
"""
deregister_auth_error_handler(urlscheme::Union{AbstractString, Regex}, f)
Removes `f` from the stack of authentication error handlers.
"""
function deregister_auth_error_handler(urlscheme::Union{String, Regex}, @nospecialize(f))
filter!(handler -> !(handler.first == urlscheme && handler.second === f), AUTH_ERROR_HANDLERS)
return nothing
end
function get_auth_header(url::AbstractString; verbose::Bool = false)
server_dir = get_server_dir(url)
server_dir === nothing && return
auth_file = joinpath(server_dir, "auth.toml")
isfile(auth_file) || return handle_auth_error(url, "no-auth-file"; verbose=verbose)
# TODO: check for insecure auth file permissions
if !is_secure_url(url)
@warn "refusing to send auth info over insecure connection" url=url
return handle_auth_error(url, "insecure-connection"; verbose=verbose)
end
# parse the auth file
auth_info = try
TOML.parsefile(auth_file)
catch err
@error "malformed auth file" file=auth_file err=err
return handle_auth_error(url, "malformed-file"; verbose=verbose)
end
# check for an auth token
if !haskey(auth_info, "access_token")
@warn "auth file without access_token field" file=auth_file
return handle_auth_error(url, "no-access-token"; verbose=verbose)
end
auth_token = auth_info["access_token"]::String
auth_header = "Authorization" => "Bearer $auth_token"
# handle token expiration and refresh
expires_at = Inf
if haskey(auth_info, "expires_at")
expires_at = min(expires_at, Float64(auth_info["expires_at"])::Float64)
end
if haskey(auth_info, "expires_in")
expires_at = min(expires_at, mtime(auth_file) + Float64(auth_info["expires_in"])::Float64)
end
# if token is good until ten minutes from now, use it
time_now = time()
if expires_at ≥ time_now + 10*60 # ten minutes
return auth_header
end
if !haskey(auth_info, "refresh_url") || !haskey(auth_info, "refresh_token")
if expires_at ≤ time_now
@warn "expired auth without refresh keys" file=auth_file
end
# try it anyway since we can't refresh
return something(handle_auth_error(url, "no-refresh-key"; verbose=verbose), auth_header)
end
refresh_url = auth_info["refresh_url"]::String
if !is_secure_url(refresh_url)
@warn "ignoring insecure auth refresh URL" url=refresh_url
return something(handle_auth_error(url, "insecure-refresh-url"; verbose=verbose), auth_header)
end
verbose && @info "Refreshing expired auth token..." file=auth_file
tmp = tempname()
refresh_token = auth_info["refresh_token"]::String
refresh_auth = "Authorization" => "Bearer $refresh_token"
try download(refresh_url, tmp, auth_header=refresh_auth, verbose=verbose)
catch err
@warn "token refresh failure" file=auth_file url=refresh_url err=err
rm(tmp, force=true)
return handle_auth_error(url, "token-refresh-failed"; verbose=verbose)
end
auth_info = try TOML.parsefile(tmp)
catch err
@warn "discarding malformed auth file" url=refresh_url err=err
rm(tmp, force=true)
return something(handle_auth_error(url, "malformed-file"; verbose=verbose), auth_header)
end
if !haskey(auth_info, "access_token")
if haskey(auth_info, "refresh_token")
auth_info["refresh_token"] = "*"^64
end
@warn "discarding auth file without access token" auth=auth_info
rm(tmp, force=true)
return something(handle_auth_error(url, "no-access-token"; verbose=verbose), auth_header)
end
if haskey(auth_info, "expires_in")
expires_in = auth_info["expires_in"]
if expires_in isa Number
expires_at = floor(time_now + Float64(expires_in)::Float64)
# overwrite expires_at (avoids clock skew issues)
auth_info["expires_at"] = expires_at
end
end
let auth_info = auth_info
open(tmp, write=true) do io
TOML.print(io, auth_info, sorted=true)
end
end
mv(tmp, auth_file, force=true)
access_token = auth_info["access_token"]::String
return "Authorization" => "Bearer $access_token"
end
# based on information in this post:
# https://github.community/t5/GitHub-Actions/Have-the-CI-environment-variable-set-by-default/m-p/32358/highlight/true#M1097
const CI_VARIABLES = [
"APPVEYOR",
"CI",
"CI_SERVER",
"CIRCLECI",
"CONTINUOUS_INTEGRATION",
"GITHUB_ACTIONS",
"GITLAB_CI",
"JULIA_CI",
"JULIA_PKGEVAL",
"JULIA_REGISTRYCI_AUTOMERGE",
"TF_BUILD",
"TRAVIS",
]
function get_metadata_headers(url::AbstractString)
headers = Pair{String,String}[]
server = pkg_server()
server_dir = get_server_dir(url, server)
server_dir === nothing && return headers
push!(headers, "Julia-Pkg-Protocol" => "1.0")
push!(headers, "Julia-Pkg-Server" => server)
push!(headers, "Julia-Version" => string(VERSION))
system = triplet(HostPlatform())
push!(headers, "Julia-System" => system)
ci_info = String[]
for var in CI_VARIABLES
val = get(ENV, var, nothing)
state = val === nothing ? "n" :
lowercase(val) in ("true", "t", "1", "yes", "y") ? "t" :
lowercase(val) in ("false", "f", "0", "no", "n") ? "f" : "o"
push!(ci_info, "$var=$state")
end
push!(headers, "Julia-CI-Variables" => join(ci_info, ';'))
push!(headers, "Julia-Interactive" => string(isinteractive()))
for (key, val) in ENV
m = match(r"^JULIA_PKG_SERVER_([A-Z0-9_]+)$"i, key)
m === nothing && continue
val = strip(val)
isempty(val) && continue
words = split(m.captures[1], '_', keepempty=false)
isempty(words) && continue
hdr = "Julia-" * join(map(titlecase, words), '-')
any(hdr == k for (k, v) in headers) && continue
push!(headers, hdr => val)
end
return headers
end
function download(
url::AbstractString,
dest::AbstractString;
verbose::Bool = false,
headers::Vector{Pair{String,String}} = Pair{String,String}[],
auth_header::Union{Pair{String,String}, Nothing} = nothing,
io::IO=stderr_f()
)
if auth_header === nothing
auth_header = get_auth_header(url, verbose=verbose)
end
if auth_header !== nothing
push!(headers, auth_header)
end
for header in get_metadata_headers(url)
push!(headers, header)
end
do_fancy = verbose && can_fancyprint(io)
progress = if do_fancy
bar = MiniProgressBar(header="Downloading", color=Base.info_color())
start_progress(io, bar)
let bar=bar
(total, now) -> begin
bar.max = total
bar.current = now
show_progress(io, bar)
end
end
else
(total, now) -> nothing
end
try
Downloads.download(url, dest; headers, progress)
finally
do_fancy && end_progress(io, bar)
end
end
"""
download_verify(
url::AbstractString,
hash::Union{AbstractString, Nothing},
dest::AbstractString;
verbose::Bool = false,
force::Bool = false,
quiet_download::Bool = false,
)
Download file located at `url`, verify it matches the given `hash`, and throw
an error if anything goes wrong. If `dest` already exists, just verify it. If
`force` is set to `true`, overwrite the given file if it exists but does not
match the given `hash`.
This method returns `true` if the file was downloaded successfully, `false`
if an existing file was removed due to the use of `force`, and throws an error
if `force` is not set and the already-existent file fails verification, or if
`force` is set, verification fails, and then verification fails again after
redownloading the file.
If `quiet_download` is set to `false`, this method will print to
stdout when downloading a new file. If it is set to `true` (default, and `verbose` is
set to `false`) the downloading process will be completely silent. If
`verbose` is set to `true`, messages about integrity verification will be
printed in addition to messages regarding downloading.
"""
function download_verify(
url::AbstractString,
hash::Union{AbstractString, Nothing},
dest::AbstractString;
verbose::Bool = false,
force::Bool = false,
quiet_download::Bool = false,
)
# Whether the file existed in the first place
file_existed = false
if isfile(dest)
file_existed = true
if verbose
@info("Destination file $(dest) already exists, verifying...")
end
# verify download, if it passes, return happy. If it fails, (and
# `force` is `true`, re-download!)
if hash !== nothing && verify(dest, hash; verbose=verbose)
return true
elseif !force
error("Verification failed, not overwriting $(dest)")
end
end
# Make sure the containing folder exists
mkpath(dirname(dest))
# Download the file, optionally continuing
f = retry(delays = fill(1.0, 3)) do
try
download(url, dest; verbose=verbose || !quiet_download)
catch err
@debug "download and verify failed" url dest err
rethrow()
end
end
f()
if hash !== nothing && !verify(dest, hash; verbose=verbose)
# If the file already existed, it's possible the initially downloaded chunk
# was bad. If verification fails after downloading, auto-delete the file
# and start over from scratch.
if file_existed
if verbose
@info("Continued download didn't work, restarting from scratch")
end
Base.rm(dest; force=true)
# Download and verify from scratch
download(url, dest; verbose=verbose || !quiet_download)
if hash !== nothing && !verify(dest, hash; verbose=verbose)
error("Verification failed. Download does not match expected hash")
end
else
# If it didn't verify properly and we didn't resume, something is
# very wrong and we must complain mightily.
error("Verification failed. Download does not match expected hash")
end
end
# If the file previously existed, this means we removed it (due to `force`)
# and redownloaded, so return `false`. If it didn't exist, then this means
# that we successfully downloaded it, so return `true`.
return !file_existed
end
# TODO: can probably delete this, only affects tests
function copy_symlinks()
var = get(ENV, "BINARYPROVIDER_COPYDEREF", "")
lowercase(var) in ("true", "t", "yes", "y", "1") ? true :
lowercase(var) in ("false", "f", "no", "n", "0") ? false : nothing
end
function unpack(
tarball_path::AbstractString,
dest::AbstractString;
verbose::Bool = false,
)
Tar.extract(`$(exe7z()) x $tarball_path -so`, dest, copy_symlinks = copy_symlinks())
end
"""
package(src_dir::AbstractString, tarball_path::AbstractString)
Compress `src_dir` into a tarball located at `tarball_path`.
"""
function package(src_dir::AbstractString, tarball_path::AbstractString; io=stderr_f())
rm(tarball_path, force=true)
cmd = `$(exe7z()) a -si -tgzip -mx9 $tarball_path`
open(pipeline(cmd, stdout=devnull, stderr=io), write=true) do io
Tar.create(src_dir, io)
end
end
"""
download_verify_unpack(
url::AbstractString,
hash::Union{AbstractString, Nothing},
dest::AbstractString;
tarball_path = nothing,
ignore_existence::Bool = false,
force::Bool = false,
verbose::Bool = false,
quiet_download::Bool = false,
io::IO=stderr,
)
Helper method to download tarball located at `url`, verify it matches the
given `hash`, then unpack it into folder `dest`. In general, the method
`install()` should be used to download and install tarballs into a `Prefix`;
this method should only be used if the extra functionality of `install()` is
undesired.
If `tarball_path` is specified, the given `url` will be downloaded to
`tarball_path`, and it will not be removed after downloading and verification
is complete. If it is not specified, the tarball will be downloaded to a
temporary location, and removed after verification is complete.
If `force` is specified, a verification failure will cause `tarball_path` to be
deleted (if it exists), the `dest` folder to be removed (if it exists) and the
tarball to be redownloaded and reverified. If the verification check is failed
a second time, an exception is raised. If `force` is not specified, a
verification failure will result in an immediate raised exception.
If `ignore_existence` is set, the tarball is unpacked even if the destination
directory already exists.
Returns `true` if a tarball was actually unpacked, `false` if nothing was
changed in the destination prefix.
"""
function download_verify_unpack(
url::AbstractString,
hash::Union{AbstractString, Nothing},
dest::AbstractString;
tarball_path = nothing,
ignore_existence::Bool = false,
force::Bool = false,
verbose::Bool = false,
quiet_download::Bool = false,
io::IO=stderr_f(),
)
# First, determine whether we should keep this tarball around
remove_tarball = false
if tarball_path === nothing
remove_tarball = true
function url_ext(url)
url = basename(url)
# Chop off urlparams
qidx = findfirst(isequal('?'), url)
if qidx !== nothing
url = url[1:qidx]
end
# Try to detect extension
dot_idx = findlast(isequal('.'), url)
if dot_idx === nothing
return nothing
end
return url[dot_idx+1:end]
end
# If extension of url contains a recognized extension, use it, otherwise use ".gz"
ext = url_ext(url)
if !(ext in ["tar", "gz", "tgz", "bz2", "xz"])
ext = "gz"
end
# Work around windows limitations regarding tempname()
tarball_path = "$(tempname())-download.$(ext)"
tries = 0
while isfile(tarball_path) && tries < 100
tarball_path = "$(tempname())-download.$(ext)"
tries += 1
end
if tries >= 100
error("Unable to generate unused tempname! Clean up your temporary folder $(dirname(tempname())) and try again.")
end
end
# Download the tarball; if it already existed and we needed to remove it
# then we should remove the unpacked path as well
should_delete = !download_verify(url, hash, tarball_path;
force=force, verbose=verbose, quiet_download=quiet_download)
if should_delete
if verbose
@info("Removing dest directory $(dest) as source tarball changed")
end
Base.rm(dest; recursive=true, force=true)
end
# If the destination path already exists, don't bother to unpack
if !ignore_existence && isdir(dest)
if verbose
@info("Destination directory $(dest) already exists, returning")
end
# Signify that we didn't do any unpacking
return false
end
try
if verbose
@info("Unpacking $(tarball_path) into $(dest)...")
end
open(`$(exe7z()) x $tarball_path -so`) do io
Tar.extract(io, dest, copy_symlinks = copy_symlinks())
end
finally
if remove_tarball
Base.rm(tarball_path)
# Remove cached tarball hash, if it exists.
Base.rm(string(tarball_path, ".sha256"); force=true)
end
end
# Signify that we did some unpacking!
return true
end
"""
verify(path::AbstractString, hash::AbstractString;
verbose::Bool = false, report_cache_status::Bool = false)
Given a file `path` and a `hash`, calculate the SHA256 of the file and compare
it to `hash`. This method caches verification results in a `"\$(path).sha256"`
file to accelerate reverification of files that have been previously verified.
If no `".sha256"` file exists, a full verification will be done and the file
will be created, with the calculated hash being stored within the `".sha256"`
file. If a `".sha256"` file does exist, its contents are checked to ensure
that the hash contained within matches the given `hash` parameter, and its
modification time shows that the file located at `path` has not been modified
since the last verification.
If `report_cache_status` is set to `true`, then the return value will be a
`Symbol` giving a granular status report on the state of the hash cache, in
addition to the `true`/`false` signifying whether verification completed
successfully.
"""
function verify(path::AbstractString, hash::AbstractString; verbose::Bool = false,
report_cache_status::Bool = false, hash_path::AbstractString="$(path).sha256")
# Check hash string format
if !occursin(r"^[0-9a-f]{64}$"i, hash)
msg = "Hash value must be 64 hexadecimal characters (256 bits), "
if !isascii(hash)
msg *= "given hash value is non-ASCII"
elseif occursin(r"^[0-9a-f]*$"i, hash)
msg *= "given hash value has the wrong length ($(length(hash)))"
else
msg *= "given hash value contains non-hexadecimal characters"
end
msg *= ": $(repr(hash))"
error(msg)
end
hash = lowercase(hash)
# Check to see if the hash cache is consistent
status = :hash_consistent
# First, it must exist
if isfile(hash_path)
# Next, it must contain the same hash as what we're verifying against
if read(hash_path, String) == hash
# Next, it must be no older than the actual path
if stat(hash_path).mtime >= stat(path).mtime
# If all of that is true, then we're good!
if verbose
@info("Hash cache is consistent, returning true")
end
status = :hash_cache_consistent
# If we're reporting our status, then report it!
if report_cache_status
return true, status
else
return true
end
else
if verbose
@info("File has been modified, hash cache invalidated")
end
status = :file_modified
end
else
if verbose
@info("Verification hash mismatch, hash cache invalidated")
end
status = :hash_cache_mismatch
end
else
if verbose
@info("No hash cache found")
end
status = :hash_cache_missing
end
calc_hash = open(path) do file
bytes2hex(sha256(file))
end
@assert occursin(r"^[0-9a-f]{64}$", calc_hash)
if verbose
@info("Calculated hash $calc_hash for file $path")
end
if calc_hash != hash
msg = "Hash Mismatch!\n"
msg *= " Expected sha256: $hash\n"
msg *= " Calculated sha256: $calc_hash"
@error(msg)
if report_cache_status
return false, :hash_mismatch
else
return false
end
end
# Try to save a hash cache if everything worked out fine
try
open(hash_path, "w") do file
write(file, hash)
end
catch e
if isa(e, InterruptException)
rethrow(e)
end
if verbose
@warn("Unable to create hash cache file $(hash_path)")
end
end
if report_cache_status
return true, status
else
return true
end
end
# Verify the git-tree-sha1 hash of a compressed archive.
function verify_archive_tree_hash(tar_gz::AbstractString, expected_hash::Base.SHA1)
# This can fail because unlike sha256 verification of the downloaded
# tarball, tree hash verification requires that the file can i) be
# decompressed and ii) is a proper archive.
calc_hash = try
Base.SHA1(open(Tar.tree_hash, `$(exe7z()) x $tar_gz -so`))
catch err
@warn "unable to decompress and read archive" exception=err
return false
end
if calc_hash != expected_hash
@warn "tarball content does not match expected git-tree-sha1"
return false
end
return true
end
end # module PlatformEngines