Skip to content

Commit

Permalink
Unify URL/URLs parameter code across http_archive, http_file, http_jar
Browse files Browse the repository at this point in the history
Hi wonderful Bazel folks,

I--like probably everyone else--make heavy use of the (super handy) http repository rules.

While using them, I'd noticed that the docs for the URL attributes they have in common had started drifting. For example, http_file's URLs array doc was mostly same as http_archive's, except that it forgot to specify that the URLs were tried in the order they were specified. jar_file's were much less complete. Similarly, only http_file was missing the URL parameter, which seems a shame, since probably most people only want to supply one authoritative source most of the time. When I looked at the code, it became clear that this was because the implementations were copy-pasted, and being updated separately.

So I spent a few minutes cleaning things up and making these commonly used features a little more consistent, fixing the issues above. Hopefully that's welcome.

I'd love your thoughtful consideration and review!

Thanks, wonderful Bazel folks, for all you do!

Cheers,
Chris
(ex-Googler)

P.S. I've given edit access with the "allow edits by maintainers" box. If there are things you'd like changed, feel free also to just make them directly!

Closes bazelbuild#15408.

PiperOrigin-RevId: 449225740
  • Loading branch information
cpsauer authored and Copybara-Service committed May 17, 2022
1 parent e575800 commit d273cb6
Showing 1 changed file with 45 additions and 55 deletions.
100 changes: 45 additions & 55 deletions tools/build_defs/repo/http.bzl
Expand Up @@ -41,6 +41,38 @@ load(
)

# Shared between http_jar, http_file and http_archive.

_URL_DOC = """A URL to a file that will be made available to Bazel.
This must be a file, http or https URL. Redirections are followed.
Authentication is not supported.
More flexibility can be achieved by the urls parameter that allows
to specify alternative URLs to fetch from."""

_URLS_DOC = """A list of URLs to a file that will be made available to Bazel.
Each entry must be a file, http or https URL. Redirections are followed.
Authentication is not supported.
URLs are tried in order until one succeeds, so you should list local mirrors first.
If all downloads fail, the rule will fail."""

def _get_all_urls(ctx):
"""Returns all urls provided via the url or urls attributes.
Also checks that at least one url is provided."""
if not ctx.attr.url and not ctx.attr.urls:
fail("At least one of url and urls must be provided")

all_urls = []
if ctx.attr.urls:
all_urls = ctx.attr.urls
if ctx.attr.url:
all_urls = [ctx.attr.url] + all_urls

return all_urls

_AUTH_PATTERN_DOC = """An optional dict mapping host names to custom authorization patterns.
If a URL's host name is present in this dict the value will be used as a pattern when
Expand Down Expand Up @@ -84,17 +116,10 @@ def _get_auth(ctx, urls):

def _http_archive_impl(ctx):
"""Implementation of the http_archive rule."""
if not ctx.attr.url and not ctx.attr.urls:
fail("At least one of url and urls must be provided")
if ctx.attr.build_file and ctx.attr.build_file_content:
fail("Only one of build_file and build_file_content can be provided.")

all_urls = []
if ctx.attr.urls:
all_urls = ctx.attr.urls
if ctx.attr.url:
all_urls = [ctx.attr.url] + all_urls

all_urls = _get_all_urls(ctx)
auth = _get_auth(ctx, all_urls)

download_info = ctx.download_and_extract(
Expand Down Expand Up @@ -138,9 +163,10 @@ def _http_file_impl(ctx):
download_path = ctx.path("file/" + downloaded_file_path)
if download_path in forbidden_files or not str(download_path).startswith(str(repo_root)):
fail("'%s' cannot be used as downloaded_file_path in http_file" % ctx.attr.downloaded_file_path)
auth = _get_auth(ctx, ctx.attr.urls)
all_urls = _get_all_urls(ctx)
auth = _get_auth(ctx, all_urls)
download_info = ctx.download(
ctx.attr.urls,
all_urls,
"file/" + downloaded_file_path,
ctx.attr.sha256,
ctx.attr.executable,
Expand Down Expand Up @@ -173,11 +199,7 @@ filegroup(

def _http_jar_impl(ctx):
"""Implementation of the http_jar rule."""
all_urls = []
if ctx.attr.urls:
all_urls = ctx.attr.urls
if ctx.attr.url:
all_urls = [ctx.attr.url] + all_urls
all_urls = _get_all_urls(ctx)
auth = _get_auth(ctx, all_urls)
downloaded_file_name = ctx.attr.downloaded_file_name
download_info = ctx.download(
Expand All @@ -192,28 +214,8 @@ def _http_jar_impl(ctx):
return update_attrs(ctx.attr, _http_jar_attrs.keys(), {"sha256": download_info.sha256})

_http_archive_attrs = {
"url": attr.string(
doc =
"""A URL to a file that will be made available to Bazel.
This must be a file, http or https URL. Redirections are followed.
Authentication is not supported.
This parameter is to simplify the transition from the native http_archive
rule. More flexibility can be achieved by the urls parameter that allows
to specify alternative URLs to fetch from.
""",
),
"urls": attr.string_list(
doc =
"""A list of URLs to a file that will be made available to Bazel.
Each entry must be a file, http or https URL. Redirections are followed.
Authentication is not supported.
URLs are tried in order until one succeeds, so you should list local mirrors first.
If all downloads fail, the rule will fail.""",
),
"url": attr.string(doc = _URL_DOC),
"urls": attr.string_list(doc = _URLS_DOC),
"sha256": attr.string(
doc = """The expected SHA-256 of the file downloaded.
Expand Down Expand Up @@ -393,7 +395,7 @@ Examples:
http_archive(
name = "my_ssl",
urls = ["http://example.com/openssl.zip"],
url = "http://example.com/openssl.zip",
sha256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
build_file = "@//:openssl.BUILD",
)
Expand Down Expand Up @@ -426,13 +428,8 @@ If specified and non-empty, bazel will not take the archive from cache,
unless it was added to the cache by a request with the same canonical id.
""",
),
"urls": attr.string_list(
mandatory = True,
doc = """A list of URLs to a file that will be made available to Bazel.
Each entry must be a file, http or https URL. Redirections are followed.
Authentication is not supported.""",
),
"url": attr.string(doc = _URL_DOC),
"urls": attr.string_list(doc = _URLS_DOC),
"netrc": attr.string(
doc = "Location of the .netrc file to use for authentication",
),
Expand All @@ -458,7 +455,7 @@ Examples:
http_file(
name = "my_deb",
urls = ["http://example.com/package.deb"],
url = "http://example.com/package.deb",
sha256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
)
```
Expand All @@ -478,15 +475,8 @@ If specified and non-empty, bazel will not take the archive from cache,
unless it was added to the cache by a request with the same canonical id.
""",
),
"url": attr.string(
doc =
"The URL to fetch the jar from. It must end in `.jar`.",
),
"urls": attr.string_list(
doc =
"A list of URLS the jar can be fetched from. They have to end " +
"in `.jar`.",
),
"url": attr.string(doc = _URL_DOC + "\n\nThe URL must end in `.jar`."),
"urls": attr.string_list(doc = _URLS_DOC + "\n\nAll URLs must end in `.jar`."),
"netrc": attr.string(
doc = "Location of the .netrc file to use for authentication",
),
Expand Down

0 comments on commit d273cb6

Please sign in to comment.