From 65aaf1ba27e2a2105e8ee03d518ba38ff852df31 Mon Sep 17 00:00:00 2001 From: Ben Hearsum Date: Mon, 17 Mar 2025 19:20:26 -0400 Subject: [PATCH 01/13] feat(landoscript): initial landoscript code with support for version bump action This adds the rough structure for landoscript as well as implementing the `version_bump` action (necessary to make it practical to test the initial code). --- .taskcluster.yml | 1 + landoscript/MANIFEST.in | 9 + landoscript/docker.d/init_worker.sh | 2 + landoscript/docker.d/worker.yml | 11 + landoscript/pyproject.toml | 2 + landoscript/requirements/base.in | 6 + landoscript/requirements/base.txt | 1174 +++++++++++++++++ landoscript/requirements/local.in | 3 + landoscript/requirements/local.txt | 44 + landoscript/requirements/test.in | 7 + landoscript/requirements/test.txt | 113 ++ landoscript/setup.py | 17 + landoscript/src/landoscript/__init__.py | 0 .../src/landoscript/actions/__init__.py | 0 .../src/landoscript/actions/version_bump.py | 143 ++ .../data/landoscript_task_schema.json | 320 +++++ landoscript/src/landoscript/errors.py | 5 + landoscript/src/landoscript/lando.py | 76 ++ landoscript/src/landoscript/script.py | 104 ++ landoscript/src/landoscript/util/__init__.py | 0 landoscript/src/landoscript/util/diffs.py | 14 + landoscript/tests/conftest.py | 41 + landoscript/tests/data/test_private_key.pem | 27 + landoscript/tests/test_version_bump.py | 676 ++++++++++ maintenance/pin.sh | 2 + taskcluster/kinds/docker-image/kind.yml | 5 + taskcluster/kinds/push-image/kind.yml | 1 + taskcluster/kinds/tox/kind.yml | 6 + tox.ini | 7 + 29 files changed, 2816 insertions(+) create mode 100644 landoscript/MANIFEST.in create mode 100755 landoscript/docker.d/init_worker.sh create mode 100644 landoscript/docker.d/worker.yml create mode 100644 landoscript/pyproject.toml create mode 100644 landoscript/requirements/base.in create mode 100644 landoscript/requirements/base.txt create mode 100644 landoscript/requirements/local.in create mode 100644 landoscript/requirements/local.txt create mode 100644 landoscript/requirements/test.in create mode 100644 landoscript/requirements/test.txt create mode 100644 landoscript/setup.py create mode 100644 landoscript/src/landoscript/__init__.py create mode 100644 landoscript/src/landoscript/actions/__init__.py create mode 100644 landoscript/src/landoscript/actions/version_bump.py create mode 100644 landoscript/src/landoscript/data/landoscript_task_schema.json create mode 100644 landoscript/src/landoscript/errors.py create mode 100644 landoscript/src/landoscript/lando.py create mode 100644 landoscript/src/landoscript/script.py create mode 100644 landoscript/src/landoscript/util/__init__.py create mode 100644 landoscript/src/landoscript/util/diffs.py create mode 100644 landoscript/tests/conftest.py create mode 100644 landoscript/tests/data/test_private_key.pem create mode 100644 landoscript/tests/test_version_bump.py diff --git a/.taskcluster.yml b/.taskcluster.yml index ea2952ed4..0867e3377 100644 --- a/.taskcluster.yml +++ b/.taskcluster.yml @@ -81,6 +81,7 @@ tasks: - bouncerscript - githubscript # - iscript (iscript has special release via ronin_puppet repo) + - landoscript - pushapkscript - pushflatpakscript - pushmsixscript diff --git a/landoscript/MANIFEST.in b/landoscript/MANIFEST.in new file mode 100644 index 000000000..72bc6d40e --- /dev/null +++ b/landoscript/MANIFEST.in @@ -0,0 +1,9 @@ +include setup.py +include landoscript/data/* + +recursive-include src * + +recursive-exclude * __pycache__ +recursive-exclude * *.py[co] +recursive-exclude requirements * +recursive-exclude tests * diff --git a/landoscript/docker.d/init_worker.sh b/landoscript/docker.d/init_worker.sh new file mode 100755 index 000000000..483b9e68d --- /dev/null +++ b/landoscript/docker.d/init_worker.sh @@ -0,0 +1,2 @@ +#!/bin/bash +set -o errexit -o pipefail diff --git a/landoscript/docker.d/worker.yml b/landoscript/docker.d/worker.yml new file mode 100644 index 000000000..d8f8a5892 --- /dev/null +++ b/landoscript/docker.d/worker.yml @@ -0,0 +1,11 @@ +work_dir: { "$eval": "WORK_DIR" } +artifact_dir: { "$eval": "ARTIFACTS_DIR" } +verbose: { "$eval": "VERBOSE == 'true'" } +lando_api: { "$eval": "LANDO_API" } +# maps the repo+branch portion of lando URLs to github repo information +# TODO: switch this to lookup in lando when that API exists +lando_name_to_github_repo: + autoland: + owner: mozilla-firefox + repo: firefox + branch: autoland diff --git a/landoscript/pyproject.toml b/landoscript/pyproject.toml new file mode 100644 index 000000000..9a91009c6 --- /dev/null +++ b/landoscript/pyproject.toml @@ -0,0 +1,2 @@ +[tool.coverage.run] +branch = true diff --git a/landoscript/requirements/base.in b/landoscript/requirements/base.in new file mode 100644 index 000000000..e46d914ea --- /dev/null +++ b/landoscript/requirements/base.in @@ -0,0 +1,6 @@ +aiohttp +async-timeout +gql +mozilla-version +scriptworker +yarl diff --git a/landoscript/requirements/base.txt b/landoscript/requirements/base.txt new file mode 100644 index 000000000..055bd3fe2 --- /dev/null +++ b/landoscript/requirements/base.txt @@ -0,0 +1,1174 @@ +# SHA1:351fe9debb87f5ae11df3160c740236641413bbc +# +# This file is autogenerated by pip-compile-multi +# To update, run: +# +# pip-compile-multi +# +aiohappyeyeballs==2.6.1 \ + --hash=sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558 \ + --hash=sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8 + # via aiohttp +aiohttp==3.11.14 \ + --hash=sha256:04eb541ce1e03edc1e3be1917a0f45ac703e913c21a940111df73a2c2db11d73 \ + --hash=sha256:05582cb2d156ac7506e68b5eac83179faedad74522ed88f88e5861b78740dc0e \ + --hash=sha256:0a29be28e60e5610d2437b5b2fed61d6f3dcde898b57fb048aa5079271e7f6f3 \ + --hash=sha256:0b2501f1b981e70932b4a552fc9b3c942991c7ae429ea117e8fba57718cdeed0 \ + --hash=sha256:0df3788187559c262922846087e36228b75987f3ae31dd0a1e5ee1034090d42f \ + --hash=sha256:12c5869e7ddf6b4b1f2109702b3cd7515667b437da90a5a4a50ba1354fe41881 \ + --hash=sha256:14fc03508359334edc76d35b2821832f092c8f092e4b356e74e38419dfe7b6de \ + --hash=sha256:1a7169ded15505f55a87f8f0812c94c9412623c744227b9e51083a72a48b68a5 \ + --hash=sha256:1c68e41c4d576cd6aa6c6d2eddfb32b2acfb07ebfbb4f9da991da26633a3db1a \ + --hash=sha256:20412c7cc3720e47a47e63c0005f78c0c2370020f9f4770d7fc0075f397a9fb0 \ + --hash=sha256:22a8107896877212130c58f74e64b77f7007cb03cea8698be317272643602d45 \ + --hash=sha256:28a3d083819741592685762d51d789e6155411277050d08066537c5edc4066e6 \ + --hash=sha256:2b86efe23684b58a88e530c4ab5b20145f102916bbb2d82942cafec7bd36a647 \ + --hash=sha256:2d0b46abee5b5737cb479cc9139b29f010a37b1875ee56d142aefc10686a390b \ + --hash=sha256:321238a42ed463848f06e291c4bbfb3d15ba5a79221a82c502da3e23d7525d06 \ + --hash=sha256:3a8a0d127c10b8d89e69bbd3430da0f73946d839e65fec00ae48ca7916a31948 \ + --hash=sha256:3a8b0321e40a833e381d127be993b7349d1564b756910b28b5f6588a159afef3 \ + --hash=sha256:3b420d076a46f41ea48e5fcccb996f517af0d406267e31e6716f480a3d50d65c \ + --hash=sha256:3b512f1de1c688f88dbe1b8bb1283f7fbeb7a2b2b26e743bb2193cbadfa6f307 \ + --hash=sha256:413fe39fd929329f697f41ad67936f379cba06fcd4c462b62e5b0f8061ee4a77 \ + --hash=sha256:41cf0cefd9e7b5c646c2ef529c8335e7eafd326f444cc1cdb0c47b6bc836f9be \ + --hash=sha256:4848ae31ad44330b30f16c71e4f586cd5402a846b11264c412de99fa768f00f3 \ + --hash=sha256:4b0a200e85da5c966277a402736a96457b882360aa15416bf104ca81e6f5807b \ + --hash=sha256:4e2e8ef37d4bc110917d038807ee3af82700a93ab2ba5687afae5271b8bc50ff \ + --hash=sha256:4edcbe34e6dba0136e4cabf7568f5a434d89cc9de5d5155371acda275353d228 \ + --hash=sha256:51ba80d473eb780a329d73ac8afa44aa71dfb521693ccea1dea8b9b5c4df45ce \ + --hash=sha256:5409a59d5057f2386bb8b8f8bbcfb6e15505cedd8b2445db510563b5d7ea1186 \ + --hash=sha256:572def4aad0a4775af66d5a2b5923c7de0820ecaeeb7987dcbccda2a735a993f \ + --hash=sha256:599b66582f7276ebefbaa38adf37585e636b6a7a73382eb412f7bc0fc55fb73d \ + --hash=sha256:59a05cdc636431f7ce843c7c2f04772437dd816a5289f16440b19441be6511f1 \ + --hash=sha256:602d4db80daf4497de93cb1ce00b8fc79969c0a7cf5b67bec96fa939268d806a \ + --hash=sha256:65c75b14ee74e8eeff2886321e76188cbe938d18c85cff349d948430179ad02c \ + --hash=sha256:69bb252bfdca385ccabfd55f4cd740d421dd8c8ad438ded9637d81c228d0da49 \ + --hash=sha256:6d3986112e34eaa36e280dc8286b9dd4cc1a5bcf328a7f147453e188f6fe148f \ + --hash=sha256:6dd9766da617855f7e85f27d2bf9a565ace04ba7c387323cd3e651ac4329db91 \ + --hash=sha256:70ab0f61c1a73d3e0342cedd9a7321425c27a7067bebeeacd509f96695b875fc \ + --hash=sha256:749f1eb10e51dbbcdba9df2ef457ec060554842eea4d23874a3e26495f9e87b1 \ + --hash=sha256:781c8bd423dcc4641298c8c5a2a125c8b1c31e11f828e8d35c1d3a722af4c15a \ + --hash=sha256:7e7abe865504f41b10777ac162c727af14e9f4db9262e3ed8254179053f63e6d \ + --hash=sha256:7f2dadece8b85596ac3ab1ec04b00694bdd62abc31e5618f524648d18d9dd7fa \ + --hash=sha256:86135c32d06927339c8c5e64f96e4eee8825d928374b9b71a3c42379d7437058 \ + --hash=sha256:8778620396e554b758b59773ab29c03b55047841d8894c5e335f12bfc45ebd28 \ + --hash=sha256:87f0e003fb4dd5810c7fbf47a1239eaa34cd929ef160e0a54c570883125c4831 \ + --hash=sha256:8aa5c68e1e68fff7cd3142288101deb4316b51f03d50c92de6ea5ce646e6c71f \ + --hash=sha256:8d14e274828561db91e4178f0057a915f3af1757b94c2ca283cb34cbb6e00b50 \ + --hash=sha256:8d1dd75aa4d855c7debaf1ef830ff2dfcc33f893c7db0af2423ee761ebffd22b \ + --hash=sha256:92007c89a8cb7be35befa2732b0b32bf3a394c1b22ef2dff0ef12537d98a7bda \ + --hash=sha256:92868f6512714efd4a6d6cb2bfc4903b997b36b97baea85f744229f18d12755e \ + --hash=sha256:948abc8952aff63de7b2c83bfe3f211c727da3a33c3a5866a0e2cf1ee1aa950f \ + --hash=sha256:95d7787f2bcbf7cb46823036a8d64ccfbc2ffc7d52016b4044d901abceeba3db \ + --hash=sha256:997b57e38aa7dc6caab843c5e042ab557bc83a2f91b7bd302e3c3aebbb9042a1 \ + --hash=sha256:99b8bbfc8111826aa8363442c0fc1f5751456b008737ff053570f06a151650b3 \ + --hash=sha256:9e73fa341d8b308bb799cf0ab6f55fc0461d27a9fa3e4582755a3d81a6af8c09 \ + --hash=sha256:a0d2c04a623ab83963576548ce098baf711a18e2c32c542b62322a0b4584b990 \ + --hash=sha256:a40087b82f83bd671cbeb5f582c233d196e9653220404a798798bfc0ee189fff \ + --hash=sha256:ad1f2fb9fe9b585ea4b436d6e998e71b50d2b087b694ab277b30e060c434e5db \ + --hash=sha256:b05774864c87210c531b48dfeb2f7659407c2dda8643104fb4ae5e2c311d12d9 \ + --hash=sha256:b41693b7388324b80f9acfabd479bd1c84f0bc7e8f17bab4ecd9675e9ff9c734 \ + --hash=sha256:b42dbd097abb44b3f1156b4bf978ec5853840802d6eee2784857be11ee82c6a0 \ + --hash=sha256:b4e7c7ec4146a94a307ca4f112802a8e26d969018fabed526efc340d21d3e7d0 \ + --hash=sha256:b59d096b5537ec7c85954cb97d821aae35cfccce3357a2cafe85660cc6295628 \ + --hash=sha256:b9c60d1de973ca94af02053d9b5111c4fbf97158e139b14f1be68337be267be6 \ + --hash=sha256:bccd2cb7aa5a3bfada72681bdb91637094d81639e116eac368f8b3874620a654 \ + --hash=sha256:c32593ead1a8c6aabd58f9d7ee706e48beac796bb0cb71d6b60f2c1056f0a65f \ + --hash=sha256:c7571f99525c76a6280f5fe8e194eeb8cb4da55586c3c61c59c33a33f10cfce7 \ + --hash=sha256:c8b2df9feac55043759aa89f722a967d977d80f8b5865a4153fc41c93b957efc \ + --hash=sha256:ca9f835cdfedcb3f5947304e85b8ca3ace31eef6346d8027a97f4de5fb687534 \ + --hash=sha256:cc9253069158d57e27d47a8453d8a2c5a370dc461374111b5184cf2f147a3cc3 \ + --hash=sha256:ced66c5c6ad5bcaf9be54560398654779ec1c3695f1a9cf0ae5e3606694a000a \ + --hash=sha256:d173c0ac508a2175f7c9a115a50db5fd3e35190d96fdd1a17f9cb10a6ab09aa1 \ + --hash=sha256:d6edc538c7480fa0a3b2bdd705f8010062d74700198da55d16498e1b49549b9c \ + --hash=sha256:daf20d9c3b12ae0fdf15ed92235e190f8284945563c4b8ad95b2d7a31f331cd3 \ + --hash=sha256:dc311634f6f28661a76cbc1c28ecf3b3a70a8edd67b69288ab7ca91058eb5a33 \ + --hash=sha256:e2bc827c01f75803de77b134afdbf74fa74b62970eafdf190f3244931d7a5c0d \ + --hash=sha256:e365034c5cf6cf74f57420b57682ea79e19eb29033399dd3f40de4d0171998fa \ + --hash=sha256:e906da0f2bcbf9b26cc2b144929e88cb3bf943dd1942b4e5af066056875c7618 \ + --hash=sha256:e9faafa74dbb906b2b6f3eb9942352e9e9db8d583ffed4be618a89bd71a4e914 \ + --hash=sha256:ec6cd1954ca2bbf0970f531a628da1b1338f594bf5da7e361e19ba163ecc4f3b \ + --hash=sha256:f296d637a50bb15fb6a229fbb0eb053080e703b53dbfe55b1e4bb1c5ed25d325 \ + --hash=sha256:f30fc72daf85486cdcdfc3f5e0aea9255493ef499e31582b34abadbfaafb0965 \ + --hash=sha256:fe846f0a98aa9913c2852b630cd39b4098f296e0907dd05f6c7b30d911afa4c3 + # via + # -r requirements/base.in + # scriptworker + # taskcluster +aiomemoizettl==0.0.3 \ + --hash=sha256:07a6becac60f6cd2604b9f2b73bcd9a50079a0b7b55e2a4e45b1eec5a3ea9659 \ + --hash=sha256:0a80d2dc765e545263f515363b6700ec8cf86fa3968b529f56390b28e34f743d + # via scriptworker +aiosignal==1.3.2 \ + --hash=sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5 \ + --hash=sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54 + # via aiohttp +anyio==4.9.0 \ + --hash=sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028 \ + --hash=sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c + # via gql +appdirs==1.4.4 \ + --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41 \ + --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128 + # via taskcluster-taskgraph +arrow==1.3.0 \ + --hash=sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80 \ + --hash=sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85 + # via + # cookiecutter + # isoduration + # scriptworker +async-timeout==5.0.1 \ + --hash=sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c \ + --hash=sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3 + # via + # -r requirements/base.in + # taskcluster +attrs==25.3.0 \ + --hash=sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3 \ + --hash=sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b + # via + # aiohttp + # jsonschema + # mozilla-version + # referencing +backoff==2.2.1 \ + --hash=sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba \ + --hash=sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8 + # via gql +binaryornot==0.4.4 \ + --hash=sha256:359501dfc9d40632edc9fac890e19542db1a287bbcfa58175b66658392018061 \ + --hash=sha256:b8b71173c917bddcd2c16070412e369c3ed7f0528926f70cac18a6c97fd563e4 + # via cookiecutter +certifi==2025.1.31 \ + --hash=sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651 \ + --hash=sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe + # via requests +cffi==1.17.1 \ + --hash=sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8 \ + --hash=sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2 \ + --hash=sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1 \ + --hash=sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15 \ + --hash=sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36 \ + --hash=sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824 \ + --hash=sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8 \ + --hash=sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36 \ + --hash=sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17 \ + --hash=sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf \ + --hash=sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc \ + --hash=sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3 \ + --hash=sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed \ + --hash=sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702 \ + --hash=sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1 \ + --hash=sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8 \ + --hash=sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903 \ + --hash=sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6 \ + --hash=sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d \ + --hash=sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b \ + --hash=sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e \ + --hash=sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be \ + --hash=sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c \ + --hash=sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683 \ + --hash=sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9 \ + --hash=sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c \ + --hash=sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8 \ + --hash=sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1 \ + --hash=sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4 \ + --hash=sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655 \ + --hash=sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67 \ + --hash=sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595 \ + --hash=sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0 \ + --hash=sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65 \ + --hash=sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41 \ + --hash=sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6 \ + --hash=sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401 \ + --hash=sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6 \ + --hash=sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3 \ + --hash=sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16 \ + --hash=sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93 \ + --hash=sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e \ + --hash=sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4 \ + --hash=sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964 \ + --hash=sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c \ + --hash=sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576 \ + --hash=sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0 \ + --hash=sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3 \ + --hash=sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662 \ + --hash=sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3 \ + --hash=sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff \ + --hash=sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5 \ + --hash=sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd \ + --hash=sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f \ + --hash=sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5 \ + --hash=sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14 \ + --hash=sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d \ + --hash=sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9 \ + --hash=sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7 \ + --hash=sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382 \ + --hash=sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a \ + --hash=sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e \ + --hash=sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a \ + --hash=sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4 \ + --hash=sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99 \ + --hash=sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87 \ + --hash=sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b + # via cryptography +chardet==5.2.0 \ + --hash=sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7 \ + --hash=sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970 + # via binaryornot +charset-normalizer==3.4.1 \ + --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \ + --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \ + --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \ + --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \ + --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \ + --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \ + --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \ + --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \ + --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \ + --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \ + --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \ + --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \ + --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \ + --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \ + --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \ + --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \ + --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \ + --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \ + --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \ + --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \ + --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \ + --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \ + --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \ + --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \ + --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \ + --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \ + --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \ + --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \ + --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \ + --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \ + --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \ + --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \ + --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \ + --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \ + --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \ + --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \ + --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \ + --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \ + --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \ + --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \ + --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \ + --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \ + --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \ + --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \ + --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \ + --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \ + --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \ + --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \ + --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \ + --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \ + --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \ + --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \ + --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \ + --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \ + --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \ + --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \ + --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \ + --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \ + --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \ + --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \ + --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \ + --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \ + --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \ + --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \ + --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \ + --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \ + --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \ + --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \ + --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \ + --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \ + --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \ + --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \ + --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \ + --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \ + --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \ + --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \ + --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \ + --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \ + --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \ + --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \ + --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \ + --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \ + --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \ + --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \ + --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \ + --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \ + --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \ + --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \ + --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \ + --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \ + --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \ + --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616 + # via requests +click==8.1.8 \ + --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ + --hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a + # via cookiecutter +cookiecutter==2.6.0 \ + --hash=sha256:a54a8e37995e4ed963b3e82831072d1ad4b005af736bb17b99c2cbd9d41b6e2d \ + --hash=sha256:db21f8169ea4f4fdc2408d48ca44859349de2647fbe494a9d6c3edfc0542c21c + # via taskcluster-taskgraph +cryptography==44.0.2 \ + --hash=sha256:04abd71114848aa25edb28e225ab5f268096f44cf0127f3d36975bdf1bdf3390 \ + --hash=sha256:0529b1d5a0105dd3731fa65680b45ce49da4d8115ea76e9da77a875396727b41 \ + --hash=sha256:1bc312dfb7a6e5d66082c87c34c8a62176e684b6fe3d90fcfe1568de675e6688 \ + --hash=sha256:268e4e9b177c76d569e8a145a6939eca9a5fec658c932348598818acf31ae9a5 \ + --hash=sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1 \ + --hash=sha256:2bf7bf75f7df9715f810d1b038870309342bff3069c5bd8c6b96128cb158668d \ + --hash=sha256:3b721b8b4d948b218c88cb8c45a01793483821e709afe5f622861fc6182b20a7 \ + --hash=sha256:3c00b6b757b32ce0f62c574b78b939afab9eecaf597c4d624caca4f9e71e7843 \ + --hash=sha256:3dc62975e31617badc19a906481deacdeb80b4bb454394b4098e3f2525a488c5 \ + --hash=sha256:4973da6ca3db4405c54cd0b26d328be54c7747e89e284fcff166132eb7bccc9c \ + --hash=sha256:4e389622b6927d8133f314949a9812972711a111d577a5d1f4bee5e58736b80a \ + --hash=sha256:51e4de3af4ec3899d6d178a8c005226491c27c4ba84101bfb59c901e10ca9f79 \ + --hash=sha256:5f6f90b72d8ccadb9c6e311c775c8305381db88374c65fa1a68250aa8a9cb3a6 \ + --hash=sha256:6210c05941994290f3f7f175a4a57dbbb2afd9273657614c506d5976db061181 \ + --hash=sha256:6f101b1f780f7fc613d040ca4bdf835c6ef3b00e9bd7125a4255ec574c7916e4 \ + --hash=sha256:7bdcd82189759aba3816d1f729ce42ffded1ac304c151d0a8e89b9996ab863d5 \ + --hash=sha256:7ca25849404be2f8e4b3c59483d9d3c51298a22c1c61a0e84415104dacaf5562 \ + --hash=sha256:81276f0ea79a208d961c433a947029e1a15948966658cf6710bbabb60fcc2639 \ + --hash=sha256:8cadc6e3b5a1f144a039ea08a0bdb03a2a92e19c46be3285123d32029f40a922 \ + --hash=sha256:8e0ddd63e6bf1161800592c71ac794d3fb8001f2caebe0966e77c5234fa9efc3 \ + --hash=sha256:909c97ab43a9c0c0b0ada7a1281430e4e5ec0458e6d9244c0e821bbf152f061d \ + --hash=sha256:96e7a5e9d6e71f9f4fca8eebfd603f8e86c5225bb18eb621b2c1e50b290a9471 \ + --hash=sha256:9a1e657c0f4ea2a23304ee3f964db058c9e9e635cc7019c4aa21c330755ef6fd \ + --hash=sha256:9eb9d22b0a5d8fd9925a7764a054dca914000607dff201a24c791ff5c799e1fa \ + --hash=sha256:af4ff3e388f2fa7bff9f7f2b31b87d5651c45731d3e8cfa0944be43dff5cfbdb \ + --hash=sha256:b042d2a275c8cee83a4b7ae30c45a15e6a4baa65a179a0ec2d78ebb90e4f6699 \ + --hash=sha256:bc821e161ae88bfe8088d11bb39caf2916562e0a2dc7b6d56714a48b784ef0bb \ + --hash=sha256:c505d61b6176aaf982c5717ce04e87da5abc9a36a5b39ac03905c4aafe8de7aa \ + --hash=sha256:c63454aa261a0cf0c5b4718349629793e9e634993538db841165b3df74f37ec0 \ + --hash=sha256:c7362add18b416b69d58c910caa217f980c5ef39b23a38a0880dfd87bdf8cd23 \ + --hash=sha256:d03806036b4f89e3b13b6218fefea8d5312e450935b1a2d55f0524e2ed7c59d9 \ + --hash=sha256:d1b3031093a366ac767b3feb8bcddb596671b3aaff82d4050f984da0c248b615 \ + --hash=sha256:d1c3572526997b36f245a96a2b1713bf79ce99b271bbcf084beb6b9b075f29ea \ + --hash=sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7 \ + --hash=sha256:f514ef4cd14bb6fb484b4a60203e912cfcb64f2ab139e88c2274511514bf7308 + # via + # pyjwt + # scriptworker +dictdiffer==0.9.0 \ + --hash=sha256:17bacf5fbfe613ccf1b6d512bd766e6b21fb798822a133aa86098b8ac9997578 \ + --hash=sha256:442bfc693cfcadaf46674575d2eba1c53b42f5e404218ca2c2ff549f2df56595 + # via scriptworker +fqdn==1.5.1 \ + --hash=sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f \ + --hash=sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014 + # via jsonschema +frozenlist==1.5.0 \ + --hash=sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e \ + --hash=sha256:03d33c2ddbc1816237a67f66336616416e2bbb6beb306e5f890f2eb22b959cdf \ + --hash=sha256:04a5c6babd5e8fb7d3c871dc8b321166b80e41b637c31a995ed844a6139942b6 \ + --hash=sha256:0996c66760924da6e88922756d99b47512a71cfd45215f3570bf1e0b694c206a \ + --hash=sha256:0cc974cc93d32c42e7b0f6cf242a6bd941c57c61b618e78b6c0a96cb72788c1d \ + --hash=sha256:0f253985bb515ecd89629db13cb58d702035ecd8cfbca7d7a7e29a0e6d39af5f \ + --hash=sha256:11aabdd62b8b9c4b84081a3c246506d1cddd2dd93ff0ad53ede5defec7886b28 \ + --hash=sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b \ + --hash=sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9 \ + --hash=sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2 \ + --hash=sha256:15538c0cbf0e4fa11d1e3a71f823524b0c46299aed6e10ebb4c2089abd8c3bec \ + --hash=sha256:15b731db116ab3aedec558573c1a5eec78822b32292fe4f2f0345b7f697745c2 \ + --hash=sha256:17dcc32fc7bda7ce5875435003220a457bcfa34ab7924a49a1c19f55b6ee185c \ + --hash=sha256:1893f948bf6681733aaccf36c5232c231e3b5166d607c5fa77773611df6dc336 \ + --hash=sha256:189f03b53e64144f90990d29a27ec4f7997d91ed3d01b51fa39d2dbe77540fd4 \ + --hash=sha256:1a8ea951bbb6cacd492e3948b8da8c502a3f814f5d20935aae74b5df2b19cf3d \ + --hash=sha256:1b96af8c582b94d381a1c1f51ffaedeb77c821c690ea5f01da3d70a487dd0a9b \ + --hash=sha256:1e76bfbc72353269c44e0bc2cfe171900fbf7f722ad74c9a7b638052afe6a00c \ + --hash=sha256:2150cc6305a2c2ab33299453e2968611dacb970d2283a14955923062c8d00b10 \ + --hash=sha256:226d72559fa19babe2ccd920273e767c96a49b9d3d38badd7c91a0fdeda8ea08 \ + --hash=sha256:237f6b23ee0f44066219dae14c70ae38a63f0440ce6750f868ee08775073f942 \ + --hash=sha256:29d94c256679247b33a3dc96cce0f93cbc69c23bf75ff715919332fdbb6a32b8 \ + --hash=sha256:2b5e23253bb709ef57a8e95e6ae48daa9ac5f265637529e4ce6b003a37b2621f \ + --hash=sha256:2d0da8bbec082bf6bf18345b180958775363588678f64998c2b7609e34719b10 \ + --hash=sha256:2f3f7a0fbc219fb4455264cae4d9f01ad41ae6ee8524500f381de64ffaa077d5 \ + --hash=sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6 \ + --hash=sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21 \ + --hash=sha256:31a9ac2b38ab9b5a8933b693db4939764ad3f299fcaa931a3e605bc3460e693c \ + --hash=sha256:366d8f93e3edfe5a918c874702f78faac300209a4d5bf38352b2c1bdc07a766d \ + --hash=sha256:374ca2dabdccad8e2a76d40b1d037f5bd16824933bf7bcea3e59c891fd4a0923 \ + --hash=sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608 \ + --hash=sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de \ + --hash=sha256:498524025a5b8ba81695761d78c8dd7382ac0b052f34e66939c42df860b8ff17 \ + --hash=sha256:50cf5e7ee9b98f22bdecbabf3800ae78ddcc26e4a435515fc72d97903e8488e0 \ + --hash=sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f \ + --hash=sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641 \ + --hash=sha256:5a3ba5f9a0dfed20337d3e966dc359784c9f96503674c2faf015f7fe8e96798c \ + --hash=sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a \ + --hash=sha256:5c28f4b5dbef8a0d8aad0d4de24d1e9e981728628afaf4ea0792f5d0939372f0 \ + --hash=sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9 \ + --hash=sha256:6321899477db90bdeb9299ac3627a6a53c7399c8cd58d25da094007402b039ab \ + --hash=sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f \ + --hash=sha256:666534d15ba8f0fda3f53969117383d5dc021266b3c1a42c9ec4855e4b58b9d3 \ + --hash=sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a \ + --hash=sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784 \ + --hash=sha256:73f2e31ea8dd7df61a359b731716018c2be196e5bb3b74ddba107f694fbd7604 \ + --hash=sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d \ + --hash=sha256:76e4753701248476e6286f2ef492af900ea67d9706a0155335a40ea21bf3b2f5 \ + --hash=sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03 \ + --hash=sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e \ + --hash=sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953 \ + --hash=sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee \ + --hash=sha256:7f3c8c1dacd037df16e85227bac13cca58c30da836c6f936ba1df0c05d046d8d \ + --hash=sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817 \ + --hash=sha256:828afae9f17e6de596825cf4228ff28fbdf6065974e5ac1410cecc22f699d2b3 \ + --hash=sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039 \ + --hash=sha256:8969190d709e7c48ea386db202d708eb94bdb29207a1f269bab1196ce0dcca1f \ + --hash=sha256:90646abbc7a5d5c7c19461d2e3eeb76eb0b204919e6ece342feb6032c9325ae9 \ + --hash=sha256:91d6c171862df0a6c61479d9724f22efb6109111017c87567cfeb7b5d1449fdf \ + --hash=sha256:9272fa73ca71266702c4c3e2d4a28553ea03418e591e377a03b8e3659d94fa76 \ + --hash=sha256:92b5278ed9d50fe610185ecd23c55d8b307d75ca18e94c0e7de328089ac5dcba \ + --hash=sha256:97160e245ea33d8609cd2b8fd997c850b56db147a304a262abc2b3be021a9171 \ + --hash=sha256:977701c081c0241d0955c9586ffdd9ce44f7a7795df39b9151cd9a6fd0ce4cfb \ + --hash=sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439 \ + --hash=sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631 \ + --hash=sha256:9bbcdfaf4af7ce002694a4e10a0159d5a8d20056a12b05b45cea944a4953f972 \ + --hash=sha256:9c2623347b933fcb9095841f1cc5d4ff0b278addd743e0e966cb3d460278840d \ + --hash=sha256:a2fe128eb4edeabe11896cb6af88fca5346059f6c8d807e3b910069f39157869 \ + --hash=sha256:a72b7a6e3cd2725eff67cd64c8f13335ee18fc3c7befc05aed043d24c7b9ccb9 \ + --hash=sha256:a9fe0f1c29ba24ba6ff6abf688cb0b7cf1efab6b6aa6adc55441773c252f7411 \ + --hash=sha256:b97f7b575ab4a8af9b7bc1d2ef7f29d3afee2226bd03ca3875c16451ad5a7723 \ + --hash=sha256:bdac3c7d9b705d253b2ce370fde941836a5f8b3c5c2b8fd70940a3ea3af7f4f2 \ + --hash=sha256:c03eff4a41bd4e38415cbed054bbaff4a075b093e2394b6915dca34a40d1e38b \ + --hash=sha256:c16d2fa63e0800723139137d667e1056bee1a1cf7965153d2d104b62855e9b99 \ + --hash=sha256:c1fac3e2ace2eb1052e9f7c7db480818371134410e1f5c55d65e8f3ac6d1407e \ + --hash=sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840 \ + --hash=sha256:cee6798eaf8b1416ef6909b06f7dc04b60755206bddc599f52232606e18179d3 \ + --hash=sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb \ + --hash=sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3 \ + --hash=sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0 \ + --hash=sha256:dd94994fc91a6177bfaafd7d9fd951bc8689b0a98168aa26b5f543868548d3ca \ + --hash=sha256:de537c11e4aa01d37db0d403b57bd6f0546e71a82347a97c6a9f0dcc532b3a45 \ + --hash=sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e \ + --hash=sha256:e66cc454f97053b79c2ab09c17fbe3c825ea6b4de20baf1be28919460dd7877f \ + --hash=sha256:e79225373c317ff1e35f210dd5f1344ff31066ba8067c307ab60254cd3a78ad5 \ + --hash=sha256:f1577515d35ed5649d52ab4319db757bb881ce3b2b796d7283e6634d99ace307 \ + --hash=sha256:f1e6540b7fa044eee0bb5111ada694cf3dc15f2b0347ca125ee9ca984d5e9e6e \ + --hash=sha256:f2ac49a9bedb996086057b75bf93538240538c6d9b38e57c82d51f75a73409d2 \ + --hash=sha256:f47c9c9028f55a04ac254346e92977bf0f166c483c74b4232bee19a6697e4778 \ + --hash=sha256:f5f9da7f5dbc00a604fe74aa02ae7c98bcede8a3b8b9666f9f86fc13993bc71a \ + --hash=sha256:fd74520371c3c4175142d02a976aee0b4cb4a7cc912a60586ffd8d5929979b30 \ + --hash=sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a + # via + # aiohttp + # aiosignal +github3-py==4.0.1 \ + --hash=sha256:30d571076753efc389edc7f9aaef338a4fcb24b54d8968d5f39b1342f45ddd36 \ + --hash=sha256:a89af7de25650612d1da2f0609622bcdeb07ee8a45a1c06b2d16a05e4234e753 + # via scriptworker +giturlparse==0.12.0 \ + --hash=sha256:412b74f2855f1da2fefa89fd8dde62df48476077a72fc19b62039554d27360eb \ + --hash=sha256:c0fff7c21acc435491b1779566e038757a205c1ffdcb47e4f81ea52ad8c3859a + # via mozilla-repo-urls +gql==3.5.2 \ + --hash=sha256:07e1325b820c8ba9478e95de27ce9f23250486e7e79113dbb7659a442dc13e74 \ + --hash=sha256:c830ffc38b3997b2a146317b27758305ab3d0da3bde607b49f34e32affb23ba2 + # via -r requirements/base.in +graphql-core==3.2.4 \ + --hash=sha256:1604f2042edc5f3114f49cac9d77e25863be51b23a54a61a23245cf32f6476f0 \ + --hash=sha256:acbe2e800980d0e39b4685dd058c2f4042660b89ebca38af83020fd872ff1264 + # via gql +idna==3.10 \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 + # via + # anyio + # jsonschema + # requests + # yarl +immutabledict==4.2.1 \ + --hash=sha256:c56a26ced38c236f79e74af3ccce53772827cef5c3bce7cab33ff2060f756373 \ + --hash=sha256:d91017248981c72eb66c8ff9834e99c2f53562346f23e7f51e7a5ebcf66a3bcc + # via scriptworker +isoduration==20.11.0 \ + --hash=sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9 \ + --hash=sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042 + # via jsonschema +jinja2==3.1.6 \ + --hash=sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d \ + --hash=sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67 + # via cookiecutter +json-e==4.8.0 \ + --hash=sha256:51ead93962912d701c6f1a6a0b27cc34bb2cb8397a82affd8adb2401898e27ea \ + --hash=sha256:91a50ba4e1a9e6d40c36c0601d68acda9ae44ca2817525e09938b2c82ce23572 + # via + # scriptworker + # taskcluster-taskgraph +jsonpointer==3.0.0 \ + --hash=sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942 \ + --hash=sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef + # via jsonschema +jsonschema[format-nongpl]==4.23.0 \ + --hash=sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4 \ + --hash=sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566 + # via scriptworker +jsonschema-specifications==2024.10.1 \ + --hash=sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272 \ + --hash=sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf + # via jsonschema +markdown-it-py==3.0.0 \ + --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ + --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb + # via rich +markupsafe==3.0.2 \ + --hash=sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4 \ + --hash=sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30 \ + --hash=sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0 \ + --hash=sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9 \ + --hash=sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396 \ + --hash=sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13 \ + --hash=sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028 \ + --hash=sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca \ + --hash=sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557 \ + --hash=sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832 \ + --hash=sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0 \ + --hash=sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b \ + --hash=sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579 \ + --hash=sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a \ + --hash=sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c \ + --hash=sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff \ + --hash=sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c \ + --hash=sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22 \ + --hash=sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094 \ + --hash=sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb \ + --hash=sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e \ + --hash=sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5 \ + --hash=sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a \ + --hash=sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d \ + --hash=sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a \ + --hash=sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b \ + --hash=sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8 \ + --hash=sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225 \ + --hash=sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c \ + --hash=sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144 \ + --hash=sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f \ + --hash=sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87 \ + --hash=sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d \ + --hash=sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93 \ + --hash=sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf \ + --hash=sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158 \ + --hash=sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84 \ + --hash=sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb \ + --hash=sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48 \ + --hash=sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171 \ + --hash=sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c \ + --hash=sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6 \ + --hash=sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd \ + --hash=sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d \ + --hash=sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1 \ + --hash=sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d \ + --hash=sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca \ + --hash=sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a \ + --hash=sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29 \ + --hash=sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe \ + --hash=sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798 \ + --hash=sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c \ + --hash=sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8 \ + --hash=sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f \ + --hash=sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f \ + --hash=sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a \ + --hash=sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178 \ + --hash=sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0 \ + --hash=sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79 \ + --hash=sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430 \ + --hash=sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50 + # via jinja2 +mdurl==0.1.2 \ + --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ + --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba + # via markdown-it-py +mohawk==1.1.0 \ + --hash=sha256:3ed296a30453d0b724679e0fd41e4e940497f8e461a9a9c3b7f36e43bab0fa09 \ + --hash=sha256:d2a0e3ab10a209cc79e95e28f2dd54bd4a73fd1998ffe27b7ba0f962b6be9723 + # via taskcluster +mozilla-repo-urls==0.1.1 \ + --hash=sha256:30510d3519479aa70211145d0ac9cf6e2fadcb8d30fa3b196bb957bd773502ba \ + --hash=sha256:7364da790751db2a060eb45adbf1d7db89a145ed279ba235f3425db9dd255915 + # via taskcluster-taskgraph +mozilla-version==3.1.0 \ + --hash=sha256:3a9463ebcf2249dc8bcf504e246b6b5977c902dfa819de31602e10bce032ed93 \ + --hash=sha256:f798e716da9063608a0b49ca1ec0a51b73ac810c3cc8a4bcc2c461df902b147c + # via -r requirements/base.in +multidict==6.2.0 \ + --hash=sha256:0085b0afb2446e57050140240a8595846ed64d1cbd26cef936bfab3192c673b8 \ + --hash=sha256:042028348dc5a1f2be6c666437042a98a5d24cee50380f4c0902215e5ec41844 \ + --hash=sha256:05fefbc3cddc4e36da209a5e49f1094bbece9a581faa7f3589201fd95df40e5d \ + --hash=sha256:063be88bd684782a0715641de853e1e58a2f25b76388538bd62d974777ce9bc2 \ + --hash=sha256:07bfa8bc649783e703263f783f73e27fef8cd37baaad4389816cf6a133141331 \ + --hash=sha256:08549895e6a799bd551cf276f6e59820aa084f0f90665c0f03dd3a50db5d3c48 \ + --hash=sha256:095a2eabe8c43041d3e6c2cb8287a257b5f1801c2d6ebd1dd877424f1e89cf29 \ + --hash=sha256:0b183a959fb88ad1be201de2c4bdf52fa8e46e6c185d76201286a97b6f5ee65c \ + --hash=sha256:0c383d28857f66f5aebe3e91d6cf498da73af75fbd51cedbe1adfb85e90c0460 \ + --hash=sha256:0d57a01a2a9fa00234aace434d8c131f0ac6e0ac6ef131eda5962d7e79edfb5b \ + --hash=sha256:0dc25a3293c50744796e87048de5e68996104d86d940bb24bc3ec31df281b191 \ + --hash=sha256:0e5a644e50ef9fb87878d4d57907f03a12410d2aa3b93b3acdf90a741df52c49 \ + --hash=sha256:0f249badb360b0b4d694307ad40f811f83df4da8cef7b68e429e4eea939e49dd \ + --hash=sha256:0f74f2fc51555f4b037ef278efc29a870d327053aba5cb7d86ae572426c7cccc \ + --hash=sha256:125dd82b40f8c06d08d87b3510beaccb88afac94e9ed4a6f6c71362dc7dbb04b \ + --hash=sha256:13551d0e2d7201f0959725a6a769b6f7b9019a168ed96006479c9ac33fe4096b \ + --hash=sha256:14ed9ed1bfedd72a877807c71113deac292bf485159a29025dfdc524c326f3e1 \ + --hash=sha256:163f4604e76639f728d127293d24c3e208b445b463168af3d031b92b0998bb90 \ + --hash=sha256:19e2819b0b468174de25c0ceed766606a07cedeab132383f1e83b9a4e96ccb4f \ + --hash=sha256:1e2a2193d3aa5cbf5758f6d5680a52aa848e0cf611da324f71e5e48a9695cc86 \ + --hash=sha256:1f3c099d3899b14e1ce52262eb82a5f5cb92157bb5106bf627b618c090a0eadc \ + --hash=sha256:214207dcc7a6221d9942f23797fe89144128a71c03632bf713d918db99bd36de \ + --hash=sha256:2325105e16d434749e1be8022f942876a936f9bece4ec41ae244e3d7fae42aaf \ + --hash=sha256:2529ddbdaa424b2c6c2eb668ea684dd6b75b839d0ad4b21aad60c168269478d7 \ + --hash=sha256:256d431fe4583c5f1e0f2e9c4d9c22f3a04ae96009b8cfa096da3a8723db0a16 \ + --hash=sha256:25bb96338512e2f46f615a2bb7c6012fe92a4a5ebd353e5020836a7e33120349 \ + --hash=sha256:2e87f1926e91855ae61769ba3e3f7315120788c099677e0842e697b0bfb659f2 \ + --hash=sha256:2fc6af8e39f7496047c7876314f4317736eac82bf85b54c7c76cf1a6f8e35d98 \ + --hash=sha256:3157126b028c074951839233647bd0e30df77ef1fedd801b48bdcad242a60f4e \ + --hash=sha256:32c9b4878f48be3e75808ea7e499d6223b1eea6d54c487a66bc10a1871e3dc6a \ + --hash=sha256:32ed748ff9ac682eae7859790d3044b50e3076c7d80e17a44239683769ff485e \ + --hash=sha256:3501621d5e86f1a88521ea65d5cad0a0834c77b26f193747615b7c911e5422d2 \ + --hash=sha256:437c33561edb6eb504b5a30203daf81d4a9b727e167e78b0854d9a4e18e8950b \ + --hash=sha256:48d39b1824b8d6ea7de878ef6226efbe0773f9c64333e1125e0efcfdd18a24c7 \ + --hash=sha256:4ac3fcf9a2d369bd075b2c2965544036a27ccd277fc3c04f708338cc57533081 \ + --hash=sha256:4ccfd74957ef53fa7380aaa1c961f523d582cd5e85a620880ffabd407f8202c0 \ + --hash=sha256:52b05e21ff05729fbea9bc20b3a791c3c11da61649ff64cce8257c82a020466d \ + --hash=sha256:5389445f0173c197f4a3613713b5fb3f3879df1ded2a1a2e4bc4b5b9c5441b7e \ + --hash=sha256:5c5e7d2e300d5cb3b2693b6d60d3e8c8e7dd4ebe27cd17c9cb57020cac0acb80 \ + --hash=sha256:5d26547423e5e71dcc562c4acdc134b900640a39abd9066d7326a7cc2324c530 \ + --hash=sha256:5dd7106d064d05896ce28c97da3f46caa442fe5a43bc26dfb258e90853b39b44 \ + --hash=sha256:5f8cb1329f42fadfb40d6211e5ff568d71ab49be36e759345f91c69d1033d633 \ + --hash=sha256:61d5541f27533f803a941d3a3f8a3d10ed48c12cf918f557efcbf3cd04ef265c \ + --hash=sha256:639556758c36093b35e2e368ca485dada6afc2bd6a1b1207d85ea6dfc3deab27 \ + --hash=sha256:641cf2e3447c9ecff2f7aa6e9eee9eaa286ea65d57b014543a4911ff2799d08a \ + --hash=sha256:6aed763b6a1b28c46c055692836879328f0b334a6d61572ee4113a5d0c859872 \ + --hash=sha256:6e2a2d6749e1ff2c9c76a72c6530d5baa601205b14e441e6d98011000f47a7ac \ + --hash=sha256:7243c5a6523c5cfeca76e063efa5f6a656d1d74c8b1fc64b2cd1e84e507f7e2a \ + --hash=sha256:76b34c12b013d813e6cb325e6bd4f9c984db27758b16085926bbe7ceeaace626 \ + --hash=sha256:781b5dd1db18c9e9eacc419027b0acb5073bdec9de1675c0be25ceb10e2ad133 \ + --hash=sha256:7c611345bbe7cb44aabb877cb94b63e86f2d0db03e382667dbd037866d44b4f8 \ + --hash=sha256:83b78c680d4b15d33042d330c2fa31813ca3974197bddb3836a5c635a5fd013f \ + --hash=sha256:84e87a7d75fa36839a3a432286d719975362d230c70ebfa0948549cc38bd5b46 \ + --hash=sha256:89b3857652183b8206a891168af47bac10b970d275bba1f6ee46565a758c078d \ + --hash=sha256:8cd1a0644ccaf27e9d2f6d9c9474faabee21f0578fe85225cc5af9a61e1653df \ + --hash=sha256:8de4d42dffd5ced9117af2ce66ba8722402541a3aa98ffdf78dde92badb68932 \ + --hash=sha256:94a7bb972178a8bfc4055db80c51efd24baefaced5e51c59b0d598a004e8305d \ + --hash=sha256:98aa8325c7f47183b45588af9c434533196e241be0a4e4ae2190b06d17675c02 \ + --hash=sha256:9e658d1373c424457ddf6d55ec1db93c280b8579276bebd1f72f113072df8a5d \ + --hash=sha256:9f49585f4abadd2283034fc605961f40c638635bc60f5162276fec075f2e37a4 \ + --hash=sha256:9f6cad071960ba1914fa231677d21b1b4a3acdcce463cee41ea30bc82e6040cf \ + --hash=sha256:a0cc398350ef31167e03f3ca7c19313d4e40a662adcb98a88755e4e861170bdd \ + --hash=sha256:a1133414b771619aa3c3000701c11b2e4624a7f492f12f256aedde97c28331a2 \ + --hash=sha256:a33273a541f1e1a8219b2a4ed2de355848ecc0254264915b9290c8d2de1c74e1 \ + --hash=sha256:a3c0ff89fe40a152e77b191b83282c9664357dce3004032d42e68c514ceff27e \ + --hash=sha256:a49994481b99cd7dedde07f2e7e93b1d86c01c0fca1c32aded18f10695ae17eb \ + --hash=sha256:abf5b17bc0cf626a8a497d89ac691308dbd825d2ac372aa990b1ca114e470151 \ + --hash=sha256:ac380cacdd3b183338ba63a144a34e9044520a6fb30c58aa14077157a033c13e \ + --hash=sha256:ad81012b24b88aad4c70b2cbc2dad84018783221b7f923e926f4690ff8569da3 \ + --hash=sha256:b2c00ad31fbc2cbac85d7d0fcf90853b2ca2e69d825a2d3f3edb842ef1544a2c \ + --hash=sha256:b4c153863dd6569f6511845922c53e39c8d61f6e81f228ad5443e690fca403de \ + --hash=sha256:b4f3d66dd0354b79761481fc15bdafaba0b9d9076f1f42cc9ce10d7fcbda205a \ + --hash=sha256:b99aac6bb2c37db336fa03a39b40ed4ef2818bf2dfb9441458165ebe88b793af \ + --hash=sha256:b9f6392d98c0bd70676ae41474e2eecf4c7150cb419237a41f8f96043fcb81d1 \ + --hash=sha256:c537da54ce4ff7c15e78ab1292e5799d0d43a2108e006578a57f531866f64025 \ + --hash=sha256:ca23db5fb195b5ef4fd1f77ce26cadefdf13dba71dab14dadd29b34d457d7c44 \ + --hash=sha256:cc826b9a8176e686b67aa60fd6c6a7047b0461cae5591ea1dc73d28f72332a8a \ + --hash=sha256:cca83a629f77402cfadd58352e394d79a61c8015f1694b83ab72237ec3941f88 \ + --hash=sha256:cf8d370b2fea27fb300825ec3984334f7dd54a581bde6456799ba3776915a656 \ + --hash=sha256:d1175b0e0d6037fab207f05774a176d71210ebd40b1c51f480a04b65ec5c786d \ + --hash=sha256:d1996ee1330e245cd3aeda0887b4409e3930524c27642b046e4fae88ffa66c5e \ + --hash=sha256:d5a36953389f35f0a4e88dc796048829a2f467c9197265504593f0e420571547 \ + --hash=sha256:da51d8928ad8b4244926fe862ba1795f0b6e68ed8c42cd2f822d435db9c2a8f4 \ + --hash=sha256:e16e7297f29a544f49340012d6fc08cf14de0ab361c9eb7529f6a57a30cbfda1 \ + --hash=sha256:e25b11a0417475f093d0f0809a149aff3943c2c56da50fdf2c3c88d57fe3dfbd \ + --hash=sha256:e4371591e621579cb6da8401e4ea405b33ff25a755874a3567c4075ca63d56e2 \ + --hash=sha256:e653d36b1bf48fa78c7fcebb5fa679342e025121ace8c87ab05c1cefd33b34fc \ + --hash=sha256:e7d91a230c7f8af86c904a5a992b8c064b66330544693fd6759c3d6162382ecf \ + --hash=sha256:e851e6363d0dbe515d8de81fd544a2c956fdec6f8a049739562286727d4a00c3 \ + --hash=sha256:ef7d48207926edbf8b16b336f779c557dd8f5a33035a85db9c4b0febb0706817 \ + --hash=sha256:f7716f7e7138252d88607228ce40be22660d6608d20fd365d596e7ca0738e019 \ + --hash=sha256:facaf11f21f3a4c51b62931feb13310e6fe3475f85e20d9c9fdce0d2ea561b87 + # via + # aiohttp + # yarl +propcache==0.3.0 \ + --hash=sha256:02df07041e0820cacc8f739510078f2aadcfd3fc57eaeeb16d5ded85c872c89e \ + --hash=sha256:03acd9ff19021bd0567582ac88f821b66883e158274183b9e5586f678984f8fe \ + --hash=sha256:03c091bb752349402f23ee43bb2bff6bd80ccab7c9df6b88ad4322258d6960fc \ + --hash=sha256:07700939b2cbd67bfb3b76a12e1412405d71019df00ca5697ce75e5ef789d829 \ + --hash=sha256:0c3e893c4464ebd751b44ae76c12c5f5c1e4f6cbd6fbf67e3783cd93ad221863 \ + --hash=sha256:119e244ab40f70a98c91906d4c1f4c5f2e68bd0b14e7ab0a06922038fae8a20f \ + --hash=sha256:11ae6a8a01b8a4dc79093b5d3ca2c8a4436f5ee251a9840d7790dccbd96cb649 \ + --hash=sha256:15010f29fbed80e711db272909a074dc79858c6d28e2915704cfc487a8ac89c6 \ + --hash=sha256:19d36bb351ad5554ff20f2ae75f88ce205b0748c38b146c75628577020351e3c \ + --hash=sha256:1c8f7d896a16da9455f882870a507567d4f58c53504dc2d4b1e1d386dfe4588a \ + --hash=sha256:2383a17385d9800b6eb5855c2f05ee550f803878f344f58b6e194de08b96352c \ + --hash=sha256:24c04f8fbf60094c531667b8207acbae54146661657a1b1be6d3ca7773b7a545 \ + --hash=sha256:2578541776769b500bada3f8a4eeaf944530516b6e90c089aa368266ed70c49e \ + --hash=sha256:26a67e5c04e3119594d8cfae517f4b9330c395df07ea65eab16f3d559b7068fe \ + --hash=sha256:2b975528998de037dfbc10144b8aed9b8dd5a99ec547f14d1cb7c5665a43f075 \ + --hash=sha256:2d15bc27163cd4df433e75f546b9ac31c1ba7b0b128bfb1b90df19082466ff57 \ + --hash=sha256:2d913d36bdaf368637b4f88d554fb9cb9d53d6920b9c5563846555938d5450bf \ + --hash=sha256:3302c5287e504d23bb0e64d2a921d1eb4a03fb93a0a0aa3b53de059f5a5d737d \ + --hash=sha256:36ca5e9a21822cc1746023e88f5c0af6fce3af3b85d4520efb1ce4221bed75cc \ + --hash=sha256:3b812b3cb6caacd072276ac0492d249f210006c57726b6484a1e1805b3cfeea0 \ + --hash=sha256:3c6ec957025bf32b15cbc6b67afe233c65b30005e4c55fe5768e4bb518d712f1 \ + --hash=sha256:41de3da5458edd5678b0f6ff66691507f9885f5fe6a0fb99a5d10d10c0fd2d64 \ + --hash=sha256:42924dc0c9d73e49908e35bbdec87adedd651ea24c53c29cac103ede0ea1d340 \ + --hash=sha256:4544699674faf66fb6b4473a1518ae4999c1b614f0b8297b1cef96bac25381db \ + --hash=sha256:46ed02532cb66612d42ae5c3929b5e98ae330ea0f3900bc66ec5f4862069519b \ + --hash=sha256:49ea05212a529c2caffe411e25a59308b07d6e10bf2505d77da72891f9a05641 \ + --hash=sha256:4fa0e7c9c3cf7c276d4f6ab9af8adddc127d04e0fcabede315904d2ff76db626 \ + --hash=sha256:507c5357a8d8b4593b97fb669c50598f4e6cccbbf77e22fa9598aba78292b4d7 \ + --hash=sha256:549722908de62aa0b47a78b90531c022fa6e139f9166be634f667ff45632cc92 \ + --hash=sha256:58e6d2a5a7cb3e5f166fd58e71e9a4ff504be9dc61b88167e75f835da5764d07 \ + --hash=sha256:5a16167118677d94bb48bfcd91e420088854eb0737b76ec374b91498fb77a70e \ + --hash=sha256:5d62c4f6706bff5d8a52fd51fec6069bef69e7202ed481486c0bc3874912c787 \ + --hash=sha256:5fa159dcee5dba00c1def3231c249cf261185189205073bde13797e57dd7540a \ + --hash=sha256:6032231d4a5abd67c7f71168fd64a47b6b451fbcb91c8397c2f7610e67683810 \ + --hash=sha256:63f26258a163c34542c24808f03d734b338da66ba91f410a703e505c8485791d \ + --hash=sha256:65a37714b8ad9aba5780325228598a5b16c47ba0f8aeb3dc0514701e4413d7c0 \ + --hash=sha256:67054e47c01b7b349b94ed0840ccae075449503cf1fdd0a1fdd98ab5ddc2667b \ + --hash=sha256:67dda3c7325691c2081510e92c561f465ba61b975f481735aefdfc845d2cd043 \ + --hash=sha256:6985a593417cdbc94c7f9c3403747335e450c1599da1647a5af76539672464d3 \ + --hash=sha256:6a1948df1bb1d56b5e7b0553c0fa04fd0e320997ae99689488201f19fa90d2e7 \ + --hash=sha256:6b5b7fd6ee7b54e01759f2044f936dcf7dea6e7585f35490f7ca0420fe723c0d \ + --hash=sha256:6c929916cbdb540d3407c66f19f73387f43e7c12fa318a66f64ac99da601bcdf \ + --hash=sha256:6f4d7a7c0aff92e8354cceca6fe223973ddf08401047920df0fcb24be2bd5138 \ + --hash=sha256:728af36011bb5d344c4fe4af79cfe186729efb649d2f8b395d1572fb088a996c \ + --hash=sha256:742840d1d0438eb7ea4280f3347598f507a199a35a08294afdcc560c3739989d \ + --hash=sha256:75e872573220d1ee2305b35c9813626e620768248425f58798413e9c39741f46 \ + --hash=sha256:794c3dd744fad478b6232289c866c25406ecdfc47e294618bdf1697e69bd64a6 \ + --hash=sha256:7c0fdbdf6983526e269e5a8d53b7ae3622dd6998468821d660d0daf72779aefa \ + --hash=sha256:7c5f5290799a3f6539cc5e6f474c3e5c5fbeba74a5e1e5be75587746a940d51e \ + --hash=sha256:7c6e7e4f9167fddc438cd653d826f2222222564daed4116a02a184b464d3ef05 \ + --hash=sha256:7cedd25e5f678f7738da38037435b340694ab34d424938041aa630d8bac42663 \ + --hash=sha256:7e2e068a83552ddf7a39a99488bcba05ac13454fb205c847674da0352602082f \ + --hash=sha256:8319293e85feadbbfe2150a5659dbc2ebc4afdeaf7d98936fb9a2f2ba0d4c35c \ + --hash=sha256:8526b0941ec5a40220fc4dfde76aed58808e2b309c03e9fa8e2260083ef7157f \ + --hash=sha256:8884ba1a0fe7210b775106b25850f5e5a9dc3c840d1ae9924ee6ea2eb3acbfe7 \ + --hash=sha256:8cb625bcb5add899cb8ba7bf716ec1d3e8f7cdea9b0713fa99eadf73b6d4986f \ + --hash=sha256:8d663fd71491dde7dfdfc899d13a067a94198e90695b4321084c6e450743b8c7 \ + --hash=sha256:8ee1983728964d6070ab443399c476de93d5d741f71e8f6e7880a065f878e0b9 \ + --hash=sha256:997e7b8f173a391987df40f3b52c423e5850be6f6df0dcfb5376365440b56667 \ + --hash=sha256:9be90eebc9842a93ef8335291f57b3b7488ac24f70df96a6034a13cb58e6ff86 \ + --hash=sha256:9ddd49258610499aab83b4f5b61b32e11fce873586282a0e972e5ab3bcadee51 \ + --hash=sha256:9ecde3671e62eeb99e977f5221abcf40c208f69b5eb986b061ccec317c82ebd0 \ + --hash=sha256:9ff4e9ecb6e4b363430edf2c6e50173a63e0820e549918adef70515f87ced19a \ + --hash=sha256:a254537b9b696ede293bfdbc0a65200e8e4507bc9f37831e2a0318a9b333c85c \ + --hash=sha256:a2b9bf8c79b660d0ca1ad95e587818c30ccdb11f787657458d6f26a1ea18c568 \ + --hash=sha256:a61a68d630e812b67b5bf097ab84e2cd79b48c792857dc10ba8a223f5b06a2af \ + --hash=sha256:a7080b0159ce05f179cfac592cda1a82898ca9cd097dacf8ea20ae33474fbb25 \ + --hash=sha256:a8fd93de4e1d278046345f49e2238cdb298589325849b2645d4a94c53faeffc5 \ + --hash=sha256:a94ffc66738da99232ddffcf7910e0f69e2bbe3a0802e54426dbf0714e1c2ffe \ + --hash=sha256:aa806bbc13eac1ab6291ed21ecd2dd426063ca5417dd507e6be58de20e58dfcf \ + --hash=sha256:b0c1a133d42c6fc1f5fbcf5c91331657a1ff822e87989bf4a6e2e39b818d0ee9 \ + --hash=sha256:b58229a844931bca61b3a20efd2be2a2acb4ad1622fc026504309a6883686fbf \ + --hash=sha256:bb2f144c6d98bb5cbc94adeb0447cfd4c0f991341baa68eee3f3b0c9c0e83767 \ + --hash=sha256:be90c94570840939fecedf99fa72839aed70b0ced449b415c85e01ae67422c90 \ + --hash=sha256:bf0d9a171908f32d54f651648c7290397b8792f4303821c42a74e7805bfb813c \ + --hash=sha256:bf15fc0b45914d9d1b706f7c9c4f66f2b7b053e9517e40123e137e8ca8958b3d \ + --hash=sha256:bf4298f366ca7e1ad1d21bbb58300a6985015909964077afd37559084590c929 \ + --hash=sha256:c441c841e82c5ba7a85ad25986014be8d7849c3cfbdb6004541873505929a74e \ + --hash=sha256:cacea77ef7a2195f04f9279297684955e3d1ae4241092ff0cfcef532bb7a1c32 \ + --hash=sha256:cd54895e4ae7d32f1e3dd91261df46ee7483a735017dc6f987904f194aa5fd14 \ + --hash=sha256:d1323cd04d6e92150bcc79d0174ce347ed4b349d748b9358fd2e497b121e03c8 \ + --hash=sha256:d383bf5e045d7f9d239b38e6acadd7b7fdf6c0087259a84ae3475d18e9a2ae8b \ + --hash=sha256:d3e7420211f5a65a54675fd860ea04173cde60a7cc20ccfbafcccd155225f8bc \ + --hash=sha256:d8074c5dd61c8a3e915fa8fc04754fa55cfa5978200d2daa1e2d4294c1f136aa \ + --hash=sha256:df03cd88f95b1b99052b52b1bb92173229d7a674df0ab06d2b25765ee8404bce \ + --hash=sha256:e45377d5d6fefe1677da2a2c07b024a6dac782088e37c0b1efea4cfe2b1be19b \ + --hash=sha256:e53d19c2bf7d0d1e6998a7e693c7e87300dd971808e6618964621ccd0e01fe4e \ + --hash=sha256:e560fd75aaf3e5693b91bcaddd8b314f4d57e99aef8a6c6dc692f935cc1e6bbf \ + --hash=sha256:ec5060592d83454e8063e487696ac3783cc48c9a329498bafae0d972bc7816c9 \ + --hash=sha256:ecc2920630283e0783c22e2ac94427f8cca29a04cfdf331467d4f661f4072dac \ + --hash=sha256:ed7161bccab7696a473fe7ddb619c1d75963732b37da4618ba12e60899fefe4f \ + --hash=sha256:ee0bd3a7b2e184e88d25c9baa6a9dc609ba25b76daae942edfb14499ac7ec374 \ + --hash=sha256:ee25f1ac091def37c4b59d192bbe3a206298feeb89132a470325bf76ad122a1e \ + --hash=sha256:efa44f64c37cc30c9f05932c740a8b40ce359f51882c70883cc95feac842da4d \ + --hash=sha256:f47d52fd9b2ac418c4890aad2f6d21a6b96183c98021f0a48497a904199f006e \ + --hash=sha256:f857034dc68d5ceb30fb60afb6ff2103087aea10a01b613985610e007053a121 \ + --hash=sha256:fb91d20fa2d3b13deea98a690534697742029f4fb83673a3501ae6e3746508b5 \ + --hash=sha256:fddb8870bdb83456a489ab67c6b3040a8d5a55069aa6f72f9d872235fbc52f54 + # via + # aiohttp + # yarl +pycparser==2.22 \ + --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ + --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc + # via cffi +pygments==2.19.1 \ + --hash=sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f \ + --hash=sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c + # via rich +pyjwt[crypto]==2.10.1 \ + --hash=sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953 \ + --hash=sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb + # via github3-py +python-dateutil==2.9.0.post0 \ + --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ + --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 + # via + # arrow + # github3-py + # taskcluster +python-slugify==8.0.4 \ + --hash=sha256:276540b79961052b66b7d116620b36518847f52d5fd9e3a70164fc8c50faa6b8 \ + --hash=sha256:59202371d1d05b54a9e7720c5e038f928f45daaffe41dd10822f3907b937c856 + # via cookiecutter +pyyaml==6.0.2 \ + --hash=sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff \ + --hash=sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48 \ + --hash=sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086 \ + --hash=sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e \ + --hash=sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133 \ + --hash=sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5 \ + --hash=sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484 \ + --hash=sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee \ + --hash=sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5 \ + --hash=sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68 \ + --hash=sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a \ + --hash=sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf \ + --hash=sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99 \ + --hash=sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8 \ + --hash=sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85 \ + --hash=sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19 \ + --hash=sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc \ + --hash=sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a \ + --hash=sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1 \ + --hash=sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317 \ + --hash=sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c \ + --hash=sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631 \ + --hash=sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d \ + --hash=sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652 \ + --hash=sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5 \ + --hash=sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e \ + --hash=sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b \ + --hash=sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8 \ + --hash=sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476 \ + --hash=sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706 \ + --hash=sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563 \ + --hash=sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237 \ + --hash=sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b \ + --hash=sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083 \ + --hash=sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180 \ + --hash=sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425 \ + --hash=sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e \ + --hash=sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f \ + --hash=sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725 \ + --hash=sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183 \ + --hash=sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab \ + --hash=sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774 \ + --hash=sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725 \ + --hash=sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e \ + --hash=sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5 \ + --hash=sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d \ + --hash=sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290 \ + --hash=sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44 \ + --hash=sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed \ + --hash=sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4 \ + --hash=sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba \ + --hash=sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12 \ + --hash=sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4 + # via + # cookiecutter + # scriptworker + # taskcluster-taskgraph +redo==3.0.0 \ + --hash=sha256:52a14200004d6708924a547b31b7d1c717cb36b944f3a5c7b176e0d61ab81eef \ + --hash=sha256:66905396b2882577fa4bf7edb90fee081db2b98992d303f12e3f898ac7f7bd56 + # via taskcluster-taskgraph +referencing==0.36.2 \ + --hash=sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa \ + --hash=sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0 + # via + # jsonschema + # jsonschema-specifications +requests==2.32.3 \ + --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ + --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 + # via + # cookiecutter + # github3-py + # taskcluster + # taskcluster-taskgraph +rfc3339-validator==0.1.4 \ + --hash=sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b \ + --hash=sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa + # via jsonschema +rfc3986-validator==0.1.1 \ + --hash=sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9 \ + --hash=sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055 + # via jsonschema +rich==13.9.4 \ + --hash=sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098 \ + --hash=sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90 + # via cookiecutter +rpds-py==0.23.1 \ + --hash=sha256:09cd7dbcb673eb60518231e02874df66ec1296c01a4fcd733875755c02014b19 \ + --hash=sha256:0f3288930b947cbebe767f84cf618d2cbe0b13be476e749da0e6a009f986248c \ + --hash=sha256:0fced9fd4a07a1ded1bac7e961ddd9753dd5d8b755ba8e05acba54a21f5f1522 \ + --hash=sha256:112b8774b0b4ee22368fec42749b94366bd9b536f8f74c3d4175d4395f5cbd31 \ + --hash=sha256:11dd60b2ffddba85715d8a66bb39b95ddbe389ad2cfcf42c833f1bcde0878eaf \ + --hash=sha256:178f8a60fc24511c0eb756af741c476b87b610dba83270fce1e5a430204566a4 \ + --hash=sha256:1b08027489ba8fedde72ddd233a5ea411b85a6ed78175f40285bd401bde7466d \ + --hash=sha256:1bf5be5ba34e19be579ae873da515a2836a2166d8d7ee43be6ff909eda42b72b \ + --hash=sha256:1ed7de3c86721b4e83ac440751329ec6a1102229aa18163f84c75b06b525ad7e \ + --hash=sha256:1eedaaccc9bb66581d4ae7c50e15856e335e57ef2734dbc5fd8ba3e2a4ab3cb6 \ + --hash=sha256:243241c95174b5fb7204c04595852fe3943cc41f47aa14c3828bc18cd9d3b2d6 \ + --hash=sha256:26bb3e8de93443d55e2e748e9fd87deb5f8075ca7bc0502cfc8be8687d69a2ec \ + --hash=sha256:271fa2184cf28bdded86bb6217c8e08d3a169fe0bbe9be5e8d96e8476b707122 \ + --hash=sha256:28358c54fffadf0ae893f6c1050e8f8853e45df22483b7fff2f6ab6152f5d8bf \ + --hash=sha256:285019078537949cecd0190f3690a0b0125ff743d6a53dfeb7a4e6787af154f5 \ + --hash=sha256:2893d778d4671ee627bac4037a075168b2673c57186fb1a57e993465dbd79a93 \ + --hash=sha256:2a54027554ce9b129fc3d633c92fa33b30de9f08bc61b32c053dc9b537266fed \ + --hash=sha256:2c6ae11e6e93728d86aafc51ced98b1658a0080a7dd9417d24bfb955bb09c3c2 \ + --hash=sha256:2cfa07c346a7ad07019c33fb9a63cf3acb1f5363c33bc73014e20d9fe8b01cdd \ + --hash=sha256:35d5631ce0af26318dba0ae0ac941c534453e42f569011585cb323b7774502a5 \ + --hash=sha256:3614d280bf7aab0d3721b5ce0e73434acb90a2c993121b6e81a1c15c665298ac \ + --hash=sha256:3902df19540e9af4cc0c3ae75974c65d2c156b9257e91f5101a51f99136d834c \ + --hash=sha256:3aaf141d39f45322e44fc2c742e4b8b4098ead5317e5f884770c8df0c332da70 \ + --hash=sha256:3d8abf7896a91fb97e7977d1aadfcc2c80415d6dc2f1d0fca5b8d0df247248f3 \ + --hash=sha256:3e77febf227a1dc3220159355dba68faa13f8dca9335d97504abf428469fb18b \ + --hash=sha256:3e9212f52074fc9d72cf242a84063787ab8e21e0950d4d6709886fb62bcb91d5 \ + --hash=sha256:3ee9d6f0b38efb22ad94c3b68ffebe4c47865cdf4b17f6806d6c674e1feb4246 \ + --hash=sha256:4233df01a250b3984465faed12ad472f035b7cd5240ea3f7c76b7a7016084495 \ + --hash=sha256:4263320ed887ed843f85beba67f8b2d1483b5947f2dc73a8b068924558bfeace \ + --hash=sha256:4ab923167cfd945abb9b51a407407cf19f5bee35001221f2911dc85ffd35ff4f \ + --hash=sha256:4caafd1a22e5eaa3732acb7672a497123354bef79a9d7ceed43387d25025e935 \ + --hash=sha256:50fb62f8d8364978478b12d5f03bf028c6bc2af04082479299139dc26edf4c64 \ + --hash=sha256:55ff4151cfd4bc635e51cfb1c59ac9f7196b256b12e3a57deb9e5742e65941ad \ + --hash=sha256:5b98b6c953e5c2bda51ab4d5b4f172617d462eebc7f4bfdc7c7e6b423f6da957 \ + --hash=sha256:5c9ff044eb07c8468594d12602291c635da292308c8c619244e30698e7fc455a \ + --hash=sha256:5e9c206a1abc27e0588cf8b7c8246e51f1a16a103734f7750830a1ccb63f557a \ + --hash=sha256:5fb89edee2fa237584e532fbf78f0ddd1e49a47c7c8cfa153ab4849dc72a35e6 \ + --hash=sha256:633462ef7e61d839171bf206551d5ab42b30b71cac8f10a64a662536e057fdef \ + --hash=sha256:66f8d2a17e5838dd6fb9be6baaba8e75ae2f5fa6b6b755d597184bfcd3cb0eba \ + --hash=sha256:6959bb9928c5c999aba4a3f5a6799d571ddc2c59ff49917ecf55be2bbb4e3722 \ + --hash=sha256:698a79d295626ee292d1730bc2ef6e70a3ab135b1d79ada8fde3ed0047b65a10 \ + --hash=sha256:721f9c4011b443b6e84505fc00cc7aadc9d1743f1c988e4c89353e19c4a968ee \ + --hash=sha256:72e680c1518733b73c994361e4b06441b92e973ef7d9449feec72e8ee4f713da \ + --hash=sha256:75307599f0d25bf6937248e5ac4e3bde5ea72ae6618623b86146ccc7845ed00b \ + --hash=sha256:754fba3084b70162a6b91efceee8a3f06b19e43dac3f71841662053c0584209a \ + --hash=sha256:759462b2d0aa5a04be5b3e37fb8183615f47014ae6b116e17036b131985cb731 \ + --hash=sha256:7938c7b0599a05246d704b3f5e01be91a93b411d0d6cc62275f025293b8a11ce \ + --hash=sha256:7b77e07233925bd33fc0022b8537774423e4c6680b6436316c5075e79b6384f4 \ + --hash=sha256:7e5413d2e2d86025e73f05510ad23dad5950ab8417b7fc6beaad99be8077138b \ + --hash=sha256:7f3240dcfa14d198dba24b8b9cb3b108c06b68d45b7babd9eefc1038fdf7e707 \ + --hash=sha256:7f9682a8f71acdf59fd554b82b1c12f517118ee72c0f3944eda461606dfe7eb9 \ + --hash=sha256:8d67beb6002441faef8251c45e24994de32c4c8686f7356a1f601ad7c466f7c3 \ + --hash=sha256:9441af1d25aed96901f97ad83d5c3e35e6cd21a25ca5e4916c82d7dd0490a4fa \ + --hash=sha256:98b257ae1e83f81fb947a363a274c4eb66640212516becaff7bef09a5dceacaa \ + --hash=sha256:9e9f3a3ac919406bc0414bbbd76c6af99253c507150191ea79fab42fdb35982a \ + --hash=sha256:a1c66e71ecfd2a4acf0e4bd75e7a3605afa8f9b28a3b497e4ba962719df2be57 \ + --hash=sha256:a1e17d8dc8e57d8e0fd21f8f0f0a5211b3fa258b2e444c2053471ef93fe25a00 \ + --hash=sha256:a20cb698c4a59c534c6701b1c24a968ff2768b18ea2991f886bd8985ce17a89f \ + --hash=sha256:a970bfaf130c29a679b1d0a6e0f867483cea455ab1535fb427566a475078f27f \ + --hash=sha256:a98f510d86f689fcb486dc59e6e363af04151e5260ad1bdddb5625c10f1e95f8 \ + --hash=sha256:a9d3b728f5a5873d84cba997b9d617c6090ca5721caaa691f3b1a78c60adc057 \ + --hash=sha256:ad76f44f70aac3a54ceb1813ca630c53415da3a24fd93c570b2dfb4856591017 \ + --hash=sha256:ae28144c1daa61366205d32abd8c90372790ff79fc60c1a8ad7fd3c8553a600e \ + --hash=sha256:b03a8d50b137ee758e4c73638b10747b7c39988eb8e6cd11abb7084266455165 \ + --hash=sha256:b5a96fcac2f18e5a0a23a75cd27ce2656c66c11c127b0318e508aab436b77428 \ + --hash=sha256:b5ef909a37e9738d146519657a1aab4584018746a18f71c692f2f22168ece40c \ + --hash=sha256:b79f5ced71efd70414a9a80bbbfaa7160da307723166f09b69773153bf17c590 \ + --hash=sha256:b91cceb5add79ee563bd1f70b30896bd63bc5f78a11c1f00a1e931729ca4f1f4 \ + --hash=sha256:b92f5654157de1379c509b15acec9d12ecf6e3bc1996571b6cb82a4302060447 \ + --hash=sha256:c04ca91dda8a61584165825907f5c967ca09e9c65fe8966ee753a3f2b019fe1e \ + --hash=sha256:c1f8afa346ccd59e4e5630d5abb67aba6a9812fddf764fd7eb11f382a345f8cc \ + --hash=sha256:c5334a71f7dc1160382d45997e29f2637c02f8a26af41073189d79b95d3321f1 \ + --hash=sha256:c617d7453a80e29d9973b926983b1e700a9377dbe021faa36041c78537d7b08c \ + --hash=sha256:c632419c3870507ca20a37c8f8f5352317aca097639e524ad129f58c125c61c6 \ + --hash=sha256:c6760211eee3a76316cf328f5a8bd695b47b1626d21c8a27fb3b2473a884d597 \ + --hash=sha256:c698d123ce5d8f2d0cd17f73336615f6a2e3bdcedac07a1291bb4d8e7d82a05a \ + --hash=sha256:c76b32eb2ab650a29e423525e84eb197c45504b1c1e6e17b6cc91fcfeb1a4b1d \ + --hash=sha256:c8f7e90b948dc9dcfff8003f1ea3af08b29c062f681c05fd798e36daa3f7e3e8 \ + --hash=sha256:c9e799dac1ffbe7b10c1fd42fe4cd51371a549c6e108249bde9cd1200e8f59b4 \ + --hash=sha256:cafa48f2133d4daa028473ede7d81cd1b9f9e6925e9e4003ebdf77010ee02f35 \ + --hash=sha256:ce473a2351c018b06dd8d30d5da8ab5a0831056cc53b2006e2a8028172c37ce5 \ + --hash=sha256:d31ed4987d72aabdf521eddfb6a72988703c091cfc0064330b9e5f8d6a042ff5 \ + --hash=sha256:d550d7e9e7d8676b183b37d65b5cd8de13676a738973d330b59dc8312df9c5dc \ + --hash=sha256:d6adb81564af0cd428910f83fa7da46ce9ad47c56c0b22b50872bc4515d91966 \ + --hash=sha256:d6f6512a90bd5cd9030a6237f5346f046c6f0e40af98657568fa45695d4de59d \ + --hash=sha256:d7031d493c4465dbc8d40bd6cafefef4bd472b17db0ab94c53e7909ee781b9ef \ + --hash=sha256:d9f75a06ecc68f159d5d7603b734e1ff6daa9497a929150f794013aa9f6e3f12 \ + --hash=sha256:db7707dde9143a67b8812c7e66aeb2d843fe33cc8e374170f4d2c50bd8f2472d \ + --hash=sha256:e0397dd0b3955c61ef9b22838144aa4bef6f0796ba5cc8edfc64d468b93798b4 \ + --hash=sha256:e0df046f2266e8586cf09d00588302a32923eb6386ced0ca5c9deade6af9a149 \ + --hash=sha256:e14f86b871ea74c3fddc9a40e947d6a5d09def5adc2076ee61fb910a9014fb35 \ + --hash=sha256:e5963ea87f88bddf7edd59644a35a0feecf75f8985430124c253612d4f7d27ae \ + --hash=sha256:e768267cbe051dd8d1c5305ba690bb153204a09bf2e3de3ae530de955f5b5580 \ + --hash=sha256:e9cb79ecedfc156c0692257ac7ed415243b6c35dd969baa461a6888fc79f2f07 \ + --hash=sha256:ed6f011bedca8585787e5082cce081bac3d30f54520097b2411351b3574e1219 \ + --hash=sha256:f3429fb8e15b20961efca8c8b21432623d85db2228cc73fe22756c6637aa39e7 \ + --hash=sha256:f35eff113ad430b5272bbfc18ba111c66ff525828f24898b4e146eb479a2cdda \ + --hash=sha256:f3a6cb95074777f1ecda2ca4fa7717caa9ee6e534f42b7575a8f0d4cb0c24013 \ + --hash=sha256:f7356a6da0562190558c4fcc14f0281db191cdf4cb96e7604c06acfcee96df15 \ + --hash=sha256:f88626e3f5e57432e6191cd0c5d6d6b319b635e70b40be2ffba713053e5147dd \ + --hash=sha256:fad784a31869747df4ac968a351e070c06ca377549e4ace94775aaa3ab33ee06 \ + --hash=sha256:fc869af5cba24d45fb0399b0cfdbcefcf6910bf4dee5d74036a57cf5264b3ff4 \ + --hash=sha256:fee513135b5a58f3bb6d89e48326cd5aa308e4bcdf2f7d59f67c861ada482bf8 + # via + # jsonschema + # referencing +scriptworker==60.7.1 \ + --hash=sha256:a5c0e6e87ecc4df9ca1eccd7bb246e3f892b33ac2852d02825e7de417d24d9d1 \ + --hash=sha256:ed2dcad392744654ec36d210ebc943f40d219d457769ac3b6123775309a4e037 + # via -r requirements/base.in +six==1.17.0 \ + --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \ + --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81 + # via + # mohawk + # python-dateutil + # rfc3339-validator +slugid==2.0.0 \ + --hash=sha256:a950d98b72691178bdd4d6c52743c4a2aa039207cf7a97d71060a111ff9ba297 \ + --hash=sha256:aec8b0e01c4ad32e38e12d609eab3ec912fd129aaf6b2ded0199b56a5f8fd67c + # via + # taskcluster + # taskcluster-taskgraph +sniffio==1.3.1 \ + --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \ + --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc + # via anyio +taskcluster==83.3.0 \ + --hash=sha256:0e882ca827109c32d9feea39232fbbc68451fb238618cf9c45c1e5053a845cc5 \ + --hash=sha256:e32531a13bd5c6652b1ab8f7a7f917dedb9a63a1ae9af0ed8a83498afa036a85 + # via scriptworker +taskcluster-taskgraph==14.0.0 \ + --hash=sha256:04be28c01ea244930d25a0d0474aa8109c2d9273b74c628ab447b4356858d60f \ + --hash=sha256:512463517429c323c1834e3164606d44bc55837e6afab56dd8d97f3abad892d2 + # via scriptworker +taskcluster-urls==13.0.1 \ + --hash=sha256:5e25e7e6818e8877178b175ff43d2e6548afad72694aa125f404a7329ece0973 \ + --hash=sha256:b25e122ecec249c4299ac7b20b08db76e3e2025bdaeb699a9d444556de5fd367 \ + --hash=sha256:f66dcbd6572a6216ab65949f0fa0b91f2df647918028436c384e6af5cd12ae2b + # via + # taskcluster + # taskcluster-taskgraph +text-unidecode==1.3 \ + --hash=sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8 \ + --hash=sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93 + # via python-slugify +types-python-dateutil==2.9.0.20241206 \ + --hash=sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb \ + --hash=sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53 + # via arrow +typing-extensions==4.12.2 \ + --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ + --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 + # via + # anyio + # referencing +uri-template==1.3.0 \ + --hash=sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7 \ + --hash=sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363 + # via jsonschema +uritemplate==4.1.1 \ + --hash=sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0 \ + --hash=sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e + # via github3-py +urllib3==2.3.0 \ + --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \ + --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d + # via requests +voluptuous==0.15.2 \ + --hash=sha256:016348bc7788a9af9520b1764ebd4de0df41fe2138ebe9e06fa036bf86a65566 \ + --hash=sha256:6ffcab32c4d3230b4d2af3a577c87e1908a714a11f6f95570456b1849b0279aa + # via taskcluster-taskgraph +webcolors==24.11.1 \ + --hash=sha256:515291393b4cdf0eb19c155749a096f779f7d909f7cceea072791cb9095b92e9 \ + --hash=sha256:ecb3d768f32202af770477b8b65f318fa4f566c22948673a977b00d589dd80f6 + # via jsonschema +yarl==1.18.3 \ + --hash=sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba \ + --hash=sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193 \ + --hash=sha256:045b8482ce9483ada4f3f23b3774f4e1bf4f23a2d5c912ed5170f68efb053318 \ + --hash=sha256:09c7907c8548bcd6ab860e5f513e727c53b4a714f459b084f6580b49fa1b9cee \ + --hash=sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e \ + --hash=sha256:0b3c92fa08759dbf12b3a59579a4096ba9af8dd344d9a813fc7f5070d86bbab1 \ + --hash=sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a \ + --hash=sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186 \ + --hash=sha256:1d407181cfa6e70077df3377938c08012d18893f9f20e92f7d2f314a437c30b1 \ + --hash=sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50 \ + --hash=sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640 \ + --hash=sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb \ + --hash=sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8 \ + --hash=sha256:3236da9272872443f81fedc389bace88408f64f89f75d1bdb2256069a8730ccc \ + --hash=sha256:35098b24e0327fc4ebdc8ffe336cee0a87a700c24ffed13161af80124b7dc8e5 \ + --hash=sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58 \ + --hash=sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2 \ + --hash=sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393 \ + --hash=sha256:4ac515b860c36becb81bb84b667466885096b5fc85596948548b667da3bf9f24 \ + --hash=sha256:5094d9206c64181d0f6e76ebd8fb2f8fe274950a63890ee9e0ebfd58bf9d787b \ + --hash=sha256:54d6921f07555713b9300bee9c50fb46e57e2e639027089b1d795ecd9f7fa910 \ + --hash=sha256:578e281c393af575879990861823ef19d66e2b1d0098414855dd367e234f5b3c \ + --hash=sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272 \ + --hash=sha256:602d98f2c2d929f8e697ed274fbadc09902c4025c5a9963bf4e9edfc3ab6f7ed \ + --hash=sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1 \ + --hash=sha256:61e5e68cb65ac8f547f6b5ef933f510134a6bf31bb178be428994b0cb46c2a04 \ + --hash=sha256:61ee62ead9b68b9123ec24bc866cbef297dd266175d53296e2db5e7f797f902d \ + --hash=sha256:6333c5a377c8e2f5fae35e7b8f145c617b02c939d04110c76f29ee3676b5f9a5 \ + --hash=sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d \ + --hash=sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889 \ + --hash=sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae \ + --hash=sha256:757e81cae69244257d125ff31663249b3013b5dc0a8520d73694aed497fb195b \ + --hash=sha256:77a6e85b90a7641d2e07184df5557132a337f136250caafc9ccaa4a2a998ca2c \ + --hash=sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576 \ + --hash=sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34 \ + --hash=sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477 \ + --hash=sha256:80316a8bd5109320d38eef8833ccf5f89608c9107d02d2a7f985f98ed6876990 \ + --hash=sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2 \ + --hash=sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512 \ + --hash=sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069 \ + --hash=sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a \ + --hash=sha256:8874027a53e3aea659a6d62751800cf6e63314c160fd607489ba5c2edd753cf6 \ + --hash=sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0 \ + --hash=sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8 \ + --hash=sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb \ + --hash=sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa \ + --hash=sha256:93b2e109287f93db79210f86deb6b9bbb81ac32fc97236b16f7433db7fc437d8 \ + --hash=sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e \ + --hash=sha256:a440a2a624683108a1b454705ecd7afc1c3438a08e890a1513d468671d90a04e \ + --hash=sha256:a4bb030cf46a434ec0225bddbebd4b89e6471814ca851abb8696170adb163985 \ + --hash=sha256:a9ca04806f3be0ac6d558fffc2fdf8fcef767e0489d2684a21912cc4ed0cd1b8 \ + --hash=sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1 \ + --hash=sha256:ac36703a585e0929b032fbaab0707b75dc12703766d0b53486eabd5139ebadd5 \ + --hash=sha256:b1771de9944d875f1b98a745bc547e684b863abf8f8287da8466cf470ef52690 \ + --hash=sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10 \ + --hash=sha256:b4f6450109834af88cb4cc5ecddfc5380ebb9c228695afc11915a0bf82116789 \ + --hash=sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b \ + --hash=sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca \ + --hash=sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e \ + --hash=sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5 \ + --hash=sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59 \ + --hash=sha256:ba87babd629f8af77f557b61e49e7c7cac36f22f871156b91e10a6e9d4f829e9 \ + --hash=sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8 \ + --hash=sha256:c1e1cc06da1491e6734f0ea1e6294ce00792193c463350626571c287c9a704db \ + --hash=sha256:c654d5207c78e0bd6d749f6dae1dcbbfde3403ad3a4b11f3c5544d9906969dde \ + --hash=sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7 \ + --hash=sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb \ + --hash=sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3 \ + --hash=sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6 \ + --hash=sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285 \ + --hash=sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb \ + --hash=sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8 \ + --hash=sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482 \ + --hash=sha256:e2c08cc9b16f4f4bc522771d96734c7901e7ebef70c6c5c35dd0f10845270bcd \ + --hash=sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75 \ + --hash=sha256:e3b9fd71836999aad54084906f8663dffcd2a7fb5cdafd6c37713b2e72be1760 \ + --hash=sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782 \ + --hash=sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53 \ + --hash=sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2 \ + --hash=sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1 \ + --hash=sha256:fe57328fbc1bfd0bd0514470ac692630f3901c0ee39052ae47acd1d90a436719 \ + --hash=sha256:fea09ca13323376a2fdfb353a5fa2e59f90cd18d7ca4eaa1fd31f0a8b4f91e62 + # via + # -r requirements/base.in + # aiohttp + # gql diff --git a/landoscript/requirements/local.in b/landoscript/requirements/local.in new file mode 100644 index 000000000..922956ede --- /dev/null +++ b/landoscript/requirements/local.in @@ -0,0 +1,3 @@ +-r test.in + +tox diff --git a/landoscript/requirements/local.txt b/landoscript/requirements/local.txt new file mode 100644 index 000000000..b70a5f790 --- /dev/null +++ b/landoscript/requirements/local.txt @@ -0,0 +1,44 @@ +# SHA1:f88f7cbe68a488d5a461e166640111dd30a94bcb +# +# This file is autogenerated by pip-compile-multi +# To update, run: +# +# pip-compile-multi +# +-r test.txt +cachetools==5.5.2 \ + --hash=sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4 \ + --hash=sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a + # via tox +colorama==0.4.6 \ + --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ + --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 + # via tox +distlib==0.3.9 \ + --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ + --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 + # via virtualenv +filelock==3.18.0 \ + --hash=sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2 \ + --hash=sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de + # via + # tox + # virtualenv +platformdirs==4.3.6 \ + --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ + --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb + # via + # tox + # virtualenv +pyproject-api==1.9.0 \ + --hash=sha256:326df9d68dea22d9d98b5243c46e3ca3161b07a1b9b18e213d1e24fd0e605766 \ + --hash=sha256:7e8a9854b2dfb49454fae421cb86af43efbb2b2454e5646ffb7623540321ae6e + # via tox +tox==4.24.2 \ + --hash=sha256:92e8290e76ad4e15748860a205865696409a2d014eedeb796a34a0f3b5e7336e \ + --hash=sha256:d5948b350f76fae436d6545a5e87c2b676ab7a0d7d88c1308651245eadbe8aea + # via -r requirements/local.in +virtualenv==20.29.3 \ + --hash=sha256:3e3d00f5807e83b234dfb6122bf37cfadf4be216c53a49ac059d02414f819170 \ + --hash=sha256:95e39403fcf3940ac45bc717597dba16110b74506131845d9b687d5e73d947ac + # via tox diff --git a/landoscript/requirements/test.in b/landoscript/requirements/test.in new file mode 100644 index 000000000..c86802f59 --- /dev/null +++ b/landoscript/requirements/test.in @@ -0,0 +1,7 @@ +-r base.in + +coverage +pytest +pytest-aioresponses +pytest-asyncio +pytest-cov diff --git a/landoscript/requirements/test.txt b/landoscript/requirements/test.txt new file mode 100644 index 000000000..ef915d1fa --- /dev/null +++ b/landoscript/requirements/test.txt @@ -0,0 +1,113 @@ +# SHA1:d3a60c74bb67b74d68d13ca57aed356aceaeaa38 +# +# This file is autogenerated by pip-compile-multi +# To update, run: +# +# pip-compile-multi +# +-r base.txt +aioresponses==0.7.8 \ + --hash=sha256:b73bd4400d978855e55004b23a3a84cb0f018183bcf066a85ad392800b5b9a94 \ + --hash=sha256:b861cdfe5dc58f3b8afac7b0a6973d5d7b2cb608dd0f6253d16b8ee8eaf6df11 + # via pytest-aioresponses +coverage[toml]==7.7.0 \ + --hash=sha256:056d3017ed67e7ddf266e6f57378ece543755a4c9231e997789ab3bd11392c94 \ + --hash=sha256:0ce8cf59e09d31a4915ff4c3b94c6514af4c84b22c4cc8ad7c3c546a86150a92 \ + --hash=sha256:104bf640f408f4e115b85110047c7f27377e1a8b7ba86f7db4fa47aa49dc9a8e \ + --hash=sha256:1393e5aa9441dafb0162c36c8506c648b89aea9565b31f6bfa351e66c11bcd82 \ + --hash=sha256:1586ad158523f4133499a4f322b230e2cfef9cc724820dbd58595a5a236186f4 \ + --hash=sha256:180e3fc68ee4dc5af8b33b6ca4e3bb8aa1abe25eedcb958ba5cff7123071af68 \ + --hash=sha256:1b336d06af14f8da5b1f391e8dec03634daf54dfcb4d1c4fb6d04c09d83cef90 \ + --hash=sha256:1c8fbce80b2b8bf135d105aa8f5b36eae0c57d702a1cc3ebdea2a6f03f6cdde5 \ + --hash=sha256:2d673e3add00048215c2cc507f1228a7523fd8bf34f279ac98334c9b07bd2656 \ + --hash=sha256:316f29cc3392fa3912493ee4c83afa4a0e2db04ff69600711f8c03997c39baaa \ + --hash=sha256:33c1394d8407e2771547583b66a85d07ed441ff8fae5a4adb4237ad39ece60db \ + --hash=sha256:37cbc7b0d93dfd133e33c7ec01123fbb90401dce174c3b6661d8d36fb1e30608 \ + --hash=sha256:39abcacd1ed54e2c33c54bdc488b310e8ef6705833f7148b6eb9a547199d375d \ + --hash=sha256:3ab7090f04b12dc6469882ce81244572779d3a4b67eea1c96fb9ecc8c607ef39 \ + --hash=sha256:3b0e6e54591ae0d7427def8a4d40fca99df6b899d10354bab73cd5609807261c \ + --hash=sha256:416e2a8845eaff288f97eaf76ab40367deafb9073ffc47bf2a583f26b05e5265 \ + --hash=sha256:4545485fef7a8a2d8f30e6f79ce719eb154aab7e44217eb444c1d38239af2072 \ + --hash=sha256:4c124025430249118d018dcedc8b7426f39373527c845093132196f2a483b6dd \ + --hash=sha256:4fbb7a0c3c21908520149d7751cf5b74eb9b38b54d62997b1e9b3ac19a8ee2fe \ + --hash=sha256:52fc89602cde411a4196c8c6894afb384f2125f34c031774f82a4f2608c59d7d \ + --hash=sha256:55143aa13c49491f5606f05b49ed88663446dce3a4d3c5d77baa4e36a16d3573 \ + --hash=sha256:57f3bd0d29bf2bd9325c0ff9cc532a175110c4bf8f412c05b2405fd35745266d \ + --hash=sha256:5b2f144444879363ea8834cd7b6869d79ac796cb8f864b0cfdde50296cd95816 \ + --hash=sha256:5efdeff5f353ed3352c04e6b318ab05c6ce9249c25ed3c2090c6e9cadda1e3b2 \ + --hash=sha256:60e6347d1ed882b1159ffea172cb8466ee46c665af4ca397edbf10ff53e9ffaf \ + --hash=sha256:693d921621a0c8043bfdc61f7d4df5ea6d22165fe8b807cac21eb80dd94e4bbd \ + --hash=sha256:708f0a1105ef2b11c79ed54ed31f17e6325ac936501fc373f24be3e6a578146a \ + --hash=sha256:70f0925c4e2bfc965369f417e7cc72538fd1ba91639cf1e4ef4b1a6b50439b3b \ + --hash=sha256:7789e700f33f2b133adae582c9f437523cd5db8de845774988a58c360fc88253 \ + --hash=sha256:7b6c96d69928a3a6767fab8dc1ce8a02cf0156836ccb1e820c7f45a423570d98 \ + --hash=sha256:7d2a65876274acf544703e943c010b60bd79404e3623a1e5d52b64a6e2728de5 \ + --hash=sha256:7f18d47641282664276977c604b5a261e51fefc2980f5271d547d706b06a837f \ + --hash=sha256:89078312f06237417adda7c021c33f80f7a6d2db8572a5f6c330d89b080061ce \ + --hash=sha256:8c938c6ae59be67ac19a7204e079efc94b38222cd7d0269f96e45e18cddeaa59 \ + --hash=sha256:8e336b56301774ace6be0017ff85c3566c556d938359b61b840796a0202f805c \ + --hash=sha256:a0a207c87a9f743c8072d059b4711f8d13c456eb42dac778a7d2e5d4f3c253a7 \ + --hash=sha256:a2454b12a3f12cc4698f3508912e6225ec63682e2ca5a96f80a2b93cef9e63f3 \ + --hash=sha256:a538a23119d1e2e2ce077e902d02ea3d8e0641786ef6e0faf11ce82324743944 \ + --hash=sha256:aa4dff57fc21a575672176d5ab0ef15a927199e775c5e8a3d75162ab2b0c7705 \ + --hash=sha256:ad0edaa97cb983d9f2ff48cadddc3e1fb09f24aa558abeb4dc9a0dbacd12cbb4 \ + --hash=sha256:ae8006772c6b0fa53c33747913473e064985dac4d65f77fd2fdc6474e7cd54e4 \ + --hash=sha256:b0fac2088ec4aaeb5468b814bd3ff5e5978364bfbce5e567c44c9e2854469f6c \ + --hash=sha256:b3e212a894d8ae07fde2ca8b43d666a6d49bbbddb10da0f6a74ca7bd31f20054 \ + --hash=sha256:b54a1ee4c6f1905a436cbaa04b26626d27925a41cbc3a337e2d3ff7038187f07 \ + --hash=sha256:b667b91f4f714b17af2a18e220015c941d1cf8b07c17f2160033dbe1e64149f0 \ + --hash=sha256:b8c36093aca722db73633cf2359026ed7782a239eb1c6db2abcff876012dc4cf \ + --hash=sha256:bb356e7ae7c2da13f404bf8f75be90f743c6df8d4607022e759f5d7d89fe83f8 \ + --hash=sha256:bce730d484038e97f27ea2dbe5d392ec5c2261f28c319a3bb266f6b213650135 \ + --hash=sha256:c075d167a6ec99b798c1fdf6e391a1d5a2d054caffe9593ba0f97e3df2c04f0e \ + --hash=sha256:c4e09534037933bf6eb31d804e72c52ec23219b32c1730f9152feabbd7499463 \ + --hash=sha256:c5f8a5364fc37b2f172c26a038bc7ec4885f429de4a05fc10fdcb53fb5834c5c \ + --hash=sha256:cb203c0afffaf1a8f5b9659a013f8f16a1b2cad3a80a8733ceedc968c0cf4c57 \ + --hash=sha256:cc41374d2f27d81d6558f8a24e5c114580ffefc197fd43eabd7058182f743322 \ + --hash=sha256:cd879d4646055a573775a1cec863d00c9ff8c55860f8b17f6d8eee9140c06166 \ + --hash=sha256:d013c07061751ae81861cae6ec3a4fe04e84781b11fd4b6b4201590234b25c7b \ + --hash=sha256:d8c7524779003d59948c51b4fcbf1ca4e27c26a7d75984f63488f3625c328b9b \ + --hash=sha256:d9710521f07f526de30ccdead67e6b236fe996d214e1a7fba8b36e2ba2cd8261 \ + --hash=sha256:e1ffde1d6bc2a92f9c9207d1ad808550873748ac2d4d923c815b866baa343b3f \ + --hash=sha256:e7f559c36d5cdc448ee13e7e56ed7b6b5d44a40a511d584d388a0f5d940977ba \ + --hash=sha256:f2a1e18a85bd066c7c556d85277a7adf4651f259b2579113844835ba1a74aafd \ + --hash=sha256:f32b165bf6dfea0846a9c9c38b7e1d68f313956d60a15cde5d1709fddcaf3bee \ + --hash=sha256:f5a2f71d6a91238e7628f23538c26aa464d390cbdedf12ee2a7a0fb92a24482a \ + --hash=sha256:f81fe93dc1b8e5673f33443c0786c14b77e36f1025973b85e07c70353e46882b + # via + # -r requirements/test.in + # pytest-cov +iniconfig==2.0.0 \ + --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ + --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 + # via pytest +packaging==24.2 \ + --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ + --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f + # via + # aioresponses + # pytest +pluggy==1.5.0 \ + --hash=sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1 \ + --hash=sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669 + # via pytest +pytest==8.3.5 \ + --hash=sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820 \ + --hash=sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845 + # via + # -r requirements/test.in + # pytest-aioresponses + # pytest-asyncio + # pytest-cov +pytest-aioresponses==0.3.0 \ + --hash=sha256:5677b32dfa1a36908b347524b5867aab35ac1c5ce1d4970244d6f66009bca7b6 \ + --hash=sha256:60f3124ff05a0210a5f369dd95e4cf66090774ba76b322f7178858ce4e6c1647 + # via -r requirements/test.in +pytest-asyncio==0.25.3 \ + --hash=sha256:9e89518e0f9bd08928f97a3482fdc4e244df17529460bc038291ccaf8f85c7c3 \ + --hash=sha256:fc1da2cf9f125ada7e710b4ddad05518d4cee187ae9412e9ac9271003497f07a + # via -r requirements/test.in +pytest-cov==6.0.0 \ + --hash=sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35 \ + --hash=sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0 + # via -r requirements/test.in diff --git a/landoscript/setup.py b/landoscript/setup.py new file mode 100644 index 000000000..79685a4ed --- /dev/null +++ b/landoscript/setup.py @@ -0,0 +1,17 @@ +# noqa: D100 +from setuptools import find_packages, setup + +setup( + name="landoscript", + # never changes + version="1.0", + description="Landoscript scriptworker", + author="Mozilla Release Engineering", + author_email="release+python@mozilla.com", + url="https://github.com/mozilla-releng/scriptworker-scripts", + packages=find_packages("src"), + package_dir={"": "src"}, + entry_points={"console_scripts": ["landoscript = landoscript.script:main"]}, + python_requires=">=3.11", + license="MPL2", +) diff --git a/landoscript/src/landoscript/__init__.py b/landoscript/src/landoscript/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/landoscript/src/landoscript/actions/__init__.py b/landoscript/src/landoscript/actions/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/landoscript/src/landoscript/actions/version_bump.py b/landoscript/src/landoscript/actions/version_bump.py new file mode 100644 index 000000000..535e645e8 --- /dev/null +++ b/landoscript/src/landoscript/actions/version_bump.py @@ -0,0 +1,143 @@ +import datetime +import logging +import os.path +import typing +from typing import TypedDict + +from gql.transport.exceptions import TransportError +from mozilla_version.gecko import FirefoxVersion, GeckoVersion, ThunderbirdVersion +from mozilla_version.mobile import MobileVersion +from mozilla_version.version import BaseVersion +from scriptworker.exceptions import TaskVerificationError + +from landoscript.errors import LandoscriptError +from landoscript.lando import LandoAction +from landoscript.util.diffs import diff_contents +from scriptworker_client.github_client import GithubClient + +log = logging.getLogger(__name__) + +# A list of files that this action is allowed to operate on. +ALLOWED_BUMP_FILES = ( + "browser/config/version.txt", + "browser/config/version_display.txt", + "config/milestone.txt", + "mobile/android/version.txt", + "mail/config/version.txt", + "mail/config/version_display.txt", +) + +# A mapping of bump file prefixes to parsers for their contents. +_VERSION_CLASS_PER_BEGINNING_OF_PATH = { + "browser/": FirefoxVersion, + "config/milestone.txt": GeckoVersion, + "mobile/android/": MobileVersion, + "mail/": ThunderbirdVersion, +} + + +def log_file_contents(contents): + for line in contents.splitlines(): + log.info(line) + + +class VersionBumpInfo(TypedDict): + next_version: str + files: list[str] + + +async def run( + github_client: GithubClient, + public_artifact_dir: str, + branch: str, + version_bump_info: VersionBumpInfo, + dontbuild: bool, +) -> LandoAction: + """Perform version bumps on the files given in `version_bump_info`, if necessary.""" + + next_version = version_bump_info["next_version"] + + for file in version_bump_info["files"]: + if file not in ALLOWED_BUMP_FILES: + raise TaskVerificationError("{} is not in version bump allowlist".format(file)) + + try: + log.info("fetching bump files from github") + orig_files = await github_client.get_files(version_bump_info["files"], branch) + except TransportError as e: + raise LandoscriptError("couldn't retrieve bump files from github") from e + + log.info("got files") + for file, contents in orig_files.items(): + log.info(f"{file} contents:") + log_file_contents(contents) + + diff = "" + for file, orig in orig_files.items(): + if not orig: + raise LandoscriptError(f"{file} does not exist!") + + log.info(f"considering {file}") + cur, next_ = get_cur_and_next_version(file, orig, next_version) + if next_ < cur: + log.warning(f"{file}: Version bumping skipped due to conflicting values: (next version {next_} is < current version {cur})") + continue + elif next_ == cur: + log.info(f"{file}: Version bumping skipped due to unchanged values") + continue + + modified = orig.replace(str(cur), str(next_)) + if orig == modified: + raise LandoscriptError("file not modified, this should be impossible") + + log.info(f"{file}: successfully bumped! new contents are:") + log_file_contents(modified) + + diff += diff_contents(orig, modified, file) + + if not diff: + log.info("no files to bump") + return {} + + with open(os.path.join(public_artifact_dir, "version-bump.diff"), "w+") as f: + f.write(diff) + + log.info("adding version bump commit! diff contents are:") + log_file_contents(diff) + + author = "Release Engineering Landoscript " + timestamp = datetime.datetime.now(datetime.timezone.utc).isoformat() + # version bumps always ignore a closed tree + commitmsg = "Subject: Automatic version bump NO BUG a=release CLOSED TREE" + if dontbuild: + commitmsg += " DONTBUILD" + + return {"action": "create-commit", "commitmsg": commitmsg, "diff": diff, "date": timestamp, "author": author} + + +def find_what_version_parser_to_use(file): + version_classes = [cls for path, cls in _VERSION_CLASS_PER_BEGINNING_OF_PATH.items() if file.startswith(path)] + + number_of_version_classes = len(version_classes) + if number_of_version_classes > 1: + raise LandoscriptError(f'File "{file}" matched too many classes: {version_classes}') + if number_of_version_classes > 0: + return version_classes[0] + + raise LandoscriptError(f"Could not determine version class based on file path for {file}") + + +def get_cur_and_next_version(filename, orig_contents, next_version): + VersionClass: BaseVersion = find_what_version_parser_to_use(filename) + lines = [line for line in orig_contents.splitlines() if line and not line.startswith("#")] + cur = VersionClass.parse(lines[-1]) + + # Special case for ESRs; make sure the next version is consistent with the + # current version with respect to whether or not it includes the `esr` + # suffix. + if next_version.endswith("esr") and not typing.cast(GeckoVersion, cur).is_esr: + next_version = next_version.replace("esr", "") + + next_ = VersionClass.parse(next_version) + + return cur, next_ diff --git a/landoscript/src/landoscript/data/landoscript_task_schema.json b/landoscript/src/landoscript/data/landoscript_task_schema.json new file mode 100644 index 000000000..88affe61d --- /dev/null +++ b/landoscript/src/landoscript/data/landoscript_task_schema.json @@ -0,0 +1,320 @@ +{ + "title": "Landoscript task schema", + "type": "object", + "definitions": { + "hg_mozilla_uri": { + "type": "string", + "format": "uri", + "title": "The Mozilla URI Schema", + "default": "", + "examples": [ + "https://hg.mozilla.org/mozilla-unified" + ], + "pattern": "^(https://hg.mozilla.org/.*)$" + }, + "merge_day_payload": { + "type": "object", + "required": [ + "version_files", + "replacements", + "to_branch", + "to_repo", + "merge_old_head" + ], + "properties": { + "fetch_version_from": { + "type": "string", + "description": "File path to query for version information, used in tags.", + "default": "browser/config/version.txt", + "examples": [ + "browser/config/version.txt" + ] + }, + "version_files": { + "type": "array", + "items": { + "type": "object", + "required": [ + "filename" + ], + "properties": { + "filename": { + "type": "string", + "description": "Path to filename containing a version." + }, + "new_suffix": { + "type": "string", + "description": "Replace the current suffix (b1, a1, esr) with this string. Empty is valid." + }, + "version_bump": { + "type": "string", + "enum": [ + "major", + "minor" + ], + "description": "Increment either the major or minor version number, or neither if empty" + } + } + } + }, + "replacements": { + "type": "array", + "minItems": 0, + "items": { + "type": "array", + "minItems": 3, + "maxItems": 3, + "items": { + "type": "string" + }, + "examples": [ + [ + "browser/config/mozconfigs/linux32/l10n-mozconfig", + "ac_add_options --with-branding=browser/branding/nightly", + "ac_add_options --enable-official-branding" + ] + ] + } + }, + "from_branch": { + "type": "string", + "examples": [ + "central" + ] + }, + "to_branch": { + "type": "string", + "examples": [ + "beta" + ] + }, + "from_repo": { + "type": "string", + "examples": [ + "https://hg.mozilla.org/releases/mozilla-beta" + ] + }, + "to_repo": { + "type": "string", + "examples": [ + "https://hg.mozilla.org/releases/mozilla-beta" + ] + }, + "merge_old_head": { + "type": "boolean", + "default": false + }, + "incr_major_version": { + "type": "boolean", + "default": false, + "description": "Control whether the major version number should be incremented" + }, + "base_tag": { + "type": "string", + "examples": [ + "FIREFOX_BETA_{major_version}_BASE" + ], + "pattern": "^(.*{major_version}.*)$" + }, + "end_tag": { + "type": "string", + "examples": [ + "FIREFOX_BETA_{major_version}_END" + ], + "pattern": "^(.*{major_version}.*)$" + } + } + } + }, + "properties": { + "dependencies": { + "type": "array", + "minItems": 1, + "uniqueItems": true, + "items": { + "type": "string" + } + }, + "payload": { + "type": "object", + "properties": { + "tag_info": { + "type": "object", + "properties": { + "tags": { + "type": "array", + "minItems": 1, + "uniqueItems": true, + "items": { + "type": "string" + } + }, + "revision": { + "type": "string" + } + }, + "required": [ + "tags", + "revision" + ] + }, + "version_bump_info": { + "type": "object", + "properties": { + "next_version": { + "type": "string" + }, + "files": { + "type": "array", + "minItems": 1, + "uniqueItems": true, + "items": { + "type": "string" + } + } + }, + "required": [ + "next_version", + "files" + ] + }, + "android_l10n_import_info": { + "type": "object", + "properties": { + "from_repo_url": { + "type": "string" + }, + "toml_info": { + "type": "array", + "minItems": 1, + "uniqueItems": true, + "items": { + "type": "object", + "properties": { + "toml_path": { + "type": "string" + }, + "dest_path": { + "type": "string" + } + }, + "required": [ + "toml_path", + "dest_path" + ] + } + } + }, + "required": [ + "from_repo_url", + "toml_info" + ] + }, + "android_l10n_sync_info": { + "type": "object", + "properties": { + "from_repo_url": { + "type": "string" + }, + "toml_info": { + "type": "array", + "minItems": 1, + "uniqueItems": true, + "items": { + "type": "object", + "properties": { + "toml_path": { + "type": "string" + } + }, + "required": [ + "toml_path" + ] + } + } + }, + "required": [ + "from_repo_url", + "toml_info" + ] + }, + "l10n_bump_info": { + "type": "array", + "minItems": 1, + "uniqueItems": true, + "items": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "name": { + "type": "string" + }, + "version_path": { + "type": "string" + }, + "l10n_repo_url": { + "type": "string" + }, + "l10n_repo_target_branch": { + "type": "string" + }, + "ignore_config": { + "type": "object" + }, + "platform_configs": { + "type": "array", + "items": { + "type": "object" + } + } + }, + "required": [ + "path", + "name", + "platform_configs", + "version_path" + ] + } + }, + "merge_info": { + "$ref": "#/definitions/merge_day_payload" + }, + "lando_repo": { + "type": "string", + "description": "A short repo identifier used in Lando API URLs. Note that this is _not_ necessarily the same as the repository's name on GitHub (but it can be used to look that up)." + }, + "dry_run": { + "type": "boolean" + }, + "dontbuild": { + "type": "boolean" + }, + "ignore_closed_tree": { + "type": "boolean" + }, + "actions": { + "type": "array", + "minItems": 1, + "uniqueItems": true, + "items": { + "type": "string", + "enum": [ + "tag", + "version_bump", + "l10n_bump", + "l10n_bump_github", + "merge_day", + "android_l10n_import", + "android_l10n_sync" + ] + } + } + } + } + }, + "required": [ + "payload" + ] +} diff --git a/landoscript/src/landoscript/errors.py b/landoscript/src/landoscript/errors.py new file mode 100644 index 000000000..2851ef3d5 --- /dev/null +++ b/landoscript/src/landoscript/errors.py @@ -0,0 +1,5 @@ +from scriptworker.exceptions import ScriptWorkerTaskException + + +class LandoscriptError(ScriptWorkerTaskException): + pass diff --git a/landoscript/src/landoscript/lando.py b/landoscript/src/landoscript/lando.py new file mode 100644 index 000000000..ebdea3d75 --- /dev/null +++ b/landoscript/src/landoscript/lando.py @@ -0,0 +1,76 @@ +import asyncio +import logging +from pprint import pprint +from typing import Any, Callable + +from aiohttp import ClientResponseError, ClientSession +from async_timeout import timeout +from scriptworker.utils import calculate_sleep_time, retry_async + +from landoscript.errors import LandoscriptError + +log = logging.getLogger(__name__) + + +LandoAction = dict[str, str] + + +async def submit( + session: ClientSession, lando_api: str, lando_repo: str, actions: list[LandoAction], sleeptime_callback: Callable[..., Any] = calculate_sleep_time +) -> str: + """Submit the provided `actions` to the given `lando_repo` through the `lando_api`.""" + url = f"{lando_api}/api/v1/{lando_repo}" + json = {"actions": actions} + + log.info(f"submitting actions to lando: {actions}") + async with timeout(30): + log.info(f"submitting POST request to {url}") + log.info("message body is:") + log.info(pprint(json)) + + submit_resp = await retry_async( + session.post, + args=(url,), + kwargs={ + "json": json, + "raise_for_status": True, + }, + attempts=10, + retry_exceptions=ClientResponseError, + sleeptime_callback=sleeptime_callback, + ) + + log.info(f"success! got {submit_resp.status} response") + + status_url = (await submit_resp.json()).get("status_url") + if not status_url: + raise LandoscriptError("couldn't find status url!") + + return status_url + + +async def poll_until_complete(session: ClientSession, poll_time: int, status_url: str): + while True: + log.info(f"sleeping {poll_time} seconds before polling for status") + await asyncio.sleep(poll_time) + + log.info(f"polling lando for status: {status_url}") + status_resp = await session.get(status_url) + + # just retry if something went wrong... + if not status_resp.ok: + log.info(f"lando response is not ok (code {status_resp.status}), trying again...") + continue + + if status_resp.status == 200: + body = await status_resp.json() + if body.get("status") != "completed": + raise LandoscriptError("code is 200, status is not completed...result is unclear...failing!") + + log.info("success! got 200 response with 'completed' status") + + log.info("Commits are:") + for commit in body["commits"]: + log.info(commit) + + break diff --git a/landoscript/src/landoscript/script.py b/landoscript/src/landoscript/script.py new file mode 100644 index 000000000..906c7fd91 --- /dev/null +++ b/landoscript/src/landoscript/script.py @@ -0,0 +1,104 @@ +import logging +import os.path + +import aiohttp +import scriptworker.client + +from landoscript import lando +from landoscript.actions import version_bump +from scriptworker_client.github_client import GithubClient + +log = logging.getLogger(__name__) + + +def get_default_config(base_dir: str = "") -> dict: + base_dir = base_dir or os.path.dirname(os.getcwd()) + default_config = { + "work_dir": os.path.join(base_dir, "work_dir"), + "artifact_dir": os.path.join(base_dir, "artifact_dir"), + "schema_file": os.path.join(os.path.dirname(__file__), "data", "landoscript_task_schema.json"), + } + return default_config + + +def validate_scopes(scopes: set, lando_repo: str, actions: list[str]): + expected_scopes = { + f"project:releng:lando:repo:{lando_repo}", + *[f"project:releng:lando:action:{action}" for action in actions], + } + missing = expected_scopes - scopes + if missing: + raise scriptworker.client.TaskVerificationError(f"required scope(s) not present: {', '.join(missing)}") + + +# `context` is kept explicitly untyped because all of its members are typed as +# Optional. This never happens in reality (only in tests), but as things stand +# at the time of writing, it means we need noisy and unnecessary None checking +# to avoid linter complaints. +async def async_main(context): + config = context.config + payload = context.task["payload"] + scopes = set(context.task["scopes"]) + artifact_dir = config["artifact_dir"] + public_artifact_dir = os.path.join(artifact_dir, "public", "build") + + # Note: `lando_repo` is not necessarily the same as the repository's name + # on Github. + lando_repo = payload["lando_repo"] + + # pull owner, repo, and branch from config + # TODO: replace this with a lookup through the lando API when that API exists + log.info(f"looking up repository details for lando repo: {lando_repo}") + repo_details = context.config["lando_name_to_github_repo"][lando_repo] + owner = repo_details["owner"] + repo = repo_details["repo"] + branch = repo_details["branch"] + log.info(f"Got owner: {owner}, repo: {repo}, branch: {branch}") + + # validate scopes - these raise if there's any scope issues + validate_scopes(scopes, lando_repo, payload["actions"]) + + os.makedirs(public_artifact_dir) + + lando_actions: list[lando.LandoAction] = [] + async with GithubClient(context.config["github_config"], owner, repo) as gh_client: + for action in payload["actions"]: + log.info(f"processing action: {action}") + + if action == "version_bump": + version_bump_action = await version_bump.run( + gh_client, + public_artifact_dir, + branch, + payload["version_bump_info"], + payload.get("dontbuild", False), + ) + # sometimes version bumps are no-ops + if version_bump_action: + lando_actions.append(version_bump_action) + + log.info("finished processing action") + + if lando_actions: + if payload.get("dry_run", False): + log.info("dry run...would've submitted lando actions:") + for la in lando_actions: + log.info(la) + else: + log.info("not a dry run...submitting lando actions:") + for la in lando_actions: + log.info(la) + + async with aiohttp.ClientSession() as session: + status_url = await lando.submit(session, config["lando_api"], lando_repo, lando_actions, config["sleeptime_callback"]) + await lando.poll_until_complete(session, config["poll_time"], status_url) + else: + log.info("no lando actions to submit!") + + +def main(config_path: str = ""): + return scriptworker.client.sync_main(async_main, config_path=config_path, default_config=get_default_config()) + + +if __name__ == "__main__": + main() diff --git a/landoscript/src/landoscript/util/__init__.py b/landoscript/src/landoscript/util/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/landoscript/src/landoscript/util/diffs.py b/landoscript/src/landoscript/util/diffs.py new file mode 100644 index 000000000..2d087c3d2 --- /dev/null +++ b/landoscript/src/landoscript/util/diffs.py @@ -0,0 +1,14 @@ +from difflib import unified_diff + + +def diff_contents(orig: str, modified: str, file: str) -> str: + """Create a git-style unified diff of `orig` and `modified` with the filename `file`.""" + diff = "" + fromfile = f"a/{file}" + tofile = f"b/{file}" + diff += f"diff --git {fromfile} {tofile}\n" + diff += "\n".join(unified_diff(orig.splitlines(), modified.splitlines(), fromfile=fromfile, tofile=tofile, lineterm="")) + if modified.endswith("\n"): + diff += "\n" + + return diff diff --git a/landoscript/tests/conftest.py b/landoscript/tests/conftest.py new file mode 100644 index 000000000..33bd6486c --- /dev/null +++ b/landoscript/tests/conftest.py @@ -0,0 +1,41 @@ +from pathlib import Path + +import pytest +from scriptworker.context import Context + +pytest_plugins = ("pytest-scriptworker-client",) + +here = Path(__file__).parent + + +@pytest.fixture(scope="function") +def context(privkey_file, tmpdir): + context = Context() + context.config = { + "artifact_dir": tmpdir, + "lando_api": "https://lando.fake", + "lando_name_to_github_repo": { + "repo_name": { + "owner": "faker", + "repo": "fake_repo", + "branch": "fake_branch", + } + }, + "github_config": { + "app_id": 12345, + "privkey_file": privkey_file, + }, + "poll_time": 0, + "sleeptime_callback": lambda _: 0, + } + return context + + +@pytest.fixture(scope="session") +def datadir(): + return here / "data" + + +@pytest.fixture(scope="session") +def privkey_file(datadir): + return datadir / "test_private_key.pem" diff --git a/landoscript/tests/data/test_private_key.pem b/landoscript/tests/data/test_private_key.pem new file mode 100644 index 000000000..fc07fdcd6 --- /dev/null +++ b/landoscript/tests/data/test_private_key.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEAz15/Lyguck6lN4ss19PUR78bLnB7eYe5OUdsjT3Rd2jLCjpE +fLhbcRqcbl38WHfv+dPmD71Lp4kmN8J4//Xua+JoB4dylKzF1ThDLACB+wsMGJad +nUAcn3e6HeAQc0RuLyOob0tN2Z/EE8SOmNb0BmqAVCeezyAuASK16+jXCuqsT6Od +MTVQRi583KmMDX8O0sPJPzTsKtKi9534iSegpLJ271cR5gDdTv/7oLjfaTUjqUQo +BX4NGbgTdasFoJyVLBSrUSrt7VQ6iHgDkwEL4/g/AqEy+mnBH+5WH2ZtlNUgRYqV +y+CystoJFJ/eCgqjKEokDaJDh4lLQmMHDIfXKwIDAQABAoIBAFt8NB4o2vzhB3DO +vNTnBmM0w5gkVcUTBLtpcFGLsUq9972mYSqo5uaqN9NVMYwSNdQBESFmgOcD0dGN +wXYjGLGN7BlOP1WqN4lOzguumamYgRij8hs6+qW722dsv7UqvnWbhKa9ETZujsGt +2L9DLdtaxwzZge2dI86qcpoGWkDc5XlQdrrt6L4CPjFLQwTI4pnYQHK5ov4UJk8J +ixCZnnNt+4+syCo/vNW1Ffo/hzPjpSCBAppjxzvlv4KQ4F+w+DsxWKliy7Fb7h0M +vU5VBH9QeegqF6Opa3HULsfHUOx/w3duItApBzaq5751EK9MVN/mtnstLzDZsM1Z +JCeiE1ECgYEA+e6ExsQAux/bVfxwccz+eStME8E1ni3yGJGr8uWJGaPnzFBag1rV +7vcv/1u+54+wQJ/YlB+qGfjMKlgXpZHbtMDV4J9m8l3/+p+VKsxzeJqXiWYYKJij +MrWeaUwbd/7aaJ8sRUZNMxpdbjmm+X2ntaUTT+s+JbLbnVX/QXfAnXMCgYEA1Gds +155QTtAlw4KGQTyCibZaTT5e4PphBQ2XVkFAoO+8F7dOTLr/qPnF8pbB+rk8+3AA +0D59cG0w+mwwMQvB92TdfGiL6uCWQxIhSryZHSJJdNr7z0zloihnvMPwWT2uAV4o +Ud1v8hmmZtWCy3zow/wKolt3x0Etn/pPF8j68WkCgYBCUYYt7+h5vtRGlruHluuX +X2PKgiQmGu74kf7cbwfYI+PoFRJPdJT72d+ElOWtMDWGkGO5ukU6qwgR3Fo59uAj +sF+QQPviKAs6h4nfV2z0faDkNpEGhKoZVJP6QNWxG5D83e+zWho2CzLrgqTJ4wvT +aUAcnsFY44Le1ujqiEo1/QKBgQCZwi0GgXI9O2y69Tfe7cDJlBoWpLnmPxoQoGAX +brEsT59nw6iI3y3H0tg63muZ5pKAqfnMxg/kXQcBoWPZfVtyja/QKZZv9xiPgQZc +pd5pOrLWRtfWnyH0my9NdMH+uCnIj7K8YIJzTaIp+xHt4IRXq5K/KVuyADrY/khX +PYR7UQKBgQCyN98Y2y4AHZDi7awKb67U0njC1YYx9ZADTKBpgARZm1aO10fyFsj3 +X1CzoiL74Z/O6I8p7AdKod3tRcMzu3hrUWk/LHRX/vHBO+xVIRvfQHfyy7e2TyZE +JcTf0C1xM0aCATDKucsK0Dnx5bUUo0Ob1P67KxqMtP9uJqRQSCLAjw== +-----END RSA PRIVATE KEY----- diff --git a/landoscript/tests/test_version_bump.py b/landoscript/tests/test_version_bump.py new file mode 100644 index 000000000..0724f3d11 --- /dev/null +++ b/landoscript/tests/test_version_bump.py @@ -0,0 +1,676 @@ +from aiohttp import ClientResponseError +import pytest +from scriptworker.client import TaskVerificationError + +from landoscript.errors import LandoscriptError +from landoscript.script import async_main +from landoscript.actions.version_bump import ALLOWED_BUMP_FILES, _VERSION_CLASS_PER_BEGINNING_OF_PATH +from simple_github.client import GITHUB_GRAPHQL_ENDPOINT +from yarl import URL + + +def assert_add_commit_response(requests, submit_uri, commit_msg_strings, initial_values, expected_bumps, attempts=1): + # make sure that exactly one request was made + # (a single request can add more than one commit, so there should never + # be a need for more than 1 request) + assert ("POST", submit_uri) in requests + reqs = requests[("POST", submit_uri)] + assert len(reqs) == attempts + + # there might be more than one in cases where we retry; we assume that + # the requests are the same for all attempts + req = reqs[0] + assert "json" in req.kwargs + assert "actions" in req.kwargs["json"] + assert len(req.kwargs["json"]["actions"]) == 1 + action = req.kwargs["json"]["actions"][0] + assert action["action"] == "create-commit" + + # ensure metadata is correct + assert action["author"] == "Release Engineering Landoscript " + # we don't actually verify the value here; it's not worth the trouble of mocking + assert "date" in action + + # ensure required substrings are in the diff header + for msg in commit_msg_strings: + assert msg in action["commitmsg"] + + diffs = action["diff"].split("diff\n") + + # ensure expected bumps are present to a reasonable degree of certainty + for file, after in expected_bumps.items(): + for diff in diffs: + # if the version is the last line in the file it may or may not + # have a trailing newline. either way, there will be one (and + # only one) in the `-` line of the diff. account for this. + # the `after` version will only have a newline if the file is + # intended to have one after the diff has been applied. + before = initial_values[file].rstrip("\n") + "\n" + if file in diff and f"\n-{before}+{after}" in diff: + break + else: + assert False, f"no version bump found for {file}: {diffs}" + + +def assert_status_response(requests, status_uri, attempts=1): + assert ("GET", status_uri) in requests + reqs = requests[("GET", status_uri)] + # there might be more than one in cases where we retry; we assume that + # the requests are the same for all attempts + assert len(reqs) == attempts + + +def setup_test(github_installation_responses, context, payload, repo="repo_name"): + lando_repo = payload["lando_repo"] + lando_api = context.config["lando_api"] + owner = context.config["lando_name_to_github_repo"][lando_repo]["owner"] + submit_uri = URL(f"{lando_api}/api/v1/{lando_repo}") + job_id = 12345 + status_uri = URL(f"{lando_api}/push/{job_id}") + + github_installation_responses(owner) + + scopes = [ + f"project:releng:lando:repo:{repo}", + f"project:releng:lando:action:version_bump", + ] + + return submit_uri, status_uri, job_id, scopes + + +def setup_fetch_files_response(aioresponses, code, initial_values={}): + if initial_values: + github_response = {} + for file, contents in initial_values.items(): + github_response[file] = f"{contents}" + + payload = { + "data": { + "repository": {k: {"text": v} for k, v in github_response.items()}, + } + } + else: + payload = {} + + aioresponses.post(GITHUB_GRAPHQL_ENDPOINT, status=code, payload=payload) + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "payload,initial_values,expected_bumps,commit_msg_strings", + ( + pytest.param( + { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "dry_run": True, + "version_bump_info": { + "files": ["browser/config/version.txt"], + "next_version": "135.0", + }, + }, + { + "browser/config/version.txt": "134.0", + }, + { + "browser/config/version.txt": "135.0", + }, + ["Automatic version bump", "NO BUG", "a=release"], + id="dryrun", + ), + pytest.param( + { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": ["browser/config/version.txt"], + "next_version": "135.0", + }, + }, + { + "browser/config/version.txt": "134.0", + }, + { + "browser/config/version.txt": "135.0", + }, + ["Automatic version bump", "NO BUG", "a=release"], + id="one_file_new_version", + ), + pytest.param( + { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": ["browser/config/version.txt"], + "next_version": "135.0", + }, + }, + { + "browser/config/version.txt": "134.0\n", + }, + { + "browser/config/version.txt": "135.0\n", + }, + ["Automatic version bump", "NO BUG", "a=release"], + id="one_file_new_version_retains_newline", + ), + pytest.param( + { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": ["browser/config/version.txt"], + "next_version": "134.0.1", + }, + }, + { + "browser/config/version.txt": "134.0", + }, + { + "browser/config/version.txt": "134.0.1", + }, + ["Automatic version bump", "NO BUG", "a=release"], + id="one_file_minor_bump", + ), + pytest.param( + { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": ["browser/config/version.txt"], + "next_version": "134.0b3", + }, + }, + { + "browser/config/version.txt": "134.0b2", + }, + { + "browser/config/version.txt": "134.0b3", + }, + ["Automatic version bump", "NO BUG", "a=release"], + id="beta_bump_display", + ), + pytest.param( + { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": ["browser/config/version.txt"], + "next_version": "128.2.1esr", + }, + }, + { + "browser/config/version.txt": "128.2.0", + }, + { + "browser/config/version.txt": "128.2.1", + }, + ["Automatic version bump", "NO BUG", "a=release"], + id="esr_bump", + ), + pytest.param( + { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": ["browser/config/version_display.txt"], + "next_version": "128.2.1esr", + }, + }, + { + "browser/config/version_display.txt": "128.2.0esr", + }, + { + "browser/config/version_display.txt": "128.2.1esr", + }, + ["Automatic version bump", "NO BUG", "a=release"], + id="esr_bump_display", + ), + pytest.param( + { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": [ + "browser/config/version.txt", + "browser/config/version_display.txt", + "config/milestone.txt", + "mobile/android/version.txt", + ], + "next_version": "135.0", + }, + }, + { + "browser/config/version.txt": "134.0", + "browser/config/version_display.txt": "134.0", + "config/milestone.txt": "134.0", + "mobile/android/version.txt": "134.0", + }, + { + "browser/config/version.txt": "135.0", + "browser/config/version_display.txt": "135.0", + "config/milestone.txt": "135.0", + "mobile/android/version.txt": "135.0", + }, + ["Automatic version bump", "NO BUG", "a=release"], + id="many_files_all_changed", + ), + pytest.param( + { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": [ + "browser/config/version.txt", + "browser/config/version_display.txt", + "config/milestone.txt", + "mobile/android/version.txt", + ], + "next_version": "135.0b3", + }, + }, + { + "browser/config/version.txt": "135.0", + "browser/config/version_display.txt": "135.0b2", + "config/milestone.txt": "135.0", + "mobile/android/version.txt": "135.0b2", + }, + { + "browser/config/version_display.txt": "135.0b3", + "mobile/android/version.txt": "135.0b3", + }, + ["Automatic version bump", "NO BUG", "a=release"], + id="many_files_some_changed", + ), + pytest.param( + { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": ["browser/config/version.txt"], + "next_version": "135.0", + }, + "dontbuild": True, + }, + { + "browser/config/version.txt": "134.0", + }, + { + "browser/config/version.txt": "135.0", + }, + ["Automatic version bump", "NO BUG", "a=release", "DONTBUILD"], + id="dontbuild_includes_correct_commit_message", + ), + ), +) +async def test_success_with_bumps(aioresponses, github_installation_responses, context, payload, initial_values, expected_bumps, commit_msg_strings): + submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload) + setup_fetch_files_response(aioresponses, 200, initial_values) + dryrun = payload.get("dry_run", False) + + if not dryrun: + aioresponses.post( + submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"} + ) + + aioresponses.get( + status_uri, + status=200, + payload={ + "commits": ["abcdef123"], + "push_id": job_id, + "status": "completed", + }, + ) + + context.task = {"payload": payload, "scopes": scopes} + await async_main(context) + + assert (context.config["artifact_dir"] / "public/build/version-bump.diff").exists() + if not dryrun: + assert_add_commit_response(aioresponses.requests, submit_uri, commit_msg_strings, initial_values, expected_bumps) + assert_status_response(aioresponses.requests, status_uri) + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "payload,initial_values,expected_bumps,commit_msg_strings", + ( + pytest.param( + { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": ["browser/config/version.txt"], + "next_version": "135.0", + }, + }, + { + "browser/config/version.txt": "134.0", + }, + { + "browser/config/version.txt": "135.0", + }, + ["Automatic version bump", "NO BUG", "a=release"], + id="one_file", + ), + pytest.param( + { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": [ + "browser/config/version.txt", + "browser/config/version_display.txt", + "config/milestone.txt", + "mobile/android/version.txt", + ], + "next_version": "135.0", + }, + }, + { + "browser/config/version.txt": "134.0", + "browser/config/version_display.txt": "134.0", + "config/milestone.txt": "134.0", + "mobile/android/version.txt": "134.0", + }, + { + "browser/config/version.txt": "135.0", + "browser/config/version_display.txt": "135.0", + "config/milestone.txt": "135.0", + "mobile/android/version.txt": "135.0", + }, + ["Automatic version bump", "NO BUG", "a=release"], + id="many_files", + ), + ), +) +async def test_success_with_retries(aioresponses, github_installation_responses, context, payload, initial_values, expected_bumps, commit_msg_strings): + submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload) + setup_fetch_files_response(aioresponses, 200, initial_values) + + aioresponses.post(submit_uri, status=500) + aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) + + aioresponses.get(status_uri, status=202, payload={"status": "pending", "job_id": job_id, "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) + aioresponses.get( + status_uri, + status=200, + payload={ + "commits": ["abcdef123"], + "push_id": job_id, + "status": "completed", + }, + ) + + context.task = {"payload": payload, "scopes": scopes} + await async_main(context) + + assert_add_commit_response(aioresponses.requests, submit_uri, commit_msg_strings, initial_values, expected_bumps, attempts=2) + assert_status_response(aioresponses.requests, status_uri, attempts=2) + assert (context.config["artifact_dir"] / "public/build/version-bump.diff").exists() + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "payload,initial_values", + ( + pytest.param( + { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": ["browser/config/version.txt"], + "next_version": "135.0", + }, + }, + { + "browser/config/version.txt": "135.0", + }, + id="one_file_no_change", + ), + ), +) +async def test_success_without_bumps(aioresponses, github_installation_responses, context, payload, initial_values): + submit_uri, status_uri, _, scopes = setup_test(github_installation_responses, context, payload) + setup_fetch_files_response(aioresponses, 200, initial_values) + + context.task = {"payload": payload, "scopes": scopes} + await async_main(context) + + assert ("POST", submit_uri) not in aioresponses.requests + assert ("GET", status_uri) not in aioresponses.requests + + +@pytest.mark.asyncio +async def test_failure_to_fetch_files(aioresponses, github_installation_responses, context): + payload = { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": ["browser/config/version.txt"], + "next_version": "135.0", + }, + } + _, _, _, scopes = setup_test(github_installation_responses, context, payload) + + # 5 attempts is hardcoded deeper than we can reasonable override it; so + # just expect it + for _ in range(5): + setup_fetch_files_response(aioresponses, 500) + + context.task = {"payload": payload, "scopes": scopes} + + try: + await async_main(context) + assert False, "should've raised LandoscriptError" + except LandoscriptError as e: + assert "couldn't retrieve bump files from github" in e.args[0] + + +@pytest.mark.asyncio +async def test_failure_to_submit_to_lando_500(aioresponses, github_installation_responses, context): + payload = { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": ["browser/config/version.txt"], + "next_version": "135.0", + }, + } + initial_values = {"browser/config/version.txt": "134.0"} + submit_uri, _, _, scopes = setup_test(github_installation_responses, context, payload) + setup_fetch_files_response(aioresponses, 200, initial_values) + + for _ in range(10): + aioresponses.post(submit_uri, status=500) + + context.task = {"payload": payload, "scopes": scopes} + + try: + await async_main(context) + assert False, "should've raised ClientResponseError" + except ClientResponseError as e: + assert e.status == 500 + + +@pytest.mark.asyncio +async def test_to_submit_to_lando_no_status_url(aioresponses, github_installation_responses, context): + payload = { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": ["browser/config/version.txt"], + "next_version": "135.0", + }, + } + initial_values = {"browser/config/version.txt": "134.0"} + submit_uri, _, _, scopes = setup_test(github_installation_responses, context, payload) + setup_fetch_files_response(aioresponses, 200, initial_values) + aioresponses.post(submit_uri, status=202, payload={}) + + context.task = {"payload": payload, "scopes": scopes} + + try: + await async_main(context) + assert False, "should've raised LandoscriptError" + except LandoscriptError as e: + assert "couldn't find status url" in e.args[0] + + +@pytest.mark.asyncio +async def test_lando_polling_result_not_completed(aioresponses, github_installation_responses, context): + payload = { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": ["browser/config/version.txt"], + "next_version": "135.0", + }, + } + initial_values = {"browser/config/version.txt": "134.0"} + submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload) + setup_fetch_files_response(aioresponses, 200, initial_values) + aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) + aioresponses.get(status_uri, status=200, payload={}) + + context.task = {"payload": payload, "scopes": scopes} + + try: + await async_main(context) + assert False, "should've raised LandoscriptError" + except LandoscriptError as e: + assert "status is not completed" in e.args[0] + + +@pytest.mark.asyncio +async def test_lando_polling_retry_on_failure(aioresponses, github_installation_responses, context): + payload = { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": ["browser/config/version.txt"], + "next_version": "135.0", + }, + } + initial_values = {"browser/config/version.txt": "134.0"} + submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload) + setup_fetch_files_response(aioresponses, 200, initial_values) + aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) + aioresponses.get(status_uri, status=500, payload={}) + aioresponses.get( + status_uri, + status=200, + payload={ + "commits": ["abcdef123"], + "push_id": job_id, + "status": "completed", + }, + ) + + context.task = {"payload": payload, "scopes": scopes} + await async_main(context) + + assert_status_response(aioresponses.requests, status_uri, attempts=2) + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "files,first_bad_file", + ( + pytest.param( + ["browser/config/unknown.txt"], + "browser/config/unknown.txt", + id="one_file", + ), + pytest.param( + ["browser/config/version.txt", "browser/config/unknown.txt", "foo/bar/baz"], + "browser/config/unknown.txt", + id="many_files", + ), + ), +) +async def test_bad_bumpfile(github_installation_responses, context, files, first_bad_file): + payload = { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": files, + "next_version": "135.0", + }, + } + _, _, _, scopes = setup_test(github_installation_responses, context, payload) + + context.task = {"payload": payload, "scopes": scopes} + + try: + await async_main(context) + assert False, "should've raised TaskVerificationError" + except TaskVerificationError as e: + assert f"{first_bad_file} is not in version bump allowlist" in e.args[0] + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "scopes,missing", + ( + pytest.param( + [ + "project:releng:lando:action:version_bump", + ], + [ + "project:releng:lando:repo:repo_name", + ], + id="missing_repo_scope", + ), + pytest.param( + [ + "project:releng:lando:repo:repo_name", + ], + [ + "project:releng:lando:action:version_bump", + ], + id="missing_action_scope", + ), + pytest.param( + [], + [ + "project:releng:lando:repo:repo_name", + "project:releng:lando:action:version_bump", + ], + id="no_scopes", + ), + ), +) +async def test_missing_scopes(context, scopes, missing): + payload = { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": ["browser/config/version.txt"], + "next_version": "135.0", + }, + } + + context.task = {"payload": payload, "scopes": scopes} + + try: + await async_main(context) + assert False, "should've raised TaskVerificationError" + except TaskVerificationError as e: + assert "required scope(s) not present" in e.args[0] + for m in missing: + assert m in e.args[0] + + +def test_no_overlaps_in_version_classes(): + for prefix1 in _VERSION_CLASS_PER_BEGINNING_OF_PATH: + for prefix2 in _VERSION_CLASS_PER_BEGINNING_OF_PATH: + if prefix1 == prefix2: + continue + assert not prefix2.startswith(prefix1) + + +def test_all_bump_files_have_version_class(): + for bump_file in ALLOWED_BUMP_FILES: + assert any([bump_file.startswith(path) for path in _VERSION_CLASS_PER_BEGINNING_OF_PATH]) diff --git a/maintenance/pin.sh b/maintenance/pin.sh index cd2c1e048..300cdd04f 100755 --- a/maintenance/pin.sh +++ b/maintenance/pin.sh @@ -16,6 +16,7 @@ else configloader githubscript iscript + landoscript notarization_poller pushapkscript pushflatpakscript @@ -43,6 +44,7 @@ PY_311_SCRIPTS=( bouncerscript configloader githubscript + landoscript pushapkscript pushflatpakscript pushmsixscript diff --git a/taskcluster/kinds/docker-image/kind.yml b/taskcluster/kinds/docker-image/kind.yml index 39c3513fa..b409753a4 100644 --- a/taskcluster/kinds/docker-image/kind.yml +++ b/taskcluster/kinds/docker-image/kind.yml @@ -59,6 +59,11 @@ tasks: parent: base args: SCRIPT_NAME: githubscript + landoscript: + definition: script + parent: base + args: + SCRIPT_NAME: landoscript shipitscript: definition: script parent: base diff --git a/taskcluster/kinds/push-image/kind.yml b/taskcluster/kinds/push-image/kind.yml index 36e9abb7b..b1ff255be 100644 --- a/taskcluster/kinds/push-image/kind.yml +++ b/taskcluster/kinds/push-image/kind.yml @@ -53,6 +53,7 @@ tasks: beetmoverscript: {} bouncerscript: {} githubscript: {} + landoscript: {} pushapkscript: {} pushflatpakscript: {} pushmsixscript: {} diff --git a/taskcluster/kinds/tox/kind.yml b/taskcluster/kinds/tox/kind.yml index ff62fb364..709152647 100644 --- a/taskcluster/kinds/tox/kind.yml +++ b/taskcluster/kinds/tox/kind.yml @@ -80,6 +80,7 @@ tasks: - bitrisescript/docker.d - bouncerscript/docker.d - githubscript/docker.d + - landoscript/docker.d - pushapkscript/docker.d - pushflatpakscript/docker.d - pushmsixscript/docker.d @@ -92,6 +93,11 @@ tasks: - iscript - scriptworker_client - vendored/mozbuild + landoscript: + python-versions: [311] + resources: + - landoscript + - scriptworker_client notarization_poller: python-versions: [38] resources: diff --git a/tox.ini b/tox.ini index 01c48ef32..ec71bc7f8 100644 --- a/tox.ini +++ b/tox.ini @@ -11,6 +11,7 @@ envlist = init-py311 # iscript and notarization_poller are python 3.8 only iscript-py38 + landoscript-py311 notarization_poller-py38 pushapkscript-py311 pushflatpakscript-py311 @@ -93,6 +94,12 @@ commands = tox -e py311 - tox -e coveralls +[testenv:landoscript-py311] +changedir = {toxinidir}/landocript +commands = + tox -e py311 + - tox -e coveralls + [testenv:notarization_poller-py38] changedir = {toxinidir}/notarization_poller commands = From 4bcc5993a0135669f98b0171ca681eefb9390cb6 Mon Sep 17 00:00:00 2001 From: Ben Hearsum Date: Wed, 19 Mar 2025 20:10:21 -0400 Subject: [PATCH 02/13] feat(landoscript): implement support for `tag` action --- landoscript/src/landoscript/actions/tag.py | 14 +++ landoscript/src/landoscript/script.py | 5 +- landoscript/tests/test_tag.py | 139 +++++++++++++++++++++ 3 files changed, 157 insertions(+), 1 deletion(-) create mode 100644 landoscript/src/landoscript/actions/tag.py create mode 100644 landoscript/tests/test_tag.py diff --git a/landoscript/src/landoscript/actions/tag.py b/landoscript/src/landoscript/actions/tag.py new file mode 100644 index 000000000..e38708382 --- /dev/null +++ b/landoscript/src/landoscript/actions/tag.py @@ -0,0 +1,14 @@ +from scriptworker.client import TaskVerificationError + +from landoscript.lando import LandoAction + + +def run(tags: list[str]) -> list[LandoAction]: + if len(tags) < 1: + raise TaskVerificationError("must provide at least one tag!") + + actions = [] + for tag in tags: + actions.append({"action": "tag", "name": tag}) + + return actions diff --git a/landoscript/src/landoscript/script.py b/landoscript/src/landoscript/script.py index 906c7fd91..d79883284 100644 --- a/landoscript/src/landoscript/script.py +++ b/landoscript/src/landoscript/script.py @@ -5,7 +5,7 @@ import scriptworker.client from landoscript import lando -from landoscript.actions import version_bump +from landoscript.actions import tag, version_bump from scriptworker_client.github_client import GithubClient log = logging.getLogger(__name__) @@ -76,6 +76,9 @@ async def async_main(context): # sometimes version bumps are no-ops if version_bump_action: lando_actions.append(version_bump_action) + elif action == "tag": + tag_actions = tag.run(payload["tags"]) + lando_actions.extend(tag_actions) log.info("finished processing action") diff --git a/landoscript/tests/test_tag.py b/landoscript/tests/test_tag.py new file mode 100644 index 000000000..3411dd5b7 --- /dev/null +++ b/landoscript/tests/test_tag.py @@ -0,0 +1,139 @@ +import pytest +from scriptworker.client import TaskVerificationError +from yarl import URL + +from landoscript.script import async_main + + +def assert_tag_response(requests, submit_uri, tags, attempts=1): + # make sure that exactly one request was made + # (a single request can add more than one commit, so there should never + # be a need for more than 1 request) + assert ("POST", submit_uri) in requests + reqs = requests[("POST", submit_uri)] + assert len(reqs) == attempts + + # there might be more than one in cases where we retry; we assume that + # the requests are the same for all attempts + req = reqs[0] + assert "json" in req.kwargs + assert "actions" in req.kwargs["json"] + assert len(req.kwargs["json"]["actions"]) == len(tags) + + requested_tags = set([action["name"] for action in req.kwargs["json"]["actions"]]) + assert requested_tags == set(tags) + + +def assert_status_response(requests, status_uri, attempts=1): + assert ("GET", status_uri) in requests + reqs = requests[("GET", status_uri)] + # there might be more than one in cases where we retry; we assume that + # the requests are the same for all attempts + assert len(reqs) == attempts + + +def setup_test(github_installation_responses, context, payload, repo="repo_name"): + lando_repo = payload["lando_repo"] + lando_api = context.config["lando_api"] + owner = context.config["lando_name_to_github_repo"][lando_repo]["owner"] + submit_uri = URL(f"{lando_api}/api/v1/{lando_repo}") + job_id = 12345 + status_uri = URL(f"{lando_api}/push/{job_id}") + + github_installation_responses(owner) + + scopes = [ + f"project:releng:lando:repo:{repo}", + f"project:releng:lando:action:tag", + ] + + return submit_uri, status_uri, job_id, scopes + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "tags,dry_run", + ( + pytest.param( + ["BUILD1"], + True, + id="dry_run", + ), + pytest.param( + ["BUILD1"], + False, + id="one_tag", + ), + pytest.param( + ["BUILD1", "RELEASE"], + False, + id="multiple_tags", + ), + ), +) +async def test_success(aioresponses, github_installation_responses, context, tags, dry_run): + payload = { + "actions": ["tag"], + "lando_repo": "repo_name", + "tags": tags, + "dry_run": dry_run, + } + submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload) + + if not dry_run: + aioresponses.post( + submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"} + ) + + aioresponses.get( + status_uri, + status=200, + payload={ + "commits": ["abcdef123"], + "push_id": job_id, + "status": "completed", + }, + ) + + context.task = {"payload": payload, "scopes": scopes} + await async_main(context) + + if not dry_run: + assert_tag_response(aioresponses.requests, submit_uri, tags) + assert_status_response(aioresponses.requests, status_uri) + + +@pytest.mark.asyncio +async def test_no_tags(github_installation_responses, context): + payload = { + "actions": ["tag"], + "lando_repo": "repo_name", + "tags": [], + } + _, _, _, scopes = setup_test(github_installation_responses, context, payload) + + context.task = {"payload": payload, "scopes": scopes} + + try: + await async_main(context) + assert False, "should've raised TaskVerificationError" + except TaskVerificationError as e: + assert "must provide at least one tag!" in e.args[0] + + +@pytest.mark.asyncio +async def test_missing_scopes(context): + payload = { + "actions": ["tag"], + "lando_repo": "repo_name", + "tags": ["BUILD1"], + } + + context.task = {"payload": payload, "scopes": ["project:releng:lando:repo:repo_name"]} + + try: + await async_main(context) + assert False, "should've raised TaskVerificationError" + except TaskVerificationError as e: + assert "required scope(s) not present" in e.args[0] + assert "project:releng:lando:action:tag" in e.args[0] From 37c105a7fdb8d2d6eddf0df5b97a4d7a232fd992 Mon Sep 17 00:00:00 2001 From: Ben Hearsum Date: Wed, 19 Mar 2025 21:12:39 -0400 Subject: [PATCH 03/13] refactor(landoscript): add tests that run multiple actions in one run Most notably, this moves common set-up into conftest, and tests that aren't testing action-specific logic (eg: lando submission) into test_script. --- landoscript/src/landoscript/script.py | 3 + landoscript/tests/__init__.py | 0 landoscript/tests/conftest.py | 35 +++ landoscript/tests/test_script.py | 357 +++++++++++++++++++++++++ landoscript/tests/test_tag.py | 69 +---- landoscript/tests/test_version_bump.py | 221 +-------------- 6 files changed, 418 insertions(+), 267 deletions(-) create mode 100644 landoscript/tests/__init__.py create mode 100644 landoscript/tests/test_script.py diff --git a/landoscript/src/landoscript/script.py b/landoscript/src/landoscript/script.py index d79883284..24ec1315f 100644 --- a/landoscript/src/landoscript/script.py +++ b/landoscript/src/landoscript/script.py @@ -3,6 +3,7 @@ import aiohttp import scriptworker.client +from scriptworker.exceptions import TaskVerificationError from landoscript import lando from landoscript.actions import tag, version_bump @@ -57,6 +58,8 @@ async def async_main(context): # validate scopes - these raise if there's any scope issues validate_scopes(scopes, lando_repo, payload["actions"]) + if len(payload["actions"]) < 1: + raise TaskVerificationError("must provide at least one action!") os.makedirs(public_artifact_dir) diff --git a/landoscript/tests/__init__.py b/landoscript/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/landoscript/tests/conftest.py b/landoscript/tests/conftest.py index 33bd6486c..f791559b1 100644 --- a/landoscript/tests/conftest.py +++ b/landoscript/tests/conftest.py @@ -1,4 +1,5 @@ from pathlib import Path +from yarl import URL import pytest from scriptworker.context import Context @@ -39,3 +40,37 @@ def datadir(): @pytest.fixture(scope="session") def privkey_file(datadir): return datadir / "test_private_key.pem" + + +def setup_test(github_installation_responses, context, payload, actions, repo="repo_name"): + lando_repo = payload["lando_repo"] + lando_api = context.config["lando_api"] + owner = context.config["lando_name_to_github_repo"][lando_repo]["owner"] + submit_uri = URL(f"{lando_api}/api/v1/{lando_repo}") + job_id = 12345 + status_uri = URL(f"{lando_api}/push/{job_id}") + + github_installation_responses(owner) + + scopes = [f"project:releng:lando:repo:{repo}"] + for action in actions: + scopes.append(f"project:releng:lando:action:{action}") + + return submit_uri, status_uri, job_id, scopes + + +def assert_lando_submission_response(requests, submit_uri, attempts=1): + assert ("POST", submit_uri) in requests + reqs = requests[("POST", submit_uri)] + assert len(reqs) == attempts + # there might be more than one in cases where we retry; we assume that + # the requests are the same for all attempts + return reqs[0] + + +def assert_status_response(requests, status_uri, attempts=1): + assert ("GET", status_uri) in requests + reqs = requests[("GET", status_uri)] + # there might be more than one in cases where we retry; we assume that + # the requests are the same for all attempts + assert len(reqs) == attempts diff --git a/landoscript/tests/test_script.py b/landoscript/tests/test_script.py new file mode 100644 index 000000000..76751e220 --- /dev/null +++ b/landoscript/tests/test_script.py @@ -0,0 +1,357 @@ +from aiohttp import ClientResponseError +import pytest +from scriptworker.client import TaskVerificationError + +from landoscript.errors import LandoscriptError +from landoscript.script import async_main +from .conftest import assert_lando_submission_response, assert_status_response, setup_test +from .test_tag import assert_tag_response +from .test_version_bump import assert_add_commit_response, setup_fetch_files_response + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "payload,initial_values,expected_bumps,commit_msg_strings,tags,dry_run", + ( + pytest.param( + { + "actions": ["tag", "version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": ["browser/config/version.txt"], + "next_version": "135.0", + }, + "tags": ["RELEASE"], + "dry_run": True, + }, + { + "browser/config/version.txt": "134.0", + }, + { + "browser/config/version.txt": "135.0", + }, + ["Automatic version bump", "NO BUG", "a=release"], + ["RELEASE"], + True, + id="tag_and_bump", + ), + pytest.param( + { + "actions": ["tag", "version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": ["browser/config/version.txt"], + "next_version": "135.0", + }, + "tags": ["RELEASE"], + }, + { + "browser/config/version.txt": "134.0", + }, + { + "browser/config/version.txt": "135.0", + }, + ["Automatic version bump", "NO BUG", "a=release"], + ["RELEASE"], + False, + id="tag_and_bump", + ), + ), +) +async def test_tag_and_bump(aioresponses, github_installation_responses, context, payload, dry_run, initial_values, expected_bumps, commit_msg_strings, tags): + submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, payload["actions"]) + setup_fetch_files_response(aioresponses, 200, initial_values) + + if not dry_run: + aioresponses.post( + submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"} + ) + + aioresponses.get( + status_uri, + status=200, + payload={ + "commits": ["abcdef123"], + "push_id": job_id, + "status": "completed", + }, + ) + + context.task = {"payload": payload, "scopes": scopes} + await async_main(context) + + assert (context.config["artifact_dir"] / "public/build/version-bump.diff").exists() + if not dry_run: + req = assert_lando_submission_response(aioresponses.requests, submit_uri) + assert_add_commit_response(req, commit_msg_strings, initial_values, expected_bumps) + assert_status_response(aioresponses.requests, status_uri) + assert_tag_response(req, tags) + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "payload,initial_values,expected_bumps,commit_msg_strings", + ( + pytest.param( + { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": ["browser/config/version.txt"], + "next_version": "135.0", + }, + }, + { + "browser/config/version.txt": "134.0", + }, + { + "browser/config/version.txt": "135.0", + }, + ["Automatic version bump", "NO BUG", "a=release"], + id="one_file", + ), + pytest.param( + { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": [ + "browser/config/version.txt", + "browser/config/version_display.txt", + "config/milestone.txt", + "mobile/android/version.txt", + ], + "next_version": "135.0", + }, + }, + { + "browser/config/version.txt": "134.0", + "browser/config/version_display.txt": "134.0", + "config/milestone.txt": "134.0", + "mobile/android/version.txt": "134.0", + }, + { + "browser/config/version.txt": "135.0", + "browser/config/version_display.txt": "135.0", + "config/milestone.txt": "135.0", + "mobile/android/version.txt": "135.0", + }, + ["Automatic version bump", "NO BUG", "a=release"], + id="many_files", + ), + ), +) +async def test_success_with_retries(aioresponses, github_installation_responses, context, payload, initial_values, expected_bumps, commit_msg_strings): + submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) + setup_fetch_files_response(aioresponses, 200, initial_values) + + aioresponses.post(submit_uri, status=500) + aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) + + aioresponses.get(status_uri, status=202, payload={"status": "pending", "job_id": job_id, "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) + aioresponses.get( + status_uri, + status=200, + payload={ + "commits": ["abcdef123"], + "push_id": job_id, + "status": "completed", + }, + ) + + context.task = {"payload": payload, "scopes": scopes} + await async_main(context) + + req = assert_lando_submission_response(aioresponses.requests, submit_uri, attempts=2) + assert_add_commit_response(req, commit_msg_strings, initial_values, expected_bumps) + assert_status_response(aioresponses.requests, status_uri, attempts=2) + assert (context.config["artifact_dir"] / "public/build/version-bump.diff").exists() + + +@pytest.mark.asyncio +async def test_no_actions(github_installation_responses, context): + payload = { + "actions": [], + "lando_repo": "repo_name", + } + _, _, _, scopes = setup_test(github_installation_responses, context, payload, []) + + context.task = {"payload": payload, "scopes": scopes} + + try: + await async_main(context) + assert False, "should've raised TaskVerificationError" + except TaskVerificationError as e: + assert "must provide at least one action!" in e.args[0] + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "scopes,missing", + ( + pytest.param( + [ + "project:releng:lando:action:tag", + "project:releng:lando:action:version_bump", + ], + [ + "project:releng:lando:repo:repo_name", + ], + id="missing_repo_scope", + ), + pytest.param( + [ + "project:releng:lando:repo:repo_name", + "project:releng:lando:action:tag", + ], + [ + "project:releng:lando:action:version_bump", + ], + id="missing_one_action_scope", + ), + pytest.param( + [ + "project:releng:lando:repo:repo_name", + ], + [ + "project:releng:lando:action:tag", + "project:releng:lando:action:version_bump", + ], + id="missing_two_action_scopes", + ), + pytest.param( + [], + [ + "project:releng:lando:repo:repo_name", + "project:releng:lando:action:tag", + "project:releng:lando:action:version_bump", + ], + id="no_scopes", + ), + ), +) +async def test_missing_scopes(context, scopes, missing): + payload = { + "actions": ["tag", "version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": ["browser/config/version.txt"], + "next_version": "135.0", + }, + } + + context.task = {"payload": payload, "scopes": scopes} + + try: + await async_main(context) + assert False, "should've raised TaskVerificationError" + except TaskVerificationError as e: + assert "required scope(s) not present" in e.args[0] + for m in missing: + assert m in e.args[0] + + +@pytest.mark.asyncio +async def test_failure_to_submit_to_lando_500(aioresponses, github_installation_responses, context): + payload = { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": ["browser/config/version.txt"], + "next_version": "135.0", + }, + } + initial_values = {"browser/config/version.txt": "134.0"} + submit_uri, _, _, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) + setup_fetch_files_response(aioresponses, 200, initial_values) + + for _ in range(10): + aioresponses.post(submit_uri, status=500) + + context.task = {"payload": payload, "scopes": scopes} + + try: + await async_main(context) + assert False, "should've raised ClientResponseError" + except ClientResponseError as e: + assert e.status == 500 + + +@pytest.mark.asyncio +async def test_to_submit_to_lando_no_status_url(aioresponses, github_installation_responses, context): + payload = { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": ["browser/config/version.txt"], + "next_version": "135.0", + }, + } + initial_values = {"browser/config/version.txt": "134.0"} + submit_uri, _, _, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) + setup_fetch_files_response(aioresponses, 200, initial_values) + aioresponses.post(submit_uri, status=202, payload={}) + + context.task = {"payload": payload, "scopes": scopes} + + try: + await async_main(context) + assert False, "should've raised LandoscriptError" + except LandoscriptError as e: + assert "couldn't find status url" in e.args[0] + + +@pytest.mark.asyncio +async def test_lando_polling_result_not_completed(aioresponses, github_installation_responses, context): + payload = { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": ["browser/config/version.txt"], + "next_version": "135.0", + }, + } + initial_values = {"browser/config/version.txt": "134.0"} + submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) + setup_fetch_files_response(aioresponses, 200, initial_values) + aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) + aioresponses.get(status_uri, status=200, payload={}) + + context.task = {"payload": payload, "scopes": scopes} + + try: + await async_main(context) + assert False, "should've raised LandoscriptError" + except LandoscriptError as e: + assert "status is not completed" in e.args[0] + + +@pytest.mark.asyncio +async def test_lando_polling_retry_on_failure(aioresponses, github_installation_responses, context): + payload = { + "actions": ["version_bump"], + "lando_repo": "repo_name", + "version_bump_info": { + "files": ["browser/config/version.txt"], + "next_version": "135.0", + }, + } + initial_values = {"browser/config/version.txt": "134.0"} + submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) + setup_fetch_files_response(aioresponses, 200, initial_values) + aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) + aioresponses.get(status_uri, status=500, payload={}) + aioresponses.get( + status_uri, + status=200, + payload={ + "commits": ["abcdef123"], + "push_id": job_id, + "status": "completed", + }, + ) + + context.task = {"payload": payload, "scopes": scopes} + await async_main(context) + + assert_status_response(aioresponses.requests, status_uri, attempts=2) diff --git a/landoscript/tests/test_tag.py b/landoscript/tests/test_tag.py index 3411dd5b7..b22d9203c 100644 --- a/landoscript/tests/test_tag.py +++ b/landoscript/tests/test_tag.py @@ -1,55 +1,21 @@ import pytest from scriptworker.client import TaskVerificationError -from yarl import URL from landoscript.script import async_main +from .conftest import assert_lando_submission_response, assert_status_response, setup_test -def assert_tag_response(requests, submit_uri, tags, attempts=1): - # make sure that exactly one request was made - # (a single request can add more than one commit, so there should never - # be a need for more than 1 request) - assert ("POST", submit_uri) in requests - reqs = requests[("POST", submit_uri)] - assert len(reqs) == attempts - # there might be more than one in cases where we retry; we assume that - # the requests are the same for all attempts - req = reqs[0] +def assert_tag_response(req, tags): assert "json" in req.kwargs assert "actions" in req.kwargs["json"] - assert len(req.kwargs["json"]["actions"]) == len(tags) + tag_actions = [action for action in req.kwargs["json"]["actions"] if action["action"] == "tag"] + assert len(tag_actions) == len(tags) - requested_tags = set([action["name"] for action in req.kwargs["json"]["actions"]]) + requested_tags = set([action["name"] for action in tag_actions]) assert requested_tags == set(tags) -def assert_status_response(requests, status_uri, attempts=1): - assert ("GET", status_uri) in requests - reqs = requests[("GET", status_uri)] - # there might be more than one in cases where we retry; we assume that - # the requests are the same for all attempts - assert len(reqs) == attempts - - -def setup_test(github_installation_responses, context, payload, repo="repo_name"): - lando_repo = payload["lando_repo"] - lando_api = context.config["lando_api"] - owner = context.config["lando_name_to_github_repo"][lando_repo]["owner"] - submit_uri = URL(f"{lando_api}/api/v1/{lando_repo}") - job_id = 12345 - status_uri = URL(f"{lando_api}/push/{job_id}") - - github_installation_responses(owner) - - scopes = [ - f"project:releng:lando:repo:{repo}", - f"project:releng:lando:action:tag", - ] - - return submit_uri, status_uri, job_id, scopes - - @pytest.mark.asyncio @pytest.mark.parametrize( "tags,dry_run", @@ -78,7 +44,7 @@ async def test_success(aioresponses, github_installation_responses, context, tag "tags": tags, "dry_run": dry_run, } - submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload) + submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["tag"]) if not dry_run: aioresponses.post( @@ -99,7 +65,8 @@ async def test_success(aioresponses, github_installation_responses, context, tag await async_main(context) if not dry_run: - assert_tag_response(aioresponses.requests, submit_uri, tags) + req = assert_lando_submission_response(aioresponses.requests, submit_uri) + assert_tag_response(req, tags) assert_status_response(aioresponses.requests, status_uri) @@ -110,7 +77,7 @@ async def test_no_tags(github_installation_responses, context): "lando_repo": "repo_name", "tags": [], } - _, _, _, scopes = setup_test(github_installation_responses, context, payload) + _, _, _, scopes = setup_test(github_installation_responses, context, payload, ["tag"]) context.task = {"payload": payload, "scopes": scopes} @@ -119,21 +86,3 @@ async def test_no_tags(github_installation_responses, context): assert False, "should've raised TaskVerificationError" except TaskVerificationError as e: assert "must provide at least one tag!" in e.args[0] - - -@pytest.mark.asyncio -async def test_missing_scopes(context): - payload = { - "actions": ["tag"], - "lando_repo": "repo_name", - "tags": ["BUILD1"], - } - - context.task = {"payload": payload, "scopes": ["project:releng:lando:repo:repo_name"]} - - try: - await async_main(context) - assert False, "should've raised TaskVerificationError" - except TaskVerificationError as e: - assert "required scope(s) not present" in e.args[0] - assert "project:releng:lando:action:tag" in e.args[0] diff --git a/landoscript/tests/test_version_bump.py b/landoscript/tests/test_version_bump.py index 0724f3d11..cdc494a1d 100644 --- a/landoscript/tests/test_version_bump.py +++ b/landoscript/tests/test_version_bump.py @@ -1,4 +1,3 @@ -from aiohttp import ClientResponseError import pytest from scriptworker.client import TaskVerificationError @@ -6,25 +5,16 @@ from landoscript.script import async_main from landoscript.actions.version_bump import ALLOWED_BUMP_FILES, _VERSION_CLASS_PER_BEGINNING_OF_PATH from simple_github.client import GITHUB_GRAPHQL_ENDPOINT -from yarl import URL +from .conftest import assert_lando_submission_response, assert_status_response, setup_test -def assert_add_commit_response(requests, submit_uri, commit_msg_strings, initial_values, expected_bumps, attempts=1): - # make sure that exactly one request was made - # (a single request can add more than one commit, so there should never - # be a need for more than 1 request) - assert ("POST", submit_uri) in requests - reqs = requests[("POST", submit_uri)] - assert len(reqs) == attempts - # there might be more than one in cases where we retry; we assume that - # the requests are the same for all attempts - req = reqs[0] +def assert_add_commit_response(req, commit_msg_strings, initial_values, expected_bumps): assert "json" in req.kwargs assert "actions" in req.kwargs["json"] - assert len(req.kwargs["json"]["actions"]) == 1 - action = req.kwargs["json"]["actions"][0] - assert action["action"] == "create-commit" + create_commit_actions = [action for action in req.kwargs["json"]["actions"] if action["action"] == "create-commit"] + assert len(create_commit_actions) == 1 + action = create_commit_actions[0] # ensure metadata is correct assert action["author"] == "Release Engineering Landoscript " @@ -52,32 +42,6 @@ def assert_add_commit_response(requests, submit_uri, commit_msg_strings, initial assert False, f"no version bump found for {file}: {diffs}" -def assert_status_response(requests, status_uri, attempts=1): - assert ("GET", status_uri) in requests - reqs = requests[("GET", status_uri)] - # there might be more than one in cases where we retry; we assume that - # the requests are the same for all attempts - assert len(reqs) == attempts - - -def setup_test(github_installation_responses, context, payload, repo="repo_name"): - lando_repo = payload["lando_repo"] - lando_api = context.config["lando_api"] - owner = context.config["lando_name_to_github_repo"][lando_repo]["owner"] - submit_uri = URL(f"{lando_api}/api/v1/{lando_repo}") - job_id = 12345 - status_uri = URL(f"{lando_api}/push/{job_id}") - - github_installation_responses(owner) - - scopes = [ - f"project:releng:lando:repo:{repo}", - f"project:releng:lando:action:version_bump", - ] - - return submit_uri, status_uri, job_id, scopes - - def setup_fetch_files_response(aioresponses, code, initial_values={}): if initial_values: github_response = {} @@ -304,7 +268,7 @@ def setup_fetch_files_response(aioresponses, code, initial_values={}): ), ) async def test_success_with_bumps(aioresponses, github_installation_responses, context, payload, initial_values, expected_bumps, commit_msg_strings): - submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload) + submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) setup_fetch_files_response(aioresponses, 200, initial_values) dryrun = payload.get("dry_run", False) @@ -328,7 +292,8 @@ async def test_success_with_bumps(aioresponses, github_installation_responses, c assert (context.config["artifact_dir"] / "public/build/version-bump.diff").exists() if not dryrun: - assert_add_commit_response(aioresponses.requests, submit_uri, commit_msg_strings, initial_values, expected_bumps) + req = assert_lando_submission_response(aioresponses.requests, submit_uri) + assert_add_commit_response(req, commit_msg_strings, initial_values, expected_bumps) assert_status_response(aioresponses.requests, status_uri) @@ -386,7 +351,7 @@ async def test_success_with_bumps(aioresponses, github_installation_responses, c ), ) async def test_success_with_retries(aioresponses, github_installation_responses, context, payload, initial_values, expected_bumps, commit_msg_strings): - submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload) + submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) setup_fetch_files_response(aioresponses, 200, initial_values) aioresponses.post(submit_uri, status=500) @@ -406,7 +371,8 @@ async def test_success_with_retries(aioresponses, github_installation_responses, context.task = {"payload": payload, "scopes": scopes} await async_main(context) - assert_add_commit_response(aioresponses.requests, submit_uri, commit_msg_strings, initial_values, expected_bumps, attempts=2) + req = assert_lando_submission_response(aioresponses.requests, submit_uri, attempts=2) + assert_add_commit_response(req, commit_msg_strings, initial_values, expected_bumps) assert_status_response(aioresponses.requests, status_uri, attempts=2) assert (context.config["artifact_dir"] / "public/build/version-bump.diff").exists() @@ -432,7 +398,7 @@ async def test_success_with_retries(aioresponses, github_installation_responses, ), ) async def test_success_without_bumps(aioresponses, github_installation_responses, context, payload, initial_values): - submit_uri, status_uri, _, scopes = setup_test(github_installation_responses, context, payload) + submit_uri, status_uri, _, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) setup_fetch_files_response(aioresponses, 200, initial_values) context.task = {"payload": payload, "scopes": scopes} @@ -452,7 +418,7 @@ async def test_failure_to_fetch_files(aioresponses, github_installation_response "next_version": "135.0", }, } - _, _, _, scopes = setup_test(github_installation_responses, context, payload) + _, _, _, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) # 5 attempts is hardcoded deeper than we can reasonable override it; so # just expect it @@ -468,112 +434,6 @@ async def test_failure_to_fetch_files(aioresponses, github_installation_response assert "couldn't retrieve bump files from github" in e.args[0] -@pytest.mark.asyncio -async def test_failure_to_submit_to_lando_500(aioresponses, github_installation_responses, context): - payload = { - "actions": ["version_bump"], - "lando_repo": "repo_name", - "version_bump_info": { - "files": ["browser/config/version.txt"], - "next_version": "135.0", - }, - } - initial_values = {"browser/config/version.txt": "134.0"} - submit_uri, _, _, scopes = setup_test(github_installation_responses, context, payload) - setup_fetch_files_response(aioresponses, 200, initial_values) - - for _ in range(10): - aioresponses.post(submit_uri, status=500) - - context.task = {"payload": payload, "scopes": scopes} - - try: - await async_main(context) - assert False, "should've raised ClientResponseError" - except ClientResponseError as e: - assert e.status == 500 - - -@pytest.mark.asyncio -async def test_to_submit_to_lando_no_status_url(aioresponses, github_installation_responses, context): - payload = { - "actions": ["version_bump"], - "lando_repo": "repo_name", - "version_bump_info": { - "files": ["browser/config/version.txt"], - "next_version": "135.0", - }, - } - initial_values = {"browser/config/version.txt": "134.0"} - submit_uri, _, _, scopes = setup_test(github_installation_responses, context, payload) - setup_fetch_files_response(aioresponses, 200, initial_values) - aioresponses.post(submit_uri, status=202, payload={}) - - context.task = {"payload": payload, "scopes": scopes} - - try: - await async_main(context) - assert False, "should've raised LandoscriptError" - except LandoscriptError as e: - assert "couldn't find status url" in e.args[0] - - -@pytest.mark.asyncio -async def test_lando_polling_result_not_completed(aioresponses, github_installation_responses, context): - payload = { - "actions": ["version_bump"], - "lando_repo": "repo_name", - "version_bump_info": { - "files": ["browser/config/version.txt"], - "next_version": "135.0", - }, - } - initial_values = {"browser/config/version.txt": "134.0"} - submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload) - setup_fetch_files_response(aioresponses, 200, initial_values) - aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) - aioresponses.get(status_uri, status=200, payload={}) - - context.task = {"payload": payload, "scopes": scopes} - - try: - await async_main(context) - assert False, "should've raised LandoscriptError" - except LandoscriptError as e: - assert "status is not completed" in e.args[0] - - -@pytest.mark.asyncio -async def test_lando_polling_retry_on_failure(aioresponses, github_installation_responses, context): - payload = { - "actions": ["version_bump"], - "lando_repo": "repo_name", - "version_bump_info": { - "files": ["browser/config/version.txt"], - "next_version": "135.0", - }, - } - initial_values = {"browser/config/version.txt": "134.0"} - submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload) - setup_fetch_files_response(aioresponses, 200, initial_values) - aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) - aioresponses.get(status_uri, status=500, payload={}) - aioresponses.get( - status_uri, - status=200, - payload={ - "commits": ["abcdef123"], - "push_id": job_id, - "status": "completed", - }, - ) - - context.task = {"payload": payload, "scopes": scopes} - await async_main(context) - - assert_status_response(aioresponses.requests, status_uri, attempts=2) - - @pytest.mark.asyncio @pytest.mark.parametrize( "files,first_bad_file", @@ -599,7 +459,7 @@ async def test_bad_bumpfile(github_installation_responses, context, files, first "next_version": "135.0", }, } - _, _, _, scopes = setup_test(github_installation_responses, context, payload) + _, _, _, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) context.task = {"payload": payload, "scopes": scopes} @@ -610,59 +470,6 @@ async def test_bad_bumpfile(github_installation_responses, context, files, first assert f"{first_bad_file} is not in version bump allowlist" in e.args[0] -@pytest.mark.asyncio -@pytest.mark.parametrize( - "scopes,missing", - ( - pytest.param( - [ - "project:releng:lando:action:version_bump", - ], - [ - "project:releng:lando:repo:repo_name", - ], - id="missing_repo_scope", - ), - pytest.param( - [ - "project:releng:lando:repo:repo_name", - ], - [ - "project:releng:lando:action:version_bump", - ], - id="missing_action_scope", - ), - pytest.param( - [], - [ - "project:releng:lando:repo:repo_name", - "project:releng:lando:action:version_bump", - ], - id="no_scopes", - ), - ), -) -async def test_missing_scopes(context, scopes, missing): - payload = { - "actions": ["version_bump"], - "lando_repo": "repo_name", - "version_bump_info": { - "files": ["browser/config/version.txt"], - "next_version": "135.0", - }, - } - - context.task = {"payload": payload, "scopes": scopes} - - try: - await async_main(context) - assert False, "should've raised TaskVerificationError" - except TaskVerificationError as e: - assert "required scope(s) not present" in e.args[0] - for m in missing: - assert m in e.args[0] - - def test_no_overlaps_in_version_classes(): for prefix1 in _VERSION_CLASS_PER_BEGINNING_OF_PATH: for prefix2 in _VERSION_CLASS_PER_BEGINNING_OF_PATH: From 31c0e0b54e5135b59f3cb42f7b51a1ccc9277be0 Mon Sep 17 00:00:00 2001 From: Ben Hearsum Date: Mon, 24 Mar 2025 12:08:33 -0400 Subject: [PATCH 04/13] feat(landoscript): implement `l10n_bump` action The helper functions here are copied out of treescript (which will soon be EOL'ed). Also included here is some minor refactoring to avoid duplication of common of `create-commit` logic. --- .../src/landoscript/actions/l10n_bump.py | 231 +++++ .../src/landoscript/actions/version_bump.py | 15 +- .../data/landoscript_task_schema.json | 6 +- landoscript/src/landoscript/lando.py | 9 + landoscript/src/landoscript/script.py | 87 +- landoscript/src/landoscript/treestatus.py | 40 + landoscript/src/landoscript/util/log.py | 8 + landoscript/tests/conftest.py | 19 + landoscript/tests/test_l10n_bump.py | 860 ++++++++++++++++++ landoscript/tests/test_version_bump.py | 20 +- 10 files changed, 1226 insertions(+), 69 deletions(-) create mode 100644 landoscript/src/landoscript/actions/l10n_bump.py create mode 100644 landoscript/src/landoscript/treestatus.py create mode 100644 landoscript/src/landoscript/util/log.py create mode 100644 landoscript/tests/test_l10n_bump.py diff --git a/landoscript/src/landoscript/actions/l10n_bump.py b/landoscript/src/landoscript/actions/l10n_bump.py new file mode 100644 index 000000000..9a36572aa --- /dev/null +++ b/landoscript/src/landoscript/actions/l10n_bump.py @@ -0,0 +1,231 @@ +import json +import logging +import os.path +import pprint +from typing import TypedDict + +from gql.transport.exceptions import TransportError +from scriptworker.client import TaskVerificationError + +from landoscript.errors import LandoscriptError +from landoscript.lando import LandoAction, create_commit_action +from landoscript.util.diffs import diff_contents +from landoscript.util.log import log_file_contents +from scriptworker_client.github import extract_github_repo_owner_and_name +from scriptworker_client.github_client import GithubClient + +log = logging.getLogger(__name__) + + +class PlatformConfig(TypedDict): + platforms: list[str] + path: str + + +class L10nBumpInfo(TypedDict): + path: str + name: str + l10n_repo_url: str + l10n_repo_target_branch: str + ignore_config: dict[str, list[str]] + platform_configs: list[PlatformConfig] + + +async def run( + github_client: GithubClient, + github_config: dict[str, str], + public_artifact_dir: str, + branch: str, + l10n_bump_infos: list[L10nBumpInfo], + dontbuild: bool, + ignore_closed_tree: bool, +) -> list[LandoAction]: + log.info("preparing to bump l10n changesets.") + + lando_actions = [] + for bump_config in l10n_bump_infos: + log.info(f"considering {bump_config['name']}") + l10n_repo_url = bump_config.get("l10n_repo_url") + l10n_repo_target_branch = bump_config.get("l10n_repo_target_branch") + if not l10n_repo_url: + raise TaskVerificationError("Cannot bump l10n revisions from github repo without an l10n_repo_url") + if not l10n_repo_target_branch: + raise TaskVerificationError("l10n_repo_target_branch must be present in bump_config!") + + l10n_owner, l10n_repo = extract_github_repo_owner_and_name(l10n_repo_url) + + async with GithubClient(github_config, l10n_owner, l10n_repo) as l10n_github_client: + # fetch initial files from github + platform_config_files = [pc["path"] for pc in bump_config["platform_configs"]] + files = [bump_config["path"], *platform_config_files] + try: + log.info(f"fetching bump files from github: {files}") + orig_files = await github_client.get_files(files, branch) + except TransportError as e: + raise LandoscriptError("couldn't retrieve bump files from github") from e + + log.debug("fetched file contents are:") + for fn, contents in orig_files.items(): + log.debug(f"{fn}:") + log.debug(contents) + + if orig_files[bump_config["path"]] is None: + raise LandoscriptError(f"{bump_config['path']} does not exist, cannot perform bump!") + + old_contents = json.loads(str(orig_files[bump_config["path"]])) + orig_platform_files = {k: v for k, v in orig_files.items() if k in platform_config_files} + + # get new revision + log.info("fetching new l10n revision") + new_revision = await l10n_github_client.get_branch_head_oid(l10n_repo_target_branch) + log.info(f"new l10n revision is {new_revision}") + + # build new versions of files + new_contents = build_revision_dict(bump_config.get("ignore_config", {}), bump_config["platform_configs"], orig_platform_files, new_revision) + log.debug(f"new contents of of {bump_config['path']} are:") + log.debug(new_contents) + + if old_contents == new_contents: + log.warning(f"old and new contents of {bump_config['path']} are the same, skipping bump...") + continue + + # make diff + diff = diff_contents( + json.dumps(old_contents, sort_keys=True, indent=4, separators=(",", ": ")), + json.dumps(new_contents, sort_keys=True, indent=4, separators=(",", ": ")), + bump_config["path"], + ) + + with open(os.path.join(public_artifact_dir, f"l10n-bump-{bump_config['name']}.diff"), "w+") as f: + f.write(diff) + + log.info(f"adding l10n bump commit for {bump_config['name']}! diff contents are:") + log_file_contents(diff) + + # create commit message + locale_map = build_locale_map(old_contents, new_contents) + commitmsg = build_commit_message(bump_config["name"], locale_map, dontbuild, ignore_closed_tree) + + # create action + lando_actions.append(create_commit_action(commitmsg, diff)) + + return lando_actions + + +def build_platform_dict(ignore_config, platform_configs, orig_platform_files): + """Build a dictionary of locale to list of platforms. + + Args: + ignore_config (dict): key/value pairs (str/[str]) of locales and + platforms that they _shouldn't_ be present for. + platform_configs ([dict]): dictionaries consisting of a path to a + shipped-locales style file (str) containing a list of locales + applicable to the platforms ([str]) provided. + and platforms ([str]) + orig_platform_files (dict): key/value pairs (str/str) of filenames + and file contents. one entry must be provided for each path + provided in `platform_configs`. + + Returns: + dict: the platform dict + + """ + platform_dict = {} + for platform_config in platform_configs: + orig_contents = orig_platform_files[platform_config["path"]] + for locale in orig_contents.splitlines(): + if locale in ("en-US",): + continue + existing_platforms = set(platform_dict.get(locale, {}).get("platforms", [])) + platforms = set(platform_config["platforms"]) + ignore_platforms = set(ignore_config.get(locale, [])) + platforms = (platforms | existing_platforms) - ignore_platforms + platform_dict[locale] = {"platforms": sorted(list(platforms))} + log.info("Built platform_dict:\n%s" % pprint.pformat(platform_dict)) + return platform_dict + + +# build_revision_dict_github {{{1 +def build_revision_dict(ignore_config, platform_configs, orig_platform_files, revision) -> dict: + """Add l10n revision information to the ``platform_dict``. All locales will + be bumped to head revision of the branch given in `l10n_repo_target_branch` + in the repository that `client` is configured with. + + Args: + ignore_config (dict): key/value pairs (str/[str]) of locales and + platforms that they _shouldn't_ be present for. + platform_configs ([dict]): dictionaries consisting of a path to a + shipped-locales style file (str) containing a list of locales + applicable to the platforms ([str]) provided. + and platforms ([str]) + bump_config (dict): one of the dictionaries from the payload ``l10n_bump_info``. + This dictionary must contain a `l10n_repo_target_branch`. + revision (str): the revision to use for each locale entry + + Returns: + dict: locale to dictionary of platforms and revision + """ + log.info("Building revision dict...") + platform_dict = build_platform_dict(ignore_config, platform_configs, orig_platform_files) + + for locale in platform_dict: + # no longer supported; this item will be removed in the future + platform_dict[locale]["pin"] = False + platform_dict[locale]["revision"] = revision + + log.info("revision_dict:\n%s" % pprint.pformat(platform_dict)) + return platform_dict + + +# build_commit_message {{{1 +def build_commit_message(name, locale_map, dontbuild=False, ignore_closed_tree=False): + """Build a commit message for the bumper. + + Args: + name (str): the human readable name for the path (e.g. Firefox l10n + changesets) + locale_map (dict): l10n changeset changes, keyed by locale + dontbuild (bool, optional): whether to add ``DONTBUILD`` to the + comment. Defaults to ``False`` + ignore_closed_tree (bool, optional): whether to add ``CLOSED TREE`` + to the comment. Defaults to ``False``. + + Returns: + str: the commit message + + """ + comments = "" + approval_str = "r=release a=l10n-bump" + for locale, revision in sorted(locale_map.items()): + comments += "%s -> %s\n" % (locale, revision) + if dontbuild: + approval_str += " DONTBUILD" + if ignore_closed_tree: + approval_str += " CLOSED TREE" + message = "no bug - Bumping %s %s\n\n" % (name, approval_str) + message += comments + return message + + +# build_locale_map {{{1 +def build_locale_map(old_contents, new_contents): + """Build a map of changed locales for the commit message. + + Args: + old_contents (dict): the old l10n changesets + new_contents (dict): the bumped l10n changesets + + Returns: + dict: the changes per locale + + """ + locale_map = {} + for key in old_contents: + if key not in new_contents: + locale_map[key] = "removed" + for k, v in new_contents.items(): + if old_contents.get(k, {}).get("revision") != v["revision"]: + locale_map[k] = v["revision"] + if old_contents.get(k, {}).get("platforms") != v["platforms"]: + locale_map[k] = v["platforms"] + return locale_map diff --git a/landoscript/src/landoscript/actions/version_bump.py b/landoscript/src/landoscript/actions/version_bump.py index 535e645e8..5e0f5cc9a 100644 --- a/landoscript/src/landoscript/actions/version_bump.py +++ b/landoscript/src/landoscript/actions/version_bump.py @@ -1,4 +1,3 @@ -import datetime import logging import os.path import typing @@ -11,8 +10,9 @@ from scriptworker.exceptions import TaskVerificationError from landoscript.errors import LandoscriptError -from landoscript.lando import LandoAction +from landoscript.lando import LandoAction, create_commit_action from landoscript.util.diffs import diff_contents +from landoscript.util.log import log_file_contents from scriptworker_client.github_client import GithubClient log = logging.getLogger(__name__) @@ -36,11 +36,6 @@ } -def log_file_contents(contents): - for line in contents.splitlines(): - log.info(line) - - class VersionBumpInfo(TypedDict): next_version: str files: list[str] @@ -70,7 +65,7 @@ async def run( log.info("got files") for file, contents in orig_files.items(): log.info(f"{file} contents:") - log_file_contents(contents) + log_file_contents(str(contents)) diff = "" for file, orig in orig_files.items(): @@ -105,14 +100,12 @@ async def run( log.info("adding version bump commit! diff contents are:") log_file_contents(diff) - author = "Release Engineering Landoscript " - timestamp = datetime.datetime.now(datetime.timezone.utc).isoformat() # version bumps always ignore a closed tree commitmsg = "Subject: Automatic version bump NO BUG a=release CLOSED TREE" if dontbuild: commitmsg += " DONTBUILD" - return {"action": "create-commit", "commitmsg": commitmsg, "diff": diff, "date": timestamp, "author": author} + return create_commit_action(commitmsg, diff) def find_what_version_parser_to_use(file): diff --git a/landoscript/src/landoscript/data/landoscript_task_schema.json b/landoscript/src/landoscript/data/landoscript_task_schema.json index 88affe61d..416f9b061 100644 --- a/landoscript/src/landoscript/data/landoscript_task_schema.json +++ b/landoscript/src/landoscript/data/landoscript_task_schema.json @@ -251,9 +251,6 @@ "name": { "type": "string" }, - "version_path": { - "type": "string" - }, "l10n_repo_url": { "type": "string" }, @@ -273,8 +270,7 @@ "required": [ "path", "name", - "platform_configs", - "version_path" + "platform_configs" ] } }, diff --git a/landoscript/src/landoscript/lando.py b/landoscript/src/landoscript/lando.py index ebdea3d75..1a638cf67 100644 --- a/landoscript/src/landoscript/lando.py +++ b/landoscript/src/landoscript/lando.py @@ -1,4 +1,5 @@ import asyncio +import datetime import logging from pprint import pprint from typing import Any, Callable @@ -15,6 +16,14 @@ LandoAction = dict[str, str] +def create_commit_action(commitmsg: str, diff: str) -> LandoAction: + """Return a `create-commit` lando action. Primarily exists to centralize the author name.""" + author = "Release Engineering Landoscript " + timestamp = datetime.datetime.now(datetime.timezone.utc).isoformat() + + return {"action": "create-commit", "commitmsg": commitmsg, "diff": diff, "date": timestamp, "author": author} + + async def submit( session: ClientSession, lando_api: str, lando_repo: str, actions: list[LandoAction], sleeptime_callback: Callable[..., Any] = calculate_sleep_time ) -> str: diff --git a/landoscript/src/landoscript/script.py b/landoscript/src/landoscript/script.py index 24ec1315f..7fc2d204d 100644 --- a/landoscript/src/landoscript/script.py +++ b/landoscript/src/landoscript/script.py @@ -6,7 +6,8 @@ from scriptworker.exceptions import TaskVerificationError from landoscript import lando -from landoscript.actions import tag, version_bump +from landoscript.actions import l10n_bump, tag, version_bump +from landoscript.treestatus import is_tree_open from scriptworker_client.github_client import GithubClient log = logging.getLogger(__name__) @@ -46,6 +47,8 @@ async def async_main(context): # Note: `lando_repo` is not necessarily the same as the repository's name # on Github. lando_repo = payload["lando_repo"] + dontbuild = payload.get("dontbuild", False) + ignore_closed_tree = payload.get("ignore_closed_tree", False) # pull owner, repo, and branch from config # TODO: replace this with a lookup through the lando API when that API exists @@ -64,42 +67,58 @@ async def async_main(context): os.makedirs(public_artifact_dir) lando_actions: list[lando.LandoAction] = [] - async with GithubClient(context.config["github_config"], owner, repo) as gh_client: - for action in payload["actions"]: - log.info(f"processing action: {action}") - - if action == "version_bump": - version_bump_action = await version_bump.run( - gh_client, - public_artifact_dir, - branch, - payload["version_bump_info"], - payload.get("dontbuild", False), - ) - # sometimes version bumps are no-ops - if version_bump_action: - lando_actions.append(version_bump_action) - elif action == "tag": - tag_actions = tag.run(payload["tags"]) - lando_actions.extend(tag_actions) - - log.info("finished processing action") - - if lando_actions: - if payload.get("dry_run", False): - log.info("dry run...would've submitted lando actions:") - for la in lando_actions: - log.info(la) - else: - log.info("not a dry run...submitting lando actions:") - for la in lando_actions: - log.info(la) + async with aiohttp.ClientSession() as session: + async with GithubClient(context.config["github_config"], owner, repo) as gh_client: + for action in payload["actions"]: + log.info(f"processing action: {action}") + + if action == "version_bump": + version_bump_action = await version_bump.run( + gh_client, + public_artifact_dir, + branch, + payload["version_bump_info"], + payload.get("dontbuild", False), + ) + # sometimes version bumps are no-ops + if version_bump_action: + lando_actions.append(version_bump_action) + elif action == "tag": + tag_actions = tag.run(payload["tags"]) + lando_actions.extend(tag_actions) + elif action == "l10n_bump": + if not ignore_closed_tree: + # despite `ignore_closed_tree` being at the top level of the + # payload, only l10n bumps pay attention to it. we should probably + # set it to true for all other actions so we can actually make + # this a global check + if not await is_tree_open(session, config["treestatus_url"], lando_repo, config["sleeptime_callback"]): + log.info("Treestatus is closed; skipping l10n bump.") + continue + + l10n_bump_actions = await l10n_bump.run( + gh_client, context.config["github_config"], public_artifact_dir, branch, payload["l10n_bump_info"], dontbuild, ignore_closed_tree + ) + # sometimes nothing has changed! + if l10n_bump_actions: + lando_actions.extend(l10n_bump_actions) + + log.info("finished processing action") + + if lando_actions: + if payload.get("dry_run", False): + log.info("Dry run...would've submitted lando actions:") + for la in lando_actions: + log.info(la) + else: + log.info("Not a dry run...submitting lando actions:") + for la in lando_actions: + log.info(la) - async with aiohttp.ClientSession() as session: status_url = await lando.submit(session, config["lando_api"], lando_repo, lando_actions, config["sleeptime_callback"]) await lando.poll_until_complete(session, config["poll_time"], status_url) - else: - log.info("no lando actions to submit!") + else: + log.info("No lando actions to submit!") def main(config_path: str = ""): diff --git a/landoscript/src/landoscript/treestatus.py b/landoscript/src/landoscript/treestatus.py new file mode 100644 index 000000000..2d1315d6d --- /dev/null +++ b/landoscript/src/landoscript/treestatus.py @@ -0,0 +1,40 @@ +import logging +from typing import Any, Callable + +from aiohttp import ClientResponseError, ClientSession +from async_timeout import timeout +from scriptworker.utils import calculate_sleep_time, retry_async + +log = logging.getLogger(__name__) + + +async def is_tree_open(session: ClientSession, treestatus_url: str, lando_repo: str, sleeptime_callback: Callable[..., Any] = calculate_sleep_time) -> bool: + """Return True if we can land based on treestatus. + + Args: + config (dict): the running config + task (dict): the running task + + Returns: + bool: ``True`` if the tree is open. + + """ + url = f"{treestatus_url}/trees/{lando_repo}" + async with timeout(30): + log.info(f"checking treestatus for {lando_repo}") + resp = await retry_async( + session.get, + args=(url,), + kwargs={"raise_for_status": True}, + attempts=10, + retry_exceptions=ClientResponseError, + sleeptime_callback=sleeptime_callback, + ) + + log.info(f"success! got {resp.status} response") + treestatus = await resp.json() + if treestatus["result"]["status"] != "closed": + log.info("treestatus is %s - assuming we can land", repr(treestatus["result"]["status"])) + return True + + return False diff --git a/landoscript/src/landoscript/util/log.py b/landoscript/src/landoscript/util/log.py new file mode 100644 index 000000000..37468959b --- /dev/null +++ b/landoscript/src/landoscript/util/log.py @@ -0,0 +1,8 @@ +import logging + +log = logging.getLogger(__name__) + + +def log_file_contents(contents: str): + for line in contents.splitlines(): + log.info(line) diff --git a/landoscript/tests/conftest.py b/landoscript/tests/conftest.py index f791559b1..ae4a6618e 100644 --- a/landoscript/tests/conftest.py +++ b/landoscript/tests/conftest.py @@ -3,6 +3,7 @@ import pytest from scriptworker.context import Context +from simple_github.client import GITHUB_GRAPHQL_ENDPOINT pytest_plugins = ("pytest-scriptworker-client",) @@ -28,6 +29,7 @@ def context(privkey_file, tmpdir): }, "poll_time": 0, "sleeptime_callback": lambda _: 0, + "treestatus_url": "https://treestatus.fake", } return context @@ -59,6 +61,23 @@ def setup_test(github_installation_responses, context, payload, actions, repo="r return submit_uri, status_uri, job_id, scopes +def setup_fetch_files_response(aioresponses, code, initial_values={}): + if initial_values: + github_response = {} + for file, contents in initial_values.items(): + github_response[file] = f"{contents}" + + payload = { + "data": { + "repository": {k: {"text": v} for k, v in github_response.items()}, + } + } + else: + payload = {} + + aioresponses.post(GITHUB_GRAPHQL_ENDPOINT, status=code, payload=payload) + + def assert_lando_submission_response(requests, submit_uri, attempts=1): assert ("POST", submit_uri) in requests reqs = requests[("POST", submit_uri)] diff --git a/landoscript/tests/test_l10n_bump.py b/landoscript/tests/test_l10n_bump.py new file mode 100644 index 000000000..d3bb44793 --- /dev/null +++ b/landoscript/tests/test_l10n_bump.py @@ -0,0 +1,860 @@ +import json +import pytest +from scriptworker.client import TaskVerificationError +from simple_github.client import GITHUB_GRAPHQL_ENDPOINT + +from landoscript.script import async_main + +from .conftest import assert_lando_submission_response, assert_status_response, setup_test, setup_fetch_files_response + + +def setup_treestatus_response(aioresponses, context, tree="repo_name", status="open", has_err=False): + url = f'{context.config["treestatus_url"]}/trees/{tree}' + if has_err: + aioresponses.get(url, status=500) + else: + resp = { + "result": { + "category": "development", + "log_id": 12345, + "message_of_the_day": "", + "reason": "", + "status": status, + "tags": [], + "tree": tree, + }, + } + aioresponses.get(url, status=200, payload=resp) + + +def get_locale_block(locale, platforms, rev): + # fmt: off + locale_block = [ + f' "{locale}": {{', + ' "pin": false,', + ' "platforms": [' + ] + platform_entries = [] + for platform in sorted(platforms): + platform_entries.append(f' "{platform}"') + locale_block.extend(",\n".join(platform_entries).split("\n")) + locale_block.extend([ + " ],", + f' "revision": "{rev}"', + # closing brace omitted because these blocks are used to generate + # diffs, and in diffs, these end up using context from the subsequent + # locale + # " }", + ]) + # fmt: on + + return locale_block + + +def assert_l10n_bump_response(req, l10n_bump_info, expected_changes, initial_values, expected_values, dontbuild, ignore_closed_tree): + assert "json" in req.kwargs + assert "actions" in req.kwargs["json"] + create_commit_actions = [action for action in req.kwargs["json"]["actions"] if action["action"] == "create-commit"] + assert len(create_commit_actions) == expected_changes + + for lbi in l10n_bump_info: + name = lbi["name"] + + action = None + for cca in create_commit_actions: + if name in cca["commitmsg"]: + action = cca + + if not action: + assert False, f"couldn't find create-commit action for {name}!" + + if dontbuild: + assert "DONTBUILD" in action["commitmsg"] + + if ignore_closed_tree: + assert "CLOSED TREE" in action["commitmsg"] + + # ensure metadata is correct + assert action["author"] == "Release Engineering Landoscript " + # we don't actually verify the value here; it's not worth the trouble of mocking + assert "date" in action + + diffs = action["diff"].split("diff\n") + assert len(diffs) == 1 + diff = diffs[0] + + initial_locales = set(initial_values[name]["locales"]) + expected_locales = set(expected_values[name]["locales"]) + initial_platforms = set(initial_values[name]["platforms"]) + expected_platforms = set(expected_values[name]["platforms"]) + added_locales = expected_locales - initial_locales + removed_locales = initial_locales - expected_locales + + # ensure each expected locale has the new revision + before_rev = initial_values[name]["revision"] + after_rev = expected_values[name]["revision"] + + if before_rev != after_rev: + revision_replacements = diff.count(f'- "revision": "{before_rev}"\n+ "revision": "{after_rev}') + # even if new locales are added, we only expect revision replacements + # for initial ones that are not being removed. added locales are checked + # further down. + expected_revision_replacements = len(initial_locales - removed_locales) + assert revision_replacements == expected_revision_replacements, "wrong number of revisions replaced!" + + # ensure any added locales are now present + if added_locales: + for locale in added_locales: + expected = "+" + "\n+".join(get_locale_block(locale, expected_platforms, after_rev)) + assert expected in diff + + # ensure any removed locales are no longer present + if removed_locales: + for locale in removed_locales: + expected = "-" + "\n-".join(get_locale_block(locale, expected_platforms, before_rev)) + assert expected in diff + + # ensure any added platforms are now present + added_platforms = expected_platforms - initial_platforms + for platform in added_platforms: + expected_additions = len(expected_locales) + for plats in lbi["ignore_config"].values(): + if platform in plats: + expected_additions -= 1 + expected = f'+ "{platform}"' + assert diff.count(expected) == expected_additions + + # ensure any removed platforms are no longer present + removed_platforms = initial_platforms - expected_platforms + for platform in removed_platforms: + expected_additions = len(expected_locales) + for plats in lbi["ignore_config"].values(): + if platform in plats: + expected_additions -= 1 + expected = f'- "{platform}"' + assert diff.count(expected) == expected_additions + + +def setup_file_responses(aioresponses, l10n_bump_info, initial_values, expected_locales): + file_responses = {} + name = l10n_bump_info["name"] + ignore_config = l10n_bump_info.get("ignore_config", {}) + revision = initial_values[name]["revision"] + locales = initial_values[name]["locales"] + platforms = initial_values[name]["platforms"] + for pc in l10n_bump_info["platform_configs"]: + file_responses[pc["path"]] = "\n".join(expected_locales) + + changesets_data = {} + for locale in locales: + locale_platforms = [] + for platform in platforms: + if platform not in ignore_config.get(locale, []): + locale_platforms.append(platform) + + changesets_data[locale] = { + "pin": False, + "platforms": [], + "revision": revision, + "platforms": sorted(locale_platforms), + } + + file_responses[l10n_bump_info["path"]] = json.dumps(changesets_data) + + setup_fetch_files_response(aioresponses, 200, file_responses) + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "l10n_bump_info,initial_values,expected_values,dry_run,dontbuild,ignore_closed_tree", + ( + pytest.param( + [ + { + "ignore_config": { + "ja": ["macosx64"], + "ja-JP-mac": ["linux64", "win64"], + }, + "l10n_repo_target_branch": "main", + "l10n_repo_url": "https://github.com/mozilla-l10n/firefox-l10n", + "name": "Firefox l10n changesets", + "path": "browser/locales/l10n-changesets.json", + "platform_configs": [ + { + "path": "browser/locales/shipped-locales", + "platforms": ["linux64", "macosx64", "win64"], + } + ], + } + ], + { + "Firefox l10n changesets": { + "revision": "abcdef", + "locales": ["af", "ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "macosx64", "win64"], + }, + }, + { + "Firefox l10n changesets": { + "revision": "ghijkl", + "locales": ["af", "ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "macosx64", "win64"], + }, + }, + True, + False, + False, + id="dry_run", + ), + pytest.param( + [ + { + "ignore_config": { + "ja": ["macosx64"], + "ja-JP-mac": ["linux64", "win64"], + }, + "l10n_repo_target_branch": "main", + "l10n_repo_url": "https://github.com/mozilla-l10n/firefox-l10n", + "name": "Firefox l10n changesets", + "path": "browser/locales/l10n-changesets.json", + "platform_configs": [ + { + "path": "browser/locales/shipped-locales", + "platforms": ["linux64", "macosx64", "win64"], + } + ], + } + ], + { + "Firefox l10n changesets": { + "revision": "abcdef", + "locales": ["af", "ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "macosx64", "win64"], + }, + }, + { + "Firefox l10n changesets": { + "revision": "ghijkl", + "locales": ["af", "ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "macosx64", "win64"], + }, + }, + False, + False, + False, + id="new_revision", + ), + pytest.param( + [ + { + "ignore_config": { + "ja": ["macosx64"], + "ja-JP-mac": ["linux64", "win64"], + }, + "l10n_repo_target_branch": "main", + "l10n_repo_url": "https://github.com/mozilla-l10n/firefox-l10n", + "name": "Firefox l10n changesets", + "path": "browser/locales/l10n-changesets.json", + "platform_configs": [ + { + "path": "browser/locales/shipped-locales", + "platforms": ["linux64", "macosx64", "win64"], + } + ], + } + ], + { + "Firefox l10n changesets": { + "revision": "abcdef", + "locales": ["af", "ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "macosx64", "win64"], + }, + }, + { + "Firefox l10n changesets": { + "revision": "ghijkl", + "locales": ["af", "ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "macosx64", "win64"], + }, + }, + False, + True, + False, + id="dontbuild", + ), + pytest.param( + [ + { + "ignore_config": { + "ja": ["macosx64"], + "ja-JP-mac": ["linux64", "win64"], + }, + "l10n_repo_target_branch": "main", + "l10n_repo_url": "https://github.com/mozilla-l10n/firefox-l10n", + "name": "Firefox l10n changesets", + "path": "browser/locales/l10n-changesets.json", + "platform_configs": [ + { + "path": "browser/locales/shipped-locales", + "platforms": ["linux64", "macosx64", "win64"], + } + ], + } + ], + { + "Firefox l10n changesets": { + "revision": "abcdef", + "locales": ["af", "ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "macosx64", "win64"], + }, + }, + { + "Firefox l10n changesets": { + "revision": "ghijkl", + "locales": ["af", "ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "macosx64", "win64"], + }, + }, + False, + False, + True, + id="ignore_closed_tree", + ), + pytest.param( + [ + { + "ignore_config": { + "ja": ["macosx64"], + "ja-JP-mac": ["linux64", "win64"], + }, + "l10n_repo_target_branch": "main", + "l10n_repo_url": "https://github.com/mozilla-l10n/firefox-l10n", + "name": "Firefox l10n changesets", + "path": "browser/locales/l10n-changesets.json", + "platform_configs": [ + { + "path": "browser/locales/shipped-locales", + "platforms": ["linux64", "macosx64", "win64"], + } + ], + } + ], + { + "Firefox l10n changesets": { + "revision": "abcdef", + "locales": ["af", "ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "macosx64", "win64"], + }, + }, + { + "Firefox l10n changesets": { + "revision": "ghijkl", + "locales": ["af", "ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "macosx64", "win64"], + }, + }, + False, + True, + True, + id="dontbuild_ignore_closed_tree", + ), + pytest.param( + [ + { + "l10n_repo_target_branch": "main", + "l10n_repo_url": "https://github.com/mozilla-l10n/firefox-l10n", + "name": "Mobile l10n changesets", + "path": "mobile/locales/l10n-changesets.json", + "platform_configs": [ + { + "path": "mobile/android/locales/all-locales", + "platforms": ["android", "android-arm"], + } + ], + } + ], + { + "Mobile l10n changesets": { + "revision": "abcdef", + "locales": ["de", "ja"], + "platforms": ["android", "android-arm"], + }, + }, + { + "Mobile l10n changesets": { + "revision": "ghijkl", + "locales": ["de", "ja"], + "platforms": ["android", "android-arm"], + }, + }, + False, + False, + False, + id="no_ignore_config", + ), + pytest.param( + [ + { + "ignore_config": { + "ja": ["macosx64"], + "ja-JP-mac": ["linux64", "win64"], + }, + "l10n_repo_target_branch": "main", + "l10n_repo_url": "https://github.com/mozilla-l10n/firefox-l10n", + "name": "Firefox l10n changesets", + "path": "browser/locales/l10n-changesets.json", + "platform_configs": [ + { + "path": "browser/locales/shipped-locales", + "platforms": ["linux64", "macosx64", "win64"], + } + ], + }, + { + "l10n_repo_target_branch": "main", + "l10n_repo_url": "https://github.com/mozilla-l10n/firefox-l10n", + "name": "Mobile l10n changesets", + "path": "mobile/locales/l10n-changesets.json", + "platform_configs": [ + { + "path": "mobile/android/locales/all-locales", + "platforms": ["android", "android-arm"], + } + ], + }, + ], + { + "Firefox l10n changesets": { + "revision": "abcdef", + "locales": ["af", "ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "macosx64", "win64"], + }, + "Mobile l10n changesets": { + "revision": "abcdef", + "locales": ["de", "ja"], + "platforms": ["android", "android-arm"], + }, + }, + { + "Firefox l10n changesets": { + "revision": "ghijkl", + "locales": ["af", "ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "macosx64", "win64"], + }, + "Mobile l10n changesets": { + "revision": "ghijkl", + "locales": ["de", "ja"], + "platforms": ["android", "android-arm"], + }, + }, + False, + False, + False, + id="multiple_bumps", + ), + pytest.param( + [ + { + "ignore_config": { + "ja": ["macosx64"], + "ja-JP-mac": ["linux64", "win64"], + }, + "l10n_repo_target_branch": "main", + "l10n_repo_url": "https://github.com/mozilla-l10n/firefox-l10n", + "name": "Firefox l10n changesets", + "path": "browser/locales/l10n-changesets.json", + "platform_configs": [ + { + "path": "browser/locales/shipped-locales", + "platforms": ["linux64", "macosx64", "win64"], + } + ], + } + ], + { + "Firefox l10n changesets": { + "revision": "abcdef", + "locales": ["af", "ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "macosx64", "win64"], + }, + }, + { + "Firefox l10n changesets": { + "revision": "abcdef", + "locales": ["af", "ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "macosx64", "win64"], + }, + }, + False, + False, + False, + id="no_new_revision", + ), + pytest.param( + [ + { + "ignore_config": { + "ja": ["macosx64"], + "ja-JP-mac": ["linux64", "win64"], + }, + "l10n_repo_target_branch": "main", + "l10n_repo_url": "https://github.com/mozilla-l10n/firefox-l10n", + "name": "Firefox l10n changesets", + "path": "browser/locales/l10n-changesets.json", + "platform_configs": [ + { + "path": "browser/locales/shipped-locales", + "platforms": ["linux64", "macosx64", "win64"], + } + ], + } + ], + { + "Firefox l10n changesets": { + "revision": "abcdef", + "locales": ["af", "ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "macosx64", "win64"], + }, + }, + { + "Firefox l10n changesets": { + "revision": "ghijkl", + "locales": ["af", "en-CA", "ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "macosx64", "win64"], + }, + }, + False, + False, + False, + id="new_locale", + ), + pytest.param( + [ + { + "ignore_config": { + "ja": ["macosx64"], + "ja-JP-mac": ["linux64", "win64"], + }, + "l10n_repo_target_branch": "main", + "l10n_repo_url": "https://github.com/mozilla-l10n/firefox-l10n", + "name": "Firefox l10n changesets", + "path": "browser/locales/l10n-changesets.json", + "platform_configs": [ + { + "path": "browser/locales/shipped-locales", + "platforms": ["linux64", "macosx64", "win64"], + } + ], + } + ], + { + "Firefox l10n changesets": { + "revision": "abcdef", + "locales": ["af", "ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "macosx64", "win64"], + }, + }, + { + "Firefox l10n changesets": { + "revision": "ghijkl", + "locales": ["ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "macosx64", "win64"], + }, + }, + False, + False, + False, + id="removed_locale", + ), + pytest.param( + [ + { + "ignore_config": { + "ja": ["macosx64"], + "ja-JP-mac": ["linux64", "linux64-aarch64", "win64"], + }, + "l10n_repo_target_branch": "main", + "l10n_repo_url": "https://github.com/mozilla-l10n/firefox-l10n", + "name": "Firefox l10n changesets", + "path": "browser/locales/l10n-changesets.json", + "platform_configs": [ + { + "path": "browser/locales/shipped-locales", + "platforms": ["linux64", "linux64-aarch64", "macosx64", "win64"], + } + ], + } + ], + { + "Firefox l10n changesets": { + "revision": "abcdef", + "locales": ["af", "ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "macosx64", "win64"], + }, + }, + { + "Firefox l10n changesets": { + "revision": "ghijkl", + "locales": ["af", "ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "linux64-aarch64", "macosx64", "win64"], + }, + }, + False, + False, + False, + id="new_platform", + ), + pytest.param( + [ + { + "ignore_config": { + "ja": ["macosx64"], + "ja-JP-mac": ["linux64", "linux64-aarch64", "win64"], + }, + "l10n_repo_target_branch": "main", + "l10n_repo_url": "https://github.com/mozilla-l10n/firefox-l10n", + "name": "Firefox l10n changesets", + "path": "browser/locales/l10n-changesets.json", + "platform_configs": [ + { + "path": "browser/locales/shipped-locales", + "platforms": ["linux64", "linux64-aarch64", "macosx64", "win64"], + } + ], + } + ], + { + "Firefox l10n changesets": { + "revision": "abcdef", + "locales": ["af", "ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "macosx64", "win64"], + }, + }, + { + "Firefox l10n changesets": { + "revision": "abcdef", + "locales": ["af", "ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "linux64-aarch64", "macosx64", "win64"], + }, + }, + False, + False, + False, + id="new_platform_without_new_revision", + ), + pytest.param( + [ + { + "ignore_config": { + "ja": ["macosx64"], + "ja-JP-mac": ["linux64", "win64"], + }, + "l10n_repo_target_branch": "main", + "l10n_repo_url": "https://github.com/mozilla-l10n/firefox-l10n", + "name": "Firefox l10n changesets", + "path": "browser/locales/l10n-changesets.json", + "platform_configs": [ + { + "path": "browser/locales/shipped-locales", + "platforms": ["linux64", "macosx64"], + } + ], + } + ], + { + "Firefox l10n changesets": { + "revision": "abcdef", + "locales": ["af", "ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "macosx64", "win64"], + }, + }, + { + "Firefox l10n changesets": { + "revision": "ghijkl", + "locales": ["af", "ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "macosx64"], + }, + }, + False, + False, + False, + id="removed_platform", + ), + ), +) +async def test_success( + aioresponses, github_installation_responses, context, l10n_bump_info, initial_values, expected_values, dry_run, dontbuild, ignore_closed_tree +): + + payload = { + "actions": ["l10n_bump"], + "lando_repo": "repo_name", + "l10n_bump_info": l10n_bump_info, + "dry_run": dry_run, + "dontbuild": dontbuild, + "ignore_closed_tree": ignore_closed_tree, + } + submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["l10n_bump"]) + setup_treestatus_response(aioresponses, context) + + # because the github graphql endpoint is generic we need to make sure we create + # these responses in the correct order... + for lbi in l10n_bump_info: + # this is called once for the repository we're bumping files in in + # `setup_test`. we have to call it again for each bump info, because + # the repository information exists in that part of the payload + github_installation_responses("mozilla-l10n") + setup_file_responses(aioresponses, lbi, initial_values, expected_values[lbi["name"]]["locales"]) + revision = expected_values[lbi["name"]]["revision"] + aioresponses.post(GITHUB_GRAPHQL_ENDPOINT, status=200, payload={"data": {"repository": {"object": {"oid": revision}}}}) + + if not dry_run: + aioresponses.post( + submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"} + ) + + aioresponses.get( + status_uri, + status=200, + payload={ + "commits": ["abcdef123"], + "push_id": job_id, + "status": "completed", + }, + ) + + context.task = {"payload": payload, "scopes": scopes} + await async_main(context) + + expected_changes = 0 + for initial_info, expected_info in zip(initial_values.values(), expected_values.values()): + for k in initial_info.keys(): + if initial_info[k] != expected_info[k]: + expected_changes += 1 + break + + for lbi in l10n_bump_info: + name = lbi["name"] + if initial_values[name] != expected_values[name]: + assert (context.config["artifact_dir"] / f"public/build/l10n-bump-{name}.diff").exists() + + if not dry_run and expected_changes > 0: + req = assert_lando_submission_response(aioresponses.requests, submit_uri) + assert_l10n_bump_response(req, l10n_bump_info, expected_changes, initial_values, expected_values, dontbuild, ignore_closed_tree) + assert_status_response(aioresponses.requests, status_uri) + else: + assert ("POST", submit_uri) not in aioresponses.requests + assert ("GET", status_uri) not in aioresponses.requests + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "l10n_bump_info,errmsg", + ( + pytest.param( + [ + { + "ignore_config": {}, + "l10n_repo_target_branch": "main", + "name": "Firefox l10n changesets", + "path": "browser/locales/l10n-changesets.json", + "platform_configs": [ + { + "path": "browser/locales/shipped-locales", + "platforms": ["linux64", "macosx64", "win64"], + } + ], + }, + ], + "without an l10n_repo_url", + id="no_l10n_repo_url", + ), + pytest.param( + [ + { + "ignore_config": {}, + "l10n_repo_url": "https://github.com/mozilla-l10n/firefox-l10n", + "name": "Firefox l10n changesets", + "path": "browser/locales/l10n-changesets.json", + "platform_configs": [ + { + "path": "browser/locales/shipped-locales", + "platforms": ["linux64", "macosx64", "win64"], + } + ], + }, + ], + "l10n_repo_target_branch must be present", + id="no_l10n_branch", + ), + pytest.param( + [ + { + "ignore_config": {}, + "name": "Firefox l10n changesets", + "path": "browser/locales/l10n-changesets.json", + "platform_configs": [ + { + "path": "browser/locales/shipped-locales", + "platforms": ["linux64", "macosx64", "win64"], + } + ], + }, + ], + "without an l10n_repo_url", + id="no_l10n_repo_url_or_branch", + ), + ), +) +async def test_l10n_repo_errors(aioresponses, github_installation_responses, context, l10n_bump_info, errmsg): + + payload = { + "actions": ["l10n_bump"], + "lando_repo": "repo_name", + "l10n_bump_info": l10n_bump_info, + } + _, _, _, scopes = setup_test(github_installation_responses, context, payload, ["l10n_bump"]) + setup_treestatus_response(aioresponses, context) + + context.task = {"payload": payload, "scopes": scopes} + + try: + await async_main(context) + assert False, "should've raised TaskVerificationError" + except TaskVerificationError as e: + assert errmsg in e.args[0] + + +@pytest.mark.asyncio +async def test_tree_is_closed_noop(aioresponses, github_installation_responses, context): + payload = { + "actions": ["l10n_bump"], + "lando_repo": "repo_name", + "l10n_bump_info": [ + { + "ignore_config": { + "ja": ["macosx64"], + "ja-JP-mac": ["linux64", "win64"], + }, + "l10n_repo_target_branch": "main", + "l10n_repo_url": "https://github.com/mozilla-l10n/firefox-l10n", + "name": "Firefox l10n changesets", + "path": "browser/locales/l10n-changesets.json", + "platform_configs": [ + { + "path": "browser/locales/shipped-locales", + "platforms": ["linux64", "macosx64", "win64"], + } + ], + } + ], + "ignore_closed_tree": False, + } + submit_uri, status_uri, _, scopes = setup_test(github_installation_responses, context, payload, ["l10n_bump"]) + setup_treestatus_response(aioresponses, context, status="closed") + + context.task = {"payload": payload, "scopes": scopes} + await async_main(context) + + assert ("POST", submit_uri) not in aioresponses.requests + assert ("GET", status_uri) not in aioresponses.requests diff --git a/landoscript/tests/test_version_bump.py b/landoscript/tests/test_version_bump.py index cdc494a1d..4b2a45e3a 100644 --- a/landoscript/tests/test_version_bump.py +++ b/landoscript/tests/test_version_bump.py @@ -4,9 +4,8 @@ from landoscript.errors import LandoscriptError from landoscript.script import async_main from landoscript.actions.version_bump import ALLOWED_BUMP_FILES, _VERSION_CLASS_PER_BEGINNING_OF_PATH -from simple_github.client import GITHUB_GRAPHQL_ENDPOINT -from .conftest import assert_lando_submission_response, assert_status_response, setup_test +from .conftest import assert_lando_submission_response, assert_status_response, setup_test, setup_fetch_files_response def assert_add_commit_response(req, commit_msg_strings, initial_values, expected_bumps): @@ -42,23 +41,6 @@ def assert_add_commit_response(req, commit_msg_strings, initial_values, expected assert False, f"no version bump found for {file}: {diffs}" -def setup_fetch_files_response(aioresponses, code, initial_values={}): - if initial_values: - github_response = {} - for file, contents in initial_values.items(): - github_response[file] = f"{contents}" - - payload = { - "data": { - "repository": {k: {"text": v} for k, v in github_response.items()}, - } - } - else: - payload = {} - - aioresponses.post(GITHUB_GRAPHQL_ENDPOINT, status=code, payload=payload) - - @pytest.mark.asyncio @pytest.mark.parametrize( "payload,initial_values,expected_bumps,commit_msg_strings", From cc5fb69959b411d992985399a45d031afc9b54a2 Mon Sep 17 00:00:00 2001 From: Ben Hearsum Date: Thu, 27 Mar 2025 14:02:05 -0400 Subject: [PATCH 05/13] feat(landoscript): implement `merge_day` action Most of the helpers here are copied out of treescript, with some tweaks and simplications where it was possible (mostly to get rid of now-unnecessary logic). Some refactoring of other actions was done here as well, to make it possible to call them from the `merge_day` action. Most notably: the `version_bump` action has been updated to support multiple version bumps in one run, which allows us to do all the merge day version bumps in a single commit, as we do now with treescript. Additional test refactoring/movement was also done to make many of the helpers available to merge day tests. --- .../src/landoscript/actions/merge_day.py | 285 ++++++++ landoscript/src/landoscript/actions/tag.py | 7 +- .../src/landoscript/actions/version_bump.py | 96 +-- landoscript/src/landoscript/script.py | 9 +- landoscript/src/landoscript/util/diffs.py | 2 + landoscript/src/landoscript/util/version.py | 24 + landoscript/tests/conftest.py | 232 +++++++ landoscript/tests/test_l10n_bump.py | 142 +--- landoscript/tests/test_merge_day.py | 655 ++++++++++++++++++ landoscript/tests/test_script.py | 239 ++++++- landoscript/tests/test_version_bump.py | 36 +- 11 files changed, 1491 insertions(+), 236 deletions(-) create mode 100644 landoscript/src/landoscript/actions/merge_day.py create mode 100644 landoscript/src/landoscript/util/version.py create mode 100644 landoscript/tests/test_merge_day.py diff --git a/landoscript/src/landoscript/actions/merge_day.py b/landoscript/src/landoscript/actions/merge_day.py new file mode 100644 index 000000000..ae5bae408 --- /dev/null +++ b/landoscript/src/landoscript/actions/merge_day.py @@ -0,0 +1,285 @@ +import logging +import os.path +import re +import string +from datetime import date +from typing import TypedDict + +import attr +from mozilla_version.gecko import GeckoVersion +from mozilla_version.version import BaseVersion +from scriptworker.client import TaskVerificationError + +from landoscript.actions import tag, version_bump +from landoscript.errors import LandoscriptError +from landoscript.lando import LandoAction, create_commit_action +from landoscript.util.diffs import diff_contents +from landoscript.util.log import log_file_contents +from landoscript.util.version import find_what_version_parser_to_use +from scriptworker_client.github_client import GithubClient, defaultdict + +log = logging.getLogger(__name__) + + +class VersionFile(TypedDict): + filename: str + new_suffix: str + version_bump: str + + +class MergeInfo(TypedDict): + to_branch: str + from_branch: str + base_tag: str + end_tag: str + merge_old_head: bool + fetch_version_from: str + touch_clobber_file: bool + version_files: list[VersionFile] + replacements: list[list[str]] + regex_replacements: list[list[str]] + + +async def run(github_client: GithubClient, public_artifact_dir: str, merge_info: MergeInfo) -> list[LandoAction]: + to_branch = merge_info["to_branch"] + from_branch = merge_info.get("from_branch") + end_tag = merge_info.get("end_tag") + base_tag = merge_info.get("base_tag") + merge_old_head = merge_info.get("merge_old_head") + version_file = merge_info["fetch_version_from"] + actions = [] + + log.info("Starting merge day operations!") + to_version = await get_version(github_client, version_file, to_branch) + log.info(f"to_version is: {to_version}") + if end_tag: + # End tag specifically uses the `to_version` _before_ we bump it + # (because we're declaring its current version as "done") + end_tag_fmted = end_tag.format(major_version=to_version.major_number) + log.info(f"Adding end_tag: {end_tag_fmted}") + actions.extend(tag.run([end_tag_fmted])) + + # We need to determine `bump_version`, which is what we will use when + # performing version bumps later on. This version must be whatever version + # is present on the `to_branch` immediately prior to the version bumps taking + # place. When `from_branch` is present, this code will end up on `to_branch` + # at that point. If there's no `from_branch`, whatever is currently on `to_branch` + # is correct. + if from_branch: + bump_version = await get_version(github_client, version_file, from_branch) + log.info(f"from_branch is present, got bump_version from it: {bump_version}") + + # base tagging _only_ happens when we have a `from_branch` -- these are + # scenarios where we're uplifting one branch to another, and beginning a new + # version number on the `to_branch`, which we declare with the `BASE` tag. + if base_tag: + base_tag_fmted = base_tag.format(major_version=bump_version.major_number) + log.info(f"Adding base_tag: {base_tag_fmted}") + actions.extend(tag.run([base_tag_fmted])) + else: + bump_version = to_version + log.info(f"from_branch is not present, using to_version as bump_version: {bump_version}") + + if merge_old_head: + log.info(f"Merging old head. target is from_branch ({from_branch}), strategy is theirs") + # perform merge + # `theirs` strategy means that the repo being modified will have its tree updated to match that + # of the `target`. + merge_msg = f"Update {to_branch} to {from_branch}" + actions.append({"action": "merge-onto", "target": from_branch, "strategy": "theirs", "message": merge_msg}) + + if merge_info.get("version_files"): + log.info("Performing version bumps") + files_by_new_suffix = defaultdict(list) + bump_types = set() + for vf in merge_info["version_files"]: + if bump_type := vf.get("version_bump"): + bump_types.add(bump_type) + files_by_new_suffix[vf.get("new_suffix", "")].append(vf["filename"]) + + if len(bump_types) == 0: + bump_types.add("") + elif len(bump_types) != 1: + raise TaskVerificationError(f"must provide zero or one `version_bump` type, got: {len(bump_types)}") + + bump_type = bump_types.pop() + version_bump_infos = [] + for new_suffix, files in files_by_new_suffix.items(): + # Note that `bump_type` may be an empty string, which means a bump will + # _not_ happen. ie: we may end up with a new suffix but the same version + # number. + next_version = get_new_version(bump_version, new_suffix, bump_type) + version_bump_infos.append( + { + "files": files, + "next_version": next_version, + } + ) + + log.info(f"version_bump_infos is: {version_bump_infos}") + actions.append( + await version_bump.run( + github_client, + public_artifact_dir, + to_branch, + version_bump_infos, + dontbuild=False, + ) + ) + + # process replacements, regex-replacements, and update clobber file + replacements = merge_info.get("replacements", []) + regex_replacements = merge_info.get("regex_replacements", []) + diff = "" + if replacements or regex_replacements: + log.info("Performing replacements and regex_replacements") + needed_files = [] + for r in replacements: + needed_files.append(r[0]) + for r in regex_replacements: + needed_files.append(r[0]) + + orig_contents = await github_client.get_files(needed_files, to_branch) + # At the moment, there are no known cases of needing to replace with + # a suffix...so we simply don't handle that here! + new_contents = process_replacements(bump_version, replacements, regex_replacements, orig_contents) + for fn in orig_contents: + if orig_contents[fn] is None: + raise LandoscriptError(f"Couldn't find file '{fn}' in repository!") + + diff += diff_contents(str(orig_contents[fn]), new_contents[fn], fn) + + if merge_info.get("touch_clobber_file", True): + log.info("Touching clobber file") + orig_clobber_file = (await github_client.get_files("CLOBBER", to_branch))["CLOBBER"] + if orig_clobber_file is None: + raise LandoscriptError("Couldn't find CLOBBER file in repository!") + + new_clobber_file = get_new_clobber_file(orig_clobber_file) + diff += diff_contents(orig_clobber_file, new_clobber_file, "CLOBBER") + + log.info("replacements and clobber diff is:") + log_file_contents(diff) + + with open(os.path.join(public_artifact_dir, "replacements.diff"), "w+") as f: + f.write(diff) + + commitmsg = "Subject: Update configs after merge day operations" + actions.append(create_commit_action(commitmsg, diff)) + + return actions + + +async def get_version(github_client: GithubClient, version_file: str, branch: str): + resp = await github_client.get_files(version_file, branch) + contents = resp[version_file] + if contents is None: + raise LandoscriptError(f"Couldn't find {version_file} in repository!") + + VersionClass = find_what_version_parser_to_use(version_file) + lines = [line for line in contents.splitlines() if line and not line.startswith("#")] + return VersionClass.parse(lines[-1]) + + +def _get_attr_evolve_kwargs(version): + kwargs = { + "beta_number": None, + "is_nightly": False, + } + if isinstance(version, GeckoVersion): + kwargs["is_esr"] = False + return kwargs + + +def get_new_version(version: BaseVersion, new_suffix="", bump_type=""): + """Create a new version string. If `bump_type` is `major` the first part of + the version will be increased by 1. If `bump_type` is `minor` the second part + of the version will be increased by 1. Suffixes will be stripped from the + result and `new_suffix` will be applied to it.""" + + if bump_type == "major": + new_version = version.bump("major_number") + elif bump_type == "minor": + new_version = version.bump("minor_number") + else: + # no bump; usually this means there's a new suffix + new_version = version + + new_version = attr.evolve(new_version, **_get_attr_evolve_kwargs(new_version)) + new_version = f"{new_version}{new_suffix}" + return new_version + + +class BashFormatter(string.Formatter): + """BashFormatter: Safer bash strings. + + Ignore things that are probably bash variables when formatting. + + For example, this will be passed back unchanged: + "MOZ_REQUIRE_SIGNING=${MOZ_REQUIRE_SIGNING-0}" + while still allowing us to have: + "Tagging {current_major_version}" + """ + + def get_value(self, key, args, kwargs): + """If a value is not found, return the key.""" + if isinstance(key, str): + return kwargs.get(key, "{" + key + "}") + else: + return string.Formatter().get_value(key, args, kwargs) + + +def replace(file_name, text, from_, to_, use_regex=False): + """Replace text in a file.""" + log.info("Replacing %s -> %s in %s", from_, to_, file_name) + + if use_regex: + new_text = re.sub(from_, to_, text) + else: + new_text = text.replace(from_, to_) + + if text == new_text: + raise ValueError(f"{file_name} does not contain {from_}") + + return new_text + + +def process_replacements(version, replacements, regex_replacements, orig_contents): + """Apply changes to repo required for merge/rebranding.""" + log.info("Processing replacements and regex-replacements") + + # Used in file replacements, further down. + format_options = { + "current_major_version": version.major_number, + "next_major_version": version.major_number + 1, + "current_weave_version": version.major_number + 2, + "next_weave_version": version.major_number + 3, # current_weave_version + 1 + } + + # Cope with bash variables in strings that we don't want to + # be formatted in Python. We do this by ignoring {vars} we + # aren't given keys for. + fmt = BashFormatter() + new_contents = {} + for f, from_, to in replacements: + from_ = fmt.format(from_, **format_options) + to = fmt.format(to, **format_options) + new_contents[f] = replace(f, orig_contents[f], from_, to) + + for f, from_, to in regex_replacements: + from_ = from_.format(**format_options) + to = fmt.format(to, **format_options) + new_contents[f] = replace(f, orig_contents[f], from_, to, use_regex=True) + + return new_contents + + +def get_new_clobber_file(contents): + """Update the clobber file in the root of the repo.""" + log.info("Updating clobber file") + new_contents = "" + for line in contents.splitlines(): + line = line.strip() + if line.startswith("#") or line == "": + new_contents += f"{line}\n" + return f"{new_contents}Merge day clobber {str(date.today())}" diff --git a/landoscript/src/landoscript/actions/tag.py b/landoscript/src/landoscript/actions/tag.py index e38708382..bef76e26d 100644 --- a/landoscript/src/landoscript/actions/tag.py +++ b/landoscript/src/landoscript/actions/tag.py @@ -3,12 +3,15 @@ from landoscript.lando import LandoAction -def run(tags: list[str]) -> list[LandoAction]: +def run(tags: list[str], target_revision: str | None = None) -> list[LandoAction]: if len(tags) < 1: raise TaskVerificationError("must provide at least one tag!") actions = [] for tag in tags: - actions.append({"action": "tag", "name": tag}) + action = {"action": "tag", "name": tag} + if target_revision: + action["target"] = target_revision + actions.append(action) return actions diff --git a/landoscript/src/landoscript/actions/version_bump.py b/landoscript/src/landoscript/actions/version_bump.py index 5e0f5cc9a..1386c275b 100644 --- a/landoscript/src/landoscript/actions/version_bump.py +++ b/landoscript/src/landoscript/actions/version_bump.py @@ -1,11 +1,8 @@ import logging import os.path -import typing from typing import TypedDict from gql.transport.exceptions import TransportError -from mozilla_version.gecko import FirefoxVersion, GeckoVersion, ThunderbirdVersion -from mozilla_version.mobile import MobileVersion from mozilla_version.version import BaseVersion from scriptworker.exceptions import TaskVerificationError @@ -13,6 +10,7 @@ from landoscript.lando import LandoAction, create_commit_action from landoscript.util.diffs import diff_contents from landoscript.util.log import log_file_contents +from landoscript.util.version import find_what_version_parser_to_use from scriptworker_client.github_client import GithubClient log = logging.getLogger(__name__) @@ -27,14 +25,6 @@ "mail/config/version_display.txt", ) -# A mapping of bump file prefixes to parsers for their contents. -_VERSION_CLASS_PER_BEGINNING_OF_PATH = { - "browser/": FirefoxVersion, - "config/milestone.txt": GeckoVersion, - "mobile/android/": MobileVersion, - "mail/": ThunderbirdVersion, -} - class VersionBumpInfo(TypedDict): next_version: str @@ -45,50 +35,52 @@ async def run( github_client: GithubClient, public_artifact_dir: str, branch: str, - version_bump_info: VersionBumpInfo, + version_bump_infos: list[VersionBumpInfo], dontbuild: bool, ) -> LandoAction: - """Perform version bumps on the files given in `version_bump_info`, if necessary.""" + """Perform version bumps on the files given in each `version_bump_info`, if necessary.""" + + diff = "" - next_version = version_bump_info["next_version"] + for version_bump_info in version_bump_infos: + next_version = version_bump_info["next_version"] - for file in version_bump_info["files"]: - if file not in ALLOWED_BUMP_FILES: - raise TaskVerificationError("{} is not in version bump allowlist".format(file)) + for file in version_bump_info["files"]: + if file not in ALLOWED_BUMP_FILES: + raise TaskVerificationError("{} is not in version bump allowlist".format(file)) - try: - log.info("fetching bump files from github") - orig_files = await github_client.get_files(version_bump_info["files"], branch) - except TransportError as e: - raise LandoscriptError("couldn't retrieve bump files from github") from e + try: + log.info("fetching bump files from github") + orig_files = await github_client.get_files(version_bump_info["files"], branch) + except TransportError as e: + raise LandoscriptError("couldn't retrieve bump files from github") from e - log.info("got files") - for file, contents in orig_files.items(): - log.info(f"{file} contents:") - log_file_contents(str(contents)) + log.info("got files") + for file, contents in orig_files.items(): + log.info(f"{file} contents:") + log_file_contents(str(contents)) - diff = "" - for file, orig in orig_files.items(): - if not orig: - raise LandoscriptError(f"{file} does not exist!") + for file, orig in orig_files.items(): + if not orig: + raise LandoscriptError(f"{file} does not exist!") - log.info(f"considering {file}") - cur, next_ = get_cur_and_next_version(file, orig, next_version) - if next_ < cur: - log.warning(f"{file}: Version bumping skipped due to conflicting values: (next version {next_} is < current version {cur})") - continue - elif next_ == cur: - log.info(f"{file}: Version bumping skipped due to unchanged values") - continue + log.info(f"considering {file}") + cur, next_ = get_cur_and_next_version(file, orig, next_version) + if next_ < cur: + log.warning(f"{file}: Version bumping skipped due to conflicting values: (next version {next_} is < current version {cur})") + continue + elif next_ == cur: + log.info(f"{file}: Version bumping skipped due to unchanged values") + continue - modified = orig.replace(str(cur), str(next_)) - if orig == modified: - raise LandoscriptError("file not modified, this should be impossible") + modified = orig.replace(str(cur), str(next_)) + if orig == modified: + raise LandoscriptError("file not modified, this should be impossible") - log.info(f"{file}: successfully bumped! new contents are:") - log_file_contents(modified) + log.info(f"{file}: successfully bumped! new contents are:") + log_file_contents(modified) - diff += diff_contents(orig, modified, file) + diff += diff_contents(orig, modified, file) if not diff: log.info("no files to bump") @@ -108,18 +100,6 @@ async def run( return create_commit_action(commitmsg, diff) -def find_what_version_parser_to_use(file): - version_classes = [cls for path, cls in _VERSION_CLASS_PER_BEGINNING_OF_PATH.items() if file.startswith(path)] - - number_of_version_classes = len(version_classes) - if number_of_version_classes > 1: - raise LandoscriptError(f'File "{file}" matched too many classes: {version_classes}') - if number_of_version_classes > 0: - return version_classes[0] - - raise LandoscriptError(f"Could not determine version class based on file path for {file}") - - def get_cur_and_next_version(filename, orig_contents, next_version): VersionClass: BaseVersion = find_what_version_parser_to_use(filename) lines = [line for line in orig_contents.splitlines() if line and not line.startswith("#")] @@ -128,8 +108,8 @@ def get_cur_and_next_version(filename, orig_contents, next_version): # Special case for ESRs; make sure the next version is consistent with the # current version with respect to whether or not it includes the `esr` # suffix. - if next_version.endswith("esr") and not typing.cast(GeckoVersion, cur).is_esr: - next_version = next_version.replace("esr", "") + # if next_version.endswith("esr") and not typing.cast(GeckoVersion, cur).is_esr: + # next_version = next_version.replace("esr", "") next_ = VersionClass.parse(next_version) diff --git a/landoscript/src/landoscript/script.py b/landoscript/src/landoscript/script.py index 7fc2d204d..532f90e58 100644 --- a/landoscript/src/landoscript/script.py +++ b/landoscript/src/landoscript/script.py @@ -6,7 +6,7 @@ from scriptworker.exceptions import TaskVerificationError from landoscript import lando -from landoscript.actions import l10n_bump, tag, version_bump +from landoscript.actions import l10n_bump, merge_day, tag, version_bump from landoscript.treestatus import is_tree_open from scriptworker_client.github_client import GithubClient @@ -77,8 +77,8 @@ async def async_main(context): gh_client, public_artifact_dir, branch, - payload["version_bump_info"], - payload.get("dontbuild", False), + [version_bump.VersionBumpInfo(payload["version_bump_info"])], + dontbuild, ) # sometimes version bumps are no-ops if version_bump_action: @@ -86,6 +86,9 @@ async def async_main(context): elif action == "tag": tag_actions = tag.run(payload["tags"]) lando_actions.extend(tag_actions) + elif action == "merge_day": + merge_day_actions = await merge_day.run(gh_client, public_artifact_dir, payload["merge_info"]) + lando_actions.extend(merge_day_actions) elif action == "l10n_bump": if not ignore_closed_tree: # despite `ignore_closed_tree` being at the top level of the diff --git a/landoscript/src/landoscript/util/diffs.py b/landoscript/src/landoscript/util/diffs.py index 2d087c3d2..a68491981 100644 --- a/landoscript/src/landoscript/util/diffs.py +++ b/landoscript/src/landoscript/util/diffs.py @@ -10,5 +10,7 @@ def diff_contents(orig: str, modified: str, file: str) -> str: diff += "\n".join(unified_diff(orig.splitlines(), modified.splitlines(), fromfile=fromfile, tofile=tofile, lineterm="")) if modified.endswith("\n"): diff += "\n" + else: + diff += "\n\\ No newline at end of file\n" return diff diff --git a/landoscript/src/landoscript/util/version.py b/landoscript/src/landoscript/util/version.py new file mode 100644 index 000000000..14d33be2d --- /dev/null +++ b/landoscript/src/landoscript/util/version.py @@ -0,0 +1,24 @@ +from mozilla_version.gecko import FirefoxVersion, GeckoVersion, ThunderbirdVersion +from mozilla_version.mobile import MobileVersion + +from landoscript.errors import LandoscriptError + +# A mapping of bump file prefixes to parsers for their contents. +_VERSION_CLASS_PER_BEGINNING_OF_PATH = { + "browser/": FirefoxVersion, + "config/milestone.txt": GeckoVersion, + "mobile/android/": MobileVersion, + "mail/": ThunderbirdVersion, +} + + +def find_what_version_parser_to_use(file): + version_classes = [cls for path, cls in _VERSION_CLASS_PER_BEGINNING_OF_PATH.items() if file.startswith(path)] + + number_of_version_classes = len(version_classes) + if number_of_version_classes > 1: + raise LandoscriptError(f'File "{file}" matched too many classes: {version_classes}') + if number_of_version_classes > 0: + return version_classes[0] + + raise LandoscriptError(f"Could not determine version class based on file path for {file}") diff --git a/landoscript/tests/conftest.py b/landoscript/tests/conftest.py index ae4a6618e..9846da31d 100644 --- a/landoscript/tests/conftest.py +++ b/landoscript/tests/conftest.py @@ -1,3 +1,4 @@ +import json from pathlib import Path from yarl import URL @@ -78,6 +79,40 @@ def setup_fetch_files_response(aioresponses, code, initial_values={}): aioresponses.post(GITHUB_GRAPHQL_ENDPOINT, status=code, payload=payload) +def setup_fetch_files_responses(aioresponses, file_contents): + for fc in file_contents: + setup_fetch_files_response(aioresponses, 200, fc) + + +def setup_l10n_file_responses(aioresponses, l10n_bump_info, initial_values, expected_locales): + file_responses = {} + name = l10n_bump_info["name"] + ignore_config = l10n_bump_info.get("ignore_config", {}) + revision = initial_values[name]["revision"] + locales = initial_values[name]["locales"] + platforms = initial_values[name]["platforms"] + for pc in l10n_bump_info["platform_configs"]: + file_responses[pc["path"]] = "\n".join(expected_locales) + + changesets_data = {} + for locale in locales: + locale_platforms = [] + for platform in platforms: + if platform not in ignore_config.get(locale, []): + locale_platforms.append(platform) + + changesets_data[locale] = { + "pin": False, + "platforms": [], + "revision": revision, + "platforms": sorted(locale_platforms), + } + + file_responses[l10n_bump_info["path"]] = json.dumps(changesets_data) + + setup_fetch_files_response(aioresponses, 200, file_responses) + + def assert_lando_submission_response(requests, submit_uri, attempts=1): assert ("POST", submit_uri) in requests reqs = requests[("POST", submit_uri)] @@ -93,3 +128,200 @@ def assert_status_response(requests, status_uri, attempts=1): # there might be more than one in cases where we retry; we assume that # the requests are the same for all attempts assert len(reqs) == attempts + + +def assert_add_commit_response(action, commit_msg_strings, initial_values, expected_bumps): + # ensure metadata is correct + assert action["author"] == "Release Engineering Landoscript " + # we don't actually verify the value here; it's not worth the trouble of mocking + assert "date" in action + + # ensure required substrings are in the diff header + for msg in commit_msg_strings: + assert msg in action["commitmsg"] + + diffs = action["diff"].split("diff\n") + + # ensure expected bumps are present to a reasonable degree of certainty + for file, after in expected_bumps.items(): + for diff in diffs: + # if the version is the last line in the file it may or may not + # have a trailing newline. either way, there will be one (and + # only one) in the `-` line of the diff. account for this. + # the `after` version will only have a newline if the file is + # intended to have one after the diff has been applied. + before = initial_values[file].rstrip("\n") + "\n" + if file in diff and f"\n-{before}+{after}" in diff: + break + else: + assert False, f"no version bump found for {file}: {diffs}" + + +def get_locale_block(locale, platforms, rev): + # fmt: off + locale_block = [ + f' "{locale}": {{', + ' "pin": false,', + ' "platforms": [' + ] + platform_entries = [] + for platform in sorted(platforms): + platform_entries.append(f' "{platform}"') + locale_block.extend(",\n".join(platform_entries).split("\n")) + locale_block.extend([ + " ],", + f' "revision": "{rev}"', + # closing brace omitted because these blocks are used to generate + # diffs, and in diffs, these end up using context from the subsequent + # locale + # " }", + ]) + # fmt: on + + return locale_block + + +def assert_l10n_bump_response(req, l10n_bump_info, expected_changes, initial_values, expected_values, dontbuild=False, ignore_closed_tree=True): + assert "json" in req.kwargs + assert "actions" in req.kwargs["json"] + create_commit_actions = [action for action in req.kwargs["json"]["actions"] if action["action"] == "create-commit"] + + # when l10n bump is being down as part of something else, eg: merge day + # there may be create-commit actions that are unrelated to l10n + l10n_create_commit_actions = {} + for lbi in l10n_bump_info: + name = lbi["name"] + + for cca in create_commit_actions: + if name in cca["commitmsg"]: + l10n_create_commit_actions[name] = cca + + for lbi in l10n_bump_info: + name = lbi["name"] + action = l10n_create_commit_actions.get(name) + + if not action: + assert False, f"couldn't find create-commit action for {name}!" + + if dontbuild: + assert "DONTBUILD" in action["commitmsg"] + + if ignore_closed_tree: + assert "CLOSED TREE" in action["commitmsg"] + + # ensure metadata is correct + assert action["author"] == "Release Engineering Landoscript " + # we don't actually verify the value here; it's not worth the trouble of mocking + assert "date" in action + + diffs = action["diff"].split("diff\n") + assert len(diffs) == 1 + diff = diffs[0] + + initial_locales = set(initial_values[name]["locales"]) + expected_locales = set(expected_values[name]["locales"]) + initial_platforms = set(initial_values[name]["platforms"]) + expected_platforms = set(expected_values[name]["platforms"]) + added_locales = expected_locales - initial_locales + removed_locales = initial_locales - expected_locales + + # ensure each expected locale has the new revision + before_rev = initial_values[name]["revision"] + after_rev = expected_values[name]["revision"] + + if before_rev != after_rev: + revision_replacements = diff.count(f'- "revision": "{before_rev}"\n+ "revision": "{after_rev}') + # even if new locales are added, we only expect revision replacements + # for initial ones that are not being removed. added locales are checked + # further down. + expected_revision_replacements = len(initial_locales - removed_locales) + assert revision_replacements == expected_revision_replacements, "wrong number of revisions replaced!" + + # ensure any added locales are now present + if added_locales: + for locale in added_locales: + expected = "+" + "\n+".join(get_locale_block(locale, expected_platforms, after_rev)) + assert expected in diff + + # ensure any removed locales are no longer present + if removed_locales: + for locale in removed_locales: + expected = "-" + "\n-".join(get_locale_block(locale, expected_platforms, before_rev)) + assert expected in diff + + # ensure any added platforms are now present + added_platforms = expected_platforms - initial_platforms + for platform in added_platforms: + expected_additions = len(expected_locales) + for plats in lbi["ignore_config"].values(): + if platform in plats: + expected_additions -= 1 + expected = f'+ "{platform}"' + assert diff.count(expected) == expected_additions + + # ensure any removed platforms are no longer present + removed_platforms = initial_platforms - expected_platforms + for platform in removed_platforms: + expected_additions = len(expected_locales) + for plats in lbi["ignore_config"].values(): + if platform in plats: + expected_additions -= 1 + expected = f'- "{platform}"' + assert diff.count(expected) == expected_additions + + +def assert_merge_response( + artifact_dir, + req, + expected_actions, + initial_values, + expected_bumps, + initial_replacement_values={}, + expected_replacement_bumps={}, + end_tag="", + base_tag="", + target_ref="", +): + actions = req.kwargs["json"]["actions"] + action_names = [action["action"] for action in actions] + assert action_names == expected_actions + + tag_actions = [action for action in req.kwargs["json"]["actions"] if action["action"] == "tag"] + if base_tag: + assert len(tag_actions) == 2 + # if it exists, base tag happens second + assert tag_actions[0]["name"] == end_tag + assert tag_actions[1]["name"] == base_tag + elif end_tag: + assert len(tag_actions) == 1 + assert tag_actions[0]["name"] == end_tag + + if "merge-onto" in expected_actions: + # `merge-onto` action w/ target revision, commit message, and `ours` strategy + merge_actions = [action for action in req.kwargs["json"]["actions"] if action["action"] == "merge-onto"] + assert len(merge_actions) == 1 + action = merge_actions[0] + assert action["target"] == target_ref + assert action["strategy"] == "theirs" + + # `create-commit` action. check diff for: + # - firefox version bumps + create_commit_actions = iter([action for action in req.kwargs["json"]["actions"] if action["action"] == "create-commit"]) + if expected_bumps: + assert (artifact_dir / "public/build/version-bump.diff").exists() + + action = next(create_commit_actions) + + commit_msg_strings = ["Automatic version bump"] + assert_add_commit_response(action, commit_msg_strings, initial_values, expected_bumps) + + # - `replacements` bumps + # - `regex-replacements` bumps + # - CLOBBER + if expected_replacement_bumps: + assert (artifact_dir / "public/build/replacements.diff").exists() + + action = next(create_commit_actions) + + commit_msg_strings = ["Update configs"] + assert_add_commit_response(action, commit_msg_strings, initial_replacement_values, expected_replacement_bumps) diff --git a/landoscript/tests/test_l10n_bump.py b/landoscript/tests/test_l10n_bump.py index d3bb44793..ed1034c36 100644 --- a/landoscript/tests/test_l10n_bump.py +++ b/landoscript/tests/test_l10n_bump.py @@ -1,11 +1,10 @@ -import json import pytest from scriptworker.client import TaskVerificationError from simple_github.client import GITHUB_GRAPHQL_ENDPOINT from landoscript.script import async_main -from .conftest import assert_lando_submission_response, assert_status_response, setup_test, setup_fetch_files_response +from .conftest import assert_lando_submission_response, assert_status_response, setup_test, setup_l10n_file_responses, assert_l10n_bump_response def setup_treestatus_response(aioresponses, context, tree="repo_name", status="open", has_err=False): @@ -27,143 +26,6 @@ def setup_treestatus_response(aioresponses, context, tree="repo_name", status="o aioresponses.get(url, status=200, payload=resp) -def get_locale_block(locale, platforms, rev): - # fmt: off - locale_block = [ - f' "{locale}": {{', - ' "pin": false,', - ' "platforms": [' - ] - platform_entries = [] - for platform in sorted(platforms): - platform_entries.append(f' "{platform}"') - locale_block.extend(",\n".join(platform_entries).split("\n")) - locale_block.extend([ - " ],", - f' "revision": "{rev}"', - # closing brace omitted because these blocks are used to generate - # diffs, and in diffs, these end up using context from the subsequent - # locale - # " }", - ]) - # fmt: on - - return locale_block - - -def assert_l10n_bump_response(req, l10n_bump_info, expected_changes, initial_values, expected_values, dontbuild, ignore_closed_tree): - assert "json" in req.kwargs - assert "actions" in req.kwargs["json"] - create_commit_actions = [action for action in req.kwargs["json"]["actions"] if action["action"] == "create-commit"] - assert len(create_commit_actions) == expected_changes - - for lbi in l10n_bump_info: - name = lbi["name"] - - action = None - for cca in create_commit_actions: - if name in cca["commitmsg"]: - action = cca - - if not action: - assert False, f"couldn't find create-commit action for {name}!" - - if dontbuild: - assert "DONTBUILD" in action["commitmsg"] - - if ignore_closed_tree: - assert "CLOSED TREE" in action["commitmsg"] - - # ensure metadata is correct - assert action["author"] == "Release Engineering Landoscript " - # we don't actually verify the value here; it's not worth the trouble of mocking - assert "date" in action - - diffs = action["diff"].split("diff\n") - assert len(diffs) == 1 - diff = diffs[0] - - initial_locales = set(initial_values[name]["locales"]) - expected_locales = set(expected_values[name]["locales"]) - initial_platforms = set(initial_values[name]["platforms"]) - expected_platforms = set(expected_values[name]["platforms"]) - added_locales = expected_locales - initial_locales - removed_locales = initial_locales - expected_locales - - # ensure each expected locale has the new revision - before_rev = initial_values[name]["revision"] - after_rev = expected_values[name]["revision"] - - if before_rev != after_rev: - revision_replacements = diff.count(f'- "revision": "{before_rev}"\n+ "revision": "{after_rev}') - # even if new locales are added, we only expect revision replacements - # for initial ones that are not being removed. added locales are checked - # further down. - expected_revision_replacements = len(initial_locales - removed_locales) - assert revision_replacements == expected_revision_replacements, "wrong number of revisions replaced!" - - # ensure any added locales are now present - if added_locales: - for locale in added_locales: - expected = "+" + "\n+".join(get_locale_block(locale, expected_platforms, after_rev)) - assert expected in diff - - # ensure any removed locales are no longer present - if removed_locales: - for locale in removed_locales: - expected = "-" + "\n-".join(get_locale_block(locale, expected_platforms, before_rev)) - assert expected in diff - - # ensure any added platforms are now present - added_platforms = expected_platforms - initial_platforms - for platform in added_platforms: - expected_additions = len(expected_locales) - for plats in lbi["ignore_config"].values(): - if platform in plats: - expected_additions -= 1 - expected = f'+ "{platform}"' - assert diff.count(expected) == expected_additions - - # ensure any removed platforms are no longer present - removed_platforms = initial_platforms - expected_platforms - for platform in removed_platforms: - expected_additions = len(expected_locales) - for plats in lbi["ignore_config"].values(): - if platform in plats: - expected_additions -= 1 - expected = f'- "{platform}"' - assert diff.count(expected) == expected_additions - - -def setup_file_responses(aioresponses, l10n_bump_info, initial_values, expected_locales): - file_responses = {} - name = l10n_bump_info["name"] - ignore_config = l10n_bump_info.get("ignore_config", {}) - revision = initial_values[name]["revision"] - locales = initial_values[name]["locales"] - platforms = initial_values[name]["platforms"] - for pc in l10n_bump_info["platform_configs"]: - file_responses[pc["path"]] = "\n".join(expected_locales) - - changesets_data = {} - for locale in locales: - locale_platforms = [] - for platform in platforms: - if platform not in ignore_config.get(locale, []): - locale_platforms.append(platform) - - changesets_data[locale] = { - "pin": False, - "platforms": [], - "revision": revision, - "platforms": sorted(locale_platforms), - } - - file_responses[l10n_bump_info["path"]] = json.dumps(changesets_data) - - setup_fetch_files_response(aioresponses, 200, file_responses) - - @pytest.mark.asyncio @pytest.mark.parametrize( "l10n_bump_info,initial_values,expected_values,dry_run,dontbuild,ignore_closed_tree", @@ -704,7 +566,7 @@ async def test_success( # `setup_test`. we have to call it again for each bump info, because # the repository information exists in that part of the payload github_installation_responses("mozilla-l10n") - setup_file_responses(aioresponses, lbi, initial_values, expected_values[lbi["name"]]["locales"]) + setup_l10n_file_responses(aioresponses, lbi, initial_values, expected_values[lbi["name"]]["locales"]) revision = expected_values[lbi["name"]]["revision"] aioresponses.post(GITHUB_GRAPHQL_ENDPOINT, status=200, payload={"data": {"repository": {"object": {"oid": revision}}}}) diff --git a/landoscript/tests/test_merge_day.py b/landoscript/tests/test_merge_day.py new file mode 100644 index 000000000..fecf72571 --- /dev/null +++ b/landoscript/tests/test_merge_day.py @@ -0,0 +1,655 @@ +from collections import defaultdict +import json +from os import major +import pytest +from scriptworker.client import TaskVerificationError +from simple_github.client import GITHUB_GRAPHQL_ENDPOINT + +from landoscript.script import async_main + +from .conftest import ( + assert_lando_submission_response, + assert_status_response, + setup_fetch_files_responses, + setup_test, + assert_merge_response, +) + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "merge_info,dry_run,initial_values,expected_bumps,initial_replacement_values,expected_replacement_bumps,expected_actions,end_tag", + ( + pytest.param( + { + "end_tag": "FIREFOX_NIGHTLY_{major_version}_END", + "to_branch": "central", + "replacements": [ + [ + "services/sync/modules/constants.sys.mjs", + 'WEAVE_VERSION = "1.{current_weave_version}.0"', + 'WEAVE_VERSION = "1.{next_weave_version}.0"', + ] + ], + "version_files": [ + {"filename": "config/milestone.txt", "new_suffix": "a1", "version_bump": "major"}, + {"filename": "browser/config/version.txt", "new_suffix": "a1", "version_bump": "major"}, + {"filename": "browser/config/version_display.txt", "new_suffix": "a1", "version_bump": "major"}, + {"filename": "mobile/android/version.txt", "new_suffix": "a1", "version_bump": "major"}, + ], + "merge_old_head": False, + "fetch_version_from": "browser/config/version.txt", + }, + False, + { + "browser/config/version.txt": "137.0a1", + "browser/config/version_display.txt": "137.0a1", + "config/milestone.txt": "137.0a1", + "mobile/android/version.txt": "137.0a1", + }, + { + "browser/config/version.txt": "138.0a1", + "browser/config/version_display.txt": "138.0a1", + "config/milestone.txt": "138.0a1", + "mobile/android/version.txt": "138.0a1", + }, + { + "services/sync/modules/constants.sys.mjs": 'export const WEAVE_VERSION = "1.139.0";', + }, + { + "services/sync/modules/constants.sys.mjs": 'export const WEAVE_VERSION = "1.140.0";', + }, + # end tag, bump configs, bump replacements + ["tag", "create-commit", "create-commit"], + "FIREFOX_NIGHTLY_137_END", + id="bump_central", + ), + pytest.param( + { + "end_tag": "FIREFOX_NIGHTLY_{major_version}_END", + "to_branch": "central", + "replacements": [ + [ + "services/sync/modules/constants.sys.mjs", + 'WEAVE_VERSION = "1.{current_weave_version}.0"', + 'WEAVE_VERSION = "1.{next_weave_version}.0"', + ] + ], + "version_files": [ + {"filename": "config/milestone.txt", "new_suffix": "a1", "version_bump": "major"}, + {"filename": "browser/config/version.txt", "new_suffix": "a1", "version_bump": "major"}, + {"filename": "browser/config/version_display.txt", "new_suffix": "a1", "version_bump": "major"}, + {"filename": "mobile/android/version.txt", "new_suffix": "a1", "version_bump": "major"}, + ], + "merge_old_head": False, + "fetch_version_from": "browser/config/version.txt", + }, + True, + { + "browser/config/version.txt": "137.0a1", + "browser/config/version_display.txt": "137.0a1", + "config/milestone.txt": "137.0a1", + "mobile/android/version.txt": "137.0a1", + }, + { + "browser/config/version.txt": "138.0a1", + "browser/config/version_display.txt": "138.0a1", + "config/milestone.txt": "138.0a1", + "mobile/android/version.txt": "138.0a1", + }, + { + "services/sync/modules/constants.sys.mjs": 'export const WEAVE_VERSION = "1.139.0";', + }, + { + "services/sync/modules/constants.sys.mjs": 'export const WEAVE_VERSION = "1.140.0";', + }, + # end tag, bump configs, bump replacements + ["tag", "create-commit", "create-commit"], + "FIREFOX_NIGHTLY_137_END", + id="bump_central_dry_run", + ), + pytest.param( + { + "end_tag": "FIREFOX_NIGHTLY_{major_version}_END", + "to_branch": "central", + "regex_replacements": [ + [ + "browser/extensions/webcompat/manifest.json", + '"version": "[0-9]+.[0-9]+.0"', + '"version": "{next_major_version}.0.0"', + ] + ], + "merge_old_head": False, + "fetch_version_from": "browser/config/version.txt", + }, + False, + { + "browser/config/version.txt": "137.0a1", + }, + {}, + { + "browser/extensions/webcompat/manifest.json": '{"version": "137.5.0"}\n', + }, + { + "browser/extensions/webcompat/manifest.json": '{"version": "138.0.0"}\n', + }, + # end tag, bump replacements + ["tag", "create-commit"], + "FIREFOX_NIGHTLY_137_END", + id="regex_replacements", + ), + ), +) +async def test_success_bump_central( + aioresponses, + github_installation_responses, + context, + merge_info, + dry_run, + initial_values, + expected_bumps, + initial_replacement_values, + expected_replacement_bumps, + expected_actions, + end_tag, +): + payload = { + "actions": ["merge_day"], + "lando_repo": "repo_name", + "merge_info": merge_info, + "dry_run": dry_run, + } + submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["merge_day"]) + + setup_fetch_files_responses( + aioresponses, + [ + # existing version in `to_branch` + {merge_info["fetch_version_from"]: "137.0a1"}, + # fetch of original contents of files to bump, if we expect any replacements + initial_values if expected_bumps else {}, + # fetch of original contents of `replacements` and `regex_replacements` files + initial_replacement_values if expected_replacement_bumps else {}, + # clobber file + {"CLOBBER": "# Modifying this file will automatically clobber\nMerge day clobber 2025-03-03"}, + ], + ) + + if not dry_run: + aioresponses.post( + submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"} + ) + + aioresponses.get( + status_uri, + status=200, + payload={ + "commits": ["abcdef123"], + "push_id": job_id, + "status": "completed", + }, + ) + + context.task = {"payload": payload, "scopes": scopes} + await async_main(context) + + if not dry_run: + req = assert_lando_submission_response(aioresponses.requests, submit_uri) + assert_merge_response( + context.config["artifact_dir"], + req, + expected_actions, + initial_values, + expected_bumps, + initial_replacement_values, + expected_replacement_bumps, + end_tag, + ) + assert_status_response(aioresponses.requests, status_uri) + else: + assert ("POST", submit_uri) not in aioresponses.requests + assert ("GET", status_uri) not in aioresponses.requests + + +@pytest.mark.asyncio +async def test_success_bump_esr(aioresponses, github_installation_responses, context): + merge_info = { + "to_branch": "esr128", + "version_files": [ + {"filename": "config/milestone.txt", "version_bump": "minor"}, + {"filename": "browser/config/version.txt", "version_bump": "minor"}, + {"filename": "browser/config/version_display.txt", "new_suffix": "esr", "version_bump": "minor"}, + ], + "merge_old_head": False, + "fetch_version_from": "browser/config/version.txt", + } + initial_values = { + "browser/config/version.txt": "128.9.0", + "browser/config/version_display.txt": "128.9.0esr", + "config/milestone.txt": "128.9.0", + } + expected_bumps = { + "browser/config/version.txt": "128.10.0", + "browser/config/version_display.txt": "128.10.0esr", + "config/milestone.txt": "128.10.0", + } + # end tag, bump configs, bump replacements + expected_actions = ["create-commit", "create-commit"] + payload = { + "actions": ["merge_day"], + "lando_repo": "repo_name", + "merge_info": merge_info, + } + submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["merge_day"]) + + # version bump files are fetched in groups, by initial version + initial_values_by_expected_version = defaultdict(dict) + for file, version in expected_bumps.items(): + initial_values_by_expected_version[version][file] = initial_values[file] + + setup_fetch_files_responses( + aioresponses, + [ + # existing version in `to_branch` + {merge_info["fetch_version_from"]: "128.9.0"}, + # fetch of original contents of files to bump + *initial_values_by_expected_version.values(), + # clobber file + {"CLOBBER": "# Modifying this file will automatically clobber\nMerge day clobber 2025-03-03"}, + ], + ) + + aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) + + aioresponses.get( + status_uri, + status=200, + payload={ + "commits": ["abcdef123"], + "push_id": job_id, + "status": "completed", + }, + ) + + context.task = {"payload": payload, "scopes": scopes} + await async_main(context) + + req = assert_lando_submission_response(aioresponses.requests, submit_uri) + assert_merge_response( + context.config["artifact_dir"], + req, + expected_actions, + initial_values, + expected_bumps, + ) + assert_status_response(aioresponses.requests, status_uri) + + +@pytest.mark.asyncio +async def test_success_early_to_late_beta(aioresponses, github_installation_responses, context): + merge_info = { + "to_branch": "beta", + "version_files": [], + "replacements": [ + [ + "build/defines.sh", + "EARLY_BETA_OR_EARLIER=1", + "EARLY_BETA_OR_EARLIER=", + ], + ], + "merge_old_head": False, + "fetch_version_from": "browser/config/version.txt", + } + initial_replacement_values = {"build/defines.sh": "EARLY_BETA_OR_EARLIER=1\n"} + expected_replacement_bumps = {"build/defines.sh": "EARLY_BETA_OR_EARLIER=\n"} + # bump configs + expected_actions = ["create-commit"] + payload = { + "actions": ["merge_day"], + "lando_repo": "repo_name", + "merge_info": merge_info, + } + submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["merge_day"]) + + setup_fetch_files_responses( + aioresponses, + [ + # initial version fetch; technically not needed for this use case + # but it keeps the merge day code cleaner to keep it + {merge_info["fetch_version_from"]: "139.0"}, + # fetch of original contents of `replacements` file + initial_replacement_values, + # clobber file + {"CLOBBER": "# Modifying this file will automatically clobber\nMerge day clobber 2025-03-03"}, + ], + ) + + aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) + + aioresponses.get( + status_uri, + status=200, + payload={ + "commits": ["abcdef123"], + "push_id": job_id, + "status": "completed", + }, + ) + + context.task = {"payload": payload, "scopes": scopes} + await async_main(context) + + req = assert_lando_submission_response(aioresponses.requests, submit_uri) + assert_merge_response( + context.config["artifact_dir"], + req, + expected_actions, + {}, + {}, + initial_replacement_values, + expected_replacement_bumps, + ) + assert_status_response(aioresponses.requests, status_uri) + + +@pytest.mark.asyncio +async def test_success_central_to_beta(aioresponses, github_installation_responses, context): + merge_info = { + "end_tag": "FIREFOX_BETA_{major_version}_END", + "base_tag": "FIREFOX_BETA_{major_version}_BASE", + "to_branch": "beta", + "from_branch": "central", + "replacements": [ + [ + "browser/config/mozconfigs/linux64/l10n-mozconfig", + "ac_add_options --with-branding=browser/branding/nightly", + "ac_add_options --enable-official-branding", + ], + [ + "browser/config/mozconfigs/win32/l10n-mozconfig", + "ac_add_options --with-branding=browser/branding/nightly", + "ac_add_options --enable-official-branding", + ], + [ + "browser/config/mozconfigs/win64/l10n-mozconfig", + "ac_add_options --with-branding=browser/branding/nightly", + "ac_add_options --enable-official-branding", + ], + [ + "browser/config/mozconfigs/macosx64/l10n-mozconfig", + "ac_add_options --with-branding=browser/branding/nightly", + "ac_add_options --enable-official-branding", + ], + [".arcconfig", "MOZILLACENTRAL", "BETA"], + ], + "version_files": [ + {"filename": "config/milestone.txt", "new_suffix": ""}, + {"filename": "browser/config/version.txt", "new_suffix": ""}, + {"filename": "browser/config/version_display.txt", "new_suffix": "b1"}, + {"filename": "mobile/android/version.txt", "new_suffix": "b1"}, + ], + "merge_old_head": True, + "fetch_version_from": "browser/config/version.txt", + } + # despite it looking weird, these beta looking versions _are_ the correct + # "before" versions after we've "merged" central into beta + initial_values = { + "browser/config/version.txt": "140.0a1", + "browser/config/version_display.txt": "140.0a1", + "config/milestone.txt": "140.0a1", + "mobile/android/version.txt": "140.0a1", + } + expected_bumps = { + "browser/config/version.txt": "140.0", + "browser/config/version_display.txt": "140.0b1", + "config/milestone.txt": "140.0", + "mobile/android/version.txt": "140.0b1", + } + initial_replacement_values = { + ".arcconfig": ' "repository.callsign": "MOZILLACENTRAL",', + "browser/config/mozconfigs/linux64/l10n-mozconfig": "ac_add_options --with-branding=browser/branding/nightly", + "browser/config/mozconfigs/win32/l10n-mozconfig": "ac_add_options --with-branding=browser/branding/nightly", + "browser/config/mozconfigs/win64/l10n-mozconfig": "ac_add_options --with-branding=browser/branding/nightly", + "browser/config/mozconfigs/macosx64/l10n-mozconfig": "ac_add_options --with-branding=browser/branding/nightly", + } + expected_replacement_values = { + ".arcconfig": ' "repository.callsign": "BETA",', + "browser/config/mozconfigs/linux64/l10n-mozconfig": "ac_add_options --enable-official-branding", + "browser/config/mozconfigs/win32/l10n-mozconfig": "ac_add_options --enable-official-branding", + "browser/config/mozconfigs/win64/l10n-mozconfig": "ac_add_options --enable-official-branding", + "browser/config/mozconfigs/macosx64/l10n-mozconfig": "ac_add_options --enable-official-branding", + } + # end tag, base tag, merge, version bump , replacements + expected_actions = ["tag", "tag", "merge-onto", "create-commit", "create-commit"] + base_tag = "FIREFOX_BETA_140_BASE" + end_tag = "FIREFOX_BETA_139_END" + target_ref = "central" + payload = { + "actions": ["merge_day"], + "lando_repo": "repo_name", + "merge_info": merge_info, + } + submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["merge_day"]) + + # version bump files are fetched in groups, by initial version + initial_values_by_expected_version = defaultdict(dict) + for file, version in expected_bumps.items(): + initial_values_by_expected_version[version][file] = initial_values[file] + + setup_fetch_files_responses( + aioresponses, + [ + # existing version in `to_branch` + {merge_info["fetch_version_from"]: "139.0b11"}, + # existing version in `from_branch` + {merge_info["fetch_version_from"]: "140.0a1"}, + # fetch of original contents of files to bump + *initial_values_by_expected_version.values(), + # fetch of original contents of `replacements` and `regex_replacements` files + initial_replacement_values, + # clobber file + {"CLOBBER": "# Modifying this file will automatically clobber\nMerge day clobber 2025-03-03"}, + ], + ) + + aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) + + aioresponses.get( + status_uri, + status=200, + payload={ + "commits": ["abcdef123"], + "push_id": job_id, + "status": "completed", + }, + ) + + context.task = {"payload": payload, "scopes": scopes} + await async_main(context) + + req = assert_lando_submission_response(aioresponses.requests, submit_uri) + assert_merge_response( + context.config["artifact_dir"], + req, + expected_actions, + initial_values, + expected_bumps, + initial_replacement_values, + expected_replacement_values, + end_tag, + base_tag, + target_ref, + ) + assert_status_response(aioresponses.requests, status_uri) + + +@pytest.mark.asyncio +async def test_success_beta_to_release(aioresponses, github_installation_responses, context): + merge_info = { + "end_tag": "FIREFOX_RELEASE_{major_version}_END", + "base_tag": "FIREFOX_RELEASE_{major_version}_BASE", + "to_branch": "release", + "from_branch": "beta", + "replacements": [[".arcconfig", "BETA", "RELEASE"]], + "version_files": [ + {"filename": "browser/config/version_display.txt", "new_suffix": ""}, + {"filename": "mobile/android/version.txt", "new_suffix": ""}, + ], + "merge_old_head": True, + "fetch_version_from": "browser/config/version.txt", + } + # despite it looking weird, these beta looking versions _are_ the correct + # "before" versions after we've "merged" the beta branch into release + initial_values = { + "browser/config/version.txt": "136.0", + "browser/config/version_display.txt": "136.0b11", + "mobile/android/version.txt": "136.0b11", + } + expected_bumps = { + "browser/config/version_display.txt": "136.0", + "mobile/android/version.txt": "136.0", + } + initial_replacement_values = { + ".arcconfig": ' "repository.callsign": "BETA",', + } + expected_replacement_values = { + ".arcconfig": ' "repository.callsign": "RELEASE",', + } + # end tag, base tag, merge, version bump, replacements + expected_actions = ["tag", "tag", "merge-onto", "create-commit", "create-commit"] + base_tag = "FIREFOX_RELEASE_136_BASE" + end_tag = "FIREFOX_RELEASE_135_END" + target_ref = "beta" + payload = { + "actions": ["merge_day"], + "lando_repo": "repo_name", + "merge_info": merge_info, + } + submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["merge_day"]) + + setup_fetch_files_responses( + aioresponses, + [ + # existing version in `to_branch` + {merge_info["fetch_version_from"]: "135.0"}, + # existing version in `from_branch` + {merge_info["fetch_version_from"]: "136.0"}, + # fetch of original contents of files to bump, if we expect any replacements + initial_values, + # fetch of original contents of `replacements` and `regex_replacements` files + initial_replacement_values, + # clobber file + {"CLOBBER": "# Modifying this file will automatically clobber\nMerge day clobber 2025-03-03"}, + ], + ) + + aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) + + aioresponses.get( + status_uri, + status=200, + payload={ + "commits": ["abcdef123"], + "push_id": job_id, + "status": "completed", + }, + ) + + context.task = {"payload": payload, "scopes": scopes} + await async_main(context) + + req = assert_lando_submission_response(aioresponses.requests, submit_uri) + assert_merge_response( + context.config["artifact_dir"], + req, + expected_actions, + initial_values, + expected_bumps, + initial_replacement_values, + expected_replacement_values, + end_tag, + base_tag, + target_ref, + ) + assert_status_response(aioresponses.requests, status_uri) + + +@pytest.mark.asyncio +async def test_success_release_to_esr(aioresponses, github_installation_responses, context): + merge_info = { + # yep...we use `BASE` on the `end_tag` for release-to-esr merges + "end_tag": "FIREFOX_ESR_{major_version}_BASE", + "to_branch": "esr128", + "replacements": [[".arcconfig", "RELEASE", "ESRONETWOEIGHT"]], + "version_files": [ + {"filename": "browser/config/version_display.txt", "new_suffix": "esr"}, + ], + "merge_old_head": False, + "fetch_version_from": "browser/config/version.txt", + } + initial_values = { + "browser/config/version_display.txt": "128.0", + } + expected_bumps = { + "browser/config/version_display.txt": "128.0esr", + } + initial_replacement_values = { + ".arcconfig": ' "repository.callsign": "RELEASE",', + } + expected_replacement_bumps = { + ".arcconfig": ' "repository.callsign": "ESRONETWOEIGHT",', + } + # end tag, version bump, replacements + expected_actions = ["tag", "create-commit", "create-commit"] + end_tag = "FIREFOX_ESR_128_BASE" + target_ref = "release" + payload = { + "actions": ["merge_day"], + "lando_repo": "repo_name", + "merge_info": merge_info, + "ignore_closed_tree": True, + } + submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["merge_day"]) + + setup_fetch_files_responses( + aioresponses, + [ + # existing version in `to_branch` + {merge_info["fetch_version_from"]: "128.0"}, + # fetch of original contents of files to bump, if we expect any replacements + initial_values if expected_bumps else {}, + # fetch of original contents of `replacements` and `regex_replacements` files + initial_replacement_values, + # clobber file + {"CLOBBER": "# Modifying this file will automatically clobber\nMerge day clobber 2025-03-03"}, + ], + ) + + aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) + + aioresponses.get( + status_uri, + status=200, + payload={ + "commits": ["abcdef123"], + "push_id": job_id, + "status": "completed", + }, + ) + + context.task = {"payload": payload, "scopes": scopes} + await async_main(context) + + req = assert_lando_submission_response(aioresponses.requests, submit_uri) + assert_merge_response( + context.config["artifact_dir"], + req, + expected_actions, + initial_values, + expected_bumps, + initial_replacement_values, + expected_replacement_bumps, + end_tag, + target_ref=target_ref, + ) + assert_status_response(aioresponses.requests, status_uri) diff --git a/landoscript/tests/test_script.py b/landoscript/tests/test_script.py index 76751e220..d8df14360 100644 --- a/landoscript/tests/test_script.py +++ b/landoscript/tests/test_script.py @@ -1,12 +1,33 @@ from aiohttp import ClientResponseError +from collections import defaultdict import pytest from scriptworker.client import TaskVerificationError +from simple_github.client import GITHUB_GRAPHQL_ENDPOINT from landoscript.errors import LandoscriptError from landoscript.script import async_main -from .conftest import assert_lando_submission_response, assert_status_response, setup_test +from .conftest import ( + assert_l10n_bump_response, + assert_lando_submission_response, + assert_status_response, + setup_test, + assert_add_commit_response, + setup_l10n_file_responses, + assert_merge_response, + setup_fetch_files_response, + setup_fetch_files_responses, +) from .test_tag import assert_tag_response -from .test_version_bump import assert_add_commit_response, setup_fetch_files_response + + +def assert_success(req, commit_msg_strings, initial_values, expected_bumps): + assert "json" in req.kwargs + assert "actions" in req.kwargs["json"] + create_commit_actions = [action for action in req.kwargs["json"]["actions"] if action["action"] == "create-commit"] + assert len(create_commit_actions) == 1 + action = create_commit_actions[0] + + assert_add_commit_response(action, commit_msg_strings, initial_values, expected_bumps) @pytest.mark.asyncio @@ -83,7 +104,7 @@ async def test_tag_and_bump(aioresponses, github_installation_responses, context assert (context.config["artifact_dir"] / "public/build/version-bump.diff").exists() if not dry_run: req = assert_lando_submission_response(aioresponses.requests, submit_uri) - assert_add_commit_response(req, commit_msg_strings, initial_values, expected_bumps) + assert_success(req, commit_msg_strings, initial_values, expected_bumps) assert_status_response(aioresponses.requests, status_uri) assert_tag_response(req, tags) @@ -163,7 +184,7 @@ async def test_success_with_retries(aioresponses, github_installation_responses, await async_main(context) req = assert_lando_submission_response(aioresponses.requests, submit_uri, attempts=2) - assert_add_commit_response(req, commit_msg_strings, initial_values, expected_bumps) + assert_success(req, commit_msg_strings, initial_values, expected_bumps) assert_status_response(aioresponses.requests, status_uri, attempts=2) assert (context.config["artifact_dir"] / "public/build/version-bump.diff").exists() @@ -355,3 +376,213 @@ async def test_lando_polling_retry_on_failure(aioresponses, github_installation_ await async_main(context) assert_status_response(aioresponses.requests, status_uri, attempts=2) + + +@pytest.mark.asyncio +async def test_success_central_to_beta_merge_day(aioresponses, github_installation_responses, context): + # despite it looking weird, these beta looking versions _are_ the correct + # "before" versions after we've "merged" the central into beta + initial_values = { + "browser/config/version.txt": "139.0a1", + "browser/config/version_display.txt": "139.0a1", + "config/milestone.txt": "139.0a1", + "mobile/android/version.txt": "139.0a1", + } + expected_bumps = { + "browser/config/version.txt": "140.0", + "browser/config/version_display.txt": "140.0b1", + "config/milestone.txt": "140.0", + "mobile/android/version.txt": "140.0b1", + } + initial_replacement_values = { + ".arcconfig": ' "repository.callsign": "MOZILLACENTRAL",', + "browser/config/mozconfigs/linux64/l10n-mozconfig": "ac_add_options --with-branding=browser/branding/nightly", + "browser/config/mozconfigs/win32/l10n-mozconfig": "ac_add_options --with-branding=browser/branding/nightly", + "browser/config/mozconfigs/win64/l10n-mozconfig": "ac_add_options --with-branding=browser/branding/nightly", + "browser/config/mozconfigs/macosx64/l10n-mozconfig": "ac_add_options --with-branding=browser/branding/nightly", + } + expected_replacement_values = { + ".arcconfig": ' "repository.callsign": "BETA",', + "browser/config/mozconfigs/linux64/l10n-mozconfig": "ac_add_options --enable-official-branding", + "browser/config/mozconfigs/win32/l10n-mozconfig": "ac_add_options --enable-official-branding", + "browser/config/mozconfigs/win64/l10n-mozconfig": "ac_add_options --enable-official-branding", + "browser/config/mozconfigs/macosx64/l10n-mozconfig": "ac_add_options --enable-official-branding", + } + # end tag, base tag, merge, version bump , replacements, mobile l10n bump, firefox l10n bump + expected_actions = ["tag", "tag", "merge-onto", "create-commit", "create-commit", "create-commit", "create-commit"] + base_tag = "FIREFOX_BETA_140_BASE" + end_tag = "FIREFOX_BETA_139_END" + target_ref = "central" + initial_l10n_changesets = { + "Firefox l10n changesets": { + "revision": "abcdef", + "locales": ["af", "ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "macosx64", "win64"], + }, + "Mobile l10n changesets": { + "revision": "abcdef", + "locales": ["de", "ja"], + "platforms": ["android", "android-arm"], + }, + } + expected_l10n_changesets = { + "Firefox l10n changesets": { + "revision": "ghijkl", + "locales": ["af", "ja", "ja-JP-mac", "zh-TW"], + "platforms": ["linux64", "macosx64", "win64"], + }, + "Mobile l10n changesets": { + "revision": "ghijkl", + "locales": ["de", "ja"], + "platforms": ["android", "android-arm"], + }, + } + l10n_bump_info = [ + { + "ignore_config": { + "ja": ["macosx64"], + "ja-JP-mac": ["linux64", "win64"], + }, + "l10n_repo_target_branch": "main", + "l10n_repo_url": "https://github.com/mozilla-l10n/firefox-l10n", + "name": "Firefox l10n changesets", + "path": "browser/locales/l10n-changesets.json", + "platform_configs": [ + { + "path": "browser/locales/shipped-locales", + "platforms": ["linux64", "macosx64", "win64"], + } + ], + }, + { + "l10n_repo_target_branch": "main", + "l10n_repo_url": "https://github.com/mozilla-l10n/firefox-l10n", + "name": "Mobile l10n changesets", + "path": "mobile/locales/l10n-changesets.json", + "platform_configs": [ + { + "path": "mobile/android/locales/all-locales", + "platforms": ["android", "android-arm"], + } + ], + }, + ] + merge_info = { + "end_tag": "FIREFOX_BETA_{major_version}_END", + "to_repo": "https://hg.mozilla.org/releases/mozilla-beta", + "base_tag": "FIREFOX_BETA_{major_version}_BASE", + "from_repo": "https://hg.mozilla.org/mozilla-central", + "to_branch": "beta", + "from_branch": "central", + "replacements": [ + [ + "browser/config/mozconfigs/linux64/l10n-mozconfig", + "ac_add_options --with-branding=browser/branding/nightly", + "ac_add_options --enable-official-branding", + ], + [ + "browser/config/mozconfigs/win32/l10n-mozconfig", + "ac_add_options --with-branding=browser/branding/nightly", + "ac_add_options --enable-official-branding", + ], + [ + "browser/config/mozconfigs/win64/l10n-mozconfig", + "ac_add_options --with-branding=browser/branding/nightly", + "ac_add_options --enable-official-branding", + ], + [ + "browser/config/mozconfigs/macosx64/l10n-mozconfig", + "ac_add_options --with-branding=browser/branding/nightly", + "ac_add_options --enable-official-branding", + ], + [".arcconfig", "MOZILLACENTRAL", "BETA"], + ], + "version_files": [ + {"filename": "config/milestone.txt", "new_suffix": ""}, + {"filename": "browser/config/version.txt", "new_suffix": ""}, + {"filename": "browser/config/version_display.txt", "new_suffix": "b1"}, + {"filename": "mobile/android/version.txt", "new_suffix": "b1"}, + ], + "merge_old_head": True, + "fetch_version_from": "browser/config/version.txt", + } + payload = { + "actions": ["merge_day", "l10n_bump"], + "lando_repo": "repo_name", + "l10n_bump_info": l10n_bump_info, + "merge_info": merge_info, + "ignore_closed_tree": True, + } + submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["merge_day", "l10n_bump"]) + + # version bump files are fetched in groups, by initial version + initial_values_by_expected_version = defaultdict(dict) + for file, version in expected_bumps.items(): + initial_values_by_expected_version[version][file] = initial_values[file] + + setup_fetch_files_responses( + aioresponses, + [ + # existing version in `to_branch` + {merge_info["fetch_version_from"]: "139.0b11"}, + # existing version in `from_branch` + {merge_info["fetch_version_from"]: "140.0a1"}, + # fetch of original contents of files to bump + *initial_values_by_expected_version.values(), + # fetch of original contents of `replacements` and `regex_replacements` files + initial_replacement_values, + # clobber file + {"CLOBBER": "# Modifying this file will automatically clobber\nMerge day clobber 2025-03-03"}, + ], + ) + + aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) + + # because the github graphql endpoint is generic we need to make sure we create + # these responses in the correct order... + for lbi in l10n_bump_info: + # this is called once for the repository we're bumping files in in + # `setup_test`. we have to call it again for each bump info, because + # the repository information exists in that part of the payload + github_installation_responses("mozilla-l10n") + setup_l10n_file_responses(aioresponses, lbi, initial_l10n_changesets, expected_l10n_changesets[lbi["name"]]["locales"]) + revision = expected_l10n_changesets[lbi["name"]]["revision"] + aioresponses.post(GITHUB_GRAPHQL_ENDPOINT, status=200, payload={"data": {"repository": {"object": {"oid": revision}}}}) + + aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) + + aioresponses.get( + status_uri, + status=200, + payload={ + "commits": ["abcdef123"], + "push_id": job_id, + "status": "completed", + }, + ) + + context.task = {"payload": payload, "scopes": scopes} + await async_main(context) + + req = assert_lando_submission_response(aioresponses.requests, submit_uri) + assert_merge_response( + context.config["artifact_dir"], + req, + expected_actions, + initial_values, + expected_bumps, + initial_replacement_values, + expected_replacement_values, + end_tag, + base_tag, + target_ref, + ) + expected_changes = 0 + for initial_info, expected_info in zip(initial_l10n_changesets.values(), expected_l10n_changesets.values()): + for k in initial_info.keys(): + if initial_info[k] != expected_info[k]: + expected_changes += 1 + break + + assert_l10n_bump_response(req, l10n_bump_info, expected_changes, initial_l10n_changesets, expected_l10n_changesets) + assert_status_response(aioresponses.requests, status_uri) diff --git a/landoscript/tests/test_version_bump.py b/landoscript/tests/test_version_bump.py index 4b2a45e3a..a26d91a2c 100644 --- a/landoscript/tests/test_version_bump.py +++ b/landoscript/tests/test_version_bump.py @@ -3,42 +3,20 @@ from landoscript.errors import LandoscriptError from landoscript.script import async_main -from landoscript.actions.version_bump import ALLOWED_BUMP_FILES, _VERSION_CLASS_PER_BEGINNING_OF_PATH +from landoscript.actions.version_bump import ALLOWED_BUMP_FILES +from landoscript.util.version import _VERSION_CLASS_PER_BEGINNING_OF_PATH -from .conftest import assert_lando_submission_response, assert_status_response, setup_test, setup_fetch_files_response +from .conftest import assert_lando_submission_response, assert_status_response, setup_test, setup_fetch_files_response, assert_add_commit_response -def assert_add_commit_response(req, commit_msg_strings, initial_values, expected_bumps): +def assert_success(req, commit_msg_strings, initial_values, expected_bumps): assert "json" in req.kwargs assert "actions" in req.kwargs["json"] create_commit_actions = [action for action in req.kwargs["json"]["actions"] if action["action"] == "create-commit"] assert len(create_commit_actions) == 1 action = create_commit_actions[0] - # ensure metadata is correct - assert action["author"] == "Release Engineering Landoscript " - # we don't actually verify the value here; it's not worth the trouble of mocking - assert "date" in action - - # ensure required substrings are in the diff header - for msg in commit_msg_strings: - assert msg in action["commitmsg"] - - diffs = action["diff"].split("diff\n") - - # ensure expected bumps are present to a reasonable degree of certainty - for file, after in expected_bumps.items(): - for diff in diffs: - # if the version is the last line in the file it may or may not - # have a trailing newline. either way, there will be one (and - # only one) in the `-` line of the diff. account for this. - # the `after` version will only have a newline if the file is - # intended to have one after the diff has been applied. - before = initial_values[file].rstrip("\n") + "\n" - if file in diff and f"\n-{before}+{after}" in diff: - break - else: - assert False, f"no version bump found for {file}: {diffs}" + assert_add_commit_response(action, commit_msg_strings, initial_values, expected_bumps) @pytest.mark.asyncio @@ -275,7 +253,7 @@ async def test_success_with_bumps(aioresponses, github_installation_responses, c assert (context.config["artifact_dir"] / "public/build/version-bump.diff").exists() if not dryrun: req = assert_lando_submission_response(aioresponses.requests, submit_uri) - assert_add_commit_response(req, commit_msg_strings, initial_values, expected_bumps) + assert_success(req, commit_msg_strings, initial_values, expected_bumps) assert_status_response(aioresponses.requests, status_uri) @@ -354,7 +332,7 @@ async def test_success_with_retries(aioresponses, github_installation_responses, await async_main(context) req = assert_lando_submission_response(aioresponses.requests, submit_uri, attempts=2) - assert_add_commit_response(req, commit_msg_strings, initial_values, expected_bumps) + assert_success(req, commit_msg_strings, initial_values, expected_bumps) assert_status_response(aioresponses.requests, status_uri, attempts=2) assert (context.config["artifact_dir"] / "public/build/version-bump.diff").exists() From 5ef149f02ee509c12b1cfc9748a35aaead161bc5 Mon Sep 17 00:00:00 2001 From: Ben Hearsum Date: Wed, 9 Apr 2025 09:37:29 -0400 Subject: [PATCH 06/13] refactor(landoscript): create `run_test` helper that simplifies most of the happy path tests This new helper allows a huge simplication for most of the action-specific tests. --- landoscript/tests/conftest.py | 64 +++++++ landoscript/tests/test_l10n_bump.py | 39 ++-- landoscript/tests/test_merge_day.py | 237 +++++++------------------ landoscript/tests/test_script.py | 42 +---- landoscript/tests/test_tag.py | 41 +---- landoscript/tests/test_version_bump.py | 48 ++--- 6 files changed, 168 insertions(+), 303 deletions(-) diff --git a/landoscript/tests/conftest.py b/landoscript/tests/conftest.py index 9846da31d..babcb0df2 100644 --- a/landoscript/tests/conftest.py +++ b/landoscript/tests/conftest.py @@ -6,6 +6,8 @@ from scriptworker.context import Context from simple_github.client import GITHUB_GRAPHQL_ENDPOINT +from landoscript.script import async_main + pytest_plugins = ("pytest-scriptworker-client",) here = Path(__file__).parent @@ -45,6 +47,25 @@ def privkey_file(datadir): return datadir / "test_private_key.pem" +def setup_treestatus_response(aioresponses, context, tree="repo_name", status="open", has_err=False): + url = f'{context.config["treestatus_url"]}/trees/{tree}' + if has_err: + aioresponses.get(url, status=500) + else: + resp = { + "result": { + "category": "development", + "log_id": 12345, + "message_of_the_day": "", + "reason": "", + "status": status, + "tags": [], + "tree": tree, + }, + } + aioresponses.get(url, status=200, payload=resp) + + def setup_test(github_installation_responses, context, payload, actions, repo="repo_name"): lando_repo = payload["lando_repo"] lando_api = context.config["lando_api"] @@ -62,6 +83,49 @@ def setup_test(github_installation_responses, context, payload, actions, repo="r return submit_uri, status_uri, job_id, scopes +async def run_test( + aioresponses, github_installation_responses, context, payload, actions, dry_run=False, assert_func=None, repo="repo_name", err=None, errmsg="" +): + submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, actions, repo) + + if not dry_run: + aioresponses.post( + submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"} + ) + + aioresponses.get( + status_uri, + status=200, + payload={ + "commits": ["abcdef123"], + "push_id": job_id, + "status": "completed", + }, + ) + + context.task = {"payload": payload, "scopes": scopes} + + # error cases and success cases are different enough that it's clearer to call + # `async_main` in different blocks than try to account for them both in one block. + if err: + try: + await async_main(context) + assert False, f"should've raised {err}" + except Exception as e: + assert isinstance(e, err) + assert errmsg in e.args[0] + else: + await async_main(context) + if not dry_run: + req = assert_lando_submission_response(aioresponses.requests, submit_uri) + assert_status_response(aioresponses.requests, status_uri) + if assert_func: + assert_func(req) + else: + assert ("POST", submit_uri) not in aioresponses.requests + assert ("GET", status_uri) not in aioresponses.requests + + def setup_fetch_files_response(aioresponses, code, initial_values={}): if initial_values: github_response = {} diff --git a/landoscript/tests/test_l10n_bump.py b/landoscript/tests/test_l10n_bump.py index ed1034c36..7e3735948 100644 --- a/landoscript/tests/test_l10n_bump.py +++ b/landoscript/tests/test_l10n_bump.py @@ -4,26 +4,15 @@ from landoscript.script import async_main -from .conftest import assert_lando_submission_response, assert_status_response, setup_test, setup_l10n_file_responses, assert_l10n_bump_response - - -def setup_treestatus_response(aioresponses, context, tree="repo_name", status="open", has_err=False): - url = f'{context.config["treestatus_url"]}/trees/{tree}' - if has_err: - aioresponses.get(url, status=500) - else: - resp = { - "result": { - "category": "development", - "log_id": 12345, - "message_of_the_day": "", - "reason": "", - "status": status, - "tags": [], - "tree": tree, - }, - } - aioresponses.get(url, status=200, payload=resp) +from .conftest import ( + assert_lando_submission_response, + assert_status_response, + run_test, + setup_test, + setup_l10n_file_responses, + assert_l10n_bump_response, + setup_treestatus_response, +) @pytest.mark.asyncio @@ -675,16 +664,8 @@ async def test_l10n_repo_errors(aioresponses, github_installation_responses, con "lando_repo": "repo_name", "l10n_bump_info": l10n_bump_info, } - _, _, _, scopes = setup_test(github_installation_responses, context, payload, ["l10n_bump"]) setup_treestatus_response(aioresponses, context) - - context.task = {"payload": payload, "scopes": scopes} - - try: - await async_main(context) - assert False, "should've raised TaskVerificationError" - except TaskVerificationError as e: - assert errmsg in e.args[0] + await run_test(aioresponses, github_installation_responses, context, payload, ["l10n_bump"], err=TaskVerificationError, errmsg=errmsg) @pytest.mark.asyncio diff --git a/landoscript/tests/test_merge_day.py b/landoscript/tests/test_merge_day.py index fecf72571..d270f5adc 100644 --- a/landoscript/tests/test_merge_day.py +++ b/landoscript/tests/test_merge_day.py @@ -1,19 +1,9 @@ from collections import defaultdict -import json -from os import major import pytest -from scriptworker.client import TaskVerificationError -from simple_github.client import GITHUB_GRAPHQL_ENDPOINT from landoscript.script import async_main -from .conftest import ( - assert_lando_submission_response, - assert_status_response, - setup_fetch_files_responses, - setup_test, - assert_merge_response, -) +from .conftest import run_test, setup_fetch_files_responses, assert_merge_response @pytest.mark.asyncio @@ -159,7 +149,6 @@ async def test_success_bump_central( "merge_info": merge_info, "dry_run": dry_run, } - submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["merge_day"]) setup_fetch_files_responses( aioresponses, @@ -175,26 +164,7 @@ async def test_success_bump_central( ], ) - if not dry_run: - aioresponses.post( - submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"} - ) - - aioresponses.get( - status_uri, - status=200, - payload={ - "commits": ["abcdef123"], - "push_id": job_id, - "status": "completed", - }, - ) - - context.task = {"payload": payload, "scopes": scopes} - await async_main(context) - - if not dry_run: - req = assert_lando_submission_response(aioresponses.requests, submit_uri) + def assert_func(req): assert_merge_response( context.config["artifact_dir"], req, @@ -205,10 +175,8 @@ async def test_success_bump_central( expected_replacement_bumps, end_tag, ) - assert_status_response(aioresponses.requests, status_uri) - else: - assert ("POST", submit_uri) not in aioresponses.requests - assert ("GET", status_uri) not in aioresponses.requests + + await run_test(aioresponses, github_installation_responses, context, payload, ["merge_day"], dry_run, assert_func) @pytest.mark.asyncio @@ -240,7 +208,6 @@ async def test_success_bump_esr(aioresponses, github_installation_responses, con "lando_repo": "repo_name", "merge_info": merge_info, } - submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["merge_day"]) # version bump files are fetched in groups, by initial version initial_values_by_expected_version = defaultdict(dict) @@ -259,30 +226,16 @@ async def test_success_bump_esr(aioresponses, github_installation_responses, con ], ) - aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) - - aioresponses.get( - status_uri, - status=200, - payload={ - "commits": ["abcdef123"], - "push_id": job_id, - "status": "completed", - }, - ) - - context.task = {"payload": payload, "scopes": scopes} - await async_main(context) + def assert_func(req): + assert_merge_response( + context.config["artifact_dir"], + req, + expected_actions, + initial_values, + expected_bumps, + ) - req = assert_lando_submission_response(aioresponses.requests, submit_uri) - assert_merge_response( - context.config["artifact_dir"], - req, - expected_actions, - initial_values, - expected_bumps, - ) - assert_status_response(aioresponses.requests, status_uri) + await run_test(aioresponses, github_installation_responses, context, payload, ["merge_day"], assert_func=assert_func) @pytest.mark.asyncio @@ -309,7 +262,6 @@ async def test_success_early_to_late_beta(aioresponses, github_installation_resp "lando_repo": "repo_name", "merge_info": merge_info, } - submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["merge_day"]) setup_fetch_files_responses( aioresponses, @@ -324,32 +276,18 @@ async def test_success_early_to_late_beta(aioresponses, github_installation_resp ], ) - aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) - - aioresponses.get( - status_uri, - status=200, - payload={ - "commits": ["abcdef123"], - "push_id": job_id, - "status": "completed", - }, - ) + def assert_func(req): + assert_merge_response( + context.config["artifact_dir"], + req, + expected_actions, + {}, + {}, + initial_replacement_values, + expected_replacement_bumps, + ) - context.task = {"payload": payload, "scopes": scopes} - await async_main(context) - - req = assert_lando_submission_response(aioresponses.requests, submit_uri) - assert_merge_response( - context.config["artifact_dir"], - req, - expected_actions, - {}, - {}, - initial_replacement_values, - expected_replacement_bumps, - ) - assert_status_response(aioresponses.requests, status_uri) + await run_test(aioresponses, github_installation_responses, context, payload, ["merge_day"], assert_func=assert_func) @pytest.mark.asyncio @@ -429,7 +367,6 @@ async def test_success_central_to_beta(aioresponses, github_installation_respons "lando_repo": "repo_name", "merge_info": merge_info, } - submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["merge_day"]) # version bump files are fetched in groups, by initial version initial_values_by_expected_version = defaultdict(dict) @@ -452,35 +389,21 @@ async def test_success_central_to_beta(aioresponses, github_installation_respons ], ) - aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) - - aioresponses.get( - status_uri, - status=200, - payload={ - "commits": ["abcdef123"], - "push_id": job_id, - "status": "completed", - }, - ) + def assert_func(req): + assert_merge_response( + context.config["artifact_dir"], + req, + expected_actions, + initial_values, + expected_bumps, + initial_replacement_values, + expected_replacement_values, + end_tag, + base_tag, + target_ref, + ) - context.task = {"payload": payload, "scopes": scopes} - await async_main(context) - - req = assert_lando_submission_response(aioresponses.requests, submit_uri) - assert_merge_response( - context.config["artifact_dir"], - req, - expected_actions, - initial_values, - expected_bumps, - initial_replacement_values, - expected_replacement_values, - end_tag, - base_tag, - target_ref, - ) - assert_status_response(aioresponses.requests, status_uri) + await run_test(aioresponses, github_installation_responses, context, payload, ["merge_day"], assert_func=assert_func) @pytest.mark.asyncio @@ -525,7 +448,6 @@ async def test_success_beta_to_release(aioresponses, github_installation_respons "lando_repo": "repo_name", "merge_info": merge_info, } - submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["merge_day"]) setup_fetch_files_responses( aioresponses, @@ -543,35 +465,21 @@ async def test_success_beta_to_release(aioresponses, github_installation_respons ], ) - aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) - - aioresponses.get( - status_uri, - status=200, - payload={ - "commits": ["abcdef123"], - "push_id": job_id, - "status": "completed", - }, - ) + def assert_func(req): + assert_merge_response( + context.config["artifact_dir"], + req, + expected_actions, + initial_values, + expected_bumps, + initial_replacement_values, + expected_replacement_values, + end_tag, + base_tag, + target_ref, + ) - context.task = {"payload": payload, "scopes": scopes} - await async_main(context) - - req = assert_lando_submission_response(aioresponses.requests, submit_uri) - assert_merge_response( - context.config["artifact_dir"], - req, - expected_actions, - initial_values, - expected_bumps, - initial_replacement_values, - expected_replacement_values, - end_tag, - base_tag, - target_ref, - ) - assert_status_response(aioresponses.requests, status_uri) + await run_test(aioresponses, github_installation_responses, context, payload, ["merge_day"], assert_func=assert_func) @pytest.mark.asyncio @@ -609,7 +517,6 @@ async def test_success_release_to_esr(aioresponses, github_installation_response "merge_info": merge_info, "ignore_closed_tree": True, } - submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["merge_day"]) setup_fetch_files_responses( aioresponses, @@ -625,31 +532,17 @@ async def test_success_release_to_esr(aioresponses, github_installation_response ], ) - aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) - - aioresponses.get( - status_uri, - status=200, - payload={ - "commits": ["abcdef123"], - "push_id": job_id, - "status": "completed", - }, - ) + def assert_func(req): + assert_merge_response( + context.config["artifact_dir"], + req, + expected_actions, + initial_values, + expected_bumps, + initial_replacement_values, + expected_replacement_bumps, + end_tag, + target_ref=target_ref, + ) - context.task = {"payload": payload, "scopes": scopes} - await async_main(context) - - req = assert_lando_submission_response(aioresponses.requests, submit_uri) - assert_merge_response( - context.config["artifact_dir"], - req, - expected_actions, - initial_values, - expected_bumps, - initial_replacement_values, - expected_replacement_bumps, - end_tag, - target_ref=target_ref, - ) - assert_status_response(aioresponses.requests, status_uri) + await run_test(aioresponses, github_installation_responses, context, payload, ["merge_day"], assert_func=assert_func) diff --git a/landoscript/tests/test_script.py b/landoscript/tests/test_script.py index d8df14360..9302a37e3 100644 --- a/landoscript/tests/test_script.py +++ b/landoscript/tests/test_script.py @@ -10,6 +10,7 @@ assert_l10n_bump_response, assert_lando_submission_response, assert_status_response, + run_test, setup_test, assert_add_commit_response, setup_l10n_file_responses, @@ -80,33 +81,14 @@ def assert_success(req, commit_msg_strings, initial_values, expected_bumps): ), ) async def test_tag_and_bump(aioresponses, github_installation_responses, context, payload, dry_run, initial_values, expected_bumps, commit_msg_strings, tags): - submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, payload["actions"]) setup_fetch_files_response(aioresponses, 200, initial_values) - if not dry_run: - aioresponses.post( - submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"} - ) - - aioresponses.get( - status_uri, - status=200, - payload={ - "commits": ["abcdef123"], - "push_id": job_id, - "status": "completed", - }, - ) - - context.task = {"payload": payload, "scopes": scopes} - await async_main(context) - - assert (context.config["artifact_dir"] / "public/build/version-bump.diff").exists() - if not dry_run: - req = assert_lando_submission_response(aioresponses.requests, submit_uri) + def assert_func(req): assert_success(req, commit_msg_strings, initial_values, expected_bumps) - assert_status_response(aioresponses.requests, status_uri) assert_tag_response(req, tags) + assert (context.config["artifact_dir"] / "public/build/version-bump.diff").exists() + + await run_test(aioresponses, github_installation_responses, context, payload, payload["actions"], dry_run, assert_func) @pytest.mark.asyncio @@ -190,20 +172,14 @@ async def test_success_with_retries(aioresponses, github_installation_responses, @pytest.mark.asyncio -async def test_no_actions(github_installation_responses, context): +async def test_no_actions(aioresponses, github_installation_responses, context): payload = { "actions": [], "lando_repo": "repo_name", } - _, _, _, scopes = setup_test(github_installation_responses, context, payload, []) - - context.task = {"payload": payload, "scopes": scopes} - - try: - await async_main(context) - assert False, "should've raised TaskVerificationError" - except TaskVerificationError as e: - assert "must provide at least one action!" in e.args[0] + await run_test( + aioresponses, github_installation_responses, context, payload, ["tag"], err=TaskVerificationError, errmsg="must provide at least one action!" + ) @pytest.mark.asyncio diff --git a/landoscript/tests/test_tag.py b/landoscript/tests/test_tag.py index b22d9203c..4ff5aac26 100644 --- a/landoscript/tests/test_tag.py +++ b/landoscript/tests/test_tag.py @@ -1,9 +1,7 @@ import pytest from scriptworker.client import TaskVerificationError -from landoscript.script import async_main - -from .conftest import assert_lando_submission_response, assert_status_response, setup_test +from .conftest import run_test def assert_tag_response(req, tags): @@ -44,45 +42,18 @@ async def test_success(aioresponses, github_installation_responses, context, tag "tags": tags, "dry_run": dry_run, } - submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["tag"]) - - if not dry_run: - aioresponses.post( - submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"} - ) - - aioresponses.get( - status_uri, - status=200, - payload={ - "commits": ["abcdef123"], - "push_id": job_id, - "status": "completed", - }, - ) - - context.task = {"payload": payload, "scopes": scopes} - await async_main(context) - if not dry_run: - req = assert_lando_submission_response(aioresponses.requests, submit_uri) + def assert_func(req): assert_tag_response(req, tags) - assert_status_response(aioresponses.requests, status_uri) + + await run_test(aioresponses, github_installation_responses, context, payload, ["tag"], dry_run, assert_func) @pytest.mark.asyncio -async def test_no_tags(github_installation_responses, context): +async def test_no_tags(aioresponses, github_installation_responses, context): payload = { "actions": ["tag"], "lando_repo": "repo_name", "tags": [], } - _, _, _, scopes = setup_test(github_installation_responses, context, payload, ["tag"]) - - context.task = {"payload": payload, "scopes": scopes} - - try: - await async_main(context) - assert False, "should've raised TaskVerificationError" - except TaskVerificationError as e: - assert "must provide at least one tag!" in e.args[0] + await run_test(aioresponses, github_installation_responses, context, payload, ["tag"], err=TaskVerificationError, errmsg="must provide at least one tag!") diff --git a/landoscript/tests/test_version_bump.py b/landoscript/tests/test_version_bump.py index a26d91a2c..7e3c3cc34 100644 --- a/landoscript/tests/test_version_bump.py +++ b/landoscript/tests/test_version_bump.py @@ -6,7 +6,7 @@ from landoscript.actions.version_bump import ALLOWED_BUMP_FILES from landoscript.util.version import _VERSION_CLASS_PER_BEGINNING_OF_PATH -from .conftest import assert_lando_submission_response, assert_status_response, setup_test, setup_fetch_files_response, assert_add_commit_response +from .conftest import assert_lando_submission_response, assert_status_response, run_test, setup_test, setup_fetch_files_response, assert_add_commit_response def assert_success(req, commit_msg_strings, initial_values, expected_bumps): @@ -228,33 +228,13 @@ def assert_success(req, commit_msg_strings, initial_values, expected_bumps): ), ) async def test_success_with_bumps(aioresponses, github_installation_responses, context, payload, initial_values, expected_bumps, commit_msg_strings): - submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) setup_fetch_files_response(aioresponses, 200, initial_values) dryrun = payload.get("dry_run", False) - if not dryrun: - aioresponses.post( - submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"} - ) - - aioresponses.get( - status_uri, - status=200, - payload={ - "commits": ["abcdef123"], - "push_id": job_id, - "status": "completed", - }, - ) - - context.task = {"payload": payload, "scopes": scopes} - await async_main(context) - - assert (context.config["artifact_dir"] / "public/build/version-bump.diff").exists() - if not dryrun: - req = assert_lando_submission_response(aioresponses.requests, submit_uri) + def assert_func(req): assert_success(req, commit_msg_strings, initial_values, expected_bumps) - assert_status_response(aioresponses.requests, status_uri) + + await run_test(aioresponses, github_installation_responses, context, payload, ["version_bump"], dryrun, assert_func) @pytest.mark.asyncio @@ -410,7 +390,7 @@ async def test_failure_to_fetch_files(aioresponses, github_installation_response ), ), ) -async def test_bad_bumpfile(github_installation_responses, context, files, first_bad_file): +async def test_bad_bumpfile(aioresponses, github_installation_responses, context, files, first_bad_file): payload = { "actions": ["version_bump"], "lando_repo": "repo_name", @@ -419,15 +399,15 @@ async def test_bad_bumpfile(github_installation_responses, context, files, first "next_version": "135.0", }, } - _, _, _, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) - - context.task = {"payload": payload, "scopes": scopes} - - try: - await async_main(context) - assert False, "should've raised TaskVerificationError" - except TaskVerificationError as e: - assert f"{first_bad_file} is not in version bump allowlist" in e.args[0] + await run_test( + aioresponses, + github_installation_responses, + context, + payload, + ["version_bump"], + err=TaskVerificationError, + errmsg=f"{first_bad_file} is not in version bump allowlist", + ) def test_no_overlaps_in_version_classes(): From 8e30bbbe0b41d701f034fe58cc75ed9bc6b4d71f Mon Sep 17 00:00:00 2001 From: Ben Hearsum Date: Wed, 9 Apr 2025 09:37:36 -0400 Subject: [PATCH 07/13] feat(landoscript): implement android l10n actions The implementation of this is a fairly significant departure from the treescript one in two ways: * It supports these actions without having a Gecko or `android-l10n` tree available. This necessitated fetching the remote file listings of these repositories, and using `moz.l10n` instead of `compare-locales`. * I've fully separated out implementation of the actions. Although at a very high level they look similar, the details are different enough that IMO it's much easier and better to duplicate some of the code rather than add the indirection to avoid it. (I found it very difficult to read the treescript implementation because of this, and I didn't want to do the same here). A necessary part of doing this was some enhancements to `diff_contents` to support added and removed files properly. --- landoscript/requirements/base.in | 2 + landoscript/requirements/base.txt | 658 ++++++++++-------- landoscript/requirements/local.txt | 18 +- landoscript/requirements/test.txt | 146 ++-- .../actions/android_l10n_import.py | 106 +++ .../landoscript/actions/android_l10n_sync.py | 95 +++ .../data/landoscript_task_schema.json | 4 + landoscript/src/landoscript/script.py | 14 +- landoscript/src/landoscript/util/diffs.py | 40 +- landoscript/src/landoscript/util/l10n.py | 31 + landoscript/tests/conftest.py | 80 ++- landoscript/tests/test_android_l10n_import.py | 301 ++++++++ landoscript/tests/test_android_l10n_sync.py | 249 +++++++ landoscript/tests/test_merge_day.py | 128 ++-- landoscript/tests/test_script.py | 42 +- landoscript/tests/test_tag.py | 2 +- landoscript/tests/test_version_bump.py | 21 +- 17 files changed, 1431 insertions(+), 506 deletions(-) create mode 100644 landoscript/src/landoscript/actions/android_l10n_import.py create mode 100644 landoscript/src/landoscript/actions/android_l10n_sync.py create mode 100644 landoscript/src/landoscript/util/l10n.py create mode 100644 landoscript/tests/test_android_l10n_import.py create mode 100644 landoscript/tests/test_android_l10n_sync.py diff --git a/landoscript/requirements/base.in b/landoscript/requirements/base.in index e46d914ea..053d09811 100644 --- a/landoscript/requirements/base.in +++ b/landoscript/requirements/base.in @@ -2,5 +2,7 @@ aiohttp async-timeout gql mozilla-version +moz.l10n scriptworker +tomli yarl diff --git a/landoscript/requirements/base.txt b/landoscript/requirements/base.txt index 055bd3fe2..0a196ce54 100644 --- a/landoscript/requirements/base.txt +++ b/landoscript/requirements/base.txt @@ -1,4 +1,4 @@ -# SHA1:351fe9debb87f5ae11df3160c740236641413bbc +# SHA1:3690783e2c8909f3c65e062a20a2edae3b54e1ef # # This file is autogenerated by pip-compile-multi # To update, run: @@ -9,88 +9,88 @@ aiohappyeyeballs==2.6.1 \ --hash=sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558 \ --hash=sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8 # via aiohttp -aiohttp==3.11.14 \ - --hash=sha256:04eb541ce1e03edc1e3be1917a0f45ac703e913c21a940111df73a2c2db11d73 \ - --hash=sha256:05582cb2d156ac7506e68b5eac83179faedad74522ed88f88e5861b78740dc0e \ - --hash=sha256:0a29be28e60e5610d2437b5b2fed61d6f3dcde898b57fb048aa5079271e7f6f3 \ - --hash=sha256:0b2501f1b981e70932b4a552fc9b3c942991c7ae429ea117e8fba57718cdeed0 \ - --hash=sha256:0df3788187559c262922846087e36228b75987f3ae31dd0a1e5ee1034090d42f \ - --hash=sha256:12c5869e7ddf6b4b1f2109702b3cd7515667b437da90a5a4a50ba1354fe41881 \ - --hash=sha256:14fc03508359334edc76d35b2821832f092c8f092e4b356e74e38419dfe7b6de \ - --hash=sha256:1a7169ded15505f55a87f8f0812c94c9412623c744227b9e51083a72a48b68a5 \ - --hash=sha256:1c68e41c4d576cd6aa6c6d2eddfb32b2acfb07ebfbb4f9da991da26633a3db1a \ - --hash=sha256:20412c7cc3720e47a47e63c0005f78c0c2370020f9f4770d7fc0075f397a9fb0 \ - --hash=sha256:22a8107896877212130c58f74e64b77f7007cb03cea8698be317272643602d45 \ - --hash=sha256:28a3d083819741592685762d51d789e6155411277050d08066537c5edc4066e6 \ - --hash=sha256:2b86efe23684b58a88e530c4ab5b20145f102916bbb2d82942cafec7bd36a647 \ - --hash=sha256:2d0b46abee5b5737cb479cc9139b29f010a37b1875ee56d142aefc10686a390b \ - --hash=sha256:321238a42ed463848f06e291c4bbfb3d15ba5a79221a82c502da3e23d7525d06 \ - --hash=sha256:3a8a0d127c10b8d89e69bbd3430da0f73946d839e65fec00ae48ca7916a31948 \ - --hash=sha256:3a8b0321e40a833e381d127be993b7349d1564b756910b28b5f6588a159afef3 \ - --hash=sha256:3b420d076a46f41ea48e5fcccb996f517af0d406267e31e6716f480a3d50d65c \ - --hash=sha256:3b512f1de1c688f88dbe1b8bb1283f7fbeb7a2b2b26e743bb2193cbadfa6f307 \ - --hash=sha256:413fe39fd929329f697f41ad67936f379cba06fcd4c462b62e5b0f8061ee4a77 \ - --hash=sha256:41cf0cefd9e7b5c646c2ef529c8335e7eafd326f444cc1cdb0c47b6bc836f9be \ - --hash=sha256:4848ae31ad44330b30f16c71e4f586cd5402a846b11264c412de99fa768f00f3 \ - --hash=sha256:4b0a200e85da5c966277a402736a96457b882360aa15416bf104ca81e6f5807b \ - --hash=sha256:4e2e8ef37d4bc110917d038807ee3af82700a93ab2ba5687afae5271b8bc50ff \ - --hash=sha256:4edcbe34e6dba0136e4cabf7568f5a434d89cc9de5d5155371acda275353d228 \ - --hash=sha256:51ba80d473eb780a329d73ac8afa44aa71dfb521693ccea1dea8b9b5c4df45ce \ - --hash=sha256:5409a59d5057f2386bb8b8f8bbcfb6e15505cedd8b2445db510563b5d7ea1186 \ - --hash=sha256:572def4aad0a4775af66d5a2b5923c7de0820ecaeeb7987dcbccda2a735a993f \ - --hash=sha256:599b66582f7276ebefbaa38adf37585e636b6a7a73382eb412f7bc0fc55fb73d \ - --hash=sha256:59a05cdc636431f7ce843c7c2f04772437dd816a5289f16440b19441be6511f1 \ - --hash=sha256:602d4db80daf4497de93cb1ce00b8fc79969c0a7cf5b67bec96fa939268d806a \ - --hash=sha256:65c75b14ee74e8eeff2886321e76188cbe938d18c85cff349d948430179ad02c \ - --hash=sha256:69bb252bfdca385ccabfd55f4cd740d421dd8c8ad438ded9637d81c228d0da49 \ - --hash=sha256:6d3986112e34eaa36e280dc8286b9dd4cc1a5bcf328a7f147453e188f6fe148f \ - --hash=sha256:6dd9766da617855f7e85f27d2bf9a565ace04ba7c387323cd3e651ac4329db91 \ - --hash=sha256:70ab0f61c1a73d3e0342cedd9a7321425c27a7067bebeeacd509f96695b875fc \ - --hash=sha256:749f1eb10e51dbbcdba9df2ef457ec060554842eea4d23874a3e26495f9e87b1 \ - --hash=sha256:781c8bd423dcc4641298c8c5a2a125c8b1c31e11f828e8d35c1d3a722af4c15a \ - --hash=sha256:7e7abe865504f41b10777ac162c727af14e9f4db9262e3ed8254179053f63e6d \ - --hash=sha256:7f2dadece8b85596ac3ab1ec04b00694bdd62abc31e5618f524648d18d9dd7fa \ - --hash=sha256:86135c32d06927339c8c5e64f96e4eee8825d928374b9b71a3c42379d7437058 \ - --hash=sha256:8778620396e554b758b59773ab29c03b55047841d8894c5e335f12bfc45ebd28 \ - --hash=sha256:87f0e003fb4dd5810c7fbf47a1239eaa34cd929ef160e0a54c570883125c4831 \ - --hash=sha256:8aa5c68e1e68fff7cd3142288101deb4316b51f03d50c92de6ea5ce646e6c71f \ - --hash=sha256:8d14e274828561db91e4178f0057a915f3af1757b94c2ca283cb34cbb6e00b50 \ - --hash=sha256:8d1dd75aa4d855c7debaf1ef830ff2dfcc33f893c7db0af2423ee761ebffd22b \ - --hash=sha256:92007c89a8cb7be35befa2732b0b32bf3a394c1b22ef2dff0ef12537d98a7bda \ - --hash=sha256:92868f6512714efd4a6d6cb2bfc4903b997b36b97baea85f744229f18d12755e \ - --hash=sha256:948abc8952aff63de7b2c83bfe3f211c727da3a33c3a5866a0e2cf1ee1aa950f \ - --hash=sha256:95d7787f2bcbf7cb46823036a8d64ccfbc2ffc7d52016b4044d901abceeba3db \ - --hash=sha256:997b57e38aa7dc6caab843c5e042ab557bc83a2f91b7bd302e3c3aebbb9042a1 \ - --hash=sha256:99b8bbfc8111826aa8363442c0fc1f5751456b008737ff053570f06a151650b3 \ - --hash=sha256:9e73fa341d8b308bb799cf0ab6f55fc0461d27a9fa3e4582755a3d81a6af8c09 \ - --hash=sha256:a0d2c04a623ab83963576548ce098baf711a18e2c32c542b62322a0b4584b990 \ - --hash=sha256:a40087b82f83bd671cbeb5f582c233d196e9653220404a798798bfc0ee189fff \ - --hash=sha256:ad1f2fb9fe9b585ea4b436d6e998e71b50d2b087b694ab277b30e060c434e5db \ - --hash=sha256:b05774864c87210c531b48dfeb2f7659407c2dda8643104fb4ae5e2c311d12d9 \ - --hash=sha256:b41693b7388324b80f9acfabd479bd1c84f0bc7e8f17bab4ecd9675e9ff9c734 \ - --hash=sha256:b42dbd097abb44b3f1156b4bf978ec5853840802d6eee2784857be11ee82c6a0 \ - --hash=sha256:b4e7c7ec4146a94a307ca4f112802a8e26d969018fabed526efc340d21d3e7d0 \ - --hash=sha256:b59d096b5537ec7c85954cb97d821aae35cfccce3357a2cafe85660cc6295628 \ - --hash=sha256:b9c60d1de973ca94af02053d9b5111c4fbf97158e139b14f1be68337be267be6 \ - --hash=sha256:bccd2cb7aa5a3bfada72681bdb91637094d81639e116eac368f8b3874620a654 \ - --hash=sha256:c32593ead1a8c6aabd58f9d7ee706e48beac796bb0cb71d6b60f2c1056f0a65f \ - --hash=sha256:c7571f99525c76a6280f5fe8e194eeb8cb4da55586c3c61c59c33a33f10cfce7 \ - --hash=sha256:c8b2df9feac55043759aa89f722a967d977d80f8b5865a4153fc41c93b957efc \ - --hash=sha256:ca9f835cdfedcb3f5947304e85b8ca3ace31eef6346d8027a97f4de5fb687534 \ - --hash=sha256:cc9253069158d57e27d47a8453d8a2c5a370dc461374111b5184cf2f147a3cc3 \ - --hash=sha256:ced66c5c6ad5bcaf9be54560398654779ec1c3695f1a9cf0ae5e3606694a000a \ - --hash=sha256:d173c0ac508a2175f7c9a115a50db5fd3e35190d96fdd1a17f9cb10a6ab09aa1 \ - --hash=sha256:d6edc538c7480fa0a3b2bdd705f8010062d74700198da55d16498e1b49549b9c \ - --hash=sha256:daf20d9c3b12ae0fdf15ed92235e190f8284945563c4b8ad95b2d7a31f331cd3 \ - --hash=sha256:dc311634f6f28661a76cbc1c28ecf3b3a70a8edd67b69288ab7ca91058eb5a33 \ - --hash=sha256:e2bc827c01f75803de77b134afdbf74fa74b62970eafdf190f3244931d7a5c0d \ - --hash=sha256:e365034c5cf6cf74f57420b57682ea79e19eb29033399dd3f40de4d0171998fa \ - --hash=sha256:e906da0f2bcbf9b26cc2b144929e88cb3bf943dd1942b4e5af066056875c7618 \ - --hash=sha256:e9faafa74dbb906b2b6f3eb9942352e9e9db8d583ffed4be618a89bd71a4e914 \ - --hash=sha256:ec6cd1954ca2bbf0970f531a628da1b1338f594bf5da7e361e19ba163ecc4f3b \ - --hash=sha256:f296d637a50bb15fb6a229fbb0eb053080e703b53dbfe55b1e4bb1c5ed25d325 \ - --hash=sha256:f30fc72daf85486cdcdfc3f5e0aea9255493ef499e31582b34abadbfaafb0965 \ - --hash=sha256:fe846f0a98aa9913c2852b630cd39b4098f296e0907dd05f6c7b30d911afa4c3 +aiohttp==3.11.16 \ + --hash=sha256:004511d3413737700835e949433536a2fe95a7d0297edd911a1e9705c5b5ea43 \ + --hash=sha256:0902e887b0e1d50424112f200eb9ae3dfed6c0d0a19fc60f633ae5a57c809656 \ + --hash=sha256:09b00dd520d88eac9d1768439a59ab3d145065c91a8fab97f900d1b5f802895e \ + --hash=sha256:0a2f451849e6b39e5c226803dcacfa9c7133e9825dcefd2f4e837a2ec5a3bb98 \ + --hash=sha256:0a950c2eb8ff17361abd8c85987fd6076d9f47d040ebffce67dce4993285e973 \ + --hash=sha256:0ad1fb47da60ae1ddfb316f0ff16d1f3b8e844d1a1e154641928ea0583d486ed \ + --hash=sha256:13ceac2c5cdcc3f64b9015710221ddf81c900c5febc505dbd8f810e770011540 \ + --hash=sha256:14461157d8426bcb40bd94deb0450a6fa16f05129f7da546090cebf8f3123b0f \ + --hash=sha256:16f8a2c9538c14a557b4d309ed4d0a7c60f0253e8ed7b6c9a2859a7582f8b1b8 \ + --hash=sha256:17ae4664031aadfbcb34fd40ffd90976671fa0c0286e6c4113989f78bebab37a \ + --hash=sha256:1ce63ae04719513dd2651202352a2beb9f67f55cb8490c40f056cea3c5c355ce \ + --hash=sha256:23a15727fbfccab973343b6d1b7181bfb0b4aa7ae280f36fd2f90f5476805682 \ + --hash=sha256:2540ddc83cc724b13d1838026f6a5ad178510953302a49e6d647f6e1de82bc34 \ + --hash=sha256:37dcee4906454ae377be5937ab2a66a9a88377b11dd7c072df7a7c142b63c37c \ + --hash=sha256:38bea84ee4fe24ebcc8edeb7b54bf20f06fd53ce4d2cc8b74344c5b9620597fd \ + --hash=sha256:3ab3367bb7f61ad18793fea2ef71f2d181c528c87948638366bf1de26e239183 \ + --hash=sha256:3ad1d59fd7114e6a08c4814983bb498f391c699f3c78712770077518cae63ff7 \ + --hash=sha256:3b4e6db8dc4879015b9955778cfb9881897339c8fab7b3676f8433f849425913 \ + --hash=sha256:3e061b09f6fa42997cf627307f220315e313ece74907d35776ec4373ed718b86 \ + --hash=sha256:42864e70a248f5f6a49fdaf417d9bc62d6e4d8ee9695b24c5916cb4bb666c802 \ + --hash=sha256:493910ceb2764f792db4dc6e8e4b375dae1b08f72e18e8f10f18b34ca17d0979 \ + --hash=sha256:4d0c970c0d602b1017e2067ff3b7dac41c98fef4f7472ec2ea26fd8a4e8c2149 \ + --hash=sha256:54eb3aead72a5c19fad07219acd882c1643a1027fbcdefac9b502c267242f955 \ + --hash=sha256:56a3443aca82abda0e07be2e1ecb76a050714faf2be84256dae291182ba59049 \ + --hash=sha256:576f5ca28d1b3276026f7df3ec841ae460e0fc3aac2a47cbf72eabcfc0f102e1 \ + --hash=sha256:58ede86453a6cf2d6ce40ef0ca15481677a66950e73b0a788917916f7e35a0bb \ + --hash=sha256:61c721764e41af907c9d16b6daa05a458f066015abd35923051be8705108ed17 \ + --hash=sha256:634d96869be6c4dc232fc503e03e40c42d32cfaa51712aee181e922e61d74814 \ + --hash=sha256:696ef00e8a1f0cec5e30640e64eca75d8e777933d1438f4facc9c0cdf288a810 \ + --hash=sha256:69a2cbd61788d26f8f1e626e188044834f37f6ae3f937bd9f08b65fc9d7e514e \ + --hash=sha256:6a792ce34b999fbe04a7a71a90c74f10c57ae4c51f65461a411faa70e154154e \ + --hash=sha256:6ac13b71761e49d5f9e4d05d33683bbafef753e876e8e5a7ef26e937dd766713 \ + --hash=sha256:6fdec0213244c39973674ca2a7f5435bf74369e7d4e104d6c7473c81c9bcc8c4 \ + --hash=sha256:72b1b03fb4655c1960403c131740755ec19c5898c82abd3961c364c2afd59fe7 \ + --hash=sha256:745f1ed5e2c687baefc3c5e7b4304e91bf3e2f32834d07baaee243e349624b24 \ + --hash=sha256:776c8e959a01e5e8321f1dec77964cb6101020a69d5a94cd3d34db6d555e01f7 \ + --hash=sha256:780df0d837276276226a1ff803f8d0fa5f8996c479aeef52eb040179f3156cbd \ + --hash=sha256:78e6e23b954644737e385befa0deb20233e2dfddf95dd11e9db752bdd2a294d3 \ + --hash=sha256:7951decace76a9271a1ef181b04aa77d3cc309a02a51d73826039003210bdc86 \ + --hash=sha256:7ba92a2d9ace559a0a14b03d87f47e021e4fa7681dc6970ebbc7b447c7d4b7cd \ + --hash=sha256:7f6428fee52d2bcf96a8aa7b62095b190ee341ab0e6b1bcf50c615d7966fd45b \ + --hash=sha256:87944bd16b7fe6160607f6a17808abd25f17f61ae1e26c47a491b970fb66d8cb \ + --hash=sha256:87a6e922b2b2401e0b0cf6b976b97f11ec7f136bfed445e16384fbf6fd5e8602 \ + --hash=sha256:8cb0688a8d81c63d716e867d59a9ccc389e97ac7037ebef904c2b89334407180 \ + --hash=sha256:8df6612df74409080575dca38a5237282865408016e65636a76a2eb9348c2567 \ + --hash=sha256:911a6e91d08bb2c72938bc17f0a2d97864c531536b7832abee6429d5296e5b27 \ + --hash=sha256:92b7ee222e2b903e0a4b329a9943d432b3767f2d5029dbe4ca59fb75223bbe2e \ + --hash=sha256:938f756c2b9374bbcc262a37eea521d8a0e6458162f2a9c26329cc87fdf06534 \ + --hash=sha256:9756d9b9d4547e091f99d554fbba0d2a920aab98caa82a8fb3d3d9bee3c9ae85 \ + --hash=sha256:98b88a2bf26965f2015a771381624dd4b0839034b70d406dc74fd8be4cc053e3 \ + --hash=sha256:9b751a6306f330801665ae69270a8a3993654a85569b3469662efaad6cf5cc50 \ + --hash=sha256:a2a450bcce4931b295fc0848f384834c3f9b00edfc2150baafb4488c27953de6 \ + --hash=sha256:a3814760a1a700f3cfd2f977249f1032301d0a12c92aba74605cfa6ce9f78489 \ + --hash=sha256:a5abcbba9f4b463a45c8ca8b7720891200658f6f46894f79517e6cd11f3405ca \ + --hash=sha256:a6db7458ab89c7d80bc1f4e930cc9df6edee2200127cfa6f6e080cf619eddfbd \ + --hash=sha256:ad497f38a0d6c329cb621774788583ee12321863cd4bd9feee1effd60f2ad133 \ + --hash=sha256:ad9509ffb2396483ceacb1eee9134724443ee45b92141105a4645857244aecc8 \ + --hash=sha256:bbcba75fe879ad6fd2e0d6a8d937f34a571f116a0e4db37df8079e738ea95c71 \ + --hash=sha256:c10d85e81d0b9ef87970ecbdbfaeec14a361a7fa947118817fcea8e45335fa46 \ + --hash=sha256:c15b2271c44da77ee9d822552201180779e5e942f3a71fb74e026bf6172ff287 \ + --hash=sha256:ca37057625693d097543bd88076ceebeb248291df9d6ca8481349efc0b05dcd0 \ + --hash=sha256:cc3a145479a76ad0ed646434d09216d33d08eef0d8c9a11f5ae5cdc37caa3540 \ + --hash=sha256:ccf10f16ab498d20e28bc2b5c1306e9c1512f2840f7b6a67000a517a4b37d5ee \ + --hash=sha256:cd464ba806e27ee24a91362ba3621bfc39dbbb8b79f2e1340201615197370f7c \ + --hash=sha256:d007aa39a52d62373bd23428ba4a2546eed0e7643d7bf2e41ddcefd54519842c \ + --hash=sha256:d0666afbe984f6933fe72cd1f1c3560d8c55880a0bdd728ad774006eb4241ecd \ + --hash=sha256:d07502cc14ecd64f52b2a74ebbc106893d9a9717120057ea9ea1fd6568a747e7 \ + --hash=sha256:d489d9778522fbd0f8d6a5c6e48e3514f11be81cb0a5954bdda06f7e1594b321 \ + --hash=sha256:df7db76400bf46ec6a0a73192b14c8295bdb9812053f4fe53f4e789f3ea66bbb \ + --hash=sha256:e3538bc9fe1b902bef51372462e3d7c96fce2b566642512138a480b7adc9d508 \ + --hash=sha256:e87fd812899aa78252866ae03a048e77bd11b80fb4878ce27c23cade239b42b2 \ + --hash=sha256:ecdb8173e6c7aa09eee342ac62e193e6904923bd232e76b4157ac0bfa670609f \ + --hash=sha256:f244b8e541f414664889e2c87cac11a07b918cb4b540c36f7ada7bfa76571ea2 \ + --hash=sha256:f4065145bf69de124accdd17ea5f4dc770da0a6a6e440c53f6e0a8c27b3e635c \ + --hash=sha256:f420bfe862fb357a6d76f2065447ef6f484bc489292ac91e29bc65d2d7a2c84d \ + --hash=sha256:f6ddd90d9fb4b501c97a4458f1c1720e42432c26cb76d28177c5b5ad4e332601 \ + --hash=sha256:fa73e8c2656a3653ae6c307b3f4e878a21f87859a9afab228280ddccd7369d71 \ + --hash=sha256:fadbb8f1d4140825069db3fedbbb843290fd5f5bc0a5dbd7eaf81d91bf1b003b \ + --hash=sha256:fb3d0cc5cdb926090748ea60172fa8a213cec728bd6c54eae18b96040fcd6227 \ + --hash=sha256:fb46bb0f24813e6cede6cc07b1961d4b04f331f7112a23b5e21f567da4ee50aa \ + --hash=sha256:fd36c119c5d6551bce374fcb5c19269638f8d09862445f85a5a48596fd59f4bb # via # -r requirements/base.in # scriptworker @@ -362,6 +362,10 @@ dictdiffer==0.9.0 \ --hash=sha256:17bacf5fbfe613ccf1b6d512bd766e6b21fb798822a133aa86098b8ac9997578 \ --hash=sha256:442bfc693cfcadaf46674575d2eba1c53b42f5e404218ca2c2ff549f2df56595 # via scriptworker +fluent-syntax==0.19.0 \ + --hash=sha256:920326d7f46864b9758f0044e9968e3112198bc826acee16ddd8f11d359004fd \ + --hash=sha256:b352b3475fac6c6ed5f06527921f432aac073d764445508ee5218aeccc7cc5c4 + # via moz-l10n fqdn==1.5.1 \ --hash=sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f \ --hash=sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014 @@ -466,6 +470,10 @@ github3-py==4.0.1 \ --hash=sha256:30d571076753efc389edc7f9aaef338a4fcb24b54d8968d5f39b1342f45ddd36 \ --hash=sha256:a89af7de25650612d1da2f0609622bcdeb07ee8a45a1c06b2d16a05e4234e753 # via scriptworker +gitignorant==0.3.1 \ + --hash=sha256:d2b96ffe90eaf4fa2e1d68ad4e3f807213c8ec1e54a943a9e7105a9f7aaa4232 \ + --hash=sha256:eb51b3421761e41f594758ef1f93aa438f94856da8beea44ccd043f9c2373442 + # via moz-l10n giturlparse==0.12.0 \ --hash=sha256:412b74f2855f1da2fefa89fd8dde62df48476077a72fc19b62039554d27360eb \ --hash=sha256:c0fff7c21acc435491b1779566e038757a205c1ffdcb47e4f81ea52ad8c3859a @@ -490,6 +498,11 @@ immutabledict==4.2.1 \ --hash=sha256:c56a26ced38c236f79e74af3ccce53772827cef5c3bce7cab33ff2060f756373 \ --hash=sha256:d91017248981c72eb66c8ff9834e99c2f53562346f23e7f51e7a5ebcf66a3bcc # via scriptworker +iniparse==0.5 \ + --hash=sha256:88ca60473b1637055a937933d48840be1b1b6835f381a6158ef118a532583675 \ + --hash=sha256:932e5239d526e7acb504017bb707be67019ac428a6932368e6851691093aa842 \ + --hash=sha256:db6ef1d8a02395448e0e7b17ac0aa28b8d338b632bbd1ffca08c02ddae32cf97 + # via moz-l10n isoduration==20.11.0 \ --hash=sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9 \ --hash=sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042 @@ -591,6 +604,10 @@ mohawk==1.1.0 \ --hash=sha256:3ed296a30453d0b724679e0fd41e4e940497f8e461a9a9c3b7f36e43bab0fa09 \ --hash=sha256:d2a0e3ab10a209cc79e95e28f2dd54bd4a73fd1998ffe27b7ba0f962b6be9723 # via taskcluster +moz-l10n==0.6.2 \ + --hash=sha256:1b73a9fbf0b9f8e05bf24f026bd844ead4d6600b49f633ecf0ff735c3f5608fe \ + --hash=sha256:4bd141b4149dccd7ab294021655c0449b61e588d9e349c8ed928ca6d7d50c8b4 + # via -r requirements/base.in mozilla-repo-urls==0.1.1 \ --hash=sha256:30510d3519479aa70211145d0ac9cf6e2fadcb8d30fa3b196bb957bd773502ba \ --hash=sha256:7364da790751db2a060eb45adbf1d7db89a145ed279ba235f3425db9dd255915 @@ -695,105 +712,109 @@ multidict==6.2.0 \ # via # aiohttp # yarl -propcache==0.3.0 \ - --hash=sha256:02df07041e0820cacc8f739510078f2aadcfd3fc57eaeeb16d5ded85c872c89e \ - --hash=sha256:03acd9ff19021bd0567582ac88f821b66883e158274183b9e5586f678984f8fe \ - --hash=sha256:03c091bb752349402f23ee43bb2bff6bd80ccab7c9df6b88ad4322258d6960fc \ - --hash=sha256:07700939b2cbd67bfb3b76a12e1412405d71019df00ca5697ce75e5ef789d829 \ - --hash=sha256:0c3e893c4464ebd751b44ae76c12c5f5c1e4f6cbd6fbf67e3783cd93ad221863 \ - --hash=sha256:119e244ab40f70a98c91906d4c1f4c5f2e68bd0b14e7ab0a06922038fae8a20f \ - --hash=sha256:11ae6a8a01b8a4dc79093b5d3ca2c8a4436f5ee251a9840d7790dccbd96cb649 \ - --hash=sha256:15010f29fbed80e711db272909a074dc79858c6d28e2915704cfc487a8ac89c6 \ - --hash=sha256:19d36bb351ad5554ff20f2ae75f88ce205b0748c38b146c75628577020351e3c \ - --hash=sha256:1c8f7d896a16da9455f882870a507567d4f58c53504dc2d4b1e1d386dfe4588a \ - --hash=sha256:2383a17385d9800b6eb5855c2f05ee550f803878f344f58b6e194de08b96352c \ - --hash=sha256:24c04f8fbf60094c531667b8207acbae54146661657a1b1be6d3ca7773b7a545 \ - --hash=sha256:2578541776769b500bada3f8a4eeaf944530516b6e90c089aa368266ed70c49e \ - --hash=sha256:26a67e5c04e3119594d8cfae517f4b9330c395df07ea65eab16f3d559b7068fe \ - --hash=sha256:2b975528998de037dfbc10144b8aed9b8dd5a99ec547f14d1cb7c5665a43f075 \ - --hash=sha256:2d15bc27163cd4df433e75f546b9ac31c1ba7b0b128bfb1b90df19082466ff57 \ - --hash=sha256:2d913d36bdaf368637b4f88d554fb9cb9d53d6920b9c5563846555938d5450bf \ - --hash=sha256:3302c5287e504d23bb0e64d2a921d1eb4a03fb93a0a0aa3b53de059f5a5d737d \ - --hash=sha256:36ca5e9a21822cc1746023e88f5c0af6fce3af3b85d4520efb1ce4221bed75cc \ - --hash=sha256:3b812b3cb6caacd072276ac0492d249f210006c57726b6484a1e1805b3cfeea0 \ - --hash=sha256:3c6ec957025bf32b15cbc6b67afe233c65b30005e4c55fe5768e4bb518d712f1 \ - --hash=sha256:41de3da5458edd5678b0f6ff66691507f9885f5fe6a0fb99a5d10d10c0fd2d64 \ - --hash=sha256:42924dc0c9d73e49908e35bbdec87adedd651ea24c53c29cac103ede0ea1d340 \ - --hash=sha256:4544699674faf66fb6b4473a1518ae4999c1b614f0b8297b1cef96bac25381db \ - --hash=sha256:46ed02532cb66612d42ae5c3929b5e98ae330ea0f3900bc66ec5f4862069519b \ - --hash=sha256:49ea05212a529c2caffe411e25a59308b07d6e10bf2505d77da72891f9a05641 \ - --hash=sha256:4fa0e7c9c3cf7c276d4f6ab9af8adddc127d04e0fcabede315904d2ff76db626 \ - --hash=sha256:507c5357a8d8b4593b97fb669c50598f4e6cccbbf77e22fa9598aba78292b4d7 \ - --hash=sha256:549722908de62aa0b47a78b90531c022fa6e139f9166be634f667ff45632cc92 \ - --hash=sha256:58e6d2a5a7cb3e5f166fd58e71e9a4ff504be9dc61b88167e75f835da5764d07 \ - --hash=sha256:5a16167118677d94bb48bfcd91e420088854eb0737b76ec374b91498fb77a70e \ - --hash=sha256:5d62c4f6706bff5d8a52fd51fec6069bef69e7202ed481486c0bc3874912c787 \ - --hash=sha256:5fa159dcee5dba00c1def3231c249cf261185189205073bde13797e57dd7540a \ - --hash=sha256:6032231d4a5abd67c7f71168fd64a47b6b451fbcb91c8397c2f7610e67683810 \ - --hash=sha256:63f26258a163c34542c24808f03d734b338da66ba91f410a703e505c8485791d \ - --hash=sha256:65a37714b8ad9aba5780325228598a5b16c47ba0f8aeb3dc0514701e4413d7c0 \ - --hash=sha256:67054e47c01b7b349b94ed0840ccae075449503cf1fdd0a1fdd98ab5ddc2667b \ - --hash=sha256:67dda3c7325691c2081510e92c561f465ba61b975f481735aefdfc845d2cd043 \ - --hash=sha256:6985a593417cdbc94c7f9c3403747335e450c1599da1647a5af76539672464d3 \ - --hash=sha256:6a1948df1bb1d56b5e7b0553c0fa04fd0e320997ae99689488201f19fa90d2e7 \ - --hash=sha256:6b5b7fd6ee7b54e01759f2044f936dcf7dea6e7585f35490f7ca0420fe723c0d \ - --hash=sha256:6c929916cbdb540d3407c66f19f73387f43e7c12fa318a66f64ac99da601bcdf \ - --hash=sha256:6f4d7a7c0aff92e8354cceca6fe223973ddf08401047920df0fcb24be2bd5138 \ - --hash=sha256:728af36011bb5d344c4fe4af79cfe186729efb649d2f8b395d1572fb088a996c \ - --hash=sha256:742840d1d0438eb7ea4280f3347598f507a199a35a08294afdcc560c3739989d \ - --hash=sha256:75e872573220d1ee2305b35c9813626e620768248425f58798413e9c39741f46 \ - --hash=sha256:794c3dd744fad478b6232289c866c25406ecdfc47e294618bdf1697e69bd64a6 \ - --hash=sha256:7c0fdbdf6983526e269e5a8d53b7ae3622dd6998468821d660d0daf72779aefa \ - --hash=sha256:7c5f5290799a3f6539cc5e6f474c3e5c5fbeba74a5e1e5be75587746a940d51e \ - --hash=sha256:7c6e7e4f9167fddc438cd653d826f2222222564daed4116a02a184b464d3ef05 \ - --hash=sha256:7cedd25e5f678f7738da38037435b340694ab34d424938041aa630d8bac42663 \ - --hash=sha256:7e2e068a83552ddf7a39a99488bcba05ac13454fb205c847674da0352602082f \ - --hash=sha256:8319293e85feadbbfe2150a5659dbc2ebc4afdeaf7d98936fb9a2f2ba0d4c35c \ - --hash=sha256:8526b0941ec5a40220fc4dfde76aed58808e2b309c03e9fa8e2260083ef7157f \ - --hash=sha256:8884ba1a0fe7210b775106b25850f5e5a9dc3c840d1ae9924ee6ea2eb3acbfe7 \ - --hash=sha256:8cb625bcb5add899cb8ba7bf716ec1d3e8f7cdea9b0713fa99eadf73b6d4986f \ - --hash=sha256:8d663fd71491dde7dfdfc899d13a067a94198e90695b4321084c6e450743b8c7 \ - --hash=sha256:8ee1983728964d6070ab443399c476de93d5d741f71e8f6e7880a065f878e0b9 \ - --hash=sha256:997e7b8f173a391987df40f3b52c423e5850be6f6df0dcfb5376365440b56667 \ - --hash=sha256:9be90eebc9842a93ef8335291f57b3b7488ac24f70df96a6034a13cb58e6ff86 \ - --hash=sha256:9ddd49258610499aab83b4f5b61b32e11fce873586282a0e972e5ab3bcadee51 \ - --hash=sha256:9ecde3671e62eeb99e977f5221abcf40c208f69b5eb986b061ccec317c82ebd0 \ - --hash=sha256:9ff4e9ecb6e4b363430edf2c6e50173a63e0820e549918adef70515f87ced19a \ - --hash=sha256:a254537b9b696ede293bfdbc0a65200e8e4507bc9f37831e2a0318a9b333c85c \ - --hash=sha256:a2b9bf8c79b660d0ca1ad95e587818c30ccdb11f787657458d6f26a1ea18c568 \ - --hash=sha256:a61a68d630e812b67b5bf097ab84e2cd79b48c792857dc10ba8a223f5b06a2af \ - --hash=sha256:a7080b0159ce05f179cfac592cda1a82898ca9cd097dacf8ea20ae33474fbb25 \ - --hash=sha256:a8fd93de4e1d278046345f49e2238cdb298589325849b2645d4a94c53faeffc5 \ - --hash=sha256:a94ffc66738da99232ddffcf7910e0f69e2bbe3a0802e54426dbf0714e1c2ffe \ - --hash=sha256:aa806bbc13eac1ab6291ed21ecd2dd426063ca5417dd507e6be58de20e58dfcf \ - --hash=sha256:b0c1a133d42c6fc1f5fbcf5c91331657a1ff822e87989bf4a6e2e39b818d0ee9 \ - --hash=sha256:b58229a844931bca61b3a20efd2be2a2acb4ad1622fc026504309a6883686fbf \ - --hash=sha256:bb2f144c6d98bb5cbc94adeb0447cfd4c0f991341baa68eee3f3b0c9c0e83767 \ - --hash=sha256:be90c94570840939fecedf99fa72839aed70b0ced449b415c85e01ae67422c90 \ - --hash=sha256:bf0d9a171908f32d54f651648c7290397b8792f4303821c42a74e7805bfb813c \ - --hash=sha256:bf15fc0b45914d9d1b706f7c9c4f66f2b7b053e9517e40123e137e8ca8958b3d \ - --hash=sha256:bf4298f366ca7e1ad1d21bbb58300a6985015909964077afd37559084590c929 \ - --hash=sha256:c441c841e82c5ba7a85ad25986014be8d7849c3cfbdb6004541873505929a74e \ - --hash=sha256:cacea77ef7a2195f04f9279297684955e3d1ae4241092ff0cfcef532bb7a1c32 \ - --hash=sha256:cd54895e4ae7d32f1e3dd91261df46ee7483a735017dc6f987904f194aa5fd14 \ - --hash=sha256:d1323cd04d6e92150bcc79d0174ce347ed4b349d748b9358fd2e497b121e03c8 \ - --hash=sha256:d383bf5e045d7f9d239b38e6acadd7b7fdf6c0087259a84ae3475d18e9a2ae8b \ - --hash=sha256:d3e7420211f5a65a54675fd860ea04173cde60a7cc20ccfbafcccd155225f8bc \ - --hash=sha256:d8074c5dd61c8a3e915fa8fc04754fa55cfa5978200d2daa1e2d4294c1f136aa \ - --hash=sha256:df03cd88f95b1b99052b52b1bb92173229d7a674df0ab06d2b25765ee8404bce \ - --hash=sha256:e45377d5d6fefe1677da2a2c07b024a6dac782088e37c0b1efea4cfe2b1be19b \ - --hash=sha256:e53d19c2bf7d0d1e6998a7e693c7e87300dd971808e6618964621ccd0e01fe4e \ - --hash=sha256:e560fd75aaf3e5693b91bcaddd8b314f4d57e99aef8a6c6dc692f935cc1e6bbf \ - --hash=sha256:ec5060592d83454e8063e487696ac3783cc48c9a329498bafae0d972bc7816c9 \ - --hash=sha256:ecc2920630283e0783c22e2ac94427f8cca29a04cfdf331467d4f661f4072dac \ - --hash=sha256:ed7161bccab7696a473fe7ddb619c1d75963732b37da4618ba12e60899fefe4f \ - --hash=sha256:ee0bd3a7b2e184e88d25c9baa6a9dc609ba25b76daae942edfb14499ac7ec374 \ - --hash=sha256:ee25f1ac091def37c4b59d192bbe3a206298feeb89132a470325bf76ad122a1e \ - --hash=sha256:efa44f64c37cc30c9f05932c740a8b40ce359f51882c70883cc95feac842da4d \ - --hash=sha256:f47d52fd9b2ac418c4890aad2f6d21a6b96183c98021f0a48497a904199f006e \ - --hash=sha256:f857034dc68d5ceb30fb60afb6ff2103087aea10a01b613985610e007053a121 \ - --hash=sha256:fb91d20fa2d3b13deea98a690534697742029f4fb83673a3501ae6e3746508b5 \ - --hash=sha256:fddb8870bdb83456a489ab67c6b3040a8d5a55069aa6f72f9d872235fbc52f54 +polib==1.2.0 \ + --hash=sha256:1c77ee1b81feb31df9bca258cbc58db1bbb32d10214b173882452c73af06d62d \ + --hash=sha256:f3ef94aefed6e183e342a8a269ae1fc4742ba193186ad76f175938621dbfc26b + # via moz-l10n +propcache==0.3.1 \ + --hash=sha256:050b571b2e96ec942898f8eb46ea4bfbb19bd5502424747e83badc2d4a99a44e \ + --hash=sha256:05543250deac8e61084234d5fc54f8ebd254e8f2b39a16b1dce48904f45b744b \ + --hash=sha256:069e7212890b0bcf9b2be0a03afb0c2d5161d91e1bf51569a64f629acc7defbf \ + --hash=sha256:09400e98545c998d57d10035ff623266927cb784d13dd2b31fd33b8a5316b85b \ + --hash=sha256:0c3c3a203c375b08fd06a20da3cf7aac293b834b6f4f4db71190e8422750cca5 \ + --hash=sha256:0c86e7ceea56376216eba345aa1fc6a8a6b27ac236181f840d1d7e6a1ea9ba5c \ + --hash=sha256:0fbe94666e62ebe36cd652f5fc012abfbc2342de99b523f8267a678e4dfdee3c \ + --hash=sha256:17d1c688a443355234f3c031349da69444be052613483f3e4158eef751abcd8a \ + --hash=sha256:19a06db789a4bd896ee91ebc50d059e23b3639c25d58eb35be3ca1cbe967c3bf \ + --hash=sha256:1c5c7ab7f2bb3f573d1cb921993006ba2d39e8621019dffb1c5bc94cdbae81e8 \ + --hash=sha256:1eb34d90aac9bfbced9a58b266f8946cb5935869ff01b164573a7634d39fbcb5 \ + --hash=sha256:1f6cc0ad7b4560e5637eb2c994e97b4fa41ba8226069c9277eb5ea7101845b42 \ + --hash=sha256:27c6ac6aa9fc7bc662f594ef380707494cb42c22786a558d95fcdedb9aa5d035 \ + --hash=sha256:2d219b0dbabe75e15e581fc1ae796109b07c8ba7d25b9ae8d650da582bed01b0 \ + --hash=sha256:2fce1df66915909ff6c824bbb5eb403d2d15f98f1518e583074671a30fe0c21e \ + --hash=sha256:319fa8765bfd6a265e5fa661547556da381e53274bc05094fc9ea50da51bfd46 \ + --hash=sha256:359e81a949a7619802eb601d66d37072b79b79c2505e6d3fd8b945538411400d \ + --hash=sha256:3a02a28095b5e63128bcae98eb59025924f121f048a62393db682f049bf4ac24 \ + --hash=sha256:3e19ea4ea0bf46179f8a3652ac1426e6dcbaf577ce4b4f65be581e237340420d \ + --hash=sha256:3e584b6d388aeb0001d6d5c2bd86b26304adde6d9bb9bfa9c4889805021b96de \ + --hash=sha256:40d980c33765359098837527e18eddefc9a24cea5b45e078a7f3bb5b032c6ecf \ + --hash=sha256:4114c4ada8f3181af20808bedb250da6bae56660e4b8dfd9cd95d4549c0962f7 \ + --hash=sha256:43593c6772aa12abc3af7784bff4a41ffa921608dd38b77cf1dfd7f5c4e71371 \ + --hash=sha256:47ef24aa6511e388e9894ec16f0fbf3313a53ee68402bc428744a367ec55b833 \ + --hash=sha256:4cf9e93a81979f1424f1a3d155213dc928f1069d697e4353edb8a5eba67c6259 \ + --hash=sha256:4d0dfdd9a2ebc77b869a0b04423591ea8823f791293b527dc1bb896c1d6f1136 \ + --hash=sha256:563f9d8c03ad645597b8d010ef4e9eab359faeb11a0a2ac9f7b4bc8c28ebef25 \ + --hash=sha256:58aa11f4ca8b60113d4b8e32d37e7e78bd8af4d1a5b5cb4979ed856a45e62005 \ + --hash=sha256:5a0a9898fdb99bf11786265468571e628ba60af80dc3f6eb89a3545540c6b0ef \ + --hash=sha256:5aed8d8308215089c0734a2af4f2e95eeb360660184ad3912686c181e500b2e7 \ + --hash=sha256:5b9145c35cc87313b5fd480144f8078716007656093d23059e8993d3a8fa730f \ + --hash=sha256:5cb5918253912e088edbf023788de539219718d3b10aef334476b62d2b53de53 \ + --hash=sha256:5cdb0f3e1eb6dfc9965d19734d8f9c481b294b5274337a8cb5cb01b462dcb7e0 \ + --hash=sha256:5ced33d827625d0a589e831126ccb4f5c29dfdf6766cac441d23995a65825dcb \ + --hash=sha256:603f1fe4144420374f1a69b907494c3acbc867a581c2d49d4175b0de7cc64566 \ + --hash=sha256:61014615c1274df8da5991a1e5da85a3ccb00c2d4701ac6f3383afd3ca47ab0a \ + --hash=sha256:64a956dff37080b352c1c40b2966b09defb014347043e740d420ca1eb7c9b908 \ + --hash=sha256:668ddddc9f3075af019f784456267eb504cb77c2c4bd46cc8402d723b4d200bf \ + --hash=sha256:6d8e309ff9a0503ef70dc9a0ebd3e69cf7b3894c9ae2ae81fc10943c37762458 \ + --hash=sha256:6f173bbfe976105aaa890b712d1759de339d8a7cef2fc0a1714cc1a1e1c47f64 \ + --hash=sha256:71ebe3fe42656a2328ab08933d420df5f3ab121772eef78f2dc63624157f0ed9 \ + --hash=sha256:730178f476ef03d3d4d255f0c9fa186cb1d13fd33ffe89d39f2cda4da90ceb71 \ + --hash=sha256:7d2d5a0028d920738372630870e7d9644ce437142197f8c827194fca404bf03b \ + --hash=sha256:7f30241577d2fef2602113b70ef7231bf4c69a97e04693bde08ddab913ba0ce5 \ + --hash=sha256:813fbb8b6aea2fc9659815e585e548fe706d6f663fa73dff59a1677d4595a037 \ + --hash=sha256:82de5da8c8893056603ac2d6a89eb8b4df49abf1a7c19d536984c8dd63f481d5 \ + --hash=sha256:83be47aa4e35b87c106fc0c84c0fc069d3f9b9b06d3c494cd404ec6747544894 \ + --hash=sha256:8638f99dca15b9dff328fb6273e09f03d1c50d9b6512f3b65a4154588a7595fe \ + --hash=sha256:87380fb1f3089d2a0b8b00f006ed12bd41bd858fabfa7330c954c70f50ed8757 \ + --hash=sha256:88c423efef9d7a59dae0614eaed718449c09a5ac79a5f224a8b9664d603f04a3 \ + --hash=sha256:89498dd49c2f9a026ee057965cdf8192e5ae070ce7d7a7bd4b66a8e257d0c976 \ + --hash=sha256:8a17583515a04358b034e241f952f1715243482fc2c2945fd99a1b03a0bd77d6 \ + --hash=sha256:916cd229b0150129d645ec51614d38129ee74c03293a9f3f17537be0029a9641 \ + --hash=sha256:9532ea0b26a401264b1365146c440a6d78269ed41f83f23818d4b79497aeabe7 \ + --hash=sha256:967a8eec513dbe08330f10137eacb427b2ca52118769e82ebcfcab0fba92a649 \ + --hash=sha256:975af16f406ce48f1333ec5e912fe11064605d5c5b3f6746969077cc3adeb120 \ + --hash=sha256:9979643ffc69b799d50d3a7b72b5164a2e97e117009d7af6dfdd2ab906cb72cd \ + --hash=sha256:9a8ecf38de50a7f518c21568c80f985e776397b902f1ce0b01f799aba1608b40 \ + --hash=sha256:9cec3239c85ed15bfaded997773fdad9fb5662b0a7cbc854a43f291eb183179e \ + --hash=sha256:9e64e948ab41411958670f1093c0a57acfdc3bee5cf5b935671bbd5313bcf229 \ + --hash=sha256:9f64d91b751df77931336b5ff7bafbe8845c5770b06630e27acd5dbb71e1931c \ + --hash=sha256:a0ab8cf8cdd2194f8ff979a43ab43049b1df0b37aa64ab7eca04ac14429baeb7 \ + --hash=sha256:a110205022d077da24e60b3df8bcee73971be9575dec5573dd17ae5d81751111 \ + --hash=sha256:a34aa3a1abc50740be6ac0ab9d594e274f59960d3ad253cd318af76b996dd654 \ + --hash=sha256:a444192f20f5ce8a5e52761a031b90f5ea6288b1eef42ad4c7e64fef33540b8f \ + --hash=sha256:a461959ead5b38e2581998700b26346b78cd98540b5524796c175722f18b0294 \ + --hash=sha256:a75801768bbe65499495660b777e018cbe90c7980f07f8aa57d6be79ea6f71da \ + --hash=sha256:aa8efd8c5adc5a2c9d3b952815ff8f7710cefdcaf5f2c36d26aff51aeca2f12f \ + --hash=sha256:aca63103895c7d960a5b9b044a83f544b233c95e0dcff114389d64d762017af7 \ + --hash=sha256:b0313e8b923b3814d1c4a524c93dfecea5f39fa95601f6a9b1ac96cd66f89ea0 \ + --hash=sha256:b23c11c2c9e6d4e7300c92e022046ad09b91fd00e36e83c44483df4afa990073 \ + --hash=sha256:b303b194c2e6f171cfddf8b8ba30baefccf03d36a4d9cab7fd0bb68ba476a3d7 \ + --hash=sha256:b655032b202028a582d27aeedc2e813299f82cb232f969f87a4fde491a233f11 \ + --hash=sha256:bd39c92e4c8f6cbf5f08257d6360123af72af9f4da75a690bef50da77362d25f \ + --hash=sha256:bef100c88d8692864651b5f98e871fb090bd65c8a41a1cb0ff2322db39c96c27 \ + --hash=sha256:c2fe5c910f6007e716a06d269608d307b4f36e7babee5f36533722660e8c4a70 \ + --hash=sha256:c66d8ccbc902ad548312b96ed8d5d266d0d2c6d006fd0f66323e9d8f2dd49be7 \ + --hash=sha256:cd6a55f65241c551eb53f8cf4d2f4af33512c39da5d9777694e9d9c60872f519 \ + --hash=sha256:d249609e547c04d190e820d0d4c8ca03ed4582bcf8e4e160a6969ddfb57b62e5 \ + --hash=sha256:d4e89cde74154c7b5957f87a355bb9c8ec929c167b59c83d90654ea36aeb6180 \ + --hash=sha256:dc1915ec523b3b494933b5424980831b636fe483d7d543f7afb7b3bf00f0c10f \ + --hash=sha256:e1c4d24b804b3a87e9350f79e2371a705a188d292fd310e663483af6ee6718ee \ + --hash=sha256:e474fc718e73ba5ec5180358aa07f6aded0ff5f2abe700e3115c37d75c947e18 \ + --hash=sha256:e4fe2a6d5ce975c117a6bb1e8ccda772d1e7029c1cca1acd209f91d30fa72815 \ + --hash=sha256:e7fb9a84c9abbf2b2683fa3e7b0d7da4d8ecf139a1c635732a8bda29c5214b0e \ + --hash=sha256:e861ad82892408487be144906a368ddbe2dc6297074ade2d892341b35c59844a \ + --hash=sha256:ec314cde7314d2dd0510c6787326bbffcbdc317ecee6b7401ce218b3099075a7 \ + --hash=sha256:ed5f6d2edbf349bd8d630e81f474d33d6ae5d07760c44d33cd808e2f5c8f4ae6 \ + --hash=sha256:ef2e4e91fb3945769e14ce82ed53007195e616a63aa43b40fb7ebaaf907c8d4c \ + --hash=sha256:f011f104db880f4e2166bcdcf7f58250f7a465bc6b068dc84c824a3d4a5c94dc \ + --hash=sha256:f1528ec4374617a7a753f90f20e2f551121bb558fcb35926f99e3c42367164b8 \ + --hash=sha256:f27785888d2fdd918bc36de8b8739f2d6c791399552333721b58193f68ea3e98 \ + --hash=sha256:f35c7070eeec2cdaac6fd3fe245226ed2a6292d3ee8c938e5bb645b434c5f256 \ + --hash=sha256:f3bbecd2f34d0e6d3c543fdb3b15d6b60dd69970c2b4c822379e5ec8f6f621d5 \ + --hash=sha256:f6f1324db48f001c2ca26a25fa25af60711e09b9aaf4b28488602776f4f9a744 \ + --hash=sha256:f78eb8422acc93d7b69964012ad7048764bb45a54ba7a39bb9e146c72ea29723 \ + --hash=sha256:fb6e0faf8cb6b4beea5d6ed7b5a578254c6d7df54c36ccd3d8b3eb00d6770277 \ + --hash=sha256:feccd282de1f6322f56f6845bf1207a537227812f0a9bf5571df52bb418d79d5 # via # aiohttp # yarl @@ -904,114 +925,125 @@ rfc3986-validator==0.1.1 \ --hash=sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9 \ --hash=sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055 # via jsonschema -rich==13.9.4 \ - --hash=sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098 \ - --hash=sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90 +rich==14.0.0 \ + --hash=sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0 \ + --hash=sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725 # via cookiecutter -rpds-py==0.23.1 \ - --hash=sha256:09cd7dbcb673eb60518231e02874df66ec1296c01a4fcd733875755c02014b19 \ - --hash=sha256:0f3288930b947cbebe767f84cf618d2cbe0b13be476e749da0e6a009f986248c \ - --hash=sha256:0fced9fd4a07a1ded1bac7e961ddd9753dd5d8b755ba8e05acba54a21f5f1522 \ - --hash=sha256:112b8774b0b4ee22368fec42749b94366bd9b536f8f74c3d4175d4395f5cbd31 \ - --hash=sha256:11dd60b2ffddba85715d8a66bb39b95ddbe389ad2cfcf42c833f1bcde0878eaf \ - --hash=sha256:178f8a60fc24511c0eb756af741c476b87b610dba83270fce1e5a430204566a4 \ - --hash=sha256:1b08027489ba8fedde72ddd233a5ea411b85a6ed78175f40285bd401bde7466d \ - --hash=sha256:1bf5be5ba34e19be579ae873da515a2836a2166d8d7ee43be6ff909eda42b72b \ - --hash=sha256:1ed7de3c86721b4e83ac440751329ec6a1102229aa18163f84c75b06b525ad7e \ - --hash=sha256:1eedaaccc9bb66581d4ae7c50e15856e335e57ef2734dbc5fd8ba3e2a4ab3cb6 \ - --hash=sha256:243241c95174b5fb7204c04595852fe3943cc41f47aa14c3828bc18cd9d3b2d6 \ - --hash=sha256:26bb3e8de93443d55e2e748e9fd87deb5f8075ca7bc0502cfc8be8687d69a2ec \ - --hash=sha256:271fa2184cf28bdded86bb6217c8e08d3a169fe0bbe9be5e8d96e8476b707122 \ - --hash=sha256:28358c54fffadf0ae893f6c1050e8f8853e45df22483b7fff2f6ab6152f5d8bf \ - --hash=sha256:285019078537949cecd0190f3690a0b0125ff743d6a53dfeb7a4e6787af154f5 \ - --hash=sha256:2893d778d4671ee627bac4037a075168b2673c57186fb1a57e993465dbd79a93 \ - --hash=sha256:2a54027554ce9b129fc3d633c92fa33b30de9f08bc61b32c053dc9b537266fed \ - --hash=sha256:2c6ae11e6e93728d86aafc51ced98b1658a0080a7dd9417d24bfb955bb09c3c2 \ - --hash=sha256:2cfa07c346a7ad07019c33fb9a63cf3acb1f5363c33bc73014e20d9fe8b01cdd \ - --hash=sha256:35d5631ce0af26318dba0ae0ac941c534453e42f569011585cb323b7774502a5 \ - --hash=sha256:3614d280bf7aab0d3721b5ce0e73434acb90a2c993121b6e81a1c15c665298ac \ - --hash=sha256:3902df19540e9af4cc0c3ae75974c65d2c156b9257e91f5101a51f99136d834c \ - --hash=sha256:3aaf141d39f45322e44fc2c742e4b8b4098ead5317e5f884770c8df0c332da70 \ - --hash=sha256:3d8abf7896a91fb97e7977d1aadfcc2c80415d6dc2f1d0fca5b8d0df247248f3 \ - --hash=sha256:3e77febf227a1dc3220159355dba68faa13f8dca9335d97504abf428469fb18b \ - --hash=sha256:3e9212f52074fc9d72cf242a84063787ab8e21e0950d4d6709886fb62bcb91d5 \ - --hash=sha256:3ee9d6f0b38efb22ad94c3b68ffebe4c47865cdf4b17f6806d6c674e1feb4246 \ - --hash=sha256:4233df01a250b3984465faed12ad472f035b7cd5240ea3f7c76b7a7016084495 \ - --hash=sha256:4263320ed887ed843f85beba67f8b2d1483b5947f2dc73a8b068924558bfeace \ - --hash=sha256:4ab923167cfd945abb9b51a407407cf19f5bee35001221f2911dc85ffd35ff4f \ - --hash=sha256:4caafd1a22e5eaa3732acb7672a497123354bef79a9d7ceed43387d25025e935 \ - --hash=sha256:50fb62f8d8364978478b12d5f03bf028c6bc2af04082479299139dc26edf4c64 \ - --hash=sha256:55ff4151cfd4bc635e51cfb1c59ac9f7196b256b12e3a57deb9e5742e65941ad \ - --hash=sha256:5b98b6c953e5c2bda51ab4d5b4f172617d462eebc7f4bfdc7c7e6b423f6da957 \ - --hash=sha256:5c9ff044eb07c8468594d12602291c635da292308c8c619244e30698e7fc455a \ - --hash=sha256:5e9c206a1abc27e0588cf8b7c8246e51f1a16a103734f7750830a1ccb63f557a \ - --hash=sha256:5fb89edee2fa237584e532fbf78f0ddd1e49a47c7c8cfa153ab4849dc72a35e6 \ - --hash=sha256:633462ef7e61d839171bf206551d5ab42b30b71cac8f10a64a662536e057fdef \ - --hash=sha256:66f8d2a17e5838dd6fb9be6baaba8e75ae2f5fa6b6b755d597184bfcd3cb0eba \ - --hash=sha256:6959bb9928c5c999aba4a3f5a6799d571ddc2c59ff49917ecf55be2bbb4e3722 \ - --hash=sha256:698a79d295626ee292d1730bc2ef6e70a3ab135b1d79ada8fde3ed0047b65a10 \ - --hash=sha256:721f9c4011b443b6e84505fc00cc7aadc9d1743f1c988e4c89353e19c4a968ee \ - --hash=sha256:72e680c1518733b73c994361e4b06441b92e973ef7d9449feec72e8ee4f713da \ - --hash=sha256:75307599f0d25bf6937248e5ac4e3bde5ea72ae6618623b86146ccc7845ed00b \ - --hash=sha256:754fba3084b70162a6b91efceee8a3f06b19e43dac3f71841662053c0584209a \ - --hash=sha256:759462b2d0aa5a04be5b3e37fb8183615f47014ae6b116e17036b131985cb731 \ - --hash=sha256:7938c7b0599a05246d704b3f5e01be91a93b411d0d6cc62275f025293b8a11ce \ - --hash=sha256:7b77e07233925bd33fc0022b8537774423e4c6680b6436316c5075e79b6384f4 \ - --hash=sha256:7e5413d2e2d86025e73f05510ad23dad5950ab8417b7fc6beaad99be8077138b \ - --hash=sha256:7f3240dcfa14d198dba24b8b9cb3b108c06b68d45b7babd9eefc1038fdf7e707 \ - --hash=sha256:7f9682a8f71acdf59fd554b82b1c12f517118ee72c0f3944eda461606dfe7eb9 \ - --hash=sha256:8d67beb6002441faef8251c45e24994de32c4c8686f7356a1f601ad7c466f7c3 \ - --hash=sha256:9441af1d25aed96901f97ad83d5c3e35e6cd21a25ca5e4916c82d7dd0490a4fa \ - --hash=sha256:98b257ae1e83f81fb947a363a274c4eb66640212516becaff7bef09a5dceacaa \ - --hash=sha256:9e9f3a3ac919406bc0414bbbd76c6af99253c507150191ea79fab42fdb35982a \ - --hash=sha256:a1c66e71ecfd2a4acf0e4bd75e7a3605afa8f9b28a3b497e4ba962719df2be57 \ - --hash=sha256:a1e17d8dc8e57d8e0fd21f8f0f0a5211b3fa258b2e444c2053471ef93fe25a00 \ - --hash=sha256:a20cb698c4a59c534c6701b1c24a968ff2768b18ea2991f886bd8985ce17a89f \ - --hash=sha256:a970bfaf130c29a679b1d0a6e0f867483cea455ab1535fb427566a475078f27f \ - --hash=sha256:a98f510d86f689fcb486dc59e6e363af04151e5260ad1bdddb5625c10f1e95f8 \ - --hash=sha256:a9d3b728f5a5873d84cba997b9d617c6090ca5721caaa691f3b1a78c60adc057 \ - --hash=sha256:ad76f44f70aac3a54ceb1813ca630c53415da3a24fd93c570b2dfb4856591017 \ - --hash=sha256:ae28144c1daa61366205d32abd8c90372790ff79fc60c1a8ad7fd3c8553a600e \ - --hash=sha256:b03a8d50b137ee758e4c73638b10747b7c39988eb8e6cd11abb7084266455165 \ - --hash=sha256:b5a96fcac2f18e5a0a23a75cd27ce2656c66c11c127b0318e508aab436b77428 \ - --hash=sha256:b5ef909a37e9738d146519657a1aab4584018746a18f71c692f2f22168ece40c \ - --hash=sha256:b79f5ced71efd70414a9a80bbbfaa7160da307723166f09b69773153bf17c590 \ - --hash=sha256:b91cceb5add79ee563bd1f70b30896bd63bc5f78a11c1f00a1e931729ca4f1f4 \ - --hash=sha256:b92f5654157de1379c509b15acec9d12ecf6e3bc1996571b6cb82a4302060447 \ - --hash=sha256:c04ca91dda8a61584165825907f5c967ca09e9c65fe8966ee753a3f2b019fe1e \ - --hash=sha256:c1f8afa346ccd59e4e5630d5abb67aba6a9812fddf764fd7eb11f382a345f8cc \ - --hash=sha256:c5334a71f7dc1160382d45997e29f2637c02f8a26af41073189d79b95d3321f1 \ - --hash=sha256:c617d7453a80e29d9973b926983b1e700a9377dbe021faa36041c78537d7b08c \ - --hash=sha256:c632419c3870507ca20a37c8f8f5352317aca097639e524ad129f58c125c61c6 \ - --hash=sha256:c6760211eee3a76316cf328f5a8bd695b47b1626d21c8a27fb3b2473a884d597 \ - --hash=sha256:c698d123ce5d8f2d0cd17f73336615f6a2e3bdcedac07a1291bb4d8e7d82a05a \ - --hash=sha256:c76b32eb2ab650a29e423525e84eb197c45504b1c1e6e17b6cc91fcfeb1a4b1d \ - --hash=sha256:c8f7e90b948dc9dcfff8003f1ea3af08b29c062f681c05fd798e36daa3f7e3e8 \ - --hash=sha256:c9e799dac1ffbe7b10c1fd42fe4cd51371a549c6e108249bde9cd1200e8f59b4 \ - --hash=sha256:cafa48f2133d4daa028473ede7d81cd1b9f9e6925e9e4003ebdf77010ee02f35 \ - --hash=sha256:ce473a2351c018b06dd8d30d5da8ab5a0831056cc53b2006e2a8028172c37ce5 \ - --hash=sha256:d31ed4987d72aabdf521eddfb6a72988703c091cfc0064330b9e5f8d6a042ff5 \ - --hash=sha256:d550d7e9e7d8676b183b37d65b5cd8de13676a738973d330b59dc8312df9c5dc \ - --hash=sha256:d6adb81564af0cd428910f83fa7da46ce9ad47c56c0b22b50872bc4515d91966 \ - --hash=sha256:d6f6512a90bd5cd9030a6237f5346f046c6f0e40af98657568fa45695d4de59d \ - --hash=sha256:d7031d493c4465dbc8d40bd6cafefef4bd472b17db0ab94c53e7909ee781b9ef \ - --hash=sha256:d9f75a06ecc68f159d5d7603b734e1ff6daa9497a929150f794013aa9f6e3f12 \ - --hash=sha256:db7707dde9143a67b8812c7e66aeb2d843fe33cc8e374170f4d2c50bd8f2472d \ - --hash=sha256:e0397dd0b3955c61ef9b22838144aa4bef6f0796ba5cc8edfc64d468b93798b4 \ - --hash=sha256:e0df046f2266e8586cf09d00588302a32923eb6386ced0ca5c9deade6af9a149 \ - --hash=sha256:e14f86b871ea74c3fddc9a40e947d6a5d09def5adc2076ee61fb910a9014fb35 \ - --hash=sha256:e5963ea87f88bddf7edd59644a35a0feecf75f8985430124c253612d4f7d27ae \ - --hash=sha256:e768267cbe051dd8d1c5305ba690bb153204a09bf2e3de3ae530de955f5b5580 \ - --hash=sha256:e9cb79ecedfc156c0692257ac7ed415243b6c35dd969baa461a6888fc79f2f07 \ - --hash=sha256:ed6f011bedca8585787e5082cce081bac3d30f54520097b2411351b3574e1219 \ - --hash=sha256:f3429fb8e15b20961efca8c8b21432623d85db2228cc73fe22756c6637aa39e7 \ - --hash=sha256:f35eff113ad430b5272bbfc18ba111c66ff525828f24898b4e146eb479a2cdda \ - --hash=sha256:f3a6cb95074777f1ecda2ca4fa7717caa9ee6e534f42b7575a8f0d4cb0c24013 \ - --hash=sha256:f7356a6da0562190558c4fcc14f0281db191cdf4cb96e7604c06acfcee96df15 \ - --hash=sha256:f88626e3f5e57432e6191cd0c5d6d6b319b635e70b40be2ffba713053e5147dd \ - --hash=sha256:fad784a31869747df4ac968a351e070c06ca377549e4ace94775aaa3ab33ee06 \ - --hash=sha256:fc869af5cba24d45fb0399b0cfdbcefcf6910bf4dee5d74036a57cf5264b3ff4 \ - --hash=sha256:fee513135b5a58f3bb6d89e48326cd5aa308e4bcdf2f7d59f67c861ada482bf8 +rpds-py==0.24.0 \ + --hash=sha256:0047638c3aa0dbcd0ab99ed1e549bbf0e142c9ecc173b6492868432d8989a046 \ + --hash=sha256:006f4342fe729a368c6df36578d7a348c7c716be1da0a1a0f86e3021f8e98724 \ + --hash=sha256:041f00419e1da7a03c46042453598479f45be3d787eb837af382bfc169c0db33 \ + --hash=sha256:04ecf5c1ff4d589987b4d9882872f80ba13da7d42427234fce8f22efb43133bc \ + --hash=sha256:04f2b712a2206e13800a8136b07aaedc23af3facab84918e7aa89e4be0260032 \ + --hash=sha256:0aeb3329c1721c43c58cae274d7d2ca85c1690d89485d9c63a006cb79a85771a \ + --hash=sha256:0e374c0ce0ca82e5b67cd61fb964077d40ec177dd2c4eda67dba130de09085c7 \ + --hash=sha256:0f00c16e089282ad68a3820fd0c831c35d3194b7cdc31d6e469511d9bffc535c \ + --hash=sha256:174e46569968ddbbeb8a806d9922f17cd2b524aa753b468f35b97ff9c19cb718 \ + --hash=sha256:1b221c2457d92a1fb3c97bee9095c874144d196f47c038462ae6e4a14436f7bc \ + --hash=sha256:208b3a70a98cf3710e97cabdc308a51cd4f28aa6e7bb11de3d56cd8b74bab98d \ + --hash=sha256:20f2712bd1cc26a3cc16c5a1bfee9ed1abc33d4cdf1aabd297fe0eb724df4272 \ + --hash=sha256:24795c099453e3721fda5d8ddd45f5dfcc8e5a547ce7b8e9da06fecc3832e26f \ + --hash=sha256:2a0f156e9509cee987283abd2296ec816225145a13ed0391df8f71bf1d789e2d \ + --hash=sha256:2b2356688e5d958c4d5cb964af865bea84db29971d3e563fb78e46e20fe1848b \ + --hash=sha256:2c13777ecdbbba2077670285dd1fe50828c8742f6a4119dbef6f83ea13ad10fb \ + --hash=sha256:2d3ee4615df36ab8eb16c2507b11e764dcc11fd350bbf4da16d09cda11fcedef \ + --hash=sha256:2d53747da70a4e4b17f559569d5f9506420966083a31c5fbd84e764461c4444b \ + --hash=sha256:32bab0a56eac685828e00cc2f5d1200c548f8bc11f2e44abf311d6b548ce2e45 \ + --hash=sha256:34d90ad8c045df9a4259c47d2e16a3f21fdb396665c94520dbfe8766e62187a4 \ + --hash=sha256:369d9c6d4c714e36d4a03957b4783217a3ccd1e222cdd67d464a3a479fc17796 \ + --hash=sha256:3a55fc10fdcbf1a4bd3c018eea422c52cf08700cf99c28b5cb10fe97ab77a0d3 \ + --hash=sha256:3d2d8e4508e15fc05b31285c4b00ddf2e0eb94259c2dc896771966a163122a0c \ + --hash=sha256:3fab5f4a2c64a8fb64fc13b3d139848817a64d467dd6ed60dcdd6b479e7febc9 \ + --hash=sha256:43dba99f00f1d37b2a0265a259592d05fcc8e7c19d140fe51c6e6f16faabeb1f \ + --hash=sha256:44d51febb7a114293ffd56c6cf4736cb31cd68c0fddd6aa303ed09ea5a48e029 \ + --hash=sha256:493fe54318bed7d124ce272fc36adbf59d46729659b2c792e87c3b95649cdee9 \ + --hash=sha256:4b28e5122829181de1898c2c97f81c0b3246d49f585f22743a1246420bb8d399 \ + --hash=sha256:4cd031e63bc5f05bdcda120646a0d32f6d729486d0067f09d79c8db5368f4586 \ + --hash=sha256:528927e63a70b4d5f3f5ccc1fa988a35456eb5d15f804d276709c33fc2f19bda \ + --hash=sha256:564c96b6076a98215af52f55efa90d8419cc2ef45d99e314fddefe816bc24f91 \ + --hash=sha256:5db385bacd0c43f24be92b60c857cf760b7f10d8234f4bd4be67b5b20a7c0b6b \ + --hash=sha256:5ef877fa3bbfb40b388a5ae1cb00636a624690dcb9a29a65267054c9ea86d88a \ + --hash=sha256:5f6e3cec44ba05ee5cbdebe92d052f69b63ae792e7d05f1020ac5e964394080c \ + --hash=sha256:5fc13b44de6419d1e7a7e592a4885b323fbc2f46e1f22151e3a8ed3b8b920405 \ + --hash=sha256:60748789e028d2a46fc1c70750454f83c6bdd0d05db50f5ae83e2db500b34da5 \ + --hash=sha256:60d9b630c8025b9458a9d114e3af579a2c54bd32df601c4581bd054e85258143 \ + --hash=sha256:619ca56a5468f933d940e1bf431c6f4e13bef8e688698b067ae68eb4f9b30e3a \ + --hash=sha256:630d3d8ea77eabd6cbcd2ea712e1c5cecb5b558d39547ac988351195db433f6c \ + --hash=sha256:63981feca3f110ed132fd217bf7768ee8ed738a55549883628ee3da75bb9cb78 \ + --hash=sha256:66420986c9afff67ef0c5d1e4cdc2d0e5262f53ad11e4f90e5e22448df485bf0 \ + --hash=sha256:675269d407a257b8c00a6b58205b72eec8231656506c56fd429d924ca00bb350 \ + --hash=sha256:6a4a535013aeeef13c5532f802708cecae8d66c282babb5cd916379b72110cf7 \ + --hash=sha256:6a727fd083009bc83eb83d6950f0c32b3c94c8b80a9b667c87f4bd1274ca30ba \ + --hash=sha256:6e1daf5bf6c2be39654beae83ee6b9a12347cb5aced9a29eecf12a2d25fff664 \ + --hash=sha256:6eea559077d29486c68218178ea946263b87f1c41ae7f996b1f30a983c476a5a \ + --hash=sha256:75a810b7664c17f24bf2ffd7f92416c00ec84b49bb68e6a0d93e542406336b56 \ + --hash=sha256:772cc1b2cd963e7e17e6cc55fe0371fb9c704d63e44cacec7b9b7f523b78919e \ + --hash=sha256:78884d155fd15d9f64f5d6124b486f3d3f7fd7cd71a78e9670a0f6f6ca06fb2d \ + --hash=sha256:79e8d804c2ccd618417e96720ad5cd076a86fa3f8cb310ea386a3e6229bae7d1 \ + --hash=sha256:7e80d375134ddb04231a53800503752093dbb65dad8dabacce2c84cccc78e964 \ + --hash=sha256:8097b3422d020ff1c44effc40ae58e67d93e60d540a65649d2cdaf9466030791 \ + --hash=sha256:8205ee14463248d3349131bb8099efe15cd3ce83b8ef3ace63c7e976998e7124 \ + --hash=sha256:8212ff58ac6dfde49946bea57474a386cca3f7706fc72c25b772b9ca4af6b79e \ + --hash=sha256:823e74ab6fbaa028ec89615ff6acb409e90ff45580c45920d4dfdddb069f2120 \ + --hash=sha256:84e0566f15cf4d769dade9b366b7b87c959be472c92dffb70462dd0844d7cbad \ + --hash=sha256:896c41007931217a343eff197c34513c154267636c8056fb409eafd494c3dcdc \ + --hash=sha256:8aa362811ccdc1f8dadcc916c6d47e554169ab79559319ae9fae7d7752d0d60c \ + --hash=sha256:8b3b397eefecec8e8e39fa65c630ef70a24b09141a6f9fc17b3c3a50bed6b50e \ + --hash=sha256:8ebc7e65ca4b111d928b669713865f021b7773350eeac4a31d3e70144297baba \ + --hash=sha256:9168764133fd919f8dcca2ead66de0105f4ef5659cbb4fa044f7014bed9a1797 \ + --hash=sha256:921ae54f9ecba3b6325df425cf72c074cd469dea843fb5743a26ca7fb2ccb149 \ + --hash=sha256:92558d37d872e808944c3c96d0423b8604879a3d1c86fdad508d7ed91ea547d5 \ + --hash=sha256:951cc481c0c395c4a08639a469d53b7d4afa252529a085418b82a6b43c45c240 \ + --hash=sha256:998c01b8e71cf051c28f5d6f1187abbdf5cf45fc0efce5da6c06447cba997034 \ + --hash=sha256:9abc80fe8c1f87218db116016de575a7998ab1629078c90840e8d11ab423ee25 \ + --hash=sha256:9be4f99bee42ac107870c61dfdb294d912bf81c3c6d45538aad7aecab468b6b7 \ + --hash=sha256:9c39438c55983d48f4bb3487734d040e22dad200dab22c41e331cee145e7a50d \ + --hash=sha256:9d7e8ce990ae17dda686f7e82fd41a055c668e13ddcf058e7fb5e9da20b57793 \ + --hash=sha256:9ea7f4174d2e4194289cb0c4e172d83e79a6404297ff95f2875cf9ac9bced8ba \ + --hash=sha256:a18fc371e900a21d7392517c6f60fe859e802547309e94313cd8181ad9db004d \ + --hash=sha256:a36b452abbf29f68527cf52e181fced56685731c86b52e852053e38d8b60bc8d \ + --hash=sha256:a5b66d1b201cc71bc3081bc2f1fc36b0c1f268b773e03bbc39066651b9e18391 \ + --hash=sha256:a824d2c7a703ba6daaca848f9c3d5cb93af0505be505de70e7e66829affd676e \ + --hash=sha256:a88c0d17d039333a41d9bf4616bd062f0bd7aa0edeb6cafe00a2fc2a804e944f \ + --hash=sha256:aa6800adc8204ce898c8a424303969b7aa6a5e4ad2789c13f8648739830323b7 \ + --hash=sha256:aad911555286884be1e427ef0dc0ba3929e6821cbeca2194b13dc415a462c7fd \ + --hash=sha256:afc6e35f344490faa8276b5f2f7cbf71f88bc2cda4328e00553bd451728c571f \ + --hash=sha256:b9a4df06c35465ef4d81799999bba810c68d29972bf1c31db61bfdb81dd9d5bb \ + --hash=sha256:bb2954155bb8f63bb19d56d80e5e5320b61d71084617ed89efedb861a684baea \ + --hash=sha256:bbc4362e06f950c62cad3d4abf1191021b2ffaf0b31ac230fbf0526453eee75e \ + --hash=sha256:c0145295ca415668420ad142ee42189f78d27af806fcf1f32a18e51d47dd2052 \ + --hash=sha256:c30ff468163a48535ee7e9bf21bd14c7a81147c0e58a36c1078289a8ca7af0bd \ + --hash=sha256:c347a20d79cedc0a7bd51c4d4b7dbc613ca4e65a756b5c3e57ec84bd43505b47 \ + --hash=sha256:c43583ea8517ed2e780a345dd9960896afc1327e8cf3ac8239c167530397440d \ + --hash=sha256:c61a2cb0085c8783906b2f8b1f16a7e65777823c7f4d0a6aaffe26dc0d358dd9 \ + --hash=sha256:c9ca89938dff18828a328af41ffdf3902405a19f4131c88e22e776a8e228c5a8 \ + --hash=sha256:cc31e13ce212e14a539d430428cd365e74f8b2d534f8bc22dd4c9c55b277b875 \ + --hash=sha256:cdabcd3beb2a6dca7027007473d8ef1c3b053347c76f685f5f060a00327b8b65 \ + --hash=sha256:cf86f72d705fc2ef776bb7dd9e5fbba79d7e1f3e258bf9377f8204ad0fc1c51e \ + --hash=sha256:d09dc82af2d3c17e7dd17120b202a79b578d79f2b5424bda209d9966efeed114 \ + --hash=sha256:d3aa13bdf38630da298f2e0d77aca967b200b8cc1473ea05248f6c5e9c9bdb44 \ + --hash=sha256:d69d003296df4840bd445a5d15fa5b6ff6ac40496f956a221c4d1f6f7b4bc4d9 \ + --hash=sha256:d6e109a454412ab82979c5b1b3aee0604eca4bbf9a02693bb9df027af2bfa91a \ + --hash=sha256:d8551e733626afec514b5d15befabea0dd70a343a9f23322860c4f16a9430205 \ + --hash=sha256:d8754d872a5dfc3c5bf9c0e059e8107451364a30d9fd50f1f1a85c4fb9481164 \ + --hash=sha256:d8f9a6e7fd5434817526815f09ea27f2746c4a51ee11bb3439065f5fc754db58 \ + --hash=sha256:dbcbb6db5582ea33ce46a5d20a5793134b5365110d84df4e30b9d37c6fd40ad3 \ + --hash=sha256:e0f3ef95795efcd3b2ec3fe0a5bcfb5dadf5e3996ea2117427e524d4fbf309c6 \ + --hash=sha256:e13ae74a8a3a0c2f22f450f773e35f893484fcfacb00bb4344a7e0f4f48e1f97 \ + --hash=sha256:e274f62cbd274359eff63e5c7e7274c913e8e09620f6a57aae66744b3df046d6 \ + --hash=sha256:e838bf2bb0b91ee67bf2b889a1a841e5ecac06dd7a2b1ef4e6151e2ce155c7ae \ + --hash=sha256:e8acd55bd5b071156bae57b555f5d33697998752673b9de554dd82f5b5352727 \ + --hash=sha256:e8e5ab32cf9eb3647450bc74eb201b27c185d3857276162c101c0f8c6374e098 \ + --hash=sha256:ebcb786b9ff30b994d5969213a8430cbb984cdd7ea9fd6df06663194bd3c450c \ + --hash=sha256:ebea2821cdb5f9fef44933617be76185b80150632736f3d76e54829ab4a3b4d1 \ + --hash=sha256:ed0ef550042a8dbcd657dfb284a8ee00f0ba269d3f2286b0493b15a5694f9fe8 \ + --hash=sha256:eda5c1e2a715a4cbbca2d6d304988460942551e4e5e3b7457b50943cd741626d \ + --hash=sha256:f5c0ed12926dec1dfe7d645333ea59cf93f4d07750986a586f511c0bc61fe103 \ + --hash=sha256:f6016bd950be4dcd047b7475fdf55fb1e1f59fc7403f387be0e8123e4a576d30 \ + --hash=sha256:f9e0057a509e096e47c87f753136c9b10d7a91842d8042c2ee6866899a717c0d \ + --hash=sha256:fc1c892b1ec1f8cbd5da8de287577b455e388d9c328ad592eabbdcb6fc93bee5 \ + --hash=sha256:fc2c1e1b00f88317d9de6b2c2b39b012ebbfe35fe5e7bef980fd2a91f6100a07 \ + --hash=sha256:fd822f019ccccd75c832deb7aa040bb02d70a92eb15a2f16c7987b7ad4ee8d83 # via # jsonschema # referencing @@ -1023,6 +1055,7 @@ six==1.17.0 \ --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \ --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81 # via + # iniparse # mohawk # python-dateutil # rfc3339-validator @@ -1036,9 +1069,9 @@ sniffio==1.3.1 \ --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \ --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc # via anyio -taskcluster==83.3.0 \ - --hash=sha256:0e882ca827109c32d9feea39232fbbc68451fb238618cf9c45c1e5053a845cc5 \ - --hash=sha256:e32531a13bd5c6652b1ab8f7a7f917dedb9a63a1ae9af0ed8a83498afa036a85 +taskcluster==83.5.0 \ + --hash=sha256:0105fdad76c392f1c0e937f34b1dbc2b283627be6ab25a8a86e1ac03819e8680 \ + --hash=sha256:a56f30d94252372137d6d7899410d200b863eff603be3ae12395678f77b7b61a # via scriptworker taskcluster-taskgraph==14.0.0 \ --hash=sha256:04be28c01ea244930d25a0d0474aa8109c2d9273b74c628ab447b4356858d60f \ @@ -1055,15 +1088,50 @@ text-unidecode==1.3 \ --hash=sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8 \ --hash=sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93 # via python-slugify +tomli==2.2.1 \ + --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ + --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ + --hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \ + --hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \ + --hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \ + --hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \ + --hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \ + --hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \ + --hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \ + --hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \ + --hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \ + --hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \ + --hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \ + --hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \ + --hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \ + --hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \ + --hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \ + --hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \ + --hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \ + --hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \ + --hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \ + --hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \ + --hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \ + --hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \ + --hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \ + --hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \ + --hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \ + --hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \ + --hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \ + --hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \ + --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ + --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 + # via -r requirements/base.in types-python-dateutil==2.9.0.20241206 \ --hash=sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb \ --hash=sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53 # via arrow -typing-extensions==4.12.2 \ - --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ - --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 +typing-extensions==4.13.1 \ + --hash=sha256:4b6cf02909eb5495cfbc3f6e8fd49217e6cc7944e145cdda8caa3734777f9e69 \ + --hash=sha256:98795af00fb9640edec5b8e31fc647597b4691f099ad75f469a2616be1a76dff # via # anyio + # fluent-syntax # referencing uri-template==1.3.0 \ --hash=sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7 \ diff --git a/landoscript/requirements/local.txt b/landoscript/requirements/local.txt index b70a5f790..914f633de 100644 --- a/landoscript/requirements/local.txt +++ b/landoscript/requirements/local.txt @@ -24,9 +24,9 @@ filelock==3.18.0 \ # via # tox # virtualenv -platformdirs==4.3.6 \ - --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ - --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb +platformdirs==4.3.7 \ + --hash=sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94 \ + --hash=sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351 # via # tox # virtualenv @@ -34,11 +34,11 @@ pyproject-api==1.9.0 \ --hash=sha256:326df9d68dea22d9d98b5243c46e3ca3161b07a1b9b18e213d1e24fd0e605766 \ --hash=sha256:7e8a9854b2dfb49454fae421cb86af43efbb2b2454e5646ffb7623540321ae6e # via tox -tox==4.24.2 \ - --hash=sha256:92e8290e76ad4e15748860a205865696409a2d014eedeb796a34a0f3b5e7336e \ - --hash=sha256:d5948b350f76fae436d6545a5e87c2b676ab7a0d7d88c1308651245eadbe8aea +tox==4.25.0 \ + --hash=sha256:4dfdc7ba2cc6fdc6688dde1b21e7b46ff6c41795fb54586c91a3533317b5255c \ + --hash=sha256:dd67f030317b80722cf52b246ff42aafd3ed27ddf331c415612d084304cf5e52 # via -r requirements/local.in -virtualenv==20.29.3 \ - --hash=sha256:3e3d00f5807e83b234dfb6122bf37cfadf4be216c53a49ac059d02414f819170 \ - --hash=sha256:95e39403fcf3940ac45bc717597dba16110b74506131845d9b687d5e73d947ac +virtualenv==20.30.0 \ + --hash=sha256:800863162bcaa5450a6e4d721049730e7f2dae07720e0902b0e4040bd6f9ada8 \ + --hash=sha256:e34302959180fca3af42d1800df014b35019490b119eba981af27f2fa486e5d6 # via tox diff --git a/landoscript/requirements/test.txt b/landoscript/requirements/test.txt index ef915d1fa..5d22f40c4 100644 --- a/landoscript/requirements/test.txt +++ b/landoscript/requirements/test.txt @@ -10,76 +10,76 @@ aioresponses==0.7.8 \ --hash=sha256:b73bd4400d978855e55004b23a3a84cb0f018183bcf066a85ad392800b5b9a94 \ --hash=sha256:b861cdfe5dc58f3b8afac7b0a6973d5d7b2cb608dd0f6253d16b8ee8eaf6df11 # via pytest-aioresponses -coverage[toml]==7.7.0 \ - --hash=sha256:056d3017ed67e7ddf266e6f57378ece543755a4c9231e997789ab3bd11392c94 \ - --hash=sha256:0ce8cf59e09d31a4915ff4c3b94c6514af4c84b22c4cc8ad7c3c546a86150a92 \ - --hash=sha256:104bf640f408f4e115b85110047c7f27377e1a8b7ba86f7db4fa47aa49dc9a8e \ - --hash=sha256:1393e5aa9441dafb0162c36c8506c648b89aea9565b31f6bfa351e66c11bcd82 \ - --hash=sha256:1586ad158523f4133499a4f322b230e2cfef9cc724820dbd58595a5a236186f4 \ - --hash=sha256:180e3fc68ee4dc5af8b33b6ca4e3bb8aa1abe25eedcb958ba5cff7123071af68 \ - --hash=sha256:1b336d06af14f8da5b1f391e8dec03634daf54dfcb4d1c4fb6d04c09d83cef90 \ - --hash=sha256:1c8fbce80b2b8bf135d105aa8f5b36eae0c57d702a1cc3ebdea2a6f03f6cdde5 \ - --hash=sha256:2d673e3add00048215c2cc507f1228a7523fd8bf34f279ac98334c9b07bd2656 \ - --hash=sha256:316f29cc3392fa3912493ee4c83afa4a0e2db04ff69600711f8c03997c39baaa \ - --hash=sha256:33c1394d8407e2771547583b66a85d07ed441ff8fae5a4adb4237ad39ece60db \ - --hash=sha256:37cbc7b0d93dfd133e33c7ec01123fbb90401dce174c3b6661d8d36fb1e30608 \ - --hash=sha256:39abcacd1ed54e2c33c54bdc488b310e8ef6705833f7148b6eb9a547199d375d \ - --hash=sha256:3ab7090f04b12dc6469882ce81244572779d3a4b67eea1c96fb9ecc8c607ef39 \ - --hash=sha256:3b0e6e54591ae0d7427def8a4d40fca99df6b899d10354bab73cd5609807261c \ - --hash=sha256:416e2a8845eaff288f97eaf76ab40367deafb9073ffc47bf2a583f26b05e5265 \ - --hash=sha256:4545485fef7a8a2d8f30e6f79ce719eb154aab7e44217eb444c1d38239af2072 \ - --hash=sha256:4c124025430249118d018dcedc8b7426f39373527c845093132196f2a483b6dd \ - --hash=sha256:4fbb7a0c3c21908520149d7751cf5b74eb9b38b54d62997b1e9b3ac19a8ee2fe \ - --hash=sha256:52fc89602cde411a4196c8c6894afb384f2125f34c031774f82a4f2608c59d7d \ - --hash=sha256:55143aa13c49491f5606f05b49ed88663446dce3a4d3c5d77baa4e36a16d3573 \ - --hash=sha256:57f3bd0d29bf2bd9325c0ff9cc532a175110c4bf8f412c05b2405fd35745266d \ - --hash=sha256:5b2f144444879363ea8834cd7b6869d79ac796cb8f864b0cfdde50296cd95816 \ - --hash=sha256:5efdeff5f353ed3352c04e6b318ab05c6ce9249c25ed3c2090c6e9cadda1e3b2 \ - --hash=sha256:60e6347d1ed882b1159ffea172cb8466ee46c665af4ca397edbf10ff53e9ffaf \ - --hash=sha256:693d921621a0c8043bfdc61f7d4df5ea6d22165fe8b807cac21eb80dd94e4bbd \ - --hash=sha256:708f0a1105ef2b11c79ed54ed31f17e6325ac936501fc373f24be3e6a578146a \ - --hash=sha256:70f0925c4e2bfc965369f417e7cc72538fd1ba91639cf1e4ef4b1a6b50439b3b \ - --hash=sha256:7789e700f33f2b133adae582c9f437523cd5db8de845774988a58c360fc88253 \ - --hash=sha256:7b6c96d69928a3a6767fab8dc1ce8a02cf0156836ccb1e820c7f45a423570d98 \ - --hash=sha256:7d2a65876274acf544703e943c010b60bd79404e3623a1e5d52b64a6e2728de5 \ - --hash=sha256:7f18d47641282664276977c604b5a261e51fefc2980f5271d547d706b06a837f \ - --hash=sha256:89078312f06237417adda7c021c33f80f7a6d2db8572a5f6c330d89b080061ce \ - --hash=sha256:8c938c6ae59be67ac19a7204e079efc94b38222cd7d0269f96e45e18cddeaa59 \ - --hash=sha256:8e336b56301774ace6be0017ff85c3566c556d938359b61b840796a0202f805c \ - --hash=sha256:a0a207c87a9f743c8072d059b4711f8d13c456eb42dac778a7d2e5d4f3c253a7 \ - --hash=sha256:a2454b12a3f12cc4698f3508912e6225ec63682e2ca5a96f80a2b93cef9e63f3 \ - --hash=sha256:a538a23119d1e2e2ce077e902d02ea3d8e0641786ef6e0faf11ce82324743944 \ - --hash=sha256:aa4dff57fc21a575672176d5ab0ef15a927199e775c5e8a3d75162ab2b0c7705 \ - --hash=sha256:ad0edaa97cb983d9f2ff48cadddc3e1fb09f24aa558abeb4dc9a0dbacd12cbb4 \ - --hash=sha256:ae8006772c6b0fa53c33747913473e064985dac4d65f77fd2fdc6474e7cd54e4 \ - --hash=sha256:b0fac2088ec4aaeb5468b814bd3ff5e5978364bfbce5e567c44c9e2854469f6c \ - --hash=sha256:b3e212a894d8ae07fde2ca8b43d666a6d49bbbddb10da0f6a74ca7bd31f20054 \ - --hash=sha256:b54a1ee4c6f1905a436cbaa04b26626d27925a41cbc3a337e2d3ff7038187f07 \ - --hash=sha256:b667b91f4f714b17af2a18e220015c941d1cf8b07c17f2160033dbe1e64149f0 \ - --hash=sha256:b8c36093aca722db73633cf2359026ed7782a239eb1c6db2abcff876012dc4cf \ - --hash=sha256:bb356e7ae7c2da13f404bf8f75be90f743c6df8d4607022e759f5d7d89fe83f8 \ - --hash=sha256:bce730d484038e97f27ea2dbe5d392ec5c2261f28c319a3bb266f6b213650135 \ - --hash=sha256:c075d167a6ec99b798c1fdf6e391a1d5a2d054caffe9593ba0f97e3df2c04f0e \ - --hash=sha256:c4e09534037933bf6eb31d804e72c52ec23219b32c1730f9152feabbd7499463 \ - --hash=sha256:c5f8a5364fc37b2f172c26a038bc7ec4885f429de4a05fc10fdcb53fb5834c5c \ - --hash=sha256:cb203c0afffaf1a8f5b9659a013f8f16a1b2cad3a80a8733ceedc968c0cf4c57 \ - --hash=sha256:cc41374d2f27d81d6558f8a24e5c114580ffefc197fd43eabd7058182f743322 \ - --hash=sha256:cd879d4646055a573775a1cec863d00c9ff8c55860f8b17f6d8eee9140c06166 \ - --hash=sha256:d013c07061751ae81861cae6ec3a4fe04e84781b11fd4b6b4201590234b25c7b \ - --hash=sha256:d8c7524779003d59948c51b4fcbf1ca4e27c26a7d75984f63488f3625c328b9b \ - --hash=sha256:d9710521f07f526de30ccdead67e6b236fe996d214e1a7fba8b36e2ba2cd8261 \ - --hash=sha256:e1ffde1d6bc2a92f9c9207d1ad808550873748ac2d4d923c815b866baa343b3f \ - --hash=sha256:e7f559c36d5cdc448ee13e7e56ed7b6b5d44a40a511d584d388a0f5d940977ba \ - --hash=sha256:f2a1e18a85bd066c7c556d85277a7adf4651f259b2579113844835ba1a74aafd \ - --hash=sha256:f32b165bf6dfea0846a9c9c38b7e1d68f313956d60a15cde5d1709fddcaf3bee \ - --hash=sha256:f5a2f71d6a91238e7628f23538c26aa464d390cbdedf12ee2a7a0fb92a24482a \ - --hash=sha256:f81fe93dc1b8e5673f33443c0786c14b77e36f1025973b85e07c70353e46882b +coverage[toml]==7.8.0 \ + --hash=sha256:042e7841a26498fff7a37d6fda770d17519982f5b7d8bf5278d140b67b61095f \ + --hash=sha256:04bfec25a8ef1c5f41f5e7e5c842f6b615599ca8ba8391ec33a9290d9d2db3a3 \ + --hash=sha256:0915742f4c82208ebf47a2b154a5334155ed9ef9fe6190674b8a46c2fb89cb05 \ + --hash=sha256:18c5ae6d061ad5b3e7eef4363fb27a0576012a7447af48be6c75b88494c6cf25 \ + --hash=sha256:2931f66991175369859b5fd58529cd4b73582461877ecfd859b6549869287ffe \ + --hash=sha256:2e4b6b87bb0c846a9315e3ab4be2d52fac905100565f4b92f02c445c8799e257 \ + --hash=sha256:3043ba1c88b2139126fc72cb48574b90e2e0546d4c78b5299317f61b7f718b78 \ + --hash=sha256:379fe315e206b14e21db5240f89dc0774bdd3e25c3c58c2c733c99eca96f1ada \ + --hash=sha256:42421e04069fb2cbcbca5a696c4050b84a43b05392679d4068acbe65449b5c64 \ + --hash=sha256:4dfd9a93db9e78666d178d4f08a5408aa3f2474ad4d0e0378ed5f2ef71640cb6 \ + --hash=sha256:52a523153c568d2c0ef8826f6cc23031dc86cffb8c6aeab92c4ff776e7951b28 \ + --hash=sha256:554fec1199d93ab30adaa751db68acec2b41c5602ac944bb19187cb9a41a8067 \ + --hash=sha256:581a40c7b94921fffd6457ffe532259813fc68eb2bdda60fa8cc343414ce3733 \ + --hash=sha256:5a26c0c795c3e0b63ec7da6efded5f0bc856d7c0b24b2ac84b4d1d7bc578d676 \ + --hash=sha256:5a570cd9bd20b85d1a0d7b009aaf6c110b52b5755c17be6962f8ccd65d1dbd23 \ + --hash=sha256:5aaeb00761f985007b38cf463b1d160a14a22c34eb3f6a39d9ad6fc27cb73008 \ + --hash=sha256:5ac46d0c2dd5820ce93943a501ac5f6548ea81594777ca585bf002aa8854cacd \ + --hash=sha256:5c8a5c139aae4c35cbd7cadca1df02ea8cf28a911534fc1b0456acb0b14234f3 \ + --hash=sha256:6b8af63b9afa1031c0ef05b217faa598f3069148eeee6bb24b79da9012423b82 \ + --hash=sha256:769773614e676f9d8e8a0980dd7740f09a6ea386d0f383db6821df07d0f08545 \ + --hash=sha256:771eb7587a0563ca5bb6f622b9ed7f9d07bd08900f7589b4febff05f469bea00 \ + --hash=sha256:77af0f6447a582fdc7de5e06fa3757a3ef87769fbb0fdbdeba78c23049140a47 \ + --hash=sha256:7a3d62b3b03b4b6fd41a085f3574874cf946cb4604d2b4d3e8dca8cd570ca501 \ + --hash=sha256:821f7bcbaa84318287115d54becb1915eece6918136c6f91045bb84e2f88739d \ + --hash=sha256:89b1f4af0d4afe495cd4787a68e00f30f1d15939f550e869de90a86efa7e0814 \ + --hash=sha256:8a1d96e780bdb2d0cbb297325711701f7c0b6f89199a57f2049e90064c29f6bd \ + --hash=sha256:8a40fcf208e021eb14b0fac6bdb045c0e0cab53105f93ba0d03fd934c956143a \ + --hash=sha256:8f99eb72bf27cbb167b636eb1726f590c00e1ad375002230607a844d9e9a2318 \ + --hash=sha256:90e7fbc6216ecaffa5a880cdc9c77b7418c1dcb166166b78dbc630d07f278cc3 \ + --hash=sha256:94ec0be97723ae72d63d3aa41961a0b9a6f5a53ff599813c324548d18e3b9e8c \ + --hash=sha256:95aa6ae391a22bbbce1b77ddac846c98c5473de0372ba5c463480043a07bff42 \ + --hash=sha256:96121edfa4c2dfdda409877ea8608dd01de816a4dc4a0523356067b305e4e17a \ + --hash=sha256:a1f406a8e0995d654b2ad87c62caf6befa767885301f3b8f6f73e6f3c31ec3a6 \ + --hash=sha256:a321c61477ff8ee705b8a5fed370b5710c56b3a52d17b983d9215861e37b642a \ + --hash=sha256:a5761c70c017c1b0d21b0815a920ffb94a670c8d5d409d9b38857874c21f70d7 \ + --hash=sha256:a9abbccd778d98e9c7e85038e35e91e67f5b520776781d9a1e2ee9d400869487 \ + --hash=sha256:ad80e6b4a0c3cb6f10f29ae4c60e991f424e6b14219d46f1e7d442b938ee68a4 \ + --hash=sha256:b44674870709017e4b4036e3d0d6c17f06a0e6d4436422e0ad29b882c40697d2 \ + --hash=sha256:b571bf5341ba8c6bc02e0baeaf3b061ab993bf372d982ae509807e7f112554e9 \ + --hash=sha256:b8194fb8e50d556d5849753de991d390c5a1edeeba50f68e3a9253fbd8bf8ccd \ + --hash=sha256:b87eb6fc9e1bb8f98892a2458781348fa37e6925f35bb6ceb9d4afd54ba36c73 \ + --hash=sha256:bbb5cc845a0292e0c520656d19d7ce40e18d0e19b22cb3e0409135a575bf79fc \ + --hash=sha256:be945402e03de47ba1872cd5236395e0f4ad635526185a930735f66710e1bd3f \ + --hash=sha256:bf13d564d310c156d1c8e53877baf2993fb3073b2fc9f69790ca6a732eb4bfea \ + --hash=sha256:cf60dd2696b457b710dd40bf17ad269d5f5457b96442f7f85722bdb16fa6c899 \ + --hash=sha256:d1ba00ae33be84066cfbe7361d4e04dec78445b2b88bdb734d0d1cbab916025a \ + --hash=sha256:d39fc4817fd67b3915256af5dda75fd4ee10621a3d484524487e33416c6f3543 \ + --hash=sha256:d766a4f0e5aa1ba056ec3496243150698dc0481902e2b8559314368717be82b1 \ + --hash=sha256:dbf364b4c5e7bae9250528167dfe40219b62e2d573c854d74be213e1e52069f7 \ + --hash=sha256:dd19608788b50eed889e13a5d71d832edc34fc9dfce606f66e8f9f917eef910d \ + --hash=sha256:e013b07ba1c748dacc2a80e69a46286ff145935f260eb8c72df7185bf048f502 \ + --hash=sha256:e5d2b9be5b0693cf21eb4ce0ec8d211efb43966f6657807f6859aab3814f946b \ + --hash=sha256:e5ff52d790c7e1628241ffbcaeb33e07d14b007b6eb00a19320c7b8a7024c040 \ + --hash=sha256:e75a2ad7b647fd8046d58c3132d7eaf31b12d8a53c0e4b21fa9c4d23d6ee6d3c \ + --hash=sha256:e7ac22a0bb2c7c49f441f7a6d46c9c80d96e56f5a8bc6972529ed43c8b694e27 \ + --hash=sha256:ed2144b8a78f9d94d9515963ed273d620e07846acd5d4b0a642d4849e8d91a0c \ + --hash=sha256:f017a61399f13aa6d1039f75cd467be388d157cd81f1a119b9d9a68ba6f2830d \ + --hash=sha256:f1d8a2a57b47142b10374902777e798784abf400a004b14f1b0b9eaf1e528ba4 \ + --hash=sha256:f2d32f95922927186c6dbc8bc60df0d186b6edb828d299ab10898ef3f40052fe \ + --hash=sha256:f319bae0321bc838e205bf9e5bc28f0a3165f30c203b610f17ab5552cff90323 \ + --hash=sha256:f3c38e4e5ccbdc9198aecc766cedbb134b2d89bf64533973678dfcf07effd883 \ + --hash=sha256:f9983d01d7705b2d1f7a95e10bbe4091fabc03a46881a256c2787637b087003f \ + --hash=sha256:fa260de59dfb143af06dcf30c2be0b200bed2a73737a8a59248fcb9fa601ef0f # via # -r requirements/test.in # pytest-cov -iniconfig==2.0.0 \ - --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ - --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 +iniconfig==2.1.0 \ + --hash=sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7 \ + --hash=sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760 # via pytest packaging==24.2 \ --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ @@ -103,11 +103,11 @@ pytest-aioresponses==0.3.0 \ --hash=sha256:5677b32dfa1a36908b347524b5867aab35ac1c5ce1d4970244d6f66009bca7b6 \ --hash=sha256:60f3124ff05a0210a5f369dd95e4cf66090774ba76b322f7178858ce4e6c1647 # via -r requirements/test.in -pytest-asyncio==0.25.3 \ - --hash=sha256:9e89518e0f9bd08928f97a3482fdc4e244df17529460bc038291ccaf8f85c7c3 \ - --hash=sha256:fc1da2cf9f125ada7e710b4ddad05518d4cee187ae9412e9ac9271003497f07a +pytest-asyncio==0.26.0 \ + --hash=sha256:7b51ed894f4fbea1340262bdae5135797ebbe21d8638978e35d31c6d19f72fb0 \ + --hash=sha256:c4df2a697648241ff39e7f0e4a73050b03f123f760673956cf0d72a4990e312f # via -r requirements/test.in -pytest-cov==6.0.0 \ - --hash=sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35 \ - --hash=sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0 +pytest-cov==6.1.0 \ + --hash=sha256:cd7e1d54981d5185ef2b8d64b50172ce97e6f357e6df5cb103e828c7f993e201 \ + --hash=sha256:ec55e828c66755e5b74a21bd7cc03c303a9f928389c0563e50ba454a6dbe71db # via -r requirements/test.in diff --git a/landoscript/src/landoscript/actions/android_l10n_import.py b/landoscript/src/landoscript/actions/android_l10n_import.py new file mode 100644 index 000000000..4bdc03ef6 --- /dev/null +++ b/landoscript/src/landoscript/actions/android_l10n_import.py @@ -0,0 +1,106 @@ +import logging +import os.path +from pathlib import Path +from typing import TypedDict + +import tomli +from scriptworker.client import TaskVerificationError + +from landoscript.errors import LandoscriptError +from landoscript.lando import LandoAction, create_commit_action +from landoscript.util.diffs import diff_contents +from landoscript.util.l10n import L10nFile, getL10nFilesFromToml +from landoscript.util.log import log_file_contents +from scriptworker_client.github import extract_github_repo_owner_and_name +from scriptworker_client.github_client import GithubClient + +log = logging.getLogger(__name__) + + +class TomlInfo(TypedDict): + toml_path: str + dest_path: str + + +class AndroidL10nImportInfo(TypedDict): + from_repo_url: str + toml_info: list[TomlInfo] + + +async def run( + github_client: GithubClient, github_config: dict[str, str], public_artifact_dir: str, android_l10n_import_info: AndroidL10nImportInfo, to_branch: str +) -> LandoAction: + log.info("Preparing to import android l10n changesets.") + + l10n_repo_url = android_l10n_import_info.get("from_repo_url") + if not l10n_repo_url: + raise TaskVerificationError("Cannot bump l10n revisions from github repo without an l10n_repo_url") + l10n_owner, l10n_repo = extract_github_repo_owner_and_name(l10n_repo_url) + + async with GithubClient(github_config, l10n_owner, l10n_repo) as l10n_github_client: + toml_files = [info["toml_path"] for info in android_l10n_import_info["toml_info"]] + # we always take the tip of the default branch when importing new strings + toml_contents = await l10n_github_client.get_files(toml_files) + l10n_files: list[L10nFile] = [] + + for info in android_l10n_import_info["toml_info"]: + toml_file = info["toml_path"] + log.info(f"processing toml file: {toml_file}") + + if toml_contents[toml_file] is None: + raise LandoscriptError(f"toml_file '{toml_file}' is not present in repository") + + contents = tomli.loads(str(toml_contents[toml_file])) + src_file_prefix = Path(toml_file).parent + dst_file_prefix = Path(info["dest_path"]) + if "**" in contents["paths"][0]["reference"]: + # localized file paths contain globs; we need that directory + # structure to determine the files we need to fetch + force_paths = await l10n_github_client.get_file_listing(str(src_file_prefix)) + else: + force_paths = [] + + for src_name in getL10nFilesFromToml(toml_file, contents, force_paths): + dst_name = dst_file_prefix / src_name.relative_to(src_file_prefix) + l10n_files.append(L10nFile(src_name=str(src_name), dst_name=str(dst_name))) + + # fetch l10n_files from android-l10n + src_files = [f["src_name"] for f in l10n_files] + log.info(f"fetching updated files from l10n repository: {src_files}") + new_files = await l10n_github_client.get_files(src_files) + + # fetch l10n_files from gecko repo + dst_files = [f["dst_name"] for f in l10n_files] + log.info(f"fetching original files from l10n repository: {dst_files}") + orig_files = await github_client.get_files(dst_files, branch=to_branch) + + diff = "" + for l10n_file in l10n_files: + if l10n_file["dst_name"] not in orig_files: + log.warning(f"WEIRD: {l10n_file['dst_name']} not in dst_files, continuing anyways...") + continue + + if l10n_file["src_name"] not in new_files: + log.warning(f"WEIRD: {l10n_file['src_name']} not in src_files, continuing anyways...") + continue + + orig_file = orig_files[l10n_file["dst_name"]] + new_file = new_files[l10n_file["src_name"]] + if orig_file == new_file: + log.warning(f"old and new contents of {new_file} are the same, skipping bump...") + continue + + diff += diff_contents(orig_file, new_file, l10n_file["dst_name"]) + + if not diff: + return {} + + with open(os.path.join(public_artifact_dir, "android-import.diff"), "w+") as f: + f.write(diff) + + log.info("adding android l10n import! diff contents are:") + log_file_contents(diff) + + # We always ignore closed trees for android l10n imports. + commitmsg = f"Import translations from {l10n_repo_url} CLOSED TREE" + return create_commit_action(commitmsg, diff) diff --git a/landoscript/src/landoscript/actions/android_l10n_sync.py b/landoscript/src/landoscript/actions/android_l10n_sync.py new file mode 100644 index 000000000..b603e0164 --- /dev/null +++ b/landoscript/src/landoscript/actions/android_l10n_sync.py @@ -0,0 +1,95 @@ +import logging +import os.path +from pathlib import Path +from typing import TypedDict + +import tomli + +from landoscript.errors import LandoscriptError +from landoscript.lando import LandoAction, create_commit_action +from landoscript.util.diffs import diff_contents +from landoscript.util.l10n import L10nFile, getL10nFilesFromToml +from landoscript.util.log import log_file_contents +from scriptworker_client.github_client import GithubClient + +log = logging.getLogger(__name__) + + +class TomlInfo(TypedDict): + toml_path: str + + +class AndroidL10nSyncInfo(TypedDict): + from_repo_url: str + from_branch: str + toml_info: list[TomlInfo] + + +async def run(github_client: GithubClient, public_artifact_dir: str, android_l10n_sync_info: AndroidL10nSyncInfo, to_branch: str) -> LandoAction: + log.info("Preparing to sync android l10n changesets.") + from_branch = android_l10n_sync_info["from_branch"] + + toml_files = [info["toml_path"] for info in android_l10n_sync_info["toml_info"]] + toml_contents = await github_client.get_files(toml_files, branch=from_branch) + l10n_files: list[L10nFile] = [] + + for info in android_l10n_sync_info["toml_info"]: + toml_file = info["toml_path"] + log.info(f"processing toml file: {toml_file}") + + if toml_contents[toml_file] is None: + raise LandoscriptError(f"toml_file '{toml_file}' is not present in repository") + + contents = tomli.loads(str(toml_contents[toml_file])) + src_file_prefix = Path(toml_file).parent + dst_file_prefix = src_file_prefix + if "**" in contents["paths"][0]["reference"]: + # localized file paths contain globs; we need that directory + # structure to determine the files we need to fetch + force_paths = await github_client.get_file_listing(str(src_file_prefix), branch=from_branch) + else: + force_paths = [] + + for src_name in getL10nFilesFromToml(toml_file, contents, force_paths): + dst_name = dst_file_prefix / src_name.relative_to(src_file_prefix) + l10n_files.append(L10nFile(src_name=str(src_name), dst_name=str(dst_name))) + + # fetch l10n_files from `from_branch` in the gecko repo + src_files = [f["src_name"] for f in l10n_files] + log.info(f"fetching updated files from l10n repository: {src_files}") + new_files = await github_client.get_files(src_files, branch=from_branch) + + # fetch l10n_files from gecko repo + dst_files = [f["dst_name"] for f in l10n_files] + log.info(f"fetching original files from l10n repository: {dst_files}") + orig_files = await github_client.get_files(dst_files, branch=to_branch) + + diff = "" + for l10n_file in l10n_files: + if l10n_file["dst_name"] not in orig_files: + log.warning(f"WEIRD: {l10n_file['dst_name']} not in dst_files, continuing anyways...") + continue + + if l10n_file["src_name"] not in new_files: + log.warning(f"WEIRD: {l10n_file['src_name']} not in src_files, continuing anyways...") + continue + + orig_file = orig_files[l10n_file["dst_name"]] + new_file = new_files[l10n_file["src_name"]] + if orig_file == new_file: + log.warning(f"old and new contents of {new_file} are the same, skipping bump...") + continue + + diff += diff_contents(orig_file, new_file, l10n_file["dst_name"]) + + if not diff: + return {} + + with open(os.path.join(public_artifact_dir, "android-sync.diff"), "w+") as f: + f.write(diff) + + log.info("adding android l10n sync! diff contents are:") + log_file_contents(diff) + + commitmsg = f"Import translations from {from_branch}" + return create_commit_action(commitmsg, diff) diff --git a/landoscript/src/landoscript/data/landoscript_task_schema.json b/landoscript/src/landoscript/data/landoscript_task_schema.json index 416f9b061..74953df4c 100644 --- a/landoscript/src/landoscript/data/landoscript_task_schema.json +++ b/landoscript/src/landoscript/data/landoscript_task_schema.json @@ -216,6 +216,9 @@ "from_repo_url": { "type": "string" }, + "from_branch": { + "type": "string" + }, "toml_info": { "type": "array", "minItems": 1, @@ -235,6 +238,7 @@ }, "required": [ "from_repo_url", + "from_branch", "toml_info" ] }, diff --git a/landoscript/src/landoscript/script.py b/landoscript/src/landoscript/script.py index 532f90e58..eecd91dc6 100644 --- a/landoscript/src/landoscript/script.py +++ b/landoscript/src/landoscript/script.py @@ -6,7 +6,7 @@ from scriptworker.exceptions import TaskVerificationError from landoscript import lando -from landoscript.actions import l10n_bump, merge_day, tag, version_bump +from landoscript.actions import android_l10n_import, android_l10n_sync, l10n_bump, merge_day, tag, version_bump from landoscript.treestatus import is_tree_open from scriptworker_client.github_client import GithubClient @@ -105,6 +105,18 @@ async def async_main(context): # sometimes nothing has changed! if l10n_bump_actions: lando_actions.extend(l10n_bump_actions) + elif action == "android_l10n_import": + android_l10n_import_info = payload["android_l10n_import_info"] + import_action = await android_l10n_import.run( + gh_client, context.config["github_config"], public_artifact_dir, android_l10n_import_info, branch + ) + if import_action: + lando_actions.append(import_action) + elif action == "android_l10n_sync": + android_l10n_sync_info = payload["android_l10n_sync_info"] + import_action = await android_l10n_sync.run(gh_client, public_artifact_dir, android_l10n_sync_info, branch) + if import_action: + lando_actions.append(import_action) log.info("finished processing action") diff --git a/landoscript/src/landoscript/util/diffs.py b/landoscript/src/landoscript/util/diffs.py index a68491981..8bdcc6884 100644 --- a/landoscript/src/landoscript/util/diffs.py +++ b/landoscript/src/landoscript/util/diffs.py @@ -1,16 +1,40 @@ from difflib import unified_diff -def diff_contents(orig: str, modified: str, file: str) -> str: +def diff_contents(orig, modified, file): """Create a git-style unified diff of `orig` and `modified` with the filename `file`.""" + if orig: + # orig exists already + orig_contents = orig.splitlines() + fromfile = f"a/{file}" + else: + # orig does not exist yet; ie: it will be added + orig_contents = "" + fromfile = "/dev/null" + if modified: + # modified exists already + modified_contents = modified.splitlines() + tofile = f"b/{file}" + else: + # modified does not exist yet; ie: it will be added + modified_contents = "" + tofile = "/dev/null" + diff = "" - fromfile = f"a/{file}" - tofile = f"b/{file}" - diff += f"diff --git {fromfile} {tofile}\n" - diff += "\n".join(unified_diff(orig.splitlines(), modified.splitlines(), fromfile=fromfile, tofile=tofile, lineterm="")) - if modified.endswith("\n"): - diff += "\n" + # header line always uses the same filename twice - even with additions and removals + diff += f"diff --git a/{file} b/{file}\n" + diff += "\n".join(unified_diff(orig_contents, modified_contents, fromfile=fromfile, tofile=tofile, lineterm="")) + # preserve the newline at the end of the new version of the file, if it exists + if modified: + if modified.endswith("\n"): + diff += "\n" + else: + diff += "\n\\ No newline at end of file\n" + # otherwise, make sure the removal is correctly line ended else: - diff += "\n\\ No newline at end of file\n" + if orig.endswith("\n"): + diff += "\n" + else: + diff += "\n\\ No newline at end of file\n" return diff diff --git a/landoscript/src/landoscript/util/l10n.py b/landoscript/src/landoscript/util/l10n.py new file mode 100644 index 000000000..b36986100 --- /dev/null +++ b/landoscript/src/landoscript/util/l10n.py @@ -0,0 +1,31 @@ +from pathlib import Path +from typing import TypedDict + +from moz.l10n.paths import L10nConfigPaths, get_android_locale + + +class L10nFile(TypedDict): + src_name: str + dst_name: str + + +def getL10nFilesFromToml(toml_path, toml_contents, force_paths=[]): + """Extract list of localized files from project configuration (TOML)""" + + def load(_): + return toml_contents + + project_config_paths = L10nConfigPaths(toml_path, cfg_load=load, locale_map={"android_locale": get_android_locale}, force_paths=force_paths) + + l10n_files = [] + locales = list(project_config_paths.all_locales) + locales.sort() + + tgt_paths = [tgt_path for _, tgt_path in project_config_paths.all()] + for locale in locales: + # Exclude missing files + for tgt_path in tgt_paths: + path = project_config_paths.format_target_path(tgt_path, locale) + l10n_files.append(Path(path)) + + return l10n_files diff --git a/landoscript/tests/conftest.py b/landoscript/tests/conftest.py index babcb0df2..6b14779c2 100644 --- a/landoscript/tests/conftest.py +++ b/landoscript/tests/conftest.py @@ -84,11 +84,11 @@ def setup_test(github_installation_responses, context, payload, actions, repo="r async def run_test( - aioresponses, github_installation_responses, context, payload, actions, dry_run=False, assert_func=None, repo="repo_name", err=None, errmsg="" + aioresponses, github_installation_responses, context, payload, actions, should_submit=True, assert_func=None, repo="repo_name", err=None, errmsg="" ): submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, actions, repo) - if not dry_run: + if should_submit: aioresponses.post( submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"} ) @@ -116,7 +116,7 @@ async def run_test( assert errmsg in e.args[0] else: await async_main(context) - if not dry_run: + if should_submit: req = assert_lando_submission_response(aioresponses.requests, submit_uri) assert_status_response(aioresponses.requests, status_uri) if assert_func: @@ -126,26 +126,51 @@ async def run_test( assert ("GET", status_uri) not in aioresponses.requests -def setup_fetch_files_response(aioresponses, code, initial_values={}): +def fetch_files_payload(initial_values={}): if initial_values: - github_response = {} - for file, contents in initial_values.items(): - github_response[file] = f"{contents}" + payload = {"data": {"repository": {}}} - payload = { - "data": { - "repository": {k: {"text": v} for k, v in github_response.items()}, - } - } + for file, contents in initial_values.items(): + if contents is None: + payload["data"]["repository"][file] = None + else: + payload["data"]["repository"][file] = {"text": contents} else: payload = {} - aioresponses.post(GITHUB_GRAPHQL_ENDPOINT, status=code, payload=payload) + return payload + + +def setup_github_graphql_responses(aioresponses, *payloads): + for payload in payloads: + aioresponses.post(GITHUB_GRAPHQL_ENDPOINT, status=200, payload=payload) -def setup_fetch_files_responses(aioresponses, file_contents): - for fc in file_contents: - setup_fetch_files_response(aioresponses, 200, fc) +def get_file_listing_payload(paths): + def make_entry(path): + parts = path.split("/", 1) + type_ = "blob" if len(parts) == 1 else "tree" + obj = {} + if type_ == "tree": + # this obviously does not handle multiple files in the same directory + # properly; this is being ignored until the case comes up + obj["entries"] = [make_entry(parts[1])] + return { + "name": parts[0], + "type": type_, + "object": obj, + } + + entries = [make_entry(path) for path in paths] + return { + "data": { + "repository": { + "object": { + "entries": entries, + } + } + } + } def setup_l10n_file_responses(aioresponses, l10n_bump_info, initial_values, expected_locales): @@ -174,7 +199,7 @@ def setup_l10n_file_responses(aioresponses, l10n_bump_info, initial_values, expe file_responses[l10n_bump_info["path"]] = json.dumps(changesets_data) - setup_fetch_files_response(aioresponses, 200, file_responses) + setup_github_graphql_responses(aioresponses, fetch_files_payload(file_responses)) def assert_lando_submission_response(requests, submit_uri, attempts=1): @@ -214,11 +239,24 @@ def assert_add_commit_response(action, commit_msg_strings, initial_values, expec # only one) in the `-` line of the diff. account for this. # the `after` version will only have a newline if the file is # intended to have one after the diff has been applied. - before = initial_values[file].rstrip("\n") + "\n" - if file in diff and f"\n-{before}+{after}" in diff: - break + if initial_values[file] is None: + before = None + else: + before = initial_values[file].rstrip("\n") + "\n" + if file in diff: + if not before: + # addition + if f"\n+{after}" in diff: + break + elif not after: + # removal + if f"\n-{before}" in diff: + break + else: + if f"\n-{before}+{after}" in diff: + break else: - assert False, f"no version bump found for {file}: {diffs}" + assert False, f"no bump found for {file}: {diffs}" def get_locale_block(locale, platforms, rev): diff --git a/landoscript/tests/test_android_l10n_import.py b/landoscript/tests/test_android_l10n_import.py new file mode 100644 index 000000000..c8030df09 --- /dev/null +++ b/landoscript/tests/test_android_l10n_import.py @@ -0,0 +1,301 @@ +import pytest + +from landoscript.script import async_main +from tests.conftest import ( + assert_add_commit_response, + assert_lando_submission_response, + assert_status_response, + fetch_files_payload, + get_file_listing_payload, + setup_github_graphql_responses, +) + +ac_l10n_toml = """ +basepath = "." + +locales = [ + "ab", +] + +[env] + +[[paths]] + reference = "components/**/src/main/res/values/strings.xml" + l10n = "components/**/src/main/res/values-{android_locale}/strings.xml" +""" + +fenix_l10n_toml = """ +basepath = "." + +locales = [ + "my", +] + +[env] + +[[paths]] + reference = "app/src/main/res/values/strings.xml" + l10n = "app/src/main/res/values-{android_locale}/strings.xml" +""" + +focus_l10n_toml = """ +basepath = "." + +locales = [ + "zam", +] + +[env] + +[[paths]] + reference = "app/src/main/res/values/strings.xml" + l10n = "app/src/main/res/values-{android_locale}/strings.xml" +""" + + +def assert_success(req, initial_values, expected_bumps): + assert "json" in req.kwargs + assert "actions" in req.kwargs["json"] + create_commit_actions = [action for action in req.kwargs["json"]["actions"] if action["action"] == "create-commit"] + assert len(create_commit_actions) == 1 + action = create_commit_actions[0] + + assert_add_commit_response(action, ["Import translations from", "CLOSED TREE"], initial_values, expected_bumps) + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "android_l10n_import_info,android_l10n_values,file_listing_files,initial_values,expected_values", + ( + pytest.param( + { + "from_repo_url": "https://github.com/mozilla-l10n/android-l10n", + "toml_info": [ + { + "dest_path": "mobile/android/fenix", + "toml_path": "mozilla-mobile/fenix/l10n.toml", + }, + { + "dest_path": "mobile/android/focus-android", + "toml_path": "mozilla-mobile/focus-android/l10n.toml", + }, + { + "dest_path": "mobile/android/android-components", + "toml_path": "mozilla-mobile/android-components/l10n.toml", + }, + ], + }, + { + # paths in android-l10n + "mozilla-mobile/fenix/app/src/main/res/values-my/strings.xml": "my expected contents", + "mozilla-mobile/focus-android/app/src/main/res/values-zam/strings.xml": "zam expected contents", + "mozilla-mobile/android-components/components/browser/toolbar/src/main/res/values-ab/strings.xml": "ab expected contents", + }, + [ + "mozilla-mobile/android-components/components/browser/toolbar/src/main/res/values/strings.xml", + ], + { + # paths in gecko + "mobile/android/fenix/app/src/main/res/values-my/strings.xml": "my initial contents", + "mobile/android/focus-android/app/src/main/res/values-zam/strings.xml": "zam initial contents", + "mobile/android/android-components/components/browser/toolbar/src/main/res/values-ab/strings.xml": "ab initial contents", + }, + { + # paths in gecko + "mobile/android/fenix/app/src/main/res/values-my/strings.xml": "my expected contents", + "mobile/android/focus-android/app/src/main/res/values-zam/strings.xml": "zam expected contents", + "mobile/android/android-components/components/browser/toolbar/src/main/res/values-ab/strings.xml": "ab expected contents", + }, + id="import", + ), + pytest.param( + { + "from_repo_url": "https://github.com/mozilla-l10n/android-l10n", + "toml_info": [ + { + "dest_path": "mobile/android/fenix", + "toml_path": "mozilla-mobile/fenix/l10n.toml", + }, + { + "dest_path": "mobile/android/focus-android", + "toml_path": "mozilla-mobile/focus-android/l10n.toml", + }, + { + "dest_path": "mobile/android/android-components", + "toml_path": "mozilla-mobile/android-components/l10n.toml", + }, + ], + }, + { + # paths in android-l10n + "mozilla-mobile/fenix/app/src/main/res/values-my/strings.xml": "my expected contents", + "mozilla-mobile/focus-android/app/src/main/res/values-zam/strings.xml": "zam expected contents", + "mozilla-mobile/android-components/components/browser/toolbar/src/main/res/values-ab/strings.xml": "ab expected contents", + }, + [ + "mozilla-mobile/android-components/components/browser/toolbar/src/main/res/values/strings.xml", + ], + { + # paths in gecko + "mobile/android/fenix/app/src/main/res/values-my/strings.xml": None, + "mobile/android/focus-android/app/src/main/res/values-zam/strings.xml": None, + "mobile/android/android-components/components/browser/toolbar/src/main/res/values-ab/strings.xml": None, + }, + { + # paths in gecko + "mobile/android/fenix/app/src/main/res/values-my/strings.xml": "my expected contents", + "mobile/android/focus-android/app/src/main/res/values-zam/strings.xml": "zam expected contents", + "mobile/android/android-components/components/browser/toolbar/src/main/res/values-ab/strings.xml": "ab expected contents", + }, + id="new files", + ), + pytest.param( + { + "from_repo_url": "https://github.com/mozilla-l10n/android-l10n", + "toml_info": [ + { + "dest_path": "mobile/android/fenix", + "toml_path": "mozilla-mobile/fenix/l10n.toml", + }, + { + "dest_path": "mobile/android/focus-android", + "toml_path": "mozilla-mobile/focus-android/l10n.toml", + }, + { + "dest_path": "mobile/android/android-components", + "toml_path": "mozilla-mobile/android-components/l10n.toml", + }, + ], + }, + { + # paths in android-l10n + "mozilla-mobile/fenix/app/src/main/res/values-my/strings.xml": None, + "mozilla-mobile/focus-android/app/src/main/res/values-zam/strings.xml": None, + "mozilla-mobile/android-components/components/browser/toolbar/src/main/res/values-ab/strings.xml": None, + }, + [ + "mozilla-mobile/android-components/components/browser/toolbar/src/main/res/values/strings.xml", + ], + { + # paths in gecko + "mobile/android/fenix/app/src/main/res/values-my/strings.xml": "my initial contents", + "mobile/android/focus-android/app/src/main/res/values-zam/strings.xml": "zam initial contents", + "mobile/android/android-components/components/browser/toolbar/src/main/res/values-ab/strings.xml": "ab initial contents", + }, + { + # paths in gecko + "mobile/android/fenix/app/src/main/res/values-my/strings.xml": None, + "mobile/android/focus-android/app/src/main/res/values-zam/strings.xml": None, + "mobile/android/android-components/components/browser/toolbar/src/main/res/values-ab/strings.xml": None, + }, + id="removed file", + ), + pytest.param( + { + "from_repo_url": "https://github.com/mozilla-l10n/android-l10n", + "toml_info": [ + { + "dest_path": "mobile/android/fenix", + "toml_path": "mozilla-mobile/fenix/l10n.toml", + }, + { + "dest_path": "mobile/android/focus-android", + "toml_path": "mozilla-mobile/focus-android/l10n.toml", + }, + { + "dest_path": "mobile/android/android-components", + "toml_path": "mozilla-mobile/android-components/l10n.toml", + }, + ], + }, + { + # paths in android-l10n + "mozilla-mobile/fenix/app/src/main/res/values-my/strings.xml": "my initial contents", + "mozilla-mobile/focus-android/app/src/main/res/values-zam/strings.xml": "zam initial contents", + "mozilla-mobile/android-components/components/browser/toolbar/src/main/res/values-ab/strings.xml": "ab initial contents", + }, + [ + "mozilla-mobile/android-components/components/browser/toolbar/src/main/res/values/strings.xml", + ], + { + # paths in gecko + "mobile/android/fenix/app/src/main/res/values-my/strings.xml": "my initial contents", + "mobile/android/focus-android/app/src/main/res/values-zam/strings.xml": "zam initial contents", + "mobile/android/android-components/components/browser/toolbar/src/main/res/values-ab/strings.xml": "ab initial contents", + }, + { + # paths in gecko + "mobile/android/fenix/app/src/main/res/values-my/strings.xml": "my initial contents", + "mobile/android/focus-android/app/src/main/res/values-zam/strings.xml": "zam initial contents", + "mobile/android/android-components/components/browser/toolbar/src/main/res/values-ab/strings.xml": "ab initial contents", + }, + id="no_changes", + ), + ), +) +async def test_success( + aioresponses, github_installation_responses, context, android_l10n_import_info, android_l10n_values, file_listing_files, initial_values, expected_values +): + payload = { + "actions": ["android_l10n_import"], + "lando_repo": "repo_name", + "android_l10n_import_info": android_l10n_import_info, + "ignore_closed_tree": True, + } + # done here because setup_test sets up github_installation_response too soon...argh + from yarl import URL + + lando_repo = payload["lando_repo"] + lando_api = context.config["lando_api"] + owner = context.config["lando_name_to_github_repo"][lando_repo]["owner"] + submit_uri = URL(f"{lando_api}/api/v1/{lando_repo}") + job_id = 12345 + status_uri = URL(f"{lando_api}/push/{job_id}") + + scopes = [f"project:releng:lando:repo:repo_name"] + scopes.append(f"project:releng:lando:action:android_l10n_import") + + github_installation_responses("mozilla-l10n") + setup_github_graphql_responses( + aioresponses, + # toml files needed before fetching anything else + fetch_files_payload( + { + "mozilla-mobile/fenix/l10n.toml": fenix_l10n_toml, + "mozilla-mobile/focus-android/l10n.toml": focus_l10n_toml, + "mozilla-mobile/android-components/l10n.toml": ac_l10n_toml, + } + ), + # directory tree information needed to correctly interpret the + # android-components l10n.toml + get_file_listing_payload(file_listing_files), + # string values in the android l10n repository + fetch_files_payload(android_l10n_values), + ) + + github_installation_responses(owner) + # current string values in the destination repository + setup_github_graphql_responses(aioresponses, fetch_files_payload(initial_values)) + + aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) + + aioresponses.get( + status_uri, + status=200, + payload={ + "commits": ["abcdef123"], + "push_id": job_id, + "status": "completed", + }, + ) + + context.task = {"payload": payload, "scopes": scopes} + await async_main(context) + + if initial_values != expected_values: + req = assert_lando_submission_response(aioresponses.requests, submit_uri) + assert_success(req, initial_values, expected_values) + assert_status_response(aioresponses.requests, status_uri) + else: + assert ("POST", submit_uri) not in aioresponses.requests + assert ("GET", status_uri) not in aioresponses.requests diff --git a/landoscript/tests/test_android_l10n_sync.py b/landoscript/tests/test_android_l10n_sync.py new file mode 100644 index 000000000..aa5672683 --- /dev/null +++ b/landoscript/tests/test_android_l10n_sync.py @@ -0,0 +1,249 @@ +import pytest +from scriptworker_client.github_client import TransportQueryError + +from tests.conftest import ( + assert_add_commit_response, + get_file_listing_payload, + run_test, + setup_github_graphql_responses, + fetch_files_payload, +) + +ac_l10n_toml = """ +basepath = "." + +locales = [ + "ab", +] + +[env] + +[[paths]] + reference = "components/**/src/main/res/values/strings.xml" + l10n = "components/**/src/main/res/values-{android_locale}/strings.xml" +""" + +fenix_l10n_toml = """ +basepath = "." + +locales = [ + "my", +] + +[env] + +[[paths]] + reference = "app/src/main/res/values/strings.xml" + l10n = "app/src/main/res/values-{android_locale}/strings.xml" +""" + +focus_l10n_toml = """ +basepath = "." + +locales = [ + "zam", +] + +[env] + +[[paths]] + reference = "app/src/main/res/values/strings.xml" + l10n = "app/src/main/res/values-{android_locale}/strings.xml" +""" + + +def assert_success(req, initial_values, expected_bumps): + assert "json" in req.kwargs + assert "actions" in req.kwargs["json"] + create_commit_actions = [action for action in req.kwargs["json"]["actions"] if action["action"] == "create-commit"] + assert len(create_commit_actions) == 1 + action = create_commit_actions[0] + + assert_add_commit_response(action, ["Import translations from"], initial_values, expected_bumps) + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "android_l10n_sync_info,android_l10n_values,file_listing_files,initial_values,expected_values", + ( + pytest.param( + { + "from_branch": "central", + "toml_info": [ + { + "toml_path": "mobile/android/fenix/l10n.toml", + }, + { + "toml_path": "mobile/android/focus-android/l10n.toml", + }, + { + "toml_path": "mobile/android/android-components/l10n.toml", + }, + ], + }, + { + "mobile/android/fenix/app/src/main/res/values-my/strings.xml": "my expected contents", + "mobile/android/focus-android/app/src/main/res/values-zam/strings.xml": "zam expected contents", + "mobile/android/android-components/components/browser/toolbar/src/main/res/values-ab/strings.xml": "ab expected contents", + }, + [ + "mobile/android/android-components/components/browser/toolbar/src/main/res/values/strings.xml", + ], + { + "mobile/android/fenix/app/src/main/res/values-my/strings.xml": "my initial contents", + "mobile/android/focus-android/app/src/main/res/values-zam/strings.xml": "zam initial contents", + "mobile/android/android-components/components/browser/toolbar/src/main/res/values-ab/strings.xml": "ab initial contents", + }, + { + "mobile/android/fenix/app/src/main/res/values-my/strings.xml": "my expected contents", + "mobile/android/focus-android/app/src/main/res/values-zam/strings.xml": "zam expected contents", + "mobile/android/android-components/components/browser/toolbar/src/main/res/values-ab/strings.xml": "ab expected contents", + }, + id="only_changes", + ), + pytest.param( + { + "from_branch": "central", + "toml_info": [ + { + "toml_path": "mobile/android/fenix/l10n.toml", + }, + { + "toml_path": "mobile/android/focus-android/l10n.toml", + }, + { + "toml_path": "mobile/android/android-components/l10n.toml", + }, + ], + }, + { + "mobile/android/fenix/app/src/main/res/values-my/strings.xml": "my expected contents", + "mobile/android/focus-android/app/src/main/res/values-zam/strings.xml": "zam expected contents", + "mobile/android/android-components/components/browser/toolbar/src/main/res/values-ab/strings.xml": "ab expected contents", + }, + [ + "mobile/android/android-components/components/browser/toolbar/src/main/res/values/strings.xml", + ], + { + "mobile/android/fenix/app/src/main/res/values-my/strings.xml": None, + "mobile/android/focus-android/app/src/main/res/values-zam/strings.xml": None, + "mobile/android/android-components/components/browser/toolbar/src/main/res/values-ab/strings.xml": None, + }, + { + "mobile/android/fenix/app/src/main/res/values-my/strings.xml": "my expected contents", + "mobile/android/focus-android/app/src/main/res/values-zam/strings.xml": "zam expected contents", + "mobile/android/android-components/components/browser/toolbar/src/main/res/values-ab/strings.xml": "ab expected contents", + }, + id="new files", + ), + pytest.param( + { + "from_branch": "central", + "toml_info": [ + { + "toml_path": "mobile/android/fenix/l10n.toml", + }, + { + "toml_path": "mobile/android/focus-android/l10n.toml", + }, + { + "toml_path": "mobile/android/android-components/l10n.toml", + }, + ], + }, + { + "mobile/android/fenix/app/src/main/res/values-my/strings.xml": None, + "mobile/android/focus-android/app/src/main/res/values-zam/strings.xml": None, + "mobile/android/android-components/components/browser/toolbar/src/main/res/values-ab/strings.xml": None, + }, + [ + "mobile/android/android-components/components/browser/toolbar/src/main/res/values/strings.xml", + ], + { + "mobile/android/fenix/app/src/main/res/values-my/strings.xml": "my initial contents", + "mobile/android/focus-android/app/src/main/res/values-zam/strings.xml": "zam initial contents", + "mobile/android/android-components/components/browser/toolbar/src/main/res/values-ab/strings.xml": "ab initial contents", + }, + { + "mobile/android/fenix/app/src/main/res/values-my/strings.xml": None, + "mobile/android/focus-android/app/src/main/res/values-zam/strings.xml": None, + "mobile/android/android-components/components/browser/toolbar/src/main/res/values-ab/strings.xml": None, + }, + id="removed file", + ), + pytest.param( + { + "from_branch": "central", + "toml_info": [ + { + "toml_path": "mobile/android/fenix/l10n.toml", + }, + { + "toml_path": "mobile/android/focus-android/l10n.toml", + }, + { + "toml_path": "mobile/android/android-components/l10n.toml", + }, + ], + }, + { + "mobile/android/fenix/app/src/main/res/values-my/strings.xml": "my initial contents", + "mobile/android/focus-android/app/src/main/res/values-zam/strings.xml": "zam initial contents", + "mobile/android/android-components/components/browser/toolbar/src/main/res/values-ab/strings.xml": "ab initial contents", + }, + [ + "mobile/android/android-components/components/browser/toolbar/src/main/res/values/strings.xml", + ], + { + "mobile/android/fenix/app/src/main/res/values-my/strings.xml": "my initial contents", + "mobile/android/focus-android/app/src/main/res/values-zam/strings.xml": "zam initial contents", + "mobile/android/android-components/components/browser/toolbar/src/main/res/values-ab/strings.xml": "ab initial contents", + }, + { + "mobile/android/fenix/app/src/main/res/values-my/strings.xml": "my initial contents", + "mobile/android/focus-android/app/src/main/res/values-zam/strings.xml": "zam initial contents", + "mobile/android/android-components/components/browser/toolbar/src/main/res/values-ab/strings.xml": "ab initial contents", + }, + id="no_changes", + ), + ), +) +async def test_success( + aioresponses, github_installation_responses, context, android_l10n_sync_info, android_l10n_values, file_listing_files, initial_values, expected_values +): + payload = { + "actions": ["android_l10n_sync"], + "lando_repo": "repo_name", + "android_l10n_sync_info": android_l10n_sync_info, + "ignore_closed_tree": True, + } + + setup_github_graphql_responses( + aioresponses, + # toml files needed before fetching anything else + fetch_files_payload( + { + "mobile/android/fenix/l10n.toml": fenix_l10n_toml, + "mobile/android/focus-android/l10n.toml": focus_l10n_toml, + "mobile/android/android-components/l10n.toml": ac_l10n_toml, + } + ), + # directory tree information needed to correctly interpret the + # android-components l10n.toml + get_file_listing_payload(file_listing_files), + # string values in the android l10n repository + fetch_files_payload(android_l10n_values), + # current string values in the destination repository + fetch_files_payload(initial_values), + ) + + def assert_func(req): + assert_success(req, initial_values, expected_values) + # check for diff on disk + + if initial_values == expected_values: + should_submit = False + else: + should_submit = True + + await run_test(aioresponses, github_installation_responses, context, payload, ["android_l10n_sync"], should_submit, assert_func) diff --git a/landoscript/tests/test_merge_day.py b/landoscript/tests/test_merge_day.py index d270f5adc..d6463e9b8 100644 --- a/landoscript/tests/test_merge_day.py +++ b/landoscript/tests/test_merge_day.py @@ -3,7 +3,7 @@ from landoscript.script import async_main -from .conftest import run_test, setup_fetch_files_responses, assert_merge_response +from .conftest import fetch_files_payload, run_test, assert_merge_response, setup_github_graphql_responses @pytest.mark.asyncio @@ -150,18 +150,16 @@ async def test_success_bump_central( "dry_run": dry_run, } - setup_fetch_files_responses( + setup_github_graphql_responses( aioresponses, - [ - # existing version in `to_branch` - {merge_info["fetch_version_from"]: "137.0a1"}, - # fetch of original contents of files to bump, if we expect any replacements - initial_values if expected_bumps else {}, - # fetch of original contents of `replacements` and `regex_replacements` files - initial_replacement_values if expected_replacement_bumps else {}, - # clobber file - {"CLOBBER": "# Modifying this file will automatically clobber\nMerge day clobber 2025-03-03"}, - ], + # existing version in `to_branch` + fetch_files_payload({merge_info["fetch_version_from"]: "137.0a1"}), + # fetch of original contents of files to bump, if we expect any replacements + fetch_files_payload(initial_values if expected_bumps else {}), + # fetch of original contents of `replacements` and `regex_replacements` files + fetch_files_payload(initial_replacement_values if expected_replacement_bumps else {}), + # clobber file + fetch_files_payload({"CLOBBER": "# Modifying this file will automatically clobber\nMerge day clobber 2025-03-03"}), ) def assert_func(req): @@ -176,7 +174,7 @@ def assert_func(req): end_tag, ) - await run_test(aioresponses, github_installation_responses, context, payload, ["merge_day"], dry_run, assert_func) + await run_test(aioresponses, github_installation_responses, context, payload, ["merge_day"], not dry_run, assert_func) @pytest.mark.asyncio @@ -214,16 +212,14 @@ async def test_success_bump_esr(aioresponses, github_installation_responses, con for file, version in expected_bumps.items(): initial_values_by_expected_version[version][file] = initial_values[file] - setup_fetch_files_responses( + setup_github_graphql_responses( aioresponses, - [ - # existing version in `to_branch` - {merge_info["fetch_version_from"]: "128.9.0"}, - # fetch of original contents of files to bump - *initial_values_by_expected_version.values(), - # clobber file - {"CLOBBER": "# Modifying this file will automatically clobber\nMerge day clobber 2025-03-03"}, - ], + # existing version in `to_branch` + fetch_files_payload({merge_info["fetch_version_from"]: "128.9.0"}), + # fetch of original contents of files to bump + *[fetch_files_payload(iv) for iv in initial_values_by_expected_version.values()], + # clobber file + fetch_files_payload({"CLOBBER": "# Modifying this file will automatically clobber\nMerge day clobber 2025-03-03"}), ) def assert_func(req): @@ -235,7 +231,7 @@ def assert_func(req): expected_bumps, ) - await run_test(aioresponses, github_installation_responses, context, payload, ["merge_day"], assert_func=assert_func) + await run_test(aioresponses, github_installation_responses, context, payload, ["merge_day"], True, assert_func=assert_func) @pytest.mark.asyncio @@ -263,17 +259,15 @@ async def test_success_early_to_late_beta(aioresponses, github_installation_resp "merge_info": merge_info, } - setup_fetch_files_responses( + setup_github_graphql_responses( aioresponses, - [ - # initial version fetch; technically not needed for this use case - # but it keeps the merge day code cleaner to keep it - {merge_info["fetch_version_from"]: "139.0"}, - # fetch of original contents of `replacements` file - initial_replacement_values, - # clobber file - {"CLOBBER": "# Modifying this file will automatically clobber\nMerge day clobber 2025-03-03"}, - ], + # initial version fetch; technically not needed for this use case + # but it keeps the merge day code cleaner to keep it + fetch_files_payload({merge_info["fetch_version_from"]: "139.0"}), + # fetch of original contents of `replacements` file + fetch_files_payload(initial_replacement_values), + # clobber file + fetch_files_payload({"CLOBBER": "# Modifying this file will automatically clobber\nMerge day clobber 2025-03-03"}), ) def assert_func(req): @@ -373,20 +367,18 @@ async def test_success_central_to_beta(aioresponses, github_installation_respons for file, version in expected_bumps.items(): initial_values_by_expected_version[version][file] = initial_values[file] - setup_fetch_files_responses( + setup_github_graphql_responses( aioresponses, - [ - # existing version in `to_branch` - {merge_info["fetch_version_from"]: "139.0b11"}, - # existing version in `from_branch` - {merge_info["fetch_version_from"]: "140.0a1"}, - # fetch of original contents of files to bump - *initial_values_by_expected_version.values(), - # fetch of original contents of `replacements` and `regex_replacements` files - initial_replacement_values, - # clobber file - {"CLOBBER": "# Modifying this file will automatically clobber\nMerge day clobber 2025-03-03"}, - ], + # existing version in `to_branch` + fetch_files_payload({merge_info["fetch_version_from"]: "139.0b11"}), + # existing version in `from_branch` + fetch_files_payload({merge_info["fetch_version_from"]: "140.0a1"}), + # fetch of original contents of files to bump + *[fetch_files_payload(iv) for iv in initial_values_by_expected_version.values()], + # fetch of original contents of `replacements` and `regex_replacements` files + fetch_files_payload(initial_replacement_values), + # clobber file + fetch_files_payload({"CLOBBER": "# Modifying this file will automatically clobber\nMerge day clobber 2025-03-03"}), ) def assert_func(req): @@ -449,20 +441,18 @@ async def test_success_beta_to_release(aioresponses, github_installation_respons "merge_info": merge_info, } - setup_fetch_files_responses( + setup_github_graphql_responses( aioresponses, - [ - # existing version in `to_branch` - {merge_info["fetch_version_from"]: "135.0"}, - # existing version in `from_branch` - {merge_info["fetch_version_from"]: "136.0"}, - # fetch of original contents of files to bump, if we expect any replacements - initial_values, - # fetch of original contents of `replacements` and `regex_replacements` files - initial_replacement_values, - # clobber file - {"CLOBBER": "# Modifying this file will automatically clobber\nMerge day clobber 2025-03-03"}, - ], + # existing version in `to_branch` + fetch_files_payload({merge_info["fetch_version_from"]: "135.0"}), + # existing version in `from_branch` + fetch_files_payload({merge_info["fetch_version_from"]: "136.0"}), + # fetch of original contents of files to bump, if we expect any replacements + fetch_files_payload(initial_values), + # fetch of original contents of `replacements` and `regex_replacements` files + fetch_files_payload(initial_replacement_values), + # clobber file + fetch_files_payload({"CLOBBER": "# Modifying this file will automatically clobber\nMerge day clobber 2025-03-03"}), ) def assert_func(req): @@ -518,18 +508,16 @@ async def test_success_release_to_esr(aioresponses, github_installation_response "ignore_closed_tree": True, } - setup_fetch_files_responses( + setup_github_graphql_responses( aioresponses, - [ - # existing version in `to_branch` - {merge_info["fetch_version_from"]: "128.0"}, - # fetch of original contents of files to bump, if we expect any replacements - initial_values if expected_bumps else {}, - # fetch of original contents of `replacements` and `regex_replacements` files - initial_replacement_values, - # clobber file - {"CLOBBER": "# Modifying this file will automatically clobber\nMerge day clobber 2025-03-03"}, - ], + # existing version in `to_branch` + fetch_files_payload({merge_info["fetch_version_from"]: "128.0"}), + # fetch of original contents of files to bump, if we expect any replacements + fetch_files_payload(initial_values if expected_bumps else {}), + # fetch of original contents of `replacements` and `regex_replacements` files + fetch_files_payload(initial_replacement_values), + # clobber file + fetch_files_payload({"CLOBBER": "# Modifying this file will automatically clobber\nMerge day clobber 2025-03-03"}), ) def assert_func(req): diff --git a/landoscript/tests/test_script.py b/landoscript/tests/test_script.py index 9302a37e3..1a3c4b202 100644 --- a/landoscript/tests/test_script.py +++ b/landoscript/tests/test_script.py @@ -10,13 +10,13 @@ assert_l10n_bump_response, assert_lando_submission_response, assert_status_response, + fetch_files_payload, run_test, + setup_github_graphql_responses, setup_test, assert_add_commit_response, setup_l10n_file_responses, assert_merge_response, - setup_fetch_files_response, - setup_fetch_files_responses, ) from .test_tag import assert_tag_response @@ -81,14 +81,14 @@ def assert_success(req, commit_msg_strings, initial_values, expected_bumps): ), ) async def test_tag_and_bump(aioresponses, github_installation_responses, context, payload, dry_run, initial_values, expected_bumps, commit_msg_strings, tags): - setup_fetch_files_response(aioresponses, 200, initial_values) + setup_github_graphql_responses(aioresponses, fetch_files_payload(initial_values)) def assert_func(req): assert_success(req, commit_msg_strings, initial_values, expected_bumps) assert_tag_response(req, tags) assert (context.config["artifact_dir"] / "public/build/version-bump.diff").exists() - await run_test(aioresponses, github_installation_responses, context, payload, payload["actions"], dry_run, assert_func) + await run_test(aioresponses, github_installation_responses, context, payload, payload["actions"], not dry_run, assert_func) @pytest.mark.asyncio @@ -146,7 +146,7 @@ def assert_func(req): ) async def test_success_with_retries(aioresponses, github_installation_responses, context, payload, initial_values, expected_bumps, commit_msg_strings): submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) - setup_fetch_files_response(aioresponses, 200, initial_values) + setup_github_graphql_responses(aioresponses, fetch_files_payload(initial_values)) aioresponses.post(submit_uri, status=500) aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) @@ -260,7 +260,7 @@ async def test_failure_to_submit_to_lando_500(aioresponses, github_installation_ } initial_values = {"browser/config/version.txt": "134.0"} submit_uri, _, _, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) - setup_fetch_files_response(aioresponses, 200, initial_values) + setup_github_graphql_responses(aioresponses, fetch_files_payload(initial_values)) for _ in range(10): aioresponses.post(submit_uri, status=500) @@ -286,7 +286,7 @@ async def test_to_submit_to_lando_no_status_url(aioresponses, github_installatio } initial_values = {"browser/config/version.txt": "134.0"} submit_uri, _, _, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) - setup_fetch_files_response(aioresponses, 200, initial_values) + setup_github_graphql_responses(aioresponses, fetch_files_payload(initial_values)) aioresponses.post(submit_uri, status=202, payload={}) context.task = {"payload": payload, "scopes": scopes} @@ -310,7 +310,7 @@ async def test_lando_polling_result_not_completed(aioresponses, github_installat } initial_values = {"browser/config/version.txt": "134.0"} submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) - setup_fetch_files_response(aioresponses, 200, initial_values) + setup_github_graphql_responses(aioresponses, fetch_files_payload(initial_values)) aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) aioresponses.get(status_uri, status=200, payload={}) @@ -335,7 +335,7 @@ async def test_lando_polling_retry_on_failure(aioresponses, github_installation_ } initial_values = {"browser/config/version.txt": "134.0"} submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) - setup_fetch_files_response(aioresponses, 200, initial_values) + setup_github_graphql_responses(aioresponses, fetch_files_payload(initial_values)) aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) aioresponses.get(status_uri, status=500, payload={}) aioresponses.get( @@ -496,20 +496,18 @@ async def test_success_central_to_beta_merge_day(aioresponses, github_installati for file, version in expected_bumps.items(): initial_values_by_expected_version[version][file] = initial_values[file] - setup_fetch_files_responses( + setup_github_graphql_responses( aioresponses, - [ - # existing version in `to_branch` - {merge_info["fetch_version_from"]: "139.0b11"}, - # existing version in `from_branch` - {merge_info["fetch_version_from"]: "140.0a1"}, - # fetch of original contents of files to bump - *initial_values_by_expected_version.values(), - # fetch of original contents of `replacements` and `regex_replacements` files - initial_replacement_values, - # clobber file - {"CLOBBER": "# Modifying this file will automatically clobber\nMerge day clobber 2025-03-03"}, - ], + # existing version in `to_branch` + fetch_files_payload({merge_info["fetch_version_from"]: "139.0b11"}), + # existing version in `from_branch` + fetch_files_payload({merge_info["fetch_version_from"]: "140.0a1"}), + # fetch of original contents of files to bump + *[fetch_files_payload(iv) for iv in initial_values_by_expected_version.values()], + # fetch of original contents of `replacements` and `regex_replacements` files + fetch_files_payload(initial_replacement_values), + # clobber file + fetch_files_payload({"CLOBBER": "# Modifying this file will automatically clobber\nMerge day clobber 2025-03-03"}), ) aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) diff --git a/landoscript/tests/test_tag.py b/landoscript/tests/test_tag.py index 4ff5aac26..55aa1872e 100644 --- a/landoscript/tests/test_tag.py +++ b/landoscript/tests/test_tag.py @@ -46,7 +46,7 @@ async def test_success(aioresponses, github_installation_responses, context, tag def assert_func(req): assert_tag_response(req, tags) - await run_test(aioresponses, github_installation_responses, context, payload, ["tag"], dry_run, assert_func) + await run_test(aioresponses, github_installation_responses, context, payload, ["tag"], not dry_run, assert_func) @pytest.mark.asyncio diff --git a/landoscript/tests/test_version_bump.py b/landoscript/tests/test_version_bump.py index 7e3c3cc34..a0a8afae4 100644 --- a/landoscript/tests/test_version_bump.py +++ b/landoscript/tests/test_version_bump.py @@ -1,12 +1,21 @@ import pytest from scriptworker.client import TaskVerificationError +from simple_github.client import GITHUB_GRAPHQL_ENDPOINT from landoscript.errors import LandoscriptError from landoscript.script import async_main from landoscript.actions.version_bump import ALLOWED_BUMP_FILES from landoscript.util.version import _VERSION_CLASS_PER_BEGINNING_OF_PATH -from .conftest import assert_lando_submission_response, assert_status_response, run_test, setup_test, setup_fetch_files_response, assert_add_commit_response +from .conftest import ( + assert_lando_submission_response, + assert_status_response, + fetch_files_payload, + run_test, + setup_github_graphql_responses, + setup_test, + assert_add_commit_response, +) def assert_success(req, commit_msg_strings, initial_values, expected_bumps): @@ -228,13 +237,13 @@ def assert_success(req, commit_msg_strings, initial_values, expected_bumps): ), ) async def test_success_with_bumps(aioresponses, github_installation_responses, context, payload, initial_values, expected_bumps, commit_msg_strings): - setup_fetch_files_response(aioresponses, 200, initial_values) + setup_github_graphql_responses(aioresponses, fetch_files_payload(initial_values)) dryrun = payload.get("dry_run", False) def assert_func(req): assert_success(req, commit_msg_strings, initial_values, expected_bumps) - await run_test(aioresponses, github_installation_responses, context, payload, ["version_bump"], dryrun, assert_func) + await run_test(aioresponses, github_installation_responses, context, payload, ["version_bump"], not dryrun, assert_func) @pytest.mark.asyncio @@ -292,7 +301,7 @@ def assert_func(req): ) async def test_success_with_retries(aioresponses, github_installation_responses, context, payload, initial_values, expected_bumps, commit_msg_strings): submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) - setup_fetch_files_response(aioresponses, 200, initial_values) + setup_github_graphql_responses(aioresponses, fetch_files_payload(initial_values)) aioresponses.post(submit_uri, status=500) aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) @@ -339,7 +348,7 @@ async def test_success_with_retries(aioresponses, github_installation_responses, ) async def test_success_without_bumps(aioresponses, github_installation_responses, context, payload, initial_values): submit_uri, status_uri, _, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) - setup_fetch_files_response(aioresponses, 200, initial_values) + setup_github_graphql_responses(aioresponses, fetch_files_payload(initial_values)) context.task = {"payload": payload, "scopes": scopes} await async_main(context) @@ -363,7 +372,7 @@ async def test_failure_to_fetch_files(aioresponses, github_installation_response # 5 attempts is hardcoded deeper than we can reasonable override it; so # just expect it for _ in range(5): - setup_fetch_files_response(aioresponses, 500) + aioresponses.post(GITHUB_GRAPHQL_ENDPOINT, status=500) context.task = {"payload": payload, "scopes": scopes} From f6627602cf4cab166aaae4d39df7034d39ad6a86 Mon Sep 17 00:00:00 2001 From: Ben Hearsum Date: Mon, 14 Apr 2025 13:25:10 -0400 Subject: [PATCH 08/13] fix(landoscript): don't support dontbuild/ignore closed tree configuration outside of l10n bump As it turns out, these are static for all other types of actions. --- .../src/landoscript/actions/version_bump.py | 2 +- landoscript/src/landoscript/script.py | 5 ++- landoscript/tests/conftest.py | 2 +- landoscript/tests/test_android_l10n_import.py | 1 - landoscript/tests/test_android_l10n_sync.py | 1 - landoscript/tests/test_merge_day.py | 1 - landoscript/tests/test_version_bump.py | 37 +++++-------------- 7 files changed, 15 insertions(+), 34 deletions(-) diff --git a/landoscript/src/landoscript/actions/version_bump.py b/landoscript/src/landoscript/actions/version_bump.py index 1386c275b..15afcaacb 100644 --- a/landoscript/src/landoscript/actions/version_bump.py +++ b/landoscript/src/landoscript/actions/version_bump.py @@ -36,7 +36,7 @@ async def run( public_artifact_dir: str, branch: str, version_bump_infos: list[VersionBumpInfo], - dontbuild: bool, + dontbuild: bool = True, ) -> LandoAction: """Perform version bumps on the files given in each `version_bump_info`, if necessary.""" diff --git a/landoscript/src/landoscript/script.py b/landoscript/src/landoscript/script.py index eecd91dc6..43ef4fd5e 100644 --- a/landoscript/src/landoscript/script.py +++ b/landoscript/src/landoscript/script.py @@ -64,6 +64,10 @@ async def async_main(context): if len(payload["actions"]) < 1: raise TaskVerificationError("must provide at least one action!") + if not any([action == "l10n_bump" for action in payload["actions"]]): + if "dontbuild" in payload or "ignore_closed_tree" in payload: + raise TaskVerificationError("dontbuild and ignore_closed_tree are only respected in l10n_bump!") + os.makedirs(public_artifact_dir) lando_actions: list[lando.LandoAction] = [] @@ -78,7 +82,6 @@ async def async_main(context): public_artifact_dir, branch, [version_bump.VersionBumpInfo(payload["version_bump_info"])], - dontbuild, ) # sometimes version bumps are no-ops if version_bump_action: diff --git a/landoscript/tests/conftest.py b/landoscript/tests/conftest.py index 6b14779c2..07e8ff555 100644 --- a/landoscript/tests/conftest.py +++ b/landoscript/tests/conftest.py @@ -414,7 +414,7 @@ def assert_merge_response( action = next(create_commit_actions) - commit_msg_strings = ["Automatic version bump"] + commit_msg_strings = ["Automatic version bump", "CLOSED TREE"] assert_add_commit_response(action, commit_msg_strings, initial_values, expected_bumps) # - `replacements` bumps diff --git a/landoscript/tests/test_android_l10n_import.py b/landoscript/tests/test_android_l10n_import.py index c8030df09..d6e192190 100644 --- a/landoscript/tests/test_android_l10n_import.py +++ b/landoscript/tests/test_android_l10n_import.py @@ -240,7 +240,6 @@ async def test_success( "actions": ["android_l10n_import"], "lando_repo": "repo_name", "android_l10n_import_info": android_l10n_import_info, - "ignore_closed_tree": True, } # done here because setup_test sets up github_installation_response too soon...argh from yarl import URL diff --git a/landoscript/tests/test_android_l10n_sync.py b/landoscript/tests/test_android_l10n_sync.py index aa5672683..e1b50c672 100644 --- a/landoscript/tests/test_android_l10n_sync.py +++ b/landoscript/tests/test_android_l10n_sync.py @@ -215,7 +215,6 @@ async def test_success( "actions": ["android_l10n_sync"], "lando_repo": "repo_name", "android_l10n_sync_info": android_l10n_sync_info, - "ignore_closed_tree": True, } setup_github_graphql_responses( diff --git a/landoscript/tests/test_merge_day.py b/landoscript/tests/test_merge_day.py index d6463e9b8..495bc1eac 100644 --- a/landoscript/tests/test_merge_day.py +++ b/landoscript/tests/test_merge_day.py @@ -505,7 +505,6 @@ async def test_success_release_to_esr(aioresponses, github_installation_response "actions": ["merge_day"], "lando_repo": "repo_name", "merge_info": merge_info, - "ignore_closed_tree": True, } setup_github_graphql_responses( diff --git a/landoscript/tests/test_version_bump.py b/landoscript/tests/test_version_bump.py index a0a8afae4..2ee4b97fe 100644 --- a/landoscript/tests/test_version_bump.py +++ b/landoscript/tests/test_version_bump.py @@ -48,7 +48,7 @@ def assert_success(req, commit_msg_strings, initial_values, expected_bumps): { "browser/config/version.txt": "135.0", }, - ["Automatic version bump", "NO BUG", "a=release"], + ["Automatic version bump", "NO BUG", "a=release", "CLOSED TREE", "DONTBUILD"], id="dryrun", ), pytest.param( @@ -66,7 +66,7 @@ def assert_success(req, commit_msg_strings, initial_values, expected_bumps): { "browser/config/version.txt": "135.0", }, - ["Automatic version bump", "NO BUG", "a=release"], + ["Automatic version bump", "NO BUG", "a=release", "CLOSED TREE", "DONTBUILD"], id="one_file_new_version", ), pytest.param( @@ -84,7 +84,7 @@ def assert_success(req, commit_msg_strings, initial_values, expected_bumps): { "browser/config/version.txt": "135.0\n", }, - ["Automatic version bump", "NO BUG", "a=release"], + ["Automatic version bump", "NO BUG", "a=release", "CLOSED TREE", "DONTBUILD"], id="one_file_new_version_retains_newline", ), pytest.param( @@ -102,7 +102,7 @@ def assert_success(req, commit_msg_strings, initial_values, expected_bumps): { "browser/config/version.txt": "134.0.1", }, - ["Automatic version bump", "NO BUG", "a=release"], + ["Automatic version bump", "NO BUG", "a=release", "CLOSED TREE", "DONTBUILD"], id="one_file_minor_bump", ), pytest.param( @@ -120,7 +120,7 @@ def assert_success(req, commit_msg_strings, initial_values, expected_bumps): { "browser/config/version.txt": "134.0b3", }, - ["Automatic version bump", "NO BUG", "a=release"], + ["Automatic version bump", "NO BUG", "a=release", "CLOSED TREE", "DONTBUILD"], id="beta_bump_display", ), pytest.param( @@ -138,7 +138,7 @@ def assert_success(req, commit_msg_strings, initial_values, expected_bumps): { "browser/config/version.txt": "128.2.1", }, - ["Automatic version bump", "NO BUG", "a=release"], + ["Automatic version bump", "NO BUG", "a=release", "CLOSED TREE", "DONTBUILD"], id="esr_bump", ), pytest.param( @@ -156,7 +156,7 @@ def assert_success(req, commit_msg_strings, initial_values, expected_bumps): { "browser/config/version_display.txt": "128.2.1esr", }, - ["Automatic version bump", "NO BUG", "a=release"], + ["Automatic version bump", "NO BUG", "a=release", "CLOSED TREE", "DONTBUILD"], id="esr_bump_display", ), pytest.param( @@ -185,7 +185,7 @@ def assert_success(req, commit_msg_strings, initial_values, expected_bumps): "config/milestone.txt": "135.0", "mobile/android/version.txt": "135.0", }, - ["Automatic version bump", "NO BUG", "a=release"], + ["Automatic version bump", "NO BUG", "a=release", "CLOSED TREE", "DONTBUILD"], id="many_files_all_changed", ), pytest.param( @@ -212,28 +212,9 @@ def assert_success(req, commit_msg_strings, initial_values, expected_bumps): "browser/config/version_display.txt": "135.0b3", "mobile/android/version.txt": "135.0b3", }, - ["Automatic version bump", "NO BUG", "a=release"], + ["Automatic version bump", "NO BUG", "a=release", "CLOSED TREE", "DONTBUILD"], id="many_files_some_changed", ), - pytest.param( - { - "actions": ["version_bump"], - "lando_repo": "repo_name", - "version_bump_info": { - "files": ["browser/config/version.txt"], - "next_version": "135.0", - }, - "dontbuild": True, - }, - { - "browser/config/version.txt": "134.0", - }, - { - "browser/config/version.txt": "135.0", - }, - ["Automatic version bump", "NO BUG", "a=release", "DONTBUILD"], - id="dontbuild_includes_correct_commit_message", - ), ), ) async def test_success_with_bumps(aioresponses, github_installation_responses, context, payload, initial_values, expected_bumps, commit_msg_strings): From a05cb53620a30f221ad410dba6ad000024e0bc91 Mon Sep 17 00:00:00 2001 From: Ben Hearsum Date: Mon, 14 Apr 2025 14:24:16 -0400 Subject: [PATCH 09/13] feat(landoscript): add authorization header when submitting lando requests Also ensure that LANDO_API and LANDO_TOKEN are set up during startup. --- landoscript/docker.d/init_worker.sh | 18 ++++++++++++++++++ landoscript/docker.d/worker.yml | 1 + landoscript/src/landoscript/lando.py | 9 ++++++++- landoscript/src/landoscript/script.py | 2 +- landoscript/tests/conftest.py | 3 +++ 5 files changed, 31 insertions(+), 2 deletions(-) diff --git a/landoscript/docker.d/init_worker.sh b/landoscript/docker.d/init_worker.sh index 483b9e68d..05d8da7c7 100755 --- a/landoscript/docker.d/init_worker.sh +++ b/landoscript/docker.d/init_worker.sh @@ -1,2 +1,20 @@ #!/bin/bash set -o errexit -o pipefail + +test_var_set() { + local varname=$1 + + if [[ -z "${!varname}" ]]; then + echo "error: ${varname} is not set" + exit 1 + fi +} + +# TODO: real URLs +if [ "$ENV" == "prod" ]; then + export LANDO_API="https://lando.prod" +else + export LANDO_API="https://lando.dev" +fi + +test_var_set "LANDO_TOKEN" diff --git a/landoscript/docker.d/worker.yml b/landoscript/docker.d/worker.yml index d8f8a5892..829b36f75 100644 --- a/landoscript/docker.d/worker.yml +++ b/landoscript/docker.d/worker.yml @@ -2,6 +2,7 @@ work_dir: { "$eval": "WORK_DIR" } artifact_dir: { "$eval": "ARTIFACTS_DIR" } verbose: { "$eval": "VERBOSE == 'true'" } lando_api: { "$eval": "LANDO_API" } +lando_token: { "$eval": "LANDO_TOKEN" } # maps the repo+branch portion of lando URLs to github repo information # TODO: switch this to lookup in lando when that API exists lando_name_to_github_repo: diff --git a/landoscript/src/landoscript/lando.py b/landoscript/src/landoscript/lando.py index 1a638cf67..321e46b3e 100644 --- a/landoscript/src/landoscript/lando.py +++ b/landoscript/src/landoscript/lando.py @@ -25,7 +25,12 @@ def create_commit_action(commitmsg: str, diff: str) -> LandoAction: async def submit( - session: ClientSession, lando_api: str, lando_repo: str, actions: list[LandoAction], sleeptime_callback: Callable[..., Any] = calculate_sleep_time + session: ClientSession, + lando_api: str, + lando_token: str, + lando_repo: str, + actions: list[LandoAction], + sleeptime_callback: Callable[..., Any] = calculate_sleep_time, ) -> str: """Submit the provided `actions` to the given `lando_repo` through the `lando_api`.""" url = f"{lando_api}/api/v1/{lando_repo}" @@ -43,6 +48,8 @@ async def submit( kwargs={ "json": json, "raise_for_status": True, + # TODO: is this a bearer token? + "headers": {"Authorization": f"Bearer {lando_token}"}, }, attempts=10, retry_exceptions=ClientResponseError, diff --git a/landoscript/src/landoscript/script.py b/landoscript/src/landoscript/script.py index 43ef4fd5e..5d7635a07 100644 --- a/landoscript/src/landoscript/script.py +++ b/landoscript/src/landoscript/script.py @@ -133,7 +133,7 @@ async def async_main(context): for la in lando_actions: log.info(la) - status_url = await lando.submit(session, config["lando_api"], lando_repo, lando_actions, config["sleeptime_callback"]) + status_url = await lando.submit(session, config["lando_api"], config["lando_token"], lando_repo, lando_actions, config["sleeptime_callback"]) await lando.poll_until_complete(session, config["poll_time"], status_url) else: log.info("No lando actions to submit!") diff --git a/landoscript/tests/conftest.py b/landoscript/tests/conftest.py index 07e8ff555..c83889514 100644 --- a/landoscript/tests/conftest.py +++ b/landoscript/tests/conftest.py @@ -19,6 +19,7 @@ def context(privkey_file, tmpdir): context.config = { "artifact_dir": tmpdir, "lando_api": "https://lando.fake", + "lando_token": "super secret", "lando_name_to_github_repo": { "repo_name": { "owner": "faker", @@ -208,6 +209,8 @@ def assert_lando_submission_response(requests, submit_uri, attempts=1): assert len(reqs) == attempts # there might be more than one in cases where we retry; we assume that # the requests are the same for all attempts + assert "Authorization" in reqs[0].kwargs["headers"] + assert reqs[0].kwargs["headers"]["Authorization"] == "Bearer super secret" return reqs[0] From daff1d9f48d2e934559b8b5c93cb5905eb1caa1c Mon Sep 17 00:00:00 2001 From: Ben Hearsum Date: Tue, 15 Apr 2025 15:41:40 -0400 Subject: [PATCH 10/13] fix(landoscript): address review comments around lando api requirements --- landoscript/src/landoscript/lando.py | 13 ++++++++----- landoscript/tests/conftest.py | 5 +++-- landoscript/tests/test_android_l10n_import.py | 4 ++-- landoscript/tests/test_l10n_bump.py | 2 +- landoscript/tests/test_script.py | 10 +++++----- landoscript/tests/test_version_bump.py | 2 +- 6 files changed, 20 insertions(+), 16 deletions(-) diff --git a/landoscript/src/landoscript/lando.py b/landoscript/src/landoscript/lando.py index 321e46b3e..8b1c2abb6 100644 --- a/landoscript/src/landoscript/lando.py +++ b/landoscript/src/landoscript/lando.py @@ -33,7 +33,7 @@ async def submit( sleeptime_callback: Callable[..., Any] = calculate_sleep_time, ) -> str: """Submit the provided `actions` to the given `lando_repo` through the `lando_api`.""" - url = f"{lando_api}/api/v1/{lando_repo}" + url = f"{lando_api}/api/repo/{lando_repo}" json = {"actions": actions} log.info(f"submitting actions to lando: {actions}") @@ -49,7 +49,10 @@ async def submit( "json": json, "raise_for_status": True, # TODO: is this a bearer token? - "headers": {"Authorization": f"Bearer {lando_token}"}, + "headers": { + "Authorization": f"Bearer {lando_token}", + "User-Agent": "Lando-User/release+landoscript@mozilla.com", + }, }, attempts=10, retry_exceptions=ClientResponseError, @@ -80,10 +83,10 @@ async def poll_until_complete(session: ClientSession, poll_time: int, status_url if status_resp.status == 200: body = await status_resp.json() - if body.get("status") != "completed": - raise LandoscriptError("code is 200, status is not completed...result is unclear...failing!") + if body.get("status") != "LANDED": + raise LandoscriptError("code is 200, status is not LANDED...result is unclear...failing!") - log.info("success! got 200 response with 'completed' status") + log.info("success! got 200 response with 'LANDED' status") log.info("Commits are:") for commit in body["commits"]: diff --git a/landoscript/tests/conftest.py b/landoscript/tests/conftest.py index c83889514..46517d742 100644 --- a/landoscript/tests/conftest.py +++ b/landoscript/tests/conftest.py @@ -71,7 +71,7 @@ def setup_test(github_installation_responses, context, payload, actions, repo="r lando_repo = payload["lando_repo"] lando_api = context.config["lando_api"] owner = context.config["lando_name_to_github_repo"][lando_repo]["owner"] - submit_uri = URL(f"{lando_api}/api/v1/{lando_repo}") + submit_uri = URL(f"{lando_api}/api/{lando_repo}") job_id = 12345 status_uri = URL(f"{lando_api}/push/{job_id}") @@ -100,7 +100,7 @@ async def run_test( payload={ "commits": ["abcdef123"], "push_id": job_id, - "status": "completed", + "status": "LANDED", }, ) @@ -211,6 +211,7 @@ def assert_lando_submission_response(requests, submit_uri, attempts=1): # the requests are the same for all attempts assert "Authorization" in reqs[0].kwargs["headers"] assert reqs[0].kwargs["headers"]["Authorization"] == "Bearer super secret" + assert reqs[0].kwargs["headers"]["User-Agent"] == "Lando-User/release+landoscript@mozilla.com" return reqs[0] diff --git a/landoscript/tests/test_android_l10n_import.py b/landoscript/tests/test_android_l10n_import.py index d6e192190..54abd3222 100644 --- a/landoscript/tests/test_android_l10n_import.py +++ b/landoscript/tests/test_android_l10n_import.py @@ -247,7 +247,7 @@ async def test_success( lando_repo = payload["lando_repo"] lando_api = context.config["lando_api"] owner = context.config["lando_name_to_github_repo"][lando_repo]["owner"] - submit_uri = URL(f"{lando_api}/api/v1/{lando_repo}") + submit_uri = URL(f"{lando_api}/api/{lando_repo}") job_id = 12345 status_uri = URL(f"{lando_api}/push/{job_id}") @@ -284,7 +284,7 @@ async def test_success( payload={ "commits": ["abcdef123"], "push_id": job_id, - "status": "completed", + "status": "LANDED", }, ) diff --git a/landoscript/tests/test_l10n_bump.py b/landoscript/tests/test_l10n_bump.py index 7e3735948..377cb0580 100644 --- a/landoscript/tests/test_l10n_bump.py +++ b/landoscript/tests/test_l10n_bump.py @@ -570,7 +570,7 @@ async def test_success( payload={ "commits": ["abcdef123"], "push_id": job_id, - "status": "completed", + "status": "LANDED", }, ) diff --git a/landoscript/tests/test_script.py b/landoscript/tests/test_script.py index 1a3c4b202..075feead6 100644 --- a/landoscript/tests/test_script.py +++ b/landoscript/tests/test_script.py @@ -158,7 +158,7 @@ async def test_success_with_retries(aioresponses, github_installation_responses, payload={ "commits": ["abcdef123"], "push_id": job_id, - "status": "completed", + "status": "LANDED", }, ) @@ -299,7 +299,7 @@ async def test_to_submit_to_lando_no_status_url(aioresponses, github_installatio @pytest.mark.asyncio -async def test_lando_polling_result_not_completed(aioresponses, github_installation_responses, context): +async def test_lando_polling_result_not_correct(aioresponses, github_installation_responses, context): payload = { "actions": ["version_bump"], "lando_repo": "repo_name", @@ -320,7 +320,7 @@ async def test_lando_polling_result_not_completed(aioresponses, github_installat await async_main(context) assert False, "should've raised LandoscriptError" except LandoscriptError as e: - assert "status is not completed" in e.args[0] + assert "status is not LANDED" in e.args[0] @pytest.mark.asyncio @@ -344,7 +344,7 @@ async def test_lando_polling_retry_on_failure(aioresponses, github_installation_ payload={ "commits": ["abcdef123"], "push_id": job_id, - "status": "completed", + "status": "LANDED", }, ) @@ -531,7 +531,7 @@ async def test_success_central_to_beta_merge_day(aioresponses, github_installati payload={ "commits": ["abcdef123"], "push_id": job_id, - "status": "completed", + "status": "LANDED", }, ) diff --git a/landoscript/tests/test_version_bump.py b/landoscript/tests/test_version_bump.py index 2ee4b97fe..e398cf4d6 100644 --- a/landoscript/tests/test_version_bump.py +++ b/landoscript/tests/test_version_bump.py @@ -294,7 +294,7 @@ async def test_success_with_retries(aioresponses, github_installation_responses, payload={ "commits": ["abcdef123"], "push_id": job_id, - "status": "completed", + "status": "LANDED", }, ) From 4f24f011508edc6b936200680828b9f2dbcc6062 Mon Sep 17 00:00:00 2001 From: Ben Hearsum Date: Wed, 16 Apr 2025 21:05:14 -0400 Subject: [PATCH 11/13] feat(landoscript): pull repository url and branch from lando instead of config --- landoscript/docker.d/worker.yml | 7 -- landoscript/src/landoscript/lando.py | 31 ++++++++- landoscript/src/landoscript/script.py | 66 +++++++++---------- landoscript/tests/conftest.py | 26 ++++---- landoscript/tests/test_android_l10n_import.py | 15 ++++- landoscript/tests/test_l10n_bump.py | 4 +- landoscript/tests/test_script.py | 16 +++-- landoscript/tests/test_version_bump.py | 6 +- 8 files changed, 103 insertions(+), 68 deletions(-) diff --git a/landoscript/docker.d/worker.yml b/landoscript/docker.d/worker.yml index 829b36f75..dff921ff8 100644 --- a/landoscript/docker.d/worker.yml +++ b/landoscript/docker.d/worker.yml @@ -3,10 +3,3 @@ artifact_dir: { "$eval": "ARTIFACTS_DIR" } verbose: { "$eval": "VERBOSE == 'true'" } lando_api: { "$eval": "LANDO_API" } lando_token: { "$eval": "LANDO_TOKEN" } -# maps the repo+branch portion of lando URLs to github repo information -# TODO: switch this to lookup in lando when that API exists -lando_name_to_github_repo: - autoland: - owner: mozilla-firefox - repo: firefox - branch: autoland diff --git a/landoscript/src/landoscript/lando.py b/landoscript/src/landoscript/lando.py index 8b1c2abb6..5218234d7 100644 --- a/landoscript/src/landoscript/lando.py +++ b/landoscript/src/landoscript/lando.py @@ -2,7 +2,7 @@ import datetime import logging from pprint import pprint -from typing import Any, Callable +from typing import Any, Callable, Tuple from aiohttp import ClientResponseError, ClientSession from async_timeout import timeout @@ -74,7 +74,10 @@ async def poll_until_complete(session: ClientSession, poll_time: int, status_url await asyncio.sleep(poll_time) log.info(f"polling lando for status: {status_url}") - status_resp = await session.get(status_url) + status_resp = await session.get( + status_url, + headers={"User-Agent": "Lando-User/release+landoscript@mozilla.com"}, + ) # just retry if something went wrong... if not status_resp.ok: @@ -93,3 +96,27 @@ async def poll_until_complete(session: ClientSession, poll_time: int, status_url log.info(commit) break + + +async def get_repo_info(session: ClientSession, lando_api: str, lando_repo: str) -> Tuple[str, str]: + """Returns the URL and branch name for the given `lando_repo`, as provided + by the `lando_api`.""" + url = f"{lando_api}/api/repoinfo/{lando_repo}" + + log.info(f"looking up repo info for {lando_repo}") + async with timeout(30): + resp = await retry_async( + session.get, + args=(url,), + kwargs={ + "raise_for_status": True, + "headers": { + "User-Agent": "Lando-User/release+landoscript@mozilla.com", + }, + }, + ) + + repo_info = await resp.json() + log.info(f"found repo info: {repo_info}") + + return (repo_info["repo_url"], repo_info["branch_name"]) diff --git a/landoscript/src/landoscript/script.py b/landoscript/src/landoscript/script.py index 5d7635a07..afb3e90a3 100644 --- a/landoscript/src/landoscript/script.py +++ b/landoscript/src/landoscript/script.py @@ -8,6 +8,7 @@ from landoscript import lando from landoscript.actions import android_l10n_import, android_l10n_sync, l10n_bump, merge_day, tag, version_bump from landoscript.treestatus import is_tree_open +from scriptworker_client.github import extract_github_repo_owner_and_name from scriptworker_client.github_client import GithubClient log = logging.getLogger(__name__) @@ -38,40 +39,37 @@ def validate_scopes(scopes: set, lando_repo: str, actions: list[str]): # at the time of writing, it means we need noisy and unnecessary None checking # to avoid linter complaints. async def async_main(context): - config = context.config - payload = context.task["payload"] - scopes = set(context.task["scopes"]) - artifact_dir = config["artifact_dir"] - public_artifact_dir = os.path.join(artifact_dir, "public", "build") - - # Note: `lando_repo` is not necessarily the same as the repository's name - # on Github. - lando_repo = payload["lando_repo"] - dontbuild = payload.get("dontbuild", False) - ignore_closed_tree = payload.get("ignore_closed_tree", False) - - # pull owner, repo, and branch from config - # TODO: replace this with a lookup through the lando API when that API exists - log.info(f"looking up repository details for lando repo: {lando_repo}") - repo_details = context.config["lando_name_to_github_repo"][lando_repo] - owner = repo_details["owner"] - repo = repo_details["repo"] - branch = repo_details["branch"] - log.info(f"Got owner: {owner}, repo: {repo}, branch: {branch}") - - # validate scopes - these raise if there's any scope issues - validate_scopes(scopes, lando_repo, payload["actions"]) - if len(payload["actions"]) < 1: - raise TaskVerificationError("must provide at least one action!") - - if not any([action == "l10n_bump" for action in payload["actions"]]): - if "dontbuild" in payload or "ignore_closed_tree" in payload: - raise TaskVerificationError("dontbuild and ignore_closed_tree are only respected in l10n_bump!") - - os.makedirs(public_artifact_dir) - - lando_actions: list[lando.LandoAction] = [] async with aiohttp.ClientSession() as session: + config = context.config + payload = context.task["payload"] + scopes = set(context.task["scopes"]) + artifact_dir = config["artifact_dir"] + public_artifact_dir = os.path.join(artifact_dir, "public", "build") + + # Note: `lando_repo` is not necessarily the same as the repository's name + # on Github. + lando_api = config["lando_api"] + lando_repo = payload["lando_repo"] + dontbuild = payload.get("dontbuild", False) + ignore_closed_tree = payload.get("ignore_closed_tree", False) + + # pull owner, repo, and branch from config + repo_url, branch = await lando.get_repo_info(session, lando_api, lando_repo) + owner, repo = extract_github_repo_owner_and_name(repo_url) + log.info(f"Got owner: {owner}, repo: {repo}, branch: {branch}") + + # validate scopes - these raise if there's any scope issues + validate_scopes(scopes, lando_repo, payload["actions"]) + if len(payload["actions"]) < 1: + raise TaskVerificationError("must provide at least one action!") + + if not any([action == "l10n_bump" for action in payload["actions"]]): + if "dontbuild" in payload or "ignore_closed_tree" in payload: + raise TaskVerificationError("dontbuild and ignore_closed_tree are only respected in l10n_bump!") + + os.makedirs(public_artifact_dir) + + lando_actions: list[lando.LandoAction] = [] async with GithubClient(context.config["github_config"], owner, repo) as gh_client: for action in payload["actions"]: log.info(f"processing action: {action}") @@ -133,7 +131,7 @@ async def async_main(context): for la in lando_actions: log.info(la) - status_url = await lando.submit(session, config["lando_api"], config["lando_token"], lando_repo, lando_actions, config["sleeptime_callback"]) + status_url = await lando.submit(session, lando_api, config["lando_token"], lando_repo, lando_actions, config["sleeptime_callback"]) await lando.poll_until_complete(session, config["poll_time"], status_url) else: log.info("No lando actions to submit!") diff --git a/landoscript/tests/conftest.py b/landoscript/tests/conftest.py index 46517d742..d777a8c67 100644 --- a/landoscript/tests/conftest.py +++ b/landoscript/tests/conftest.py @@ -20,13 +20,6 @@ def context(privkey_file, tmpdir): "artifact_dir": tmpdir, "lando_api": "https://lando.fake", "lando_token": "super secret", - "lando_name_to_github_repo": { - "repo_name": { - "owner": "faker", - "repo": "fake_repo", - "branch": "fake_branch", - } - }, "github_config": { "app_id": 12345, "privkey_file": privkey_file, @@ -67,14 +60,25 @@ def setup_treestatus_response(aioresponses, context, tree="repo_name", status="o aioresponses.get(url, status=200, payload=resp) -def setup_test(github_installation_responses, context, payload, actions, repo="repo_name"): +def setup_test(aioresponses, github_installation_responses, context, payload, actions, repo="repo_name"): lando_repo = payload["lando_repo"] lando_api = context.config["lando_api"] - owner = context.config["lando_name_to_github_repo"][lando_repo]["owner"] - submit_uri = URL(f"{lando_api}/api/{lando_repo}") + owner = "faker" + repo_info_uri = URL(f"{lando_api}/api/repoinfo/{repo}") + submit_uri = URL(f"{lando_api}/api/repo/{lando_repo}") job_id = 12345 status_uri = URL(f"{lando_api}/push/{job_id}") + aioresponses.get( + repo_info_uri, + status=200, + payload={ + "repo_url": f"https://github.com/{owner}/{repo}", + "branch_name": "fake_branch", + "scm_level": "whatever", + }, + ) + github_installation_responses(owner) scopes = [f"project:releng:lando:repo:{repo}"] @@ -87,7 +91,7 @@ def setup_test(github_installation_responses, context, payload, actions, repo="r async def run_test( aioresponses, github_installation_responses, context, payload, actions, should_submit=True, assert_func=None, repo="repo_name", err=None, errmsg="" ): - submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, actions, repo) + submit_uri, status_uri, job_id, scopes = setup_test(aioresponses, github_installation_responses, context, payload, actions, repo) if should_submit: aioresponses.post( diff --git a/landoscript/tests/test_android_l10n_import.py b/landoscript/tests/test_android_l10n_import.py index 54abd3222..2bba164bf 100644 --- a/landoscript/tests/test_android_l10n_import.py +++ b/landoscript/tests/test_android_l10n_import.py @@ -246,8 +246,9 @@ async def test_success( lando_repo = payload["lando_repo"] lando_api = context.config["lando_api"] - owner = context.config["lando_name_to_github_repo"][lando_repo]["owner"] - submit_uri = URL(f"{lando_api}/api/{lando_repo}") + owner = "faker" + repo_info_uri = URL(f"{lando_api}/api/repoinfo/repo_name") + submit_uri = URL(f"{lando_api}/api/repo/{lando_repo}") job_id = 12345 status_uri = URL(f"{lando_api}/push/{job_id}") @@ -272,6 +273,16 @@ async def test_success( fetch_files_payload(android_l10n_values), ) + aioresponses.get( + repo_info_uri, + status=200, + payload={ + "repo_url": f"https://github.com/{owner}/repo_name", + "branch_name": "fake_branch", + "scm_level": "whatever", + }, + ) + github_installation_responses(owner) # current string values in the destination repository setup_github_graphql_responses(aioresponses, fetch_files_payload(initial_values)) diff --git a/landoscript/tests/test_l10n_bump.py b/landoscript/tests/test_l10n_bump.py index 377cb0580..abd6ab909 100644 --- a/landoscript/tests/test_l10n_bump.py +++ b/landoscript/tests/test_l10n_bump.py @@ -545,7 +545,7 @@ async def test_success( "dontbuild": dontbuild, "ignore_closed_tree": ignore_closed_tree, } - submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["l10n_bump"]) + submit_uri, status_uri, job_id, scopes = setup_test(aioresponses, github_installation_responses, context, payload, ["l10n_bump"]) setup_treestatus_response(aioresponses, context) # because the github graphql endpoint is generic we need to make sure we create @@ -693,7 +693,7 @@ async def test_tree_is_closed_noop(aioresponses, github_installation_responses, ], "ignore_closed_tree": False, } - submit_uri, status_uri, _, scopes = setup_test(github_installation_responses, context, payload, ["l10n_bump"]) + submit_uri, status_uri, _, scopes = setup_test(aioresponses, github_installation_responses, context, payload, ["l10n_bump"]) setup_treestatus_response(aioresponses, context, status="closed") context.task = {"payload": payload, "scopes": scopes} diff --git a/landoscript/tests/test_script.py b/landoscript/tests/test_script.py index 075feead6..ef351c387 100644 --- a/landoscript/tests/test_script.py +++ b/landoscript/tests/test_script.py @@ -145,7 +145,7 @@ def assert_func(req): ), ) async def test_success_with_retries(aioresponses, github_installation_responses, context, payload, initial_values, expected_bumps, commit_msg_strings): - submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) + submit_uri, status_uri, job_id, scopes = setup_test(aioresponses, github_installation_responses, context, payload, ["version_bump"]) setup_github_graphql_responses(aioresponses, fetch_files_payload(initial_values)) aioresponses.post(submit_uri, status=500) @@ -227,7 +227,7 @@ async def test_no_actions(aioresponses, github_installation_responses, context): ), ), ) -async def test_missing_scopes(context, scopes, missing): +async def test_missing_scopes(aioresponses, github_installation_responses, context, scopes, missing): payload = { "actions": ["tag", "version_bump"], "lando_repo": "repo_name", @@ -237,6 +237,8 @@ async def test_missing_scopes(context, scopes, missing): }, } + setup_test(aioresponses, github_installation_responses, context, payload, ["version_bump"]) + context.task = {"payload": payload, "scopes": scopes} try: @@ -259,7 +261,7 @@ async def test_failure_to_submit_to_lando_500(aioresponses, github_installation_ }, } initial_values = {"browser/config/version.txt": "134.0"} - submit_uri, _, _, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) + submit_uri, _, _, scopes = setup_test(aioresponses, github_installation_responses, context, payload, ["version_bump"]) setup_github_graphql_responses(aioresponses, fetch_files_payload(initial_values)) for _ in range(10): @@ -285,7 +287,7 @@ async def test_to_submit_to_lando_no_status_url(aioresponses, github_installatio }, } initial_values = {"browser/config/version.txt": "134.0"} - submit_uri, _, _, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) + submit_uri, _, _, scopes = setup_test(aioresponses, github_installation_responses, context, payload, ["version_bump"]) setup_github_graphql_responses(aioresponses, fetch_files_payload(initial_values)) aioresponses.post(submit_uri, status=202, payload={}) @@ -309,7 +311,7 @@ async def test_lando_polling_result_not_correct(aioresponses, github_installatio }, } initial_values = {"browser/config/version.txt": "134.0"} - submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) + submit_uri, status_uri, job_id, scopes = setup_test(aioresponses, github_installation_responses, context, payload, ["version_bump"]) setup_github_graphql_responses(aioresponses, fetch_files_payload(initial_values)) aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) aioresponses.get(status_uri, status=200, payload={}) @@ -334,7 +336,7 @@ async def test_lando_polling_retry_on_failure(aioresponses, github_installation_ }, } initial_values = {"browser/config/version.txt": "134.0"} - submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) + submit_uri, status_uri, job_id, scopes = setup_test(aioresponses, github_installation_responses, context, payload, ["version_bump"]) setup_github_graphql_responses(aioresponses, fetch_files_payload(initial_values)) aioresponses.post(submit_uri, status=202, payload={"job_id": job_id, "status_url": str(status_uri), "message": "foo", "started_at": "2025-03-08T12:25:00Z"}) aioresponses.get(status_uri, status=500, payload={}) @@ -489,7 +491,7 @@ async def test_success_central_to_beta_merge_day(aioresponses, github_installati "merge_info": merge_info, "ignore_closed_tree": True, } - submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["merge_day", "l10n_bump"]) + submit_uri, status_uri, job_id, scopes = setup_test(aioresponses, github_installation_responses, context, payload, ["merge_day", "l10n_bump"]) # version bump files are fetched in groups, by initial version initial_values_by_expected_version = defaultdict(dict) diff --git a/landoscript/tests/test_version_bump.py b/landoscript/tests/test_version_bump.py index e398cf4d6..d9ef3f62a 100644 --- a/landoscript/tests/test_version_bump.py +++ b/landoscript/tests/test_version_bump.py @@ -281,7 +281,7 @@ def assert_func(req): ), ) async def test_success_with_retries(aioresponses, github_installation_responses, context, payload, initial_values, expected_bumps, commit_msg_strings): - submit_uri, status_uri, job_id, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) + submit_uri, status_uri, job_id, scopes = setup_test(aioresponses, github_installation_responses, context, payload, ["version_bump"]) setup_github_graphql_responses(aioresponses, fetch_files_payload(initial_values)) aioresponses.post(submit_uri, status=500) @@ -328,7 +328,7 @@ async def test_success_with_retries(aioresponses, github_installation_responses, ), ) async def test_success_without_bumps(aioresponses, github_installation_responses, context, payload, initial_values): - submit_uri, status_uri, _, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) + submit_uri, status_uri, _, scopes = setup_test(aioresponses, github_installation_responses, context, payload, ["version_bump"]) setup_github_graphql_responses(aioresponses, fetch_files_payload(initial_values)) context.task = {"payload": payload, "scopes": scopes} @@ -348,7 +348,7 @@ async def test_failure_to_fetch_files(aioresponses, github_installation_response "next_version": "135.0", }, } - _, _, _, scopes = setup_test(github_installation_responses, context, payload, ["version_bump"]) + _, _, _, scopes = setup_test(aioresponses, github_installation_responses, context, payload, ["version_bump"]) # 5 attempts is hardcoded deeper than we can reasonable override it; so # just expect it From 484f3a498cb2a75b327dd62af1b278b13c18f6ff Mon Sep 17 00:00:00 2001 From: Ben Hearsum Date: Wed, 16 Apr 2025 22:02:53 -0400 Subject: [PATCH 12/13] fix(landoscript): Replace TypedDicts with dataclasses This required a little bit of massaging for places where there's nested dataclasses, but it was otherwise straightforward. This change also uncovered some places where tests were including unnecessary data, which have been fixed. It also replaced some existing null checks (because the dataclass will throw if a required member is not present during construction). --- .../actions/android_l10n_import.py | 48 +++++++------ .../landoscript/actions/android_l10n_sync.py | 44 +++++++----- .../src/landoscript/actions/l10n_bump.py | 62 ++++++++-------- .../src/landoscript/actions/merge_day.py | 70 ++++++++++--------- .../src/landoscript/actions/version_bump.py | 11 +-- .../data/landoscript_task_schema.json | 17 +---- landoscript/src/landoscript/script.py | 15 ++-- landoscript/src/landoscript/util/l10n.py | 5 +- landoscript/tests/test_l10n_bump.py | 70 ------------------- landoscript/tests/test_script.py | 2 - 10 files changed, 149 insertions(+), 195 deletions(-) diff --git a/landoscript/src/landoscript/actions/android_l10n_import.py b/landoscript/src/landoscript/actions/android_l10n_import.py index 4bdc03ef6..f417e55c5 100644 --- a/landoscript/src/landoscript/actions/android_l10n_import.py +++ b/landoscript/src/landoscript/actions/android_l10n_import.py @@ -1,10 +1,11 @@ import logging import os.path +from copy import deepcopy +from dataclasses import dataclass from pathlib import Path -from typing import TypedDict +from typing import Self import tomli -from scriptworker.client import TaskVerificationError from landoscript.errors import LandoscriptError from landoscript.lando import LandoAction, create_commit_action @@ -17,34 +18,41 @@ log = logging.getLogger(__name__) -class TomlInfo(TypedDict): +@dataclass(frozen=True) +class TomlInfo: toml_path: str dest_path: str -class AndroidL10nImportInfo(TypedDict): +@dataclass +class AndroidL10nImportInfo: from_repo_url: str toml_info: list[TomlInfo] + @classmethod + def from_payload_data(cls, payload_data) -> Self: + # copy to avoid modifying the original + kwargs = deepcopy(payload_data) + kwargs["toml_info"] = [TomlInfo(**ti) for ti in payload_data["toml_info"]] + return cls(**kwargs) + async def run( github_client: GithubClient, github_config: dict[str, str], public_artifact_dir: str, android_l10n_import_info: AndroidL10nImportInfo, to_branch: str ) -> LandoAction: log.info("Preparing to import android l10n changesets.") - l10n_repo_url = android_l10n_import_info.get("from_repo_url") - if not l10n_repo_url: - raise TaskVerificationError("Cannot bump l10n revisions from github repo without an l10n_repo_url") + l10n_repo_url = android_l10n_import_info.from_repo_url l10n_owner, l10n_repo = extract_github_repo_owner_and_name(l10n_repo_url) async with GithubClient(github_config, l10n_owner, l10n_repo) as l10n_github_client: - toml_files = [info["toml_path"] for info in android_l10n_import_info["toml_info"]] + toml_files = [info.toml_path for info in android_l10n_import_info.toml_info] # we always take the tip of the default branch when importing new strings toml_contents = await l10n_github_client.get_files(toml_files) l10n_files: list[L10nFile] = [] - for info in android_l10n_import_info["toml_info"]: - toml_file = info["toml_path"] + for info in android_l10n_import_info.toml_info: + toml_file = info.toml_path log.info(f"processing toml file: {toml_file}") if toml_contents[toml_file] is None: @@ -52,7 +60,7 @@ async def run( contents = tomli.loads(str(toml_contents[toml_file])) src_file_prefix = Path(toml_file).parent - dst_file_prefix = Path(info["dest_path"]) + dst_file_prefix = Path(info.dest_path) if "**" in contents["paths"][0]["reference"]: # localized file paths contain globs; we need that directory # structure to determine the files we need to fetch @@ -65,32 +73,32 @@ async def run( l10n_files.append(L10nFile(src_name=str(src_name), dst_name=str(dst_name))) # fetch l10n_files from android-l10n - src_files = [f["src_name"] for f in l10n_files] + src_files = [f.src_name for f in l10n_files] log.info(f"fetching updated files from l10n repository: {src_files}") new_files = await l10n_github_client.get_files(src_files) # fetch l10n_files from gecko repo - dst_files = [f["dst_name"] for f in l10n_files] + dst_files = [f.dst_name for f in l10n_files] log.info(f"fetching original files from l10n repository: {dst_files}") orig_files = await github_client.get_files(dst_files, branch=to_branch) diff = "" for l10n_file in l10n_files: - if l10n_file["dst_name"] not in orig_files: - log.warning(f"WEIRD: {l10n_file['dst_name']} not in dst_files, continuing anyways...") + if l10n_file.dst_name not in orig_files: + log.warning(f"WEIRD: {l10n_file.dst_name} not in dst_files, continuing anyways...") continue - if l10n_file["src_name"] not in new_files: - log.warning(f"WEIRD: {l10n_file['src_name']} not in src_files, continuing anyways...") + if l10n_file.src_name not in new_files: + log.warning(f"WEIRD: {l10n_file.src_name} not in src_files, continuing anyways...") continue - orig_file = orig_files[l10n_file["dst_name"]] - new_file = new_files[l10n_file["src_name"]] + orig_file = orig_files[l10n_file.dst_name] + new_file = new_files[l10n_file.src_name] if orig_file == new_file: log.warning(f"old and new contents of {new_file} are the same, skipping bump...") continue - diff += diff_contents(orig_file, new_file, l10n_file["dst_name"]) + diff += diff_contents(orig_file, new_file, l10n_file.dst_name) if not diff: return {} diff --git a/landoscript/src/landoscript/actions/android_l10n_sync.py b/landoscript/src/landoscript/actions/android_l10n_sync.py index b603e0164..93ecdfbd8 100644 --- a/landoscript/src/landoscript/actions/android_l10n_sync.py +++ b/landoscript/src/landoscript/actions/android_l10n_sync.py @@ -1,7 +1,9 @@ import logging import os.path +from copy import deepcopy +from dataclasses import dataclass from pathlib import Path -from typing import TypedDict +from typing import Self import tomli @@ -15,26 +17,34 @@ log = logging.getLogger(__name__) -class TomlInfo(TypedDict): +@dataclass(frozen=True) +class TomlInfo: toml_path: str -class AndroidL10nSyncInfo(TypedDict): - from_repo_url: str +@dataclass(frozen=True) +class AndroidL10nSyncInfo: from_branch: str toml_info: list[TomlInfo] + @classmethod + def from_payload_data(cls, payload_data) -> Self: + # copy to avoid modifying the original + kwargs = deepcopy(payload_data) + kwargs["toml_info"] = [TomlInfo(**ti) for ti in payload_data["toml_info"]] + return cls(**kwargs) + async def run(github_client: GithubClient, public_artifact_dir: str, android_l10n_sync_info: AndroidL10nSyncInfo, to_branch: str) -> LandoAction: log.info("Preparing to sync android l10n changesets.") - from_branch = android_l10n_sync_info["from_branch"] + from_branch = android_l10n_sync_info.from_branch - toml_files = [info["toml_path"] for info in android_l10n_sync_info["toml_info"]] + toml_files = [info.toml_path for info in android_l10n_sync_info.toml_info] toml_contents = await github_client.get_files(toml_files, branch=from_branch) l10n_files: list[L10nFile] = [] - for info in android_l10n_sync_info["toml_info"]: - toml_file = info["toml_path"] + for info in android_l10n_sync_info.toml_info: + toml_file = info.toml_path log.info(f"processing toml file: {toml_file}") if toml_contents[toml_file] is None: @@ -55,32 +65,32 @@ async def run(github_client: GithubClient, public_artifact_dir: str, android_l10 l10n_files.append(L10nFile(src_name=str(src_name), dst_name=str(dst_name))) # fetch l10n_files from `from_branch` in the gecko repo - src_files = [f["src_name"] for f in l10n_files] + src_files = [f.src_name for f in l10n_files] log.info(f"fetching updated files from l10n repository: {src_files}") new_files = await github_client.get_files(src_files, branch=from_branch) # fetch l10n_files from gecko repo - dst_files = [f["dst_name"] for f in l10n_files] + dst_files = [f.dst_name for f in l10n_files] log.info(f"fetching original files from l10n repository: {dst_files}") orig_files = await github_client.get_files(dst_files, branch=to_branch) diff = "" for l10n_file in l10n_files: - if l10n_file["dst_name"] not in orig_files: - log.warning(f"WEIRD: {l10n_file['dst_name']} not in dst_files, continuing anyways...") + if l10n_file.dst_name not in orig_files: + log.warning(f"WEIRD: {l10n_file.dst_name} not in dst_files, continuing anyways...") continue - if l10n_file["src_name"] not in new_files: - log.warning(f"WEIRD: {l10n_file['src_name']} not in src_files, continuing anyways...") + if l10n_file.src_name not in new_files: + log.warning(f"WEIRD: {l10n_file.src_name} not in src_files, continuing anyways...") continue - orig_file = orig_files[l10n_file["dst_name"]] - new_file = new_files[l10n_file["src_name"]] + orig_file = orig_files[l10n_file.dst_name] + new_file = new_files[l10n_file.src_name] if orig_file == new_file: log.warning(f"old and new contents of {new_file} are the same, skipping bump...") continue - diff += diff_contents(orig_file, new_file, l10n_file["dst_name"]) + diff += diff_contents(orig_file, new_file, l10n_file.dst_name) if not diff: return {} diff --git a/landoscript/src/landoscript/actions/l10n_bump.py b/landoscript/src/landoscript/actions/l10n_bump.py index 9a36572aa..d15a79491 100644 --- a/landoscript/src/landoscript/actions/l10n_bump.py +++ b/landoscript/src/landoscript/actions/l10n_bump.py @@ -2,10 +2,11 @@ import logging import os.path import pprint -from typing import TypedDict +from copy import deepcopy +from dataclasses import dataclass, field +from typing import Self from gql.transport.exceptions import TransportError -from scriptworker.client import TaskVerificationError from landoscript.errors import LandoscriptError from landoscript.lando import LandoAction, create_commit_action @@ -17,18 +18,27 @@ log = logging.getLogger(__name__) -class PlatformConfig(TypedDict): +@dataclass(frozen=True) +class PlatformConfig: platforms: list[str] path: str -class L10nBumpInfo(TypedDict): +@dataclass(frozen=True) +class L10nBumpInfo: path: str name: str l10n_repo_url: str l10n_repo_target_branch: str - ignore_config: dict[str, list[str]] platform_configs: list[PlatformConfig] + ignore_config: dict[str, list[str]] = field(default_factory=dict) + + @classmethod + def from_payload_data(cls, payload_data) -> Self: + # copy to avoid modifying the original + kwargs = deepcopy(payload_data) + kwargs["platform_configs"] = [PlatformConfig(**pc) for pc in payload_data["platform_configs"]] + return cls(**kwargs) async def run( @@ -44,20 +54,16 @@ async def run( lando_actions = [] for bump_config in l10n_bump_infos: - log.info(f"considering {bump_config['name']}") - l10n_repo_url = bump_config.get("l10n_repo_url") - l10n_repo_target_branch = bump_config.get("l10n_repo_target_branch") - if not l10n_repo_url: - raise TaskVerificationError("Cannot bump l10n revisions from github repo without an l10n_repo_url") - if not l10n_repo_target_branch: - raise TaskVerificationError("l10n_repo_target_branch must be present in bump_config!") + log.info(f"considering {bump_config.name}") + l10n_repo_url = bump_config.l10n_repo_url + l10n_repo_target_branch = bump_config.l10n_repo_target_branch l10n_owner, l10n_repo = extract_github_repo_owner_and_name(l10n_repo_url) async with GithubClient(github_config, l10n_owner, l10n_repo) as l10n_github_client: # fetch initial files from github - platform_config_files = [pc["path"] for pc in bump_config["platform_configs"]] - files = [bump_config["path"], *platform_config_files] + platform_config_files = [pc.path for pc in bump_config.platform_configs] + files = [bump_config.path, *platform_config_files] try: log.info(f"fetching bump files from github: {files}") orig_files = await github_client.get_files(files, branch) @@ -69,10 +75,10 @@ async def run( log.debug(f"{fn}:") log.debug(contents) - if orig_files[bump_config["path"]] is None: - raise LandoscriptError(f"{bump_config['path']} does not exist, cannot perform bump!") + if orig_files[bump_config.path] is None: + raise LandoscriptError(f"{bump_config.path} does not exist, cannot perform bump!") - old_contents = json.loads(str(orig_files[bump_config["path"]])) + old_contents = json.loads(str(orig_files[bump_config.path])) orig_platform_files = {k: v for k, v in orig_files.items() if k in platform_config_files} # get new revision @@ -81,30 +87,30 @@ async def run( log.info(f"new l10n revision is {new_revision}") # build new versions of files - new_contents = build_revision_dict(bump_config.get("ignore_config", {}), bump_config["platform_configs"], orig_platform_files, new_revision) - log.debug(f"new contents of of {bump_config['path']} are:") + new_contents = build_revision_dict(bump_config.ignore_config, bump_config.platform_configs, orig_platform_files, new_revision) + log.debug(f"new contents of of {bump_config.path} are:") log.debug(new_contents) if old_contents == new_contents: - log.warning(f"old and new contents of {bump_config['path']} are the same, skipping bump...") + log.warning(f"old and new contents of {bump_config.path} are the same, skipping bump...") continue # make diff diff = diff_contents( json.dumps(old_contents, sort_keys=True, indent=4, separators=(",", ": ")), json.dumps(new_contents, sort_keys=True, indent=4, separators=(",", ": ")), - bump_config["path"], + bump_config.path, ) - with open(os.path.join(public_artifact_dir, f"l10n-bump-{bump_config['name']}.diff"), "w+") as f: + with open(os.path.join(public_artifact_dir, f"l10n-bump-{bump_config.name}.diff"), "w+") as f: f.write(diff) - log.info(f"adding l10n bump commit for {bump_config['name']}! diff contents are:") + log.info(f"adding l10n bump commit for {bump_config.name}! diff contents are:") log_file_contents(diff) # create commit message locale_map = build_locale_map(old_contents, new_contents) - commitmsg = build_commit_message(bump_config["name"], locale_map, dontbuild, ignore_closed_tree) + commitmsg = build_commit_message(bump_config.name, locale_map, dontbuild, ignore_closed_tree) # create action lando_actions.append(create_commit_action(commitmsg, diff)) @@ -112,7 +118,7 @@ async def run( return lando_actions -def build_platform_dict(ignore_config, platform_configs, orig_platform_files): +def build_platform_dict(ignore_config, platform_configs: list[PlatformConfig], orig_platform_files): """Build a dictionary of locale to list of platforms. Args: @@ -132,12 +138,12 @@ def build_platform_dict(ignore_config, platform_configs, orig_platform_files): """ platform_dict = {} for platform_config in platform_configs: - orig_contents = orig_platform_files[platform_config["path"]] + orig_contents = orig_platform_files[platform_config.path] for locale in orig_contents.splitlines(): if locale in ("en-US",): continue existing_platforms = set(platform_dict.get(locale, {}).get("platforms", [])) - platforms = set(platform_config["platforms"]) + platforms = set(platform_config.platforms) ignore_platforms = set(ignore_config.get(locale, [])) platforms = (platforms | existing_platforms) - ignore_platforms platform_dict[locale] = {"platforms": sorted(list(platforms))} @@ -146,7 +152,7 @@ def build_platform_dict(ignore_config, platform_configs, orig_platform_files): # build_revision_dict_github {{{1 -def build_revision_dict(ignore_config, platform_configs, orig_platform_files, revision) -> dict: +def build_revision_dict(ignore_config, platform_configs: list[PlatformConfig], orig_platform_files, revision) -> dict: """Add l10n revision information to the ``platform_dict``. All locales will be bumped to head revision of the branch given in `l10n_repo_target_branch` in the repository that `client` is configured with. diff --git a/landoscript/src/landoscript/actions/merge_day.py b/landoscript/src/landoscript/actions/merge_day.py index ae5bae408..1df739b2c 100644 --- a/landoscript/src/landoscript/actions/merge_day.py +++ b/landoscript/src/landoscript/actions/merge_day.py @@ -2,8 +2,10 @@ import os.path import re import string +from copy import deepcopy +from dataclasses import dataclass, field from datetime import date -from typing import TypedDict +from typing import Self import attr from mozilla_version.gecko import GeckoVersion @@ -21,32 +23,41 @@ log = logging.getLogger(__name__) -class VersionFile(TypedDict): +@dataclass(frozen=True) +class VersionFile: filename: str - new_suffix: str - version_bump: str + new_suffix: str = "" + version_bump: str = "" -class MergeInfo(TypedDict): +@dataclass(frozen=True) +class MergeInfo: to_branch: str - from_branch: str - base_tag: str - end_tag: str - merge_old_head: bool fetch_version_from: str - touch_clobber_file: bool - version_files: list[VersionFile] - replacements: list[list[str]] - regex_replacements: list[list[str]] + from_branch: str = "" + base_tag: str = "" + end_tag: str = "" + merge_old_head: bool = False + touch_clobber_file: bool = True + version_files: list[VersionFile] = field(default_factory=list) + replacements: list[list[str]] = field(default_factory=list) + regex_replacements: list[list[str]] = field(default_factory=list) + + @classmethod + def from_payload_data(cls, payload_data) -> Self: + # copy to avoid modifying the original + kwargs = deepcopy(payload_data) + kwargs["version_files"] = [VersionFile(**v) for v in payload_data.get("version_files", [])] + return cls(**kwargs) async def run(github_client: GithubClient, public_artifact_dir: str, merge_info: MergeInfo) -> list[LandoAction]: - to_branch = merge_info["to_branch"] - from_branch = merge_info.get("from_branch") - end_tag = merge_info.get("end_tag") - base_tag = merge_info.get("base_tag") - merge_old_head = merge_info.get("merge_old_head") - version_file = merge_info["fetch_version_from"] + to_branch = merge_info.to_branch + from_branch = merge_info.from_branch + end_tag = merge_info.end_tag + base_tag = merge_info.base_tag + merge_old_head = merge_info.merge_old_head + version_file = merge_info.fetch_version_from actions = [] log.info("Starting merge day operations!") @@ -88,14 +99,14 @@ async def run(github_client: GithubClient, public_artifact_dir: str, merge_info: merge_msg = f"Update {to_branch} to {from_branch}" actions.append({"action": "merge-onto", "target": from_branch, "strategy": "theirs", "message": merge_msg}) - if merge_info.get("version_files"): + if merge_info.version_files: log.info("Performing version bumps") files_by_new_suffix = defaultdict(list) bump_types = set() - for vf in merge_info["version_files"]: - if bump_type := vf.get("version_bump"): + for vf in merge_info.version_files: + if bump_type := vf.version_bump: bump_types.add(bump_type) - files_by_new_suffix[vf.get("new_suffix", "")].append(vf["filename"]) + files_by_new_suffix[vf.new_suffix].append(vf.filename) if len(bump_types) == 0: bump_types.add("") @@ -109,12 +120,7 @@ async def run(github_client: GithubClient, public_artifact_dir: str, merge_info: # _not_ happen. ie: we may end up with a new suffix but the same version # number. next_version = get_new_version(bump_version, new_suffix, bump_type) - version_bump_infos.append( - { - "files": files, - "next_version": next_version, - } - ) + version_bump_infos.append(version_bump.VersionBumpInfo(files=files, next_version=next_version)) log.info(f"version_bump_infos is: {version_bump_infos}") actions.append( @@ -128,8 +134,8 @@ async def run(github_client: GithubClient, public_artifact_dir: str, merge_info: ) # process replacements, regex-replacements, and update clobber file - replacements = merge_info.get("replacements", []) - regex_replacements = merge_info.get("regex_replacements", []) + replacements = merge_info.replacements + regex_replacements = merge_info.regex_replacements diff = "" if replacements or regex_replacements: log.info("Performing replacements and regex_replacements") @@ -149,7 +155,7 @@ async def run(github_client: GithubClient, public_artifact_dir: str, merge_info: diff += diff_contents(str(orig_contents[fn]), new_contents[fn], fn) - if merge_info.get("touch_clobber_file", True): + if merge_info.touch_clobber_file: log.info("Touching clobber file") orig_clobber_file = (await github_client.get_files("CLOBBER", to_branch))["CLOBBER"] if orig_clobber_file is None: diff --git a/landoscript/src/landoscript/actions/version_bump.py b/landoscript/src/landoscript/actions/version_bump.py index 15afcaacb..f40053e45 100644 --- a/landoscript/src/landoscript/actions/version_bump.py +++ b/landoscript/src/landoscript/actions/version_bump.py @@ -1,6 +1,6 @@ import logging import os.path -from typing import TypedDict +from dataclasses import dataclass from gql.transport.exceptions import TransportError from mozilla_version.version import BaseVersion @@ -26,7 +26,8 @@ ) -class VersionBumpInfo(TypedDict): +@dataclass(frozen=True) +class VersionBumpInfo: next_version: str files: list[str] @@ -43,15 +44,15 @@ async def run( diff = "" for version_bump_info in version_bump_infos: - next_version = version_bump_info["next_version"] + next_version = version_bump_info.next_version - for file in version_bump_info["files"]: + for file in version_bump_info.files: if file not in ALLOWED_BUMP_FILES: raise TaskVerificationError("{} is not in version bump allowlist".format(file)) try: log.info("fetching bump files from github") - orig_files = await github_client.get_files(version_bump_info["files"], branch) + orig_files = await github_client.get_files(version_bump_info.files, branch) except TransportError as e: raise LandoscriptError("couldn't retrieve bump files from github") from e diff --git a/landoscript/src/landoscript/data/landoscript_task_schema.json b/landoscript/src/landoscript/data/landoscript_task_schema.json index 74953df4c..d6d5b3123 100644 --- a/landoscript/src/landoscript/data/landoscript_task_schema.json +++ b/landoscript/src/landoscript/data/landoscript_task_schema.json @@ -18,7 +18,6 @@ "version_files", "replacements", "to_branch", - "to_repo", "merge_old_head" ], "properties": { @@ -88,18 +87,6 @@ "beta" ] }, - "from_repo": { - "type": "string", - "examples": [ - "https://hg.mozilla.org/releases/mozilla-beta" - ] - }, - "to_repo": { - "type": "string", - "examples": [ - "https://hg.mozilla.org/releases/mozilla-beta" - ] - }, "merge_old_head": { "type": "boolean", "default": false @@ -274,7 +261,9 @@ "required": [ "path", "name", - "platform_configs" + "platform_configs", + "l10n_repo_url", + "l10n_repo_target_branch" ] } }, diff --git a/landoscript/src/landoscript/script.py b/landoscript/src/landoscript/script.py index afb3e90a3..b56d3d52f 100644 --- a/landoscript/src/landoscript/script.py +++ b/landoscript/src/landoscript/script.py @@ -34,6 +34,10 @@ def validate_scopes(scopes: set, lando_repo: str, actions: list[str]): raise scriptworker.client.TaskVerificationError(f"required scope(s) not present: {', '.join(missing)}") +def sanity_check_payload(payload): + pass + + # `context` is kept explicitly untyped because all of its members are typed as # Optional. This never happens in reality (only in tests), but as things stand # at the time of writing, it means we need noisy and unnecessary None checking @@ -79,7 +83,7 @@ async def async_main(context): gh_client, public_artifact_dir, branch, - [version_bump.VersionBumpInfo(payload["version_bump_info"])], + [version_bump.VersionBumpInfo(**payload["version_bump_info"])], ) # sometimes version bumps are no-ops if version_bump_action: @@ -88,7 +92,7 @@ async def async_main(context): tag_actions = tag.run(payload["tags"]) lando_actions.extend(tag_actions) elif action == "merge_day": - merge_day_actions = await merge_day.run(gh_client, public_artifact_dir, payload["merge_info"]) + merge_day_actions = await merge_day.run(gh_client, public_artifact_dir, merge_day.MergeInfo.from_payload_data(payload["merge_info"])) lando_actions.extend(merge_day_actions) elif action == "l10n_bump": if not ignore_closed_tree: @@ -100,21 +104,22 @@ async def async_main(context): log.info("Treestatus is closed; skipping l10n bump.") continue + l10n_bump_info = [l10n_bump.L10nBumpInfo.from_payload_data(lbi) for lbi in payload["l10n_bump_info"]] l10n_bump_actions = await l10n_bump.run( - gh_client, context.config["github_config"], public_artifact_dir, branch, payload["l10n_bump_info"], dontbuild, ignore_closed_tree + gh_client, context.config["github_config"], public_artifact_dir, branch, l10n_bump_info, dontbuild, ignore_closed_tree ) # sometimes nothing has changed! if l10n_bump_actions: lando_actions.extend(l10n_bump_actions) elif action == "android_l10n_import": - android_l10n_import_info = payload["android_l10n_import_info"] + android_l10n_import_info = android_l10n_import.AndroidL10nImportInfo.from_payload_data(payload["android_l10n_import_info"]) import_action = await android_l10n_import.run( gh_client, context.config["github_config"], public_artifact_dir, android_l10n_import_info, branch ) if import_action: lando_actions.append(import_action) elif action == "android_l10n_sync": - android_l10n_sync_info = payload["android_l10n_sync_info"] + android_l10n_sync_info = android_l10n_sync.AndroidL10nSyncInfo.from_payload_data(payload["android_l10n_sync_info"]) import_action = await android_l10n_sync.run(gh_client, public_artifact_dir, android_l10n_sync_info, branch) if import_action: lando_actions.append(import_action) diff --git a/landoscript/src/landoscript/util/l10n.py b/landoscript/src/landoscript/util/l10n.py index b36986100..511bb93fb 100644 --- a/landoscript/src/landoscript/util/l10n.py +++ b/landoscript/src/landoscript/util/l10n.py @@ -1,10 +1,11 @@ +from dataclasses import dataclass from pathlib import Path -from typing import TypedDict from moz.l10n.paths import L10nConfigPaths, get_android_locale -class L10nFile(TypedDict): +@dataclass +class L10nFile: src_name: str dst_name: str diff --git a/landoscript/tests/test_l10n_bump.py b/landoscript/tests/test_l10n_bump.py index abd6ab909..0eca4277b 100644 --- a/landoscript/tests/test_l10n_bump.py +++ b/landoscript/tests/test_l10n_bump.py @@ -598,76 +598,6 @@ async def test_success( assert ("GET", status_uri) not in aioresponses.requests -@pytest.mark.asyncio -@pytest.mark.parametrize( - "l10n_bump_info,errmsg", - ( - pytest.param( - [ - { - "ignore_config": {}, - "l10n_repo_target_branch": "main", - "name": "Firefox l10n changesets", - "path": "browser/locales/l10n-changesets.json", - "platform_configs": [ - { - "path": "browser/locales/shipped-locales", - "platforms": ["linux64", "macosx64", "win64"], - } - ], - }, - ], - "without an l10n_repo_url", - id="no_l10n_repo_url", - ), - pytest.param( - [ - { - "ignore_config": {}, - "l10n_repo_url": "https://github.com/mozilla-l10n/firefox-l10n", - "name": "Firefox l10n changesets", - "path": "browser/locales/l10n-changesets.json", - "platform_configs": [ - { - "path": "browser/locales/shipped-locales", - "platforms": ["linux64", "macosx64", "win64"], - } - ], - }, - ], - "l10n_repo_target_branch must be present", - id="no_l10n_branch", - ), - pytest.param( - [ - { - "ignore_config": {}, - "name": "Firefox l10n changesets", - "path": "browser/locales/l10n-changesets.json", - "platform_configs": [ - { - "path": "browser/locales/shipped-locales", - "platforms": ["linux64", "macosx64", "win64"], - } - ], - }, - ], - "without an l10n_repo_url", - id="no_l10n_repo_url_or_branch", - ), - ), -) -async def test_l10n_repo_errors(aioresponses, github_installation_responses, context, l10n_bump_info, errmsg): - - payload = { - "actions": ["l10n_bump"], - "lando_repo": "repo_name", - "l10n_bump_info": l10n_bump_info, - } - setup_treestatus_response(aioresponses, context) - await run_test(aioresponses, github_installation_responses, context, payload, ["l10n_bump"], err=TaskVerificationError, errmsg=errmsg) - - @pytest.mark.asyncio async def test_tree_is_closed_noop(aioresponses, github_installation_responses, context): payload = { diff --git a/landoscript/tests/test_script.py b/landoscript/tests/test_script.py index ef351c387..df020f768 100644 --- a/landoscript/tests/test_script.py +++ b/landoscript/tests/test_script.py @@ -447,9 +447,7 @@ async def test_success_central_to_beta_merge_day(aioresponses, github_installati ] merge_info = { "end_tag": "FIREFOX_BETA_{major_version}_END", - "to_repo": "https://hg.mozilla.org/releases/mozilla-beta", "base_tag": "FIREFOX_BETA_{major_version}_BASE", - "from_repo": "https://hg.mozilla.org/mozilla-central", "to_branch": "beta", "from_branch": "central", "replacements": [ From 54390545543f7ebcfa621d7e1eaf3b77f2ed64f1 Mon Sep 17 00:00:00 2001 From: Ben Hearsum Date: Thu, 17 Apr 2025 10:29:39 -0400 Subject: [PATCH 13/13] fix(landoscript): improve comment around some test code --- landoscript/tests/conftest.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/landoscript/tests/conftest.py b/landoscript/tests/conftest.py index d777a8c67..9ae3c296a 100644 --- a/landoscript/tests/conftest.py +++ b/landoscript/tests/conftest.py @@ -157,8 +157,10 @@ def make_entry(path): type_ = "blob" if len(parts) == 1 else "tree" obj = {} if type_ == "tree": - # this obviously does not handle multiple files in the same directory - # properly; this is being ignored until the case comes up + # Note: this does not handle multiple files in the same directory + # properly (we'll only end up with an entry for the last file seen). + # This is being ignored to avoid complicating this code until + # an actual use case for it comes up. obj["entries"] = [make_entry(parts[1])] return { "name": parts[0],