diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 773cbd6..510054a 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,11 +1,15 @@ Change log ========== -Unreleased ----------- +0.5.0 (2022-07-04) +------------------ + +Times Square now implements two GitHub check runs for pull requests on notebook repositories: + +- The "YAML config" check validates the structure of YAML configuration files, specifically the ``times-square.yaml`` repository settings as well as the YAML sidecar files that describe each notebook. +- The "Notebook execution" check actually runs notebooks (given their default parameters) with Noteburst, and ensures that they return without error. -Times Square now implements GitHub Checks for pull requests for notebook repositories. -Initially, Times Square validates the structure of YAML configuration files, specifically the ``times-square.yaml`` repository settings as well as the YAML sidecar files that describe each notebook. +Together, these features will help contributors to Times Square notebook repositories ensure that their contributions work before they merge pull requests. 0.4.0 (2022-05-14) ------------------ diff --git a/requirements/dev.txt b/requirements/dev.txt index cd126ab..8a6a024 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -10,26 +10,20 @@ anyio==3.6.1 \ # via # -c requirements/main.txt # httpcore - # watchgod + # watchfiles asgi-lifespan==1.0.1 \ --hash=sha256:9a33e7da2073c4764bc79bd6136501d6c42f60e3d2168ba71235e84122eadb7f \ --hash=sha256:9ea969dc5eb5cf08e52c08dce6f61afcadd28112e72d81c972b1d8eb8691ab53 # via -r requirements/dev.in -asgiref==3.5.2 \ - --hash=sha256:1d2880b792ae8757289136f1db2b7b99100ce959b2aa57fd69dab783d05afac4 \ - --hash=sha256:4a29362a6acebe09bf1d6640db38c1dc3d9217c68e6f9f6204d72667fc19a424 - # via - # -c requirements/main.txt - # uvicorn attrs==21.4.0 \ --hash=sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4 \ --hash=sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd # via # -c requirements/main.txt # pytest -certifi==2022.5.18.1 \ - --hash=sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7 \ - --hash=sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a +certifi==2022.6.15 \ + --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d \ + --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412 # via # -c requirements/main.txt # httpcore @@ -264,9 +258,9 @@ mypy-extensions==0.4.3 \ --hash=sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d \ --hash=sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8 # via mypy -nodeenv==1.6.0 \ - --hash=sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b \ - --hash=sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7 +nodeenv==1.7.0 \ + --hash=sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e \ + --hash=sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b # via pre-commit packaging==21.3 \ --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ @@ -381,49 +375,49 @@ sniffio==1.2.0 \ # asgi-lifespan # httpcore # httpx -sqlalchemy[asyncio,mypy]==1.4.37 \ - --hash=sha256:06ec11a5e6a4b6428167d3ce33b5bd455c020c867dabe3e6951fa98836e0741d \ - --hash=sha256:0e7fd52e48e933771f177c2a1a484b06ea03774fc7741651ebdf19985a34037c \ - --hash=sha256:139c50b9384e6d32a74fc4dcd0e9717f343ed38f95dbacf832c782c68e3862f3 \ - --hash=sha256:17417327b87a0f703c9a20180f75e953315207d048159aff51822052f3e33e69 \ - --hash=sha256:29a742c29fea12259f1d2a9ee2eb7fe4694a85d904a4ac66d15e01177b17ad7f \ - --hash=sha256:2aac2a685feb9882d09f457f4e5586c885d578af4e97a2b759e91e8c457cbce5 \ - --hash=sha256:3197441772dc3b1c6419f13304402f2418a18d7fe78000aa5a026e7100836739 \ - --hash=sha256:3688f92c62db6c5df268e2264891078f17ecb91e3141b400f2e28d0f75796dea \ - --hash=sha256:3862a069a24f354145e01a76c7c720c263d62405fe5bed038c46a7ce900f5dd6 \ - --hash=sha256:4a17c1a1152ca4c29d992714aa9df3054da3af1598e02134f2e7314a32ef69d8 \ - --hash=sha256:4c1d9fb3931e27d59166bb5c4dcc911400fee51082cfba66ceb19ac954ade068 \ - --hash=sha256:4e8706919829d455a9fa687c6bbd1b048e36fec3919a59f2d366247c2bfdbd9c \ - --hash=sha256:50c8eaf44c3fed5ba6758d375de25f163e46137c39fda3a72b9ee1d1bb327dfc \ - --hash=sha256:5e4e517ce72fad35cce364a01aff165f524449e9c959f1837dc71088afa2824c \ - --hash=sha256:6629c79967a6c92e33fad811599adf9bc5cee6e504a1027bbf9cc1b6fb2d276d \ - --hash=sha256:78363f400fbda80f866e8e91d37d36fe6313ff847ded08674e272873c1377ea5 \ - --hash=sha256:7a44683cf97744a405103ef8fdd31199e9d7fc41b4a67e9044523b29541662b0 \ - --hash=sha256:7e579d6e281cc937bdb59917017ab98e618502067e04efb1d24ac168925e1d2a \ - --hash=sha256:7ee34c85cbda7779d66abac392c306ec78c13f5c73a1f01b8b767916d4895d23 \ - --hash=sha256:8b38e088659b30c2ca0af63e5d139fad1779a7925d75075a08717a21c406c0f6 \ - --hash=sha256:9785d6f962d2c925aeb06a7539ac9d16608877da6aeaaf341984b3693ae80a02 \ - --hash=sha256:a91d0668cada27352432f15b92ac3d43e34d8f30973fa8b86f5e9fddee928f3b \ - --hash=sha256:a940c551cfbd2e1e646ceea2777944425f5c3edff914bc808fe734d9e66f8d71 \ - --hash=sha256:aaa0e90e527066409c2ea5676282cf4afb4a40bb9dce0f56c8ec2768bff22a6e \ - --hash=sha256:b4c92823889cf9846b972ee6db30c0e3a92c0ddfc76c6060a6cda467aa5fb694 \ - --hash=sha256:b55932fd0e81b43f4aff397c8ad0b3c038f540af37930423ab8f47a20b117e4c \ - --hash=sha256:c37885f83b59e248bebe2b35beabfbea398cb40960cdc6d3a76eac863d4e1938 \ - --hash=sha256:caca6acf3f90893d7712ae2c6616ecfeac3581b4cc677c928a330ce6fbad4319 \ - --hash=sha256:cffc67cdd07f0e109a1fc83e333972ae423ea5ad414585b63275b66b870ea62b \ - --hash=sha256:d4c3b009c9220ae6e33f17b45f43fb46b9a1d281d76118405af13e26376f2e11 \ - --hash=sha256:d58f2d9d1a4b1459e8956a0153a4119da80f54ee5a9ea623cd568e99459a3ef1 \ - --hash=sha256:d6927c9e3965b194acf75c8e0fb270b4d54512db171f65faae15ef418721996e \ - --hash=sha256:d9050b0c4a7f5538650c74aaba5c80cd64450e41c206f43ea6d194ae6d060ff9 \ - --hash=sha256:eec39a17bab3f69c44c9df4e0ed87c7306f2d2bf1eca3070af644927ec4199fa \ - --hash=sha256:f9940528bf9c4df9e3c3872d23078b6b2da6431c19565637c09f1b88a427a684 \ - --hash=sha256:ffe487570f47536b96eff5ef2b84034a8ba4e19aab5ab7647e677d94a119ea55 +sqlalchemy[asyncio,mypy]==1.4.39 \ + --hash=sha256:047ef5ccd8860f6147b8ac6c45a4bc573d4e030267b45d9a1c47b55962ff0e6f \ + --hash=sha256:05a05771617bfa723ba4cef58d5b25ac028b0d68f28f403edebed5b8243b3a87 \ + --hash=sha256:0ec54460475f0c42512895c99c63d90dd2d9cbd0c13491a184182e85074b04c5 \ + --hash=sha256:107df519eb33d7f8e0d0d052128af2f25066c1a0f6b648fd1a9612ab66800b86 \ + --hash=sha256:14ea8ff2d33c48f8e6c3c472111d893b9e356284d1482102da9678195e5a8eac \ + --hash=sha256:1745987ada1890b0e7978abdb22c133eca2e89ab98dc17939042240063e1ef21 \ + --hash=sha256:1962dfee37b7fb17d3d4889bf84c4ea08b1c36707194c578f61e6e06d12ab90f \ + --hash=sha256:20bf65bcce65c538e68d5df27402b39341fabeecf01de7e0e72b9d9836c13c52 \ + --hash=sha256:26146c59576dfe9c546c9f45397a7c7c4a90c25679492ff610a7500afc7d03a6 \ + --hash=sha256:365b75938049ae31cf2176efd3d598213ddb9eb883fbc82086efa019a5f649df \ + --hash=sha256:4770eb3ba69ec5fa41c681a75e53e0e342ac24c1f9220d883458b5596888e43a \ + --hash=sha256:50e7569637e2e02253295527ff34666706dbb2bc5f6c61a5a7f44b9610c9bb09 \ + --hash=sha256:5c2d19bfb33262bf987ef0062345efd0f54c4189c2d95159c72995457bf4a359 \ + --hash=sha256:621f050e72cc7dfd9ad4594ff0abeaad954d6e4a2891545e8f1a53dcdfbef445 \ + --hash=sha256:6d81de54e45f1d756785405c9d06cd17918c2eecc2d4262dc2d276ca612c2f61 \ + --hash=sha256:6f95706da857e6e79b54c33c1214f5467aab10600aa508ddd1239d5df271986e \ + --hash=sha256:752ef2e8dbaa3c5d419f322e3632f00ba6b1c3230f65bc97c2ff5c5c6c08f441 \ + --hash=sha256:7b2785dd2a0c044a36836857ac27310dc7a99166253551ee8f5408930958cc60 \ + --hash=sha256:7f13644b15665f7322f9e0635129e0ef2098409484df67fcd225d954c5861559 \ + --hash=sha256:8194896038753b46b08a0b0ae89a5d80c897fb601dd51e243ed5720f1f155d27 \ + --hash=sha256:864d4f89f054819cb95e93100b7d251e4d114d1c60bc7576db07b046432af280 \ + --hash=sha256:8b773c9974c272aae0fa7e95b576d98d17ee65f69d8644f9b6ffc90ee96b4d19 \ + --hash=sha256:8f901be74f00a13bf375241a778455ee864c2c21c79154aad196b7a994e1144f \ + --hash=sha256:91d2b89bb0c302f89e753bea008936acfa4e18c156fb264fe41eb6bbb2bbcdeb \ + --hash=sha256:b0538b66f959771c56ff996d828081908a6a52a47c5548faed4a3d0a027a5368 \ + --hash=sha256:b30e70f1594ee3c8902978fd71900d7312453922827c4ce0012fa6a8278d6df4 \ + --hash=sha256:b71be98ef6e180217d1797185c75507060a57ab9cd835653e0112db16a710f0d \ + --hash=sha256:c6d00cb9da8d0cbfaba18cad046e94b06de6d4d0ffd9d4095a3ad1838af22528 \ + --hash=sha256:d1f665e50592caf4cad3caed3ed86f93227bffe0680218ccbb293bd5a6734ca8 \ + --hash=sha256:e6e2c8581c6620136b9530137954a8376efffd57fe19802182c7561b0ab48b48 \ + --hash=sha256:e7a7667d928ba6ee361a3176e1bef6847c1062b37726b33505cc84136f657e0d \ + --hash=sha256:ec3985c883d6d217cf2013028afc6e3c82b8907192ba6195d6e49885bfc4b19d \ + --hash=sha256:ede13a472caa85a13abe5095e71676af985d7690eaa8461aeac5c74f6600b6c0 \ + --hash=sha256:f24d4d6ec301688c59b0c4bb1c1c94c5d0bff4ecad33bb8f5d9efdfb8d8bc925 \ + --hash=sha256:f2a42acc01568b9701665e85562bbff78ec3e21981c7d51d56717c22e5d3d58b \ + --hash=sha256:fbc076f79d830ae4c9d49926180a1140b49fa675d0f0d555b44c9a15b29f4c80 # via # -c requirements/main.txt # -r requirements/dev.in -sqlalchemy2-stubs==0.0.2a23 \ - --hash=sha256:6011d2219365d4e51f3e9d83ffeb5b904964ef1d143dc1298d8a70ce8641014d \ - --hash=sha256:a13d94e23b5b0da8ee21986ef8890788a1f2eb26c2a9f39424cc933e4e7e87ff +sqlalchemy2-stubs==0.0.2a24 \ + --hash=sha256:e15c45302eafe196ed516f979ef017135fd619d2c62d02de9a5c5f2e59a600c4 \ + --hash=sha256:f2399251d3d8f00a88659d711a449c855a0d4e977c7a9134e414f1459b9acc11 # via sqlalchemy toml==0.10.2 \ --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \ @@ -437,9 +431,9 @@ tomli==2.0.1 \ # coverage # mypy # pytest -types-pyyaml==6.0.8 \ - --hash=sha256:56a7b0e8109602785f942a11ebfbd16e97d5d0e79f5fbb077ec4e6a0004837ff \ - --hash=sha256:d9495d377bb4f9c5387ac278776403eb3b4bb376851025d913eea4c22b4c6438 +types-pyyaml==6.0.9 \ + --hash=sha256:33ae75c84b8f61fddf0c63e9c7e557db9db1694ad3c2ee8628ec5efebb5a5e9b \ + --hash=sha256:b738e9ef120da0af8c235ba49d3b72510f56ef9bcc308fc8e7357100ff122284 # via -r requirements/dev.in typing-extensions==4.2.0 \ --hash=sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708 \ @@ -448,9 +442,9 @@ typing-extensions==4.2.0 \ # -c requirements/main.txt # mypy # sqlalchemy2-stubs -uvicorn[standard]==0.17.6 \ - --hash=sha256:19e2a0e96c9ac5581c01eb1a79a7d2f72bb479691acd2b8921fce48ed5b961a6 \ - --hash=sha256:5180f9d059611747d841a4a4c4ab675edf54c8489e97f96d0583ee90ac3bfc23 +uvicorn[standard]==0.18.1 \ + --hash=sha256:013c4ea0787cc2dc456ef4368e18c01982e6be57903e4d3183218e543eb889b7 \ + --hash=sha256:35703e6518105cfe53f16a5a9435db3e2e227d0784f1fd8fbc1214b1fdc108df # via # -c requirements/main.txt # -r requirements/dev.in @@ -474,13 +468,23 @@ uvloop==0.16.0 \ # via # -c requirements/main.txt # uvicorn -virtualenv==20.14.1 \ - --hash=sha256:e617f16e25b42eb4f6e74096b9c9e37713cf10bf30168fb4a739f3fa8f898a3a \ - --hash=sha256:ef589a79795589aada0c1c5b319486797c03b67ac3984c48c669c0e4f50df3a5 +virtualenv==20.15.0 \ + --hash=sha256:4c44b1d77ca81f8368e2d7414f9b20c428ad16b343ac6d226206c5b84e2b4fcc \ + --hash=sha256:804cce4de5b8a322f099897e308eecc8f6e2951f1a8e7e2b3598dff865f01336 # via pre-commit -watchgod==0.8.2 \ - --hash=sha256:2f3e8137d98f493ff58af54ea00f4d1433a6afe2ed08ab331a657df468c6bfce \ - --hash=sha256:cb11ff66657befba94d828e3b622d5fb76f22fbda1376f355f3e6e51e97d9450 +watchfiles==0.15.0 \ + --hash=sha256:56abed43e645d1f2d6def83e35999cc5758b051aff54ca1065cbfcaea15b3389 \ + --hash=sha256:65ca99a94fcab29d00aa406526eb29cf198c0661854d59a315596064fed02141 \ + --hash=sha256:67d4c66e46a564059df4aeedab78f09cba0b697bf36cc77566b0a7015dfb7f5d \ + --hash=sha256:6e0e8829d32b05151e6009570449f44f891e05f518e495d25f960e0d0b2d0064 \ + --hash=sha256:715733c2ac9da67b2790788657ff6f8b3797eb31565bfc592289b523ae907ca2 \ + --hash=sha256:7b81c6e404b2aa62482a719eb778e4a16d01728302dce1f1512c1e5354a73fda \ + --hash=sha256:82238d08d8a49f1a1ba254278cd4329a154f6100b028393059722ebeddd2ff3d \ + --hash=sha256:955e8f840e1996a8a41be57de4c03af7b1515a685b7fb6abe222f859e413a907 \ + --hash=sha256:cab62510f990d195986302aa6a48ed636d685b099927049120d520c96069fa49 \ + --hash=sha256:d1f9de6b776b3aff17898a4cf5ac5a2d0a16212ea7aad2bbe0ef6aa3e79a96af \ + --hash=sha256:d4f45acd1143db6d3ee77a4ff12d3239bc8083108133e6174e9dcce59c1f9902 \ + --hash=sha256:f7f71012e096e11256fae3b37617a9777980f281e18deb2e789e85cd5b113935 # via # -c requirements/main.txt # uvicorn @@ -536,3 +540,7 @@ websockets==10.3 \ # via # -c requirements/main.txt # uvicorn + +# WARNING: The following packages were not pinned, but pip requires them to be +# pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag. +# setuptools diff --git a/requirements/main.txt b/requirements/main.txt index 91cef13..7894e57 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -14,15 +14,11 @@ anyio==3.6.1 \ # via # httpcore # starlette - # watchgod + # watchfiles arq==0.23a1 \ --hash=sha256:7cec3b9584f7ac735f4ee93e88c27a8eb4552d379ea8d76f84b60f52fc404f03 \ --hash=sha256:b60a347be3865ab058284320c5309a3303f8466d0843de44de9633e74871194f # via safir -asgiref==3.5.2 \ - --hash=sha256:1d2880b792ae8757289136f1db2b7b99100ce959b2aa57fd69dab783d05afac4 \ - --hash=sha256:4a29362a6acebe09bf1d6640db38c1dc3d9217c68e6f9f6204d72667fc19a424 - # via uvicorn async-timeout==4.0.2 \ --hash=sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15 \ --hash=sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c @@ -69,9 +65,9 @@ bleach==5.0.0 \ --hash=sha256:08a1fe86d253b5c88c92cc3d810fd8048a16d15762e1e5b74d502256e5926aa1 \ --hash=sha256:c6d6cc054bdc9c83b48b8083e236e5f00f238428666d2ce2e083eaa5fd568565 # via nbconvert -certifi==2022.5.18.1 \ - --hash=sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7 \ - --hash=sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a +certifi==2022.6.15 \ + --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d \ + --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412 # via # httpcore # httpx @@ -580,64 +576,64 @@ pyyaml==6.0 \ # via # -r requirements/main.in # uvicorn -pyzmq==23.1.0 \ - --hash=sha256:057176dd3f5ccf5aad4abd662d76b6a39bbf799baaf2f39cd4fdaf2eab326e43 \ - --hash=sha256:05ec90a8da618f2398f9d1aa20b18a9ef332992c6ac23e8c866099faad6ef0d6 \ - --hash=sha256:154de02b15422af28b53d29a02de72121ba503634955017255573fc1f995143d \ - --hash=sha256:16b832adb5d8716f46051da5533c480250bf126984ce86804db6137a3a7f931b \ - --hash=sha256:1df26aa854bdd3a8341bf199064dd6aa6e240f2eaa3c9fa8d217e5d8b868c73e \ - --hash=sha256:28f9164fb2658b7b414fa0894c75b1a9c61375774cdc1bdb7298beb042a2cd87 \ - --hash=sha256:2951c29b8649f3672af9dca8ff61d86310d3664d9629788b1c66422fb13b1239 \ - --hash=sha256:2b08774057ae7ce8a2eb4e7d54db05358234440706ce43a85814500c5d7bd22e \ - --hash=sha256:2e2ac40f7a91c740ec68d6db07ae19ea9259c959333c68bee56ab2c799a67d66 \ - --hash=sha256:312e56799410c34797417a4060a8bd37d4db1f06d1ec0c54f7c8fd81e0d90376 \ - --hash=sha256:38f778a74e3889392e949326cfd0e9b2eb37dcbb2980d98fad2c51703d523db2 \ - --hash=sha256:3955dd5bbbe02f454655296ee36a66c334c7102a29b8458223d168c0380edfd5 \ - --hash=sha256:425ba851a6f9892bde1da2024d82e2fe6796bd77e3391fb96665c50fe9d4c6a5 \ - --hash=sha256:48bbc2db041ab28eeee4a3e8ada0ed336640946dd5a8e53dbd3805f9dbdcf0dc \ - --hash=sha256:4fbcd657cda75574fd1315a4c44bd322bc2e219039fb09f146bbe6f8aef039e9 \ - --hash=sha256:523ba7fd4d8fe75ad09c1e574a648892b75a97d0cfc8005727681053ac19555b \ - --hash=sha256:53b2c1326c2e484d450932d2be739f064b7cb572faabec38386098a28516a529 \ - --hash=sha256:540d7146c3cdc9bbffab039ea067f494eba24d1abe5bd33eb9f963c01e3305d4 \ - --hash=sha256:563d4281c4dbdf647d93114420151d33f895afc4c46b7115a67a0aa5347e6624 \ - --hash=sha256:67a049bcf967a39993858beed873ed3405536019820922d4efacfe35ab3da51a \ - --hash=sha256:67ec63ae3c9c1fa2e077fcb42e77035e2121a04f987464bdf9945a28535d30ad \ - --hash=sha256:68e22c5d3be451e87d47f956b397a7823bfbde2176341bc902fba30f96831d7e \ - --hash=sha256:6ab4b6108e69f63c917cd7ef7217c5727955b1ac90600e44a13ed5312019a014 \ - --hash=sha256:6bd7f18bd4cf51ea8d7e54825902cf36f9d2f35cc51ef618373988d5398b8dd0 \ - --hash=sha256:6cd53e861bccc0bdc4620f68fb4a91d5bcfe9f4213cf8e200fa498044d33a6dc \ - --hash=sha256:6d346e551fa64b89d57a4ac74b9bc66703413f02f50093e089e861999ec5cccc \ - --hash=sha256:6ff8708fabc9f9bc2949f457d39b4088c9656c4c9ac15fbbbbaafce8f6d07833 \ - --hash=sha256:7626e8384275a7dea6f3d1f749fb5e00299042e9c895fc3dbe24cb154909c242 \ - --hash=sha256:7e7346b2b33dcd4a2171dd8a9870ae283eec8f6231dcbcf237a0f41e74751a50 \ - --hash=sha256:81623c67cb71b93b5f7e06c9107f3781738ae86866db830c950223d87af2a235 \ - --hash=sha256:83f1c76068faf62c32a36dd62dc4db642c2027bbbd960f8f6345b59e9d4dc472 \ - --hash=sha256:8679bb1dd723ecbea03b1f96c98972815775fd8ec756c440a14f289c436c472e \ - --hash=sha256:86fb683cb9a9c0bb7476988b7957393ecdd22777d87d804442c66e62c99197f9 \ - --hash=sha256:8757c62f7960cd26122f7aaaf86eda1e016fa85734c3777b8054dd334d7dea4d \ - --hash=sha256:894be7d17228e7328cc188096c0162697211ec91761f6812fff12790cbe11c66 \ - --hash=sha256:8a0f240bf43c29be1bd82d77e602a61c798e9de02e5f8bb7bb414cb814f43236 \ - --hash=sha256:8c3abf7eab5b76ae162c4fbb16d514a947fc57fd995b64e5ea8ef8ba3b888a69 \ - --hash=sha256:93332c6972e4c91522c4810e907f3aea067424338071161b39cacded022559df \ - --hash=sha256:97d6c676dc97d593625d9fc48154f2ffeabb619a1e6fe8d2a5b53f97e3e9bdee \ - --hash=sha256:99dd85f0ca1db8d17a01a25c2bbb7784d25a2d39497c6beddbe96bff74194e04 \ - --hash=sha256:9c7fb691fb07ec7ab99fd173bb0e7e0248d31bf83d484a87b917a342f63812c9 \ - --hash=sha256:b3bc3cf200aab74f3d758586ac50295214eda496ac6a6636e0c881c5958d9123 \ - --hash=sha256:bba54f97578943f48f621b4a7afb8eb022370da26a88b88ccc9fee9f3ef7ce45 \ - --hash=sha256:bd2a13a0f8367e50347cbac87ae230ae1953935443240238f956bf10668bead6 \ - --hash=sha256:cbc1184349ca6e5112898aa7fc3efa1b1bbae24ab1edc774cfd09cbfd3b091d7 \ - --hash=sha256:cd82cca9c489e441574804dbda2dd8e114cf3be7935b03de11dade2c9478aea6 \ - --hash=sha256:ce8ba5ed8b0a7a203922d61cff45ee6001a41a9359f04f00d055a4e988755569 \ - --hash=sha256:cfee22e072a382b92ee0709dbb8203dabd52d54258051e770d9d2a81b162530b \ - --hash=sha256:d977df6f7c4109ed1d96ffb6795f6af77114be606ae4556efbfc9cac725db65d \ - --hash=sha256:da72a384a1d7e87490ca71182f3ab469ed21d847adc16b70c34faac5a3b12801 \ - --hash=sha256:ddf4ad1d651e6c9234945061e1a31fe27a4be0dea21c498b87b186fadf8f5919 \ - --hash=sha256:eb0ae5dfda83bbce660179d7b41c1c38fd833a54d2e6d9b258c644f3b75ef94d \ - --hash=sha256:f4c7d370badc60ac94a554bc571a46d03e39d8aacfba8006b334512e184aed59 \ - --hash=sha256:f6c378b435a26fda8996579c0e324b108d2ca0d01b4661503a75634e5155559f \ - --hash=sha256:f6c9d30888503f2f5f87d6d41f016301352dd98da4a861bd10663c3a2d99d3b5 \ - --hash=sha256:fab8a7877275060f7b303e1f91c218069a2814a616b6a5ee2d8a3737deb15915 \ - --hash=sha256:fc32e7d7f98cac3d8d5153ed2cb583158ae3d446a6efb8e28ccb1c54a09f4169 +pyzmq==23.2.0 \ + --hash=sha256:004a431dfa0459123e6f4660d7e3c4ac19217d134ca38bacfffb2e78716fe944 \ + --hash=sha256:057b154471e096e2dda147f7b057041acc303bb7ca4aa24c3b88c6cecdd78717 \ + --hash=sha256:0e08671dc202a1880fa522f921f35ca5925ba30da8bc96228d74a8f0643ead9c \ + --hash=sha256:1b2a21f595f8cc549abd6c8de1fcd34c83441e35fb24b8a59bf161889c62a486 \ + --hash=sha256:21552624ce69e69f7924f413b802b1fb554f4c0497f837810e429faa1cd4f163 \ + --hash=sha256:22ac0243a41798e3eb5d5714b28c2f28e3d10792dffbc8a5fca092f975fdeceb \ + --hash=sha256:2b054525c9f7e240562185bf21671ca16d56bde92e9bd0f822c07dec7626b704 \ + --hash=sha256:30c365e60c39c53f8eea042b37ea28304ffa6558fb7241cf278745095a5757da \ + --hash=sha256:3a4d87342c2737fbb9eee5c33c792db27b36b04957b4e6b7edd73a5b239a2a13 \ + --hash=sha256:420b9abd1a7330687a095373b8280a20cdee04342fbc8ccb3b56d9ec8efd4e62 \ + --hash=sha256:444f7d615d5f686d0ef508b9edfa8a286e6d89f449a1ba37b60ef69d869220a3 \ + --hash=sha256:558f5f636e3e65f261b64925e8b190e8689e334911595394572cc7523879006d \ + --hash=sha256:5592fb4316f895922b1cacb91b04a0fa09d6f6f19bbab4442b4d0a0825177b93 \ + --hash=sha256:59928dfebe93cf1e203e3cb0fd5d5dd384da56b99c8305f2e1b0a933751710f6 \ + --hash=sha256:5cb642e94337b0c76c9c8cb9bfb0f8a78654575847d080d3e1504f312d691fc3 \ + --hash=sha256:5d57542429df6acff02ff022067aa75b677603cee70e3abb9742787545eec966 \ + --hash=sha256:5d92e7cbeab7f70b08cc0f27255b0bb2500afc30f31075bca0b1cb87735d186c \ + --hash=sha256:602835e5672ca9ca1d78e6c148fb28c4f91b748ebc41fbd2f479d8763d58bc9b \ + --hash=sha256:60746a7e8558655420a69441c0a1d47ed225ed3ac355920b96a96d0554ef7e6b \ + --hash=sha256:61b97f624da42813f74977425a3a6144d604ea21cf065616d36ea3a866d92c1c \ + --hash=sha256:693c96ae4d975eb8efa1639670e9b1fac0c3f98b7845b65c0f369141fb4bb21f \ + --hash=sha256:814e5aaf0c3be9991a59066eafb2d6e117aed6b413e3e7e9be45d4e55f5e2748 \ + --hash=sha256:83005d8928f8a5cebcfb33af3bfb84b1ad65d882b899141a331cc5d07d89f093 \ + --hash=sha256:831da96ba3f36cc892f0afbb4fb89b28b61b387261676e55d55a682addbd29f7 \ + --hash=sha256:8355744fdbdeac5cfadfa4f38b82029b5f2b8cab7472a33453a217a7f3a9dce2 \ + --hash=sha256:8496a2a5efd055c61ac2c6a18116c768a25c644b6747dcfde43e91620ab3453c \ + --hash=sha256:859059caf564f0c9398c9005278055ed3d37af4d73de6b1597821193b04ca09b \ + --hash=sha256:8c0f4d6f8c985bab83792be26ff3233940ba42e22237610ac50cbcfc10a5c235 \ + --hash=sha256:8c2d8b69a2bf239ae3d987537bf3fbc2b044a405394cf4c258fc684971dd48b2 \ + --hash=sha256:984b232802eddf9f0be264a4d57a10b3a1fd7319df14ee6fc7b41c6d155a3e6c \ + --hash=sha256:99cedf38eaddf263cf7e2a50e405f12c02cedf6d9df00a0d9c5d7b9417b57f76 \ + --hash=sha256:a3dc339f7bc185d5fd0fd976242a5baf35de404d467e056484def8a4dd95868b \ + --hash=sha256:a51f12a8719aad9dcfb55d456022f16b90abc8dde7d3ca93ce3120b40e3fa169 \ + --hash=sha256:bbabd1df23bf63ae829e81200034c0e433499275a6ed29ca1a912ea7629426d9 \ + --hash=sha256:bcc6953e47bcfc9028ddf9ab2a321a3c51d7cc969db65edec092019bb837959f \ + --hash=sha256:c0a5f987d73fd9b46c3d180891f829afda714ab6bab30a1218724d4a0a63afd8 \ + --hash=sha256:c223a13555444707a0a7ebc6f9ee63053147c8c082bd1a31fd1207a03e8b0500 \ + --hash=sha256:c616893a577e9d6773a3836732fd7e2a729157a108b8fccd31c87512fa01671a \ + --hash=sha256:c882f1d4f96fbd807e92c334251d8ebd159a1ef89059ccd386ddea83fdb91bd8 \ + --hash=sha256:c8dec8a2f3f0bb462e6439df436cd8c7ec37968e90b4209ac621e7fbc0ed3b00 \ + --hash=sha256:c9638e0057e3f1a8b7c5ce33c7575349d9183a033a19b5676ad55096ae36820b \ + --hash=sha256:ce4f71e17fa849de41a06109030d3f6815fcc33338bf98dd0dde6d456d33c929 \ + --hash=sha256:ced12075cdf3c7332ecc1960f77f7439d5ebb8ea20bbd3c34c8299e694f1b0a1 \ + --hash=sha256:d11628212fd731b8986f1561d9bb3f8c38d9c15b330c3d8a88963519fbcd553b \ + --hash=sha256:d1610260cc672975723fcf7705c69a95f3b88802a594c9867781bedd9b13422c \ + --hash=sha256:d4651de7316ec8560afe430fb042c0782ed8ac54c0be43a515944d7c78fddac8 \ + --hash=sha256:da338e2728410d74ddeb1479ec67cfba73311607037455a40f92b6f5c62bf11d \ + --hash=sha256:de727ea906033b30527b4a99498f19aca3f4d1073230a958679a5b726e2784e0 \ + --hash=sha256:e2e2db5c6ef376e97c912733dfc24406f5949474d03e800d5f07b6aca4d870af \ + --hash=sha256:e669913cb2179507628419ec4f0e453e48ce6f924de5884d396f18c31836089c \ + --hash=sha256:eb4a573a8499685d62545e806d8fd143c84ac8b3439f925cd92c8763f0ed9bd7 \ + --hash=sha256:f146648941cadaaaf01254a75651a23c08159d009d36c5af42a7cc200a5e53ec \ + --hash=sha256:f3ff6abde52e702397949054cb5b06c1c75b5d6542f6a2ce029e46f71ffbbbf2 \ + --hash=sha256:f5aa9da520e4bb8cee8189f2f541701405e7690745094ded7a37b425d60527ea \ + --hash=sha256:f5fdb00d65ec44b10cc6b9b6318ef1363b81647a4aa3270ca39565eadb2d1201 \ + --hash=sha256:f685003d836ad0e5d4f08d1e024ee3ac7816eb2f873b2266306eef858f058133 \ + --hash=sha256:fee86542dc4ee8229e023003e3939b4d58cc2453922cf127778b69505fc9064b # via jupyter-client redis==4.3.3 \ --hash=sha256:2f7a57cf4af15cd543c4394bcbe2b9148db2606a37edba755368836e3a1d053e \ @@ -668,43 +664,43 @@ soupsieve==2.3.2.post1 \ --hash=sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759 \ --hash=sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d # via beautifulsoup4 -sqlalchemy[asyncio]==1.4.37 \ - --hash=sha256:06ec11a5e6a4b6428167d3ce33b5bd455c020c867dabe3e6951fa98836e0741d \ - --hash=sha256:0e7fd52e48e933771f177c2a1a484b06ea03774fc7741651ebdf19985a34037c \ - --hash=sha256:139c50b9384e6d32a74fc4dcd0e9717f343ed38f95dbacf832c782c68e3862f3 \ - --hash=sha256:17417327b87a0f703c9a20180f75e953315207d048159aff51822052f3e33e69 \ - --hash=sha256:29a742c29fea12259f1d2a9ee2eb7fe4694a85d904a4ac66d15e01177b17ad7f \ - --hash=sha256:2aac2a685feb9882d09f457f4e5586c885d578af4e97a2b759e91e8c457cbce5 \ - --hash=sha256:3197441772dc3b1c6419f13304402f2418a18d7fe78000aa5a026e7100836739 \ - --hash=sha256:3688f92c62db6c5df268e2264891078f17ecb91e3141b400f2e28d0f75796dea \ - --hash=sha256:3862a069a24f354145e01a76c7c720c263d62405fe5bed038c46a7ce900f5dd6 \ - --hash=sha256:4a17c1a1152ca4c29d992714aa9df3054da3af1598e02134f2e7314a32ef69d8 \ - --hash=sha256:4c1d9fb3931e27d59166bb5c4dcc911400fee51082cfba66ceb19ac954ade068 \ - --hash=sha256:4e8706919829d455a9fa687c6bbd1b048e36fec3919a59f2d366247c2bfdbd9c \ - --hash=sha256:50c8eaf44c3fed5ba6758d375de25f163e46137c39fda3a72b9ee1d1bb327dfc \ - --hash=sha256:5e4e517ce72fad35cce364a01aff165f524449e9c959f1837dc71088afa2824c \ - --hash=sha256:6629c79967a6c92e33fad811599adf9bc5cee6e504a1027bbf9cc1b6fb2d276d \ - --hash=sha256:78363f400fbda80f866e8e91d37d36fe6313ff847ded08674e272873c1377ea5 \ - --hash=sha256:7a44683cf97744a405103ef8fdd31199e9d7fc41b4a67e9044523b29541662b0 \ - --hash=sha256:7e579d6e281cc937bdb59917017ab98e618502067e04efb1d24ac168925e1d2a \ - --hash=sha256:7ee34c85cbda7779d66abac392c306ec78c13f5c73a1f01b8b767916d4895d23 \ - --hash=sha256:8b38e088659b30c2ca0af63e5d139fad1779a7925d75075a08717a21c406c0f6 \ - --hash=sha256:9785d6f962d2c925aeb06a7539ac9d16608877da6aeaaf341984b3693ae80a02 \ - --hash=sha256:a91d0668cada27352432f15b92ac3d43e34d8f30973fa8b86f5e9fddee928f3b \ - --hash=sha256:a940c551cfbd2e1e646ceea2777944425f5c3edff914bc808fe734d9e66f8d71 \ - --hash=sha256:aaa0e90e527066409c2ea5676282cf4afb4a40bb9dce0f56c8ec2768bff22a6e \ - --hash=sha256:b4c92823889cf9846b972ee6db30c0e3a92c0ddfc76c6060a6cda467aa5fb694 \ - --hash=sha256:b55932fd0e81b43f4aff397c8ad0b3c038f540af37930423ab8f47a20b117e4c \ - --hash=sha256:c37885f83b59e248bebe2b35beabfbea398cb40960cdc6d3a76eac863d4e1938 \ - --hash=sha256:caca6acf3f90893d7712ae2c6616ecfeac3581b4cc677c928a330ce6fbad4319 \ - --hash=sha256:cffc67cdd07f0e109a1fc83e333972ae423ea5ad414585b63275b66b870ea62b \ - --hash=sha256:d4c3b009c9220ae6e33f17b45f43fb46b9a1d281d76118405af13e26376f2e11 \ - --hash=sha256:d58f2d9d1a4b1459e8956a0153a4119da80f54ee5a9ea623cd568e99459a3ef1 \ - --hash=sha256:d6927c9e3965b194acf75c8e0fb270b4d54512db171f65faae15ef418721996e \ - --hash=sha256:d9050b0c4a7f5538650c74aaba5c80cd64450e41c206f43ea6d194ae6d060ff9 \ - --hash=sha256:eec39a17bab3f69c44c9df4e0ed87c7306f2d2bf1eca3070af644927ec4199fa \ - --hash=sha256:f9940528bf9c4df9e3c3872d23078b6b2da6431c19565637c09f1b88a427a684 \ - --hash=sha256:ffe487570f47536b96eff5ef2b84034a8ba4e19aab5ab7647e677d94a119ea55 +sqlalchemy[asyncio]==1.4.39 \ + --hash=sha256:047ef5ccd8860f6147b8ac6c45a4bc573d4e030267b45d9a1c47b55962ff0e6f \ + --hash=sha256:05a05771617bfa723ba4cef58d5b25ac028b0d68f28f403edebed5b8243b3a87 \ + --hash=sha256:0ec54460475f0c42512895c99c63d90dd2d9cbd0c13491a184182e85074b04c5 \ + --hash=sha256:107df519eb33d7f8e0d0d052128af2f25066c1a0f6b648fd1a9612ab66800b86 \ + --hash=sha256:14ea8ff2d33c48f8e6c3c472111d893b9e356284d1482102da9678195e5a8eac \ + --hash=sha256:1745987ada1890b0e7978abdb22c133eca2e89ab98dc17939042240063e1ef21 \ + --hash=sha256:1962dfee37b7fb17d3d4889bf84c4ea08b1c36707194c578f61e6e06d12ab90f \ + --hash=sha256:20bf65bcce65c538e68d5df27402b39341fabeecf01de7e0e72b9d9836c13c52 \ + --hash=sha256:26146c59576dfe9c546c9f45397a7c7c4a90c25679492ff610a7500afc7d03a6 \ + --hash=sha256:365b75938049ae31cf2176efd3d598213ddb9eb883fbc82086efa019a5f649df \ + --hash=sha256:4770eb3ba69ec5fa41c681a75e53e0e342ac24c1f9220d883458b5596888e43a \ + --hash=sha256:50e7569637e2e02253295527ff34666706dbb2bc5f6c61a5a7f44b9610c9bb09 \ + --hash=sha256:5c2d19bfb33262bf987ef0062345efd0f54c4189c2d95159c72995457bf4a359 \ + --hash=sha256:621f050e72cc7dfd9ad4594ff0abeaad954d6e4a2891545e8f1a53dcdfbef445 \ + --hash=sha256:6d81de54e45f1d756785405c9d06cd17918c2eecc2d4262dc2d276ca612c2f61 \ + --hash=sha256:6f95706da857e6e79b54c33c1214f5467aab10600aa508ddd1239d5df271986e \ + --hash=sha256:752ef2e8dbaa3c5d419f322e3632f00ba6b1c3230f65bc97c2ff5c5c6c08f441 \ + --hash=sha256:7b2785dd2a0c044a36836857ac27310dc7a99166253551ee8f5408930958cc60 \ + --hash=sha256:7f13644b15665f7322f9e0635129e0ef2098409484df67fcd225d954c5861559 \ + --hash=sha256:8194896038753b46b08a0b0ae89a5d80c897fb601dd51e243ed5720f1f155d27 \ + --hash=sha256:864d4f89f054819cb95e93100b7d251e4d114d1c60bc7576db07b046432af280 \ + --hash=sha256:8b773c9974c272aae0fa7e95b576d98d17ee65f69d8644f9b6ffc90ee96b4d19 \ + --hash=sha256:8f901be74f00a13bf375241a778455ee864c2c21c79154aad196b7a994e1144f \ + --hash=sha256:91d2b89bb0c302f89e753bea008936acfa4e18c156fb264fe41eb6bbb2bbcdeb \ + --hash=sha256:b0538b66f959771c56ff996d828081908a6a52a47c5548faed4a3d0a027a5368 \ + --hash=sha256:b30e70f1594ee3c8902978fd71900d7312453922827c4ce0012fa6a8278d6df4 \ + --hash=sha256:b71be98ef6e180217d1797185c75507060a57ab9cd835653e0112db16a710f0d \ + --hash=sha256:c6d00cb9da8d0cbfaba18cad046e94b06de6d4d0ffd9d4095a3ad1838af22528 \ + --hash=sha256:d1f665e50592caf4cad3caed3ed86f93227bffe0680218ccbb293bd5a6734ca8 \ + --hash=sha256:e6e2c8581c6620136b9530137954a8376efffd57fe19802182c7561b0ab48b48 \ + --hash=sha256:e7a7667d928ba6ee361a3176e1bef6847c1062b37726b33505cc84136f657e0d \ + --hash=sha256:ec3985c883d6d217cf2013028afc6e3c82b8907192ba6195d6e49885bfc4b19d \ + --hash=sha256:ede13a472caa85a13abe5095e71676af985d7690eaa8461aeac5c74f6600b6c0 \ + --hash=sha256:f24d4d6ec301688c59b0c4bb1c1c94c5d0bff4ecad33bb8f5d9efdfb8d8bc925 \ + --hash=sha256:f2a42acc01568b9701665e85562bbff78ec3e21981c7d51d56717c22e5d3d58b \ + --hash=sha256:fbc076f79d830ae4c9d49926180a1140b49fa675d0f0d555b44c9a15b29f4c80 # via safir starlette==0.19.1 \ --hash=sha256:5a60c5c2d051f3a8eb546136aa0c9399773a689595e099e0877704d5888279bf \ @@ -768,9 +764,9 @@ tornado==6.1 \ --hash=sha256:fa2ba70284fa42c2a5ecb35e322e68823288a4251f9ba9cc77be04ae15eada68 \ --hash=sha256:fba85b6cd9c39be262fcd23865652920832b61583de2a2ca907dbd8e8a8c81e5 # via jupyter-client -traitlets==5.2.2.post1 \ - --hash=sha256:1530d04badddc6a73d50b7ee34667d4b96914da352109117b4280cb56523a51b \ - --hash=sha256:74803a1baa59af70f023671d86d5c7a834c931186df26d50d362ee6a1ff021fd +traitlets==5.3.0 \ + --hash=sha256:0bb9f1f9f017aa8ec187d8b1b2a7a6626a2a1d877116baba52a129bfa124f8e2 \ + --hash=sha256:65fa18961659635933100db8ca120ef6220555286949774b9cfc106f941d1c7a # via # jupyter-client # jupyter-core @@ -791,9 +787,9 @@ uritemplate==4.1.1 \ --hash=sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0 \ --hash=sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e # via gidgethub -uvicorn[standard]==0.17.6 \ - --hash=sha256:19e2a0e96c9ac5581c01eb1a79a7d2f72bb479691acd2b8921fce48ed5b961a6 \ - --hash=sha256:5180f9d059611747d841a4a4c4ab675edf54c8489e97f96d0583ee90ac3bfc23 +uvicorn[standard]==0.18.1 \ + --hash=sha256:013c4ea0787cc2dc456ef4368e18c01982e6be57903e4d3183218e543eb889b7 \ + --hash=sha256:35703e6518105cfe53f16a5a9435db3e2e227d0784f1fd8fbc1214b1fdc108df # via -r requirements/main.in uvloop==0.16.0 \ --hash=sha256:04ff57aa137230d8cc968f03481176041ae789308b4d5079118331ab01112450 \ @@ -813,9 +809,19 @@ uvloop==0.16.0 \ --hash=sha256:e814ac2c6f9daf4c36eb8e85266859f42174a4ff0d71b99405ed559257750382 \ --hash=sha256:f74bc20c7b67d1c27c72601c78cf95be99d5c2cdd4514502b4f3eb0933ff1228 # via uvicorn -watchgod==0.8.2 \ - --hash=sha256:2f3e8137d98f493ff58af54ea00f4d1433a6afe2ed08ab331a657df468c6bfce \ - --hash=sha256:cb11ff66657befba94d828e3b622d5fb76f22fbda1376f355f3e6e51e97d9450 +watchfiles==0.15.0 \ + --hash=sha256:56abed43e645d1f2d6def83e35999cc5758b051aff54ca1065cbfcaea15b3389 \ + --hash=sha256:65ca99a94fcab29d00aa406526eb29cf198c0661854d59a315596064fed02141 \ + --hash=sha256:67d4c66e46a564059df4aeedab78f09cba0b697bf36cc77566b0a7015dfb7f5d \ + --hash=sha256:6e0e8829d32b05151e6009570449f44f891e05f518e495d25f960e0d0b2d0064 \ + --hash=sha256:715733c2ac9da67b2790788657ff6f8b3797eb31565bfc592289b523ae907ca2 \ + --hash=sha256:7b81c6e404b2aa62482a719eb778e4a16d01728302dce1f1512c1e5354a73fda \ + --hash=sha256:82238d08d8a49f1a1ba254278cd4329a154f6100b028393059722ebeddd2ff3d \ + --hash=sha256:955e8f840e1996a8a41be57de4c03af7b1515a685b7fb6abe222f859e413a907 \ + --hash=sha256:cab62510f990d195986302aa6a48ed636d685b099927049120d520c96069fa49 \ + --hash=sha256:d1f9de6b776b3aff17898a4cf5ac5a2d0a16212ea7aad2bbe0ef6aa3e79a96af \ + --hash=sha256:d4f45acd1143db6d3ee77a4ff12d3239bc8083108133e6174e9dcce59c1f9902 \ + --hash=sha256:f7f71012e096e11256fae3b37617a9777980f281e18deb2e789e85cd5b113935 # via uvicorn webencodings==0.5.1 \ --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ diff --git a/src/timessquare/dbschema/page.py b/src/timessquare/dbschema/page.py index cb0e7bd..9b75ab8 100644 --- a/src/timessquare/dbschema/page.py +++ b/src/timessquare/dbschema/page.py @@ -84,6 +84,11 @@ class SqlPage(Base): github_repo: Optional[str] = Column(Unicode(255)) """The GitHub repository name for GitHub-backed pages.""" + github_commit: Optional[str] = Column(Unicode(40)) + """The SHA of the commit this page corresponds to; only used for pages + associated with a GitHub Check Run. + """ + repository_path_prefix: Optional[str] = Column(Unicode(2048)) """The repository path prefix, relative to the root of the directory.""" diff --git a/src/timessquare/domain/githubcheckout.py b/src/timessquare/domain/githubcheckout.py index 2116e1a..04fc4fc 100644 --- a/src/timessquare/domain/githubcheckout.py +++ b/src/timessquare/domain/githubcheckout.py @@ -103,7 +103,8 @@ def full_name(self) -> str: async def get_git_tree( self, github_client: GitHubAPI ) -> RecursiveGitTreeModel: - """Get the recursive git tree of the repository from the GitHub API. + """Get the recursive git tree of the repository from the GitHub API + for this checkout's HEAD SHA (commit). Parameters ---------- diff --git a/src/timessquare/domain/githubcheckrun.py b/src/timessquare/domain/githubcheckrun.py index ed7648a..903e931 100644 --- a/src/timessquare/domain/githubcheckrun.py +++ b/src/timessquare/domain/githubcheckrun.py @@ -2,6 +2,7 @@ from __future__ import annotations +from abc import ABCMeta, abstractproperty from dataclasses import dataclass from typing import Any, Dict, List, Optional, Sequence, Union @@ -12,13 +13,18 @@ GitHubBlobModel, GitHubCheckRunAnnotationLevel, GitHubCheckRunConclusion, + GitHubCheckRunModel, + GitHubCheckRunStatus, GitHubRepositoryModel, ) from .githubcheckout import ( GitHubRepositoryCheckout, NotebookSidecarFile, + RecursiveGitTreeModel, RepositoryNotebookTreeRef, ) +from .noteburst import NoteburstJobResponseModel, NoteburstJobStatus +from .page import PageExecutionInfo @dataclass(kw_only=True) @@ -83,24 +89,144 @@ def export(self) -> Dict[str, Any]: return output -class GitHubConfigsCheck: - """A domain model for a YAML configuration GitHub Check run.""" +class GitHubCheck(metaclass=ABCMeta): + """A base class for GitHub Check domain models.""" + + title: str = "Times Square check" - def __init__(self) -> None: + external_id: str = "times-square/generic-check" + """The CheckRun external ID field. All check runs of this type + share the same external ID. + """ + + def __init__(self, check_run: GitHubCheckRunModel) -> None: + self.check_run = check_run self.annotations: List[Annotation] = [] + + @property + def conclusion(self) -> GitHubCheckRunConclusion: + """A conclusion based on the annotations.""" + for annotation in self.annotations: + if ( + annotation.annotation_level + == GitHubCheckRunAnnotationLevel.failure + ): + return GitHubCheckRunConclusion.failure + + return GitHubCheckRunConclusion.success + + @abstractproperty + def summary(self) -> str: + """Summary text for the check.""" + raise NotImplementedError + + @abstractproperty + def text(self) -> str: + """The text body of the check's message.""" + raise NotImplementedError + + def export_truncated_annotations(self) -> List[Dict[str, Any]]: + """Export the first 50 annotations to objects serializable to + GitHub. + + Sending more than 50 annotations requires multiple HTTP requests, + which we haven't implemented yet. See + https://docs.github.com/en/rest/checks/runs#update-a-check-run + """ + return [a.export() for a in self.annotations[:50]] + + async def submit_in_progress(self, github_client: GitHubAPI) -> None: + """Set the check run to "In progress".""" + await github_client.patch( + self.check_run.url, + data={"status": GitHubCheckRunStatus.in_progress}, + ) + + async def submit_conclusion( + self, + *, + github_client: GitHubAPI, + ) -> None: + """Send a patch result for the check run to GitHub with the final + conclusion of the check. + """ + await github_client.patch( + self.check_run.url, + data={ + "status": GitHubCheckRunStatus.completed, + "conclusion": self.conclusion, + "output": { + "title": self.title, + "summary": self.summary, + "text": self.text, + "annotations": self.export_truncated_annotations(), + }, + }, + ) + + +class GitHubConfigsCheck(GitHubCheck): + """A domain model for a YAML configuration GitHub Check run.""" + + title: str = "YAML config validation" + + external_id: str = "times-square/yaml-check" + """The CheckRun external ID field. All check runs of this type + share the same external ID. + """ + + def __init__(self, check_run: GitHubCheckRunModel) -> None: self.sidecar_files_checked: List[str] = [] + # Optional caching for data reuse + self.checkout: Optional[GitHubRepositoryCheckout] = None + self.tree: Optional[RecursiveGitTreeModel] = None + + super().__init__(check_run=check_run) + + @classmethod + async def create_check_run_and_validate( + cls, + *, + github_client: GitHubAPI, + repo: GitHubRepositoryModel, + head_sha: str, + ) -> GitHubConfigsCheck: + """Create a GitHubConfigsCheck by first creating a GitHub Check Run, + then running a validation via `validate_repo`. + """ + data = await github_client.post( + "repos/{owner}/{repo}/check-runs", + url_vars={"owner": repo.owner.login, "repo": repo.name}, + data={ + "name": cls.title, + "head_sha": head_sha, + "external_id": cls.external_id, + }, + ) + check_run = GitHubCheckRunModel.parse_obj(data) + return await cls.validate_repo( + check_run=check_run, + github_client=github_client, + repo=repo, + head_sha=head_sha, + ) + @classmethod async def validate_repo( cls, + *, github_client: GitHubAPI, repo: GitHubRepositoryModel, head_sha: str, + check_run: GitHubCheckRunModel, ) -> GitHubConfigsCheck: """Create a check run result model for a specific SHA of a GitHub - repository containing Times Square notebooks. + repository containing Times Square notebooks given a check run already + registered with GitHub. """ - check = cls() + check = cls(check_run) + await check.submit_in_progress(github_client) try: checkout = await GitHubRepositoryCheckout.create( @@ -124,6 +250,13 @@ async def validate_repo( notebook_ref=notebook_ref, ) + # Cache this checkout and tree so that the notebook execution check + # can reuse them efficiently. + check._cache_github_checkout( + checkout=checkout, + tree=tree, + ) + return check async def validate_sidecar( @@ -152,7 +285,6 @@ async def validate_sidecar( @property def conclusion(self) -> GitHubCheckRunConclusion: - """Synthesize a conclusion based on the annotations.""" for annotation in self.annotations: if ( annotation.annotation_level @@ -162,10 +294,6 @@ def conclusion(self) -> GitHubCheckRunConclusion: return GitHubCheckRunConclusion.success - @property - def title(self) -> str: - return "YAML config validation" - @property def summary(self) -> str: sidecar_count = len(self.sidecar_files_checked) @@ -213,12 +341,106 @@ def _is_file_ok(self, path: str) -> bool: return False return True - def export_truncated_annotations(self) -> List[Dict[str, Any]]: - """Export the first 50 annotations to objects serializable to - GitHub. - - Sending more than 50 annotations requires multiple HTTP requests, - which we haven't implemented yet. See - https://docs.github.com/en/rest/checks/runs#update-a-check-run + def _cache_github_checkout( + self, + *, + checkout: GitHubRepositoryCheckout, + tree: RecursiveGitTreeModel, + ) -> None: + """Cache the checkout and Git tree (usually obtained in + iniitalization so they can be reused elsewhere without getting the + resources again from GitHub. """ - return [a.export() for a in self.annotations[:50]] + self.checkout = checkout + self.tree = tree + + +class NotebookExecutionsCheck(GitHubCheck): + """A domain model for a notebook execution GitHub check.""" + + title: str = "Notebook execution" + + external_id: str = "times-square/nbexec" + """The CheckRun external ID field. All check runs of this type + share the same external ID. + """ + + def __init__(self, check_run: GitHubCheckRunModel) -> None: + self.notebook_paths_checked: List[str] = [] + super().__init__(check_run=check_run) + + def report_noteburst_failure( + self, page_execution: PageExecutionInfo + ) -> None: + path = page_execution.page.repository_source_path + assert path is not None + annotation = Annotation( + path=path, + start_line=1, + message=page_execution.noteburst_error_message or "", + title=( + "Noteburst error (status " + f"{page_execution.noteburst_error_message})" + ), + annotation_level=GitHubCheckRunAnnotationLevel.failure, + ) + self.annotations.append(annotation) + self.notebook_paths_checked.append(path) + + def report_noteburst_completion( + self, + *, + page_execution: PageExecutionInfo, + job_result: NoteburstJobResponseModel, + ) -> None: + if job_result.status != NoteburstJobStatus.complete: + raise ValueError("Noteburst job isn't complete yet") + assert job_result.status is not None + + notebook_path = page_execution.page.repository_source_path + assert notebook_path is not None + self.notebook_paths_checked.append(notebook_path) + if not job_result.success: + annotation = Annotation( + path=notebook_path, + start_line=1, + message="We couldn't run this notebook successfully.", + title="Notebook execution error", + annotation_level=GitHubCheckRunAnnotationLevel.failure, + ) + self.annotations.append(annotation) + + @property + def summary(self) -> str: + notebooks_count = len(self.notebook_paths_checked) + if self.conclusion == GitHubCheckRunConclusion.success: + text = "Notebooks ran without issue ✅" + else: + text = "There are some issues 🧐" + + if notebooks_count == 1: + text = f"{text} (checked {notebooks_count} notebook)" + else: + text = f"{text} (checked {notebooks_count} notebooks)" + + return text + + @property + def text(self) -> str: + text = "| Notebook | Status |\n | --- | :-: |\n" + + notebook_paths = list(set(self.notebook_paths_checked)) + notebook_paths.sort() + for notebook_path in notebook_paths: + if self._is_file_ok(notebook_path): + text = f"{text}| {notebook_path} | ✅ |\n" + else: + text = f"{text}| {notebook_path} | ❌ |\n" + + return text + + def _is_file_ok(self, path: str) -> bool: + for annotation in self.annotations: + if annotation.path == path: + return False + return True diff --git a/src/timessquare/domain/nbhtml.py b/src/timessquare/domain/nbhtml.py index a3e211e..d1ddd1e 100644 --- a/src/timessquare/domain/nbhtml.py +++ b/src/timessquare/domain/nbhtml.py @@ -13,7 +13,7 @@ from pydantic import BaseModel from traitlets.config import Config -from .noteburstjob import NoteburstJobResponseModel +from .noteburst import NoteburstJobResponseModel from .page import PageInstanceIdModel, PageInstanceModel diff --git a/src/timessquare/domain/noteburst.py b/src/timessquare/domain/noteburst.py new file mode 100644 index 0000000..bbe5909 --- /dev/null +++ b/src/timessquare/domain/noteburst.py @@ -0,0 +1,137 @@ +"""Domain model for the noteburst service integration.""" + +from __future__ import annotations + +from dataclasses import dataclass +from datetime import datetime +from enum import Enum +from typing import Dict, Optional + +from httpx import AsyncClient +from pydantic import AnyHttpUrl, BaseModel + +from timessquare.config import config + + +class NoteburstJobModel(BaseModel): + """The domain model for a noteburst notebook execution job of a page's + notebook. + """ + + date_submitted: datetime + """The time when the execution job was submitted.""" + + job_url: AnyHttpUrl + """The URL of the noteburst job resource.""" + + +class NoteburstJobStatus(str, Enum): + """Enum of noteburst job statuses.""" + + deferred = "deferred" + queued = "queued" + in_progress = "in_progress" + complete = "complete" + not_found = "not_found" + + +class NoteburstJobResponseModel(BaseModel): + """A model for a subset of the noteburst response body for a notebook + execution request. + """ + + self_url: AnyHttpUrl + """The URL of this resource.""" + + enqueue_time: datetime + """Time when the job was added to the queue (UTC).""" + + status: NoteburstJobStatus + """The current status of the notebook execution job.""" + + ipynb: Optional[str] = None + """The executed notebook.""" + + start_time: Optional[datetime] = None + """Time when the notebook execution started (only set if result is + available). + """ + + finish_time: Optional[datetime] = None + """Time when the notebook execution finished (only set if result is + available). + """ + + success: Optional[bool] = None + """Whether the execution was successful or not (only set if result is + available). + """ + + def to_job_model(self) -> NoteburstJobModel: + """Export to a `NoteburstJobModel` for storage.""" + return NoteburstJobModel( + date_submitted=self.enqueue_time, job_url=self.self_url + ) + + +@dataclass +class NoteburstApiResult: + + data: Optional[NoteburstJobResponseModel] + + status_code: int + + error: Optional[str] = None + + +class NoteburstApi: + """A client for the noteburst noteburst execution service API.""" + + def __init__(self, http_client: AsyncClient) -> None: + self._http_client = http_client + + async def submit_job( + self, *, ipynb: str, kernel: str = "LSST", enable_retry: bool = True + ) -> NoteburstApiResult: + r = await self._http_client.post( + f"{config.environment_url}/noteburst/v1/notebooks/", + json={ + "ipynb": ipynb, + "kernel_name": kernel, + "enable_retry": enable_retry, + }, + headers=self._noteburst_auth_header, + ) + if r.status_code == 202: + return NoteburstApiResult( + status_code=r.status_code, + data=NoteburstJobResponseModel.parse_obj(r.json()), + error=None, + ) + else: + return NoteburstApiResult( + status_code=r.status_code, data=None, error=r.text + ) + + async def get_job(self, job_url: str) -> NoteburstApiResult: + r = await self._http_client.get( + job_url, headers=self._noteburst_auth_header + ) + if r.status_code == 200: + return NoteburstApiResult( + status_code=r.status_code, + data=NoteburstJobResponseModel.parse_obj(r.json()), + error=None, + ) + else: + return NoteburstApiResult( + status_code=r.status_code, data=None, error=r.text + ) + + @property + def _noteburst_auth_header(self) -> Dict[str, str]: + return { + "Authorization": ( + f"Bearer {config.gafaelfawr_token.get_secret_value()}" + ) + } diff --git a/src/timessquare/domain/noteburstjob.py b/src/timessquare/domain/noteburstjob.py deleted file mode 100644 index 516cc45..0000000 --- a/src/timessquare/domain/noteburstjob.py +++ /dev/null @@ -1,79 +0,0 @@ -"""Domain model for a noteburst job (that corresponds to the execution of a -page's ipynb notebook for a given set of parameters. -""" - -from __future__ import annotations - -from datetime import datetime -from enum import Enum -from typing import Any, Mapping, Optional - -from pydantic import AnyHttpUrl, BaseModel - - -class NoteburstJobModel(BaseModel): - """The domain model for a noteburst notebook execution job of a page's - notebook. - """ - - date_submitted: datetime - """The time when the execution job was submitted.""" - - job_url: AnyHttpUrl - """The URL of the noteburst job resource.""" - - @classmethod - def from_noteburst_response( - cls, data: Mapping[str, Any] - ) -> NoteburstJobModel: - """Create a NoteburstJobModel from a noteburst job metadata - response. - """ - d = NoteburstJobResponseModel.parse_obj(data) - return cls( - date_submitted=d.enqueue_time, - job_url=d.self_url, - ) - - -class NoteburstJobStatus(str, Enum): - """Enum of noteburst job statuses.""" - - deferred = "deferred" - queued = "queued" - in_progress = "in_progress" - complete = "complete" - not_found = "not_found" - - -class NoteburstJobResponseModel(BaseModel): - """A model for a subset of the noteburst response body for a notebook - execution request. - """ - - self_url: AnyHttpUrl - """The URL of this resource.""" - - enqueue_time: datetime - """Time when the job was added to the queue (UTC).""" - - status: NoteburstJobStatus - """The current status of the notebook execution job.""" - - ipynb: Optional[str] = None - """The executed notebook.""" - - start_time: Optional[datetime] = None - """Time when the notebook execution started (only set if result is - available). - """ - - finish_time: Optional[datetime] = None - """Time when the notebook execution finished (only set if result is - available). - """ - - success: Optional[bool] = None - """Whether the execution was successful or not (only set if result is - available). - """ diff --git a/src/timessquare/domain/page.py b/src/timessquare/domain/page.py index db8b829..53b980f 100644 --- a/src/timessquare/domain/page.py +++ b/src/timessquare/domain/page.py @@ -26,6 +26,8 @@ ParameterSchemaError, ) +from .noteburst import NoteburstJobModel + NB_VERSION = 4 """The notebook format version used for reading and writing notebooks. @@ -97,6 +99,11 @@ class PageModel: github_repo: Optional[str] = None """The GitHub repository name for GitHub-backed pages.""" + github_commit: Optional[str] = None + """The SHA of the commit this page corresponds to; only used for pages + associated with a GitHub Check Run. + """ + repository_path_prefix: Optional[str] = None """The repository path prefix, relative to the root of the repository.""" @@ -193,6 +200,7 @@ def create_from_repo( cache_ttl: Optional[int] = None, tags: Optional[List[str]] = None, authors: Optional[List[PersonModel]] = None, + github_commit: Optional[str] = None, ) -> PageModel: name = uuid4().hex # random slug for API uploads date_added = datetime.now(timezone.utc) @@ -209,6 +217,7 @@ def create_from_repo( cache_ttl=cache_ttl, github_owner=github_owner, github_repo=github_repo, + github_commit=github_commit, repository_path_prefix=repository_path_prefix, repository_display_path_prefix=repository_display_path_prefix, repository_path_stem=repository_path_stem, @@ -643,3 +652,17 @@ class PageInstanceModel(PageInstanceIdModel): page: PageModel """The page domain object.""" + + +@dataclass(kw_only=True) +class PageExecutionInfo(PageInstanceModel): + """A domain model for information about a new page, including information + about the noteburst job that processes the page's default instantiation. + """ + + noteburst_status_code: int + + noteburst_error_message: Optional[str] = None + + noteburst_job: Optional[NoteburstJobModel] = None + """The noteburst job that is processing the new page's default form.""" diff --git a/src/timessquare/handlers/v1/handlers.py b/src/timessquare/handlers/v1/handlers.py index 27f7843..050dbab 100644 --- a/src/timessquare/handlers/v1/handlers.py +++ b/src/timessquare/handlers/v1/handlers.py @@ -163,7 +163,7 @@ async def post_page( authors = [a.to_domain() for a in request_data.authors] async with context.session.begin(): - page_name = await page_service.create_page_with_notebook_from_upload( + page_exec = await page_service.create_page_with_notebook_from_upload( title=request_data.title, ipynb=request_data.ipynb, uploader_username=username, @@ -172,10 +172,10 @@ async def post_page( description=request_data.description, cache_ttl=request_data.cache_ttl, ) - page = await page_service.get_page(page_name) + page = await page_service.get_page(page_exec.name) context.response.headers["location"] = context.request.url_for( - "get_page", page=page_name + "get_page", page=page_exec.name ) return Page.from_domain(page=page, request=context.request) diff --git a/src/timessquare/services/github/repo.py b/src/timessquare/services/github/repo.py index 4c673ef..39df242 100644 --- a/src/timessquare/services/github/repo.py +++ b/src/timessquare/services/github/repo.py @@ -6,17 +6,20 @@ from __future__ import annotations +import asyncio +from collections import deque from pathlib import PurePosixPath -from typing import List +from typing import Deque, List, Optional from gidgethub.httpx import GitHubAPI +from httpx import AsyncClient from structlog.stdlib import BoundLogger from timessquare.domain.githubapi import ( GitHubBlobModel, GitHubBranchModel, + GitHubCheckRunConclusion, GitHubCheckRunModel, - GitHubCheckRunStatus, GitHubRepositoryModel, ) from timessquare.domain.githubcheckout import ( @@ -24,14 +27,18 @@ RepositoryNotebookModel, RepositorySettingsFile, ) -from timessquare.domain.githubcheckrun import GitHubConfigsCheck +from timessquare.domain.githubcheckrun import ( + GitHubConfigsCheck, + NotebookExecutionsCheck, +) from timessquare.domain.githubwebhook import ( GitHubCheckRunEventModel, GitHubCheckSuiteEventModel, GitHubPullRequestModel, GitHubPushEventModel, ) -from timessquare.domain.page import PageModel +from timessquare.domain.noteburst import NoteburstJobStatus +from timessquare.domain.page import PageExecutionInfo, PageModel from ..page import PageService @@ -41,6 +48,8 @@ class GitHubRepoService: Parameters ---------- + http_client : `AsyncClient` + An httpx client. github_client : `GitHubAPI` A GidgetHub API client that is authenticated as a GitHub app installation. @@ -54,10 +63,12 @@ class GitHubRepoService: def __init__( self, + http_client: AsyncClient, github_client: GitHubAPI, page_service: PageService, logger: BoundLogger, ) -> None: + self._http_client = http_client self._github_client = github_client self._page_service = page_service self._logger = logger @@ -118,21 +129,6 @@ async def request_github_branch( ) return GitHubBranchModel.parse_obj(data) - async def create_checkout( - self, *, repo: GitHubRepositoryModel, git_ref: str, head_sha: str - ) -> GitHubRepositoryCheckout: - settings = await self.load_settings_file(repo=repo, git_ref=head_sha) - checkout = GitHubRepositoryCheckout( - owner_name=repo.owner.login, - name=repo.name, - settings=settings, - git_ref=git_ref, - head_sha=head_sha, - trees_url=repo.trees_url, - blobs_url=repo.blobs_url, - ) - return checkout - async def load_settings_file( self, *, repo: GitHubRepositoryModel, git_ref: str ) -> RepositorySettingsFile: @@ -147,7 +143,10 @@ async def load_settings_file( file_content = content_data.decode() return RepositorySettingsFile.parse_yaml(file_content) - async def sync_checkout(self, checkout: GitHubRepositoryCheckout) -> None: + async def sync_checkout( + self, + checkout: GitHubRepositoryCheckout, + ) -> None: """Sync a "checkout" of a GitHub repository. Notes @@ -167,7 +166,8 @@ async def sync_checkout(self, checkout: GitHubRepositoryCheckout) -> None: existing_pages = { page.display_path: page for page in await self._page_service.get_pages_for_repo( - owner=checkout.owner_name, name=checkout.name + owner=checkout.owner_name, + name=checkout.name, ) } found_display_paths: List[str] = [] @@ -217,9 +217,12 @@ async def sync_checkout(self, checkout: GitHubRepositoryCheckout) -> None: self._logger.debug( "Creating new page for notebook", display_path=display_path ) - await self.create_new_page( + page = await self.create_page( checkout=checkout, notebook=notebook ) + # pre-execute that page + page_svc = self._page_service + await page_svc.execute_page_with_defaults(page) deleted_paths = set(existing_pages.keys()) - set(found_display_paths) self._logger.info("Paths to delete", count=len(deleted_paths)) @@ -227,13 +230,26 @@ async def sync_checkout(self, checkout: GitHubRepositoryCheckout) -> None: page = existing_pages[deleted_path] await self._page_service.soft_delete_page(page) - async def create_new_page( + async def create_page( self, *, checkout: GitHubRepositoryCheckout, notebook: RepositoryNotebookModel, - ) -> None: - """Create a new page based on the notebook tree ref.""" + commit_sha: Optional[str] = None, + ) -> PageModel: + """Create a new page based on the notebook tree ref. + + Parameters + ---------- + checkout : `GitHubRepositoryCheckout` + The repository checkout + notebook : `RepositoryNotebookModel` + The notebook from the repository that is the basis for the page. + commit_sha : `str`, optional + If set, this page is associated with a specific commit, rather than + the default view of a repository. Commit-specific pages are used + to show previews for pull requests and GitHub Check Run results. + """ display_path_prefix = notebook.get_display_path_prefix(checkout) source_path = PurePosixPath(notebook.notebook_source_path) @@ -260,8 +276,10 @@ async def create_new_page( cache_ttl=notebook.sidecar.cache_ttl, tags=notebook.sidecar.tags, authors=notebook.sidecar.export_authors(), + github_commit=commit_sha, ) - await self._page_service.add_page(page) + await self._page_service.add_page_to_store(page) + return page async def update_page( self, *, notebook: RepositoryNotebookModel, page: PageModel @@ -291,77 +309,199 @@ async def update_page( page.repository_source_sha = notebook.notebook_git_tree_sha page.repository_sidecar_sha = notebook.sidecar_git_tree_sha - await self._page_service.update_page(page) + await self._page_service.update_page_and_execute(page) - async def create_check_run( + async def initiate_check_runs( self, *, payload: GitHubCheckSuiteEventModel ) -> None: - """Create a new GitHub check run suite, given a new Check Suite. + """Create a new GitHub check runs, given a new Check Suite. + Notes + ----- NOTE: currently we're assuming that check suites are automatically created when created a check run. See https://docs.github.com/en/rest/checks/runs#create-a-check-run """ - await self._create_yaml_config_check_run( + # Run the configurations check + config_check = await GitHubConfigsCheck.create_check_run_and_validate( + github_client=self._github_client, repo=payload.repository, head_sha=payload.check_suite.head_sha, ) + await config_check.submit_conclusion(github_client=self._github_client) - async def create_rerequested_check_run( - self, *, payload: GitHubCheckRunEventModel - ) -> None: - """Run a GitHub check run that was rerequested.""" - await self._create_yaml_config_check_run( - repo=payload.repository, - head_sha=payload.check_run.head_sha, - ) - - async def _create_yaml_config_check_run( - self, *, repo: GitHubRepositoryModel, head_sha: str - ) -> None: + repo = payload.repository data = await self._github_client.post( "repos/{owner}/{repo}/check-runs", url_vars={"owner": repo.owner.login, "repo": repo.name}, - data={"name": "YAML configurations", "head_sha": head_sha}, + data={ + "name": NotebookExecutionsCheck.title, + "head_sha": payload.check_suite.head_sha, + "external_id": NotebookExecutionsCheck.external_id, + }, ) check_run = GitHubCheckRunModel.parse_obj(data) - await self._compute_check_run(check_run=check_run, repo=repo) + if config_check.conclusion == GitHubCheckRunConclusion.success: + await self.run_notebook_check_run( + check_run=check_run, + repo=payload.repository, + ) + else: + # Set the notebook check run to "neutral" indicating that we're + # skipping this check. + await self._github_client.patch( + check_run.url, + data={"conclusion": GitHubCheckRunConclusion.neutral}, + ) - async def compute_check_run( + async def create_rerequested_check_run( self, *, payload: GitHubCheckRunEventModel ) -> None: - """Compute a GitHub check run.""" - await self._compute_check_run( - repo=payload.repository, check_run=payload.check_run - ) + """Run a GitHub check run that was rerequested.""" + external_id = payload.check_run.external_id + if external_id == GitHubConfigsCheck.external_id: + config_check = await GitHubConfigsCheck.validate_repo( + github_client=self._github_client, + repo=payload.repository, + head_sha=payload.check_run.head_sha, + check_run=payload.check_run, + ) + await config_check.submit_conclusion( + github_client=self._github_client + ) + elif external_id == NotebookExecutionsCheck.external_id: + await self.run_notebook_check_run( + check_run=payload.check_run, + repo=payload.repository, + ) - async def _compute_check_run( - self, *, repo: GitHubRepositoryModel, check_run: GitHubCheckRunModel + async def run_notebook_check_run( + self, *, check_run: GitHubCheckRunModel, repo: GitHubRepositoryModel ) -> None: - """Compute the YAML validation check run.""" - # Set the check run to in-progress - await self._github_client.patch( - check_run.url, - data={"status": GitHubCheckRunStatus.in_progress}, - ) + """Run the notebook execution check. + + This check actually creates/updates Page resources, hence it is run + at the service layer, rather than in a domain model. + """ + check = NotebookExecutionsCheck(check_run) + await check.submit_in_progress(self._github_client) + self._logger.debug("Notebook executions check in progress") - config_check = await GitHubConfigsCheck.validate_repo( + checkout = await GitHubRepositoryCheckout.create( github_client=self._github_client, repo=repo, head_sha=check_run.head_sha, ) - # Set the check run to complete - await self._github_client.patch( - check_run.url, - data={ - "status": GitHubCheckRunStatus.completed, - "conclusion": config_check.conclusion, - "output": { - "title": config_check.title, - "summary": config_check.summary, - "text": config_check.text, - "annotations": config_check.export_truncated_annotations(), - }, - }, - ) + # Look for any existing pages for this repo's SHA. If they already + # exist it indicates the check is being re-run, so we'll delete those + # old pages for this commit + for page in await self._page_service.get_pages_for_repo( + owner=checkout.owner_name, + name=checkout.name, + commit=check_run.head_sha, + ): + await self._page_service.soft_delete_page(page) + self._logger.debug( + "Deleted existing page for notebook check run", + page_name=page.name, + ) + + tree = await checkout.get_git_tree(self._github_client) + pending_pages: Deque[PageExecutionInfo] = deque() + for notebook_ref in tree.find_notebooks(checkout.settings): + self._logger.debug( + "Started notebook execution for notebook", + path=notebook_ref.notebook_source_path, + ) + notebook = await checkout.load_notebook( + notebook_ref=notebook_ref, github_client=self._github_client + ) + page = await self.create_page( + checkout=checkout, + notebook=notebook, + commit_sha=check_run.head_sha, + ) + page_execution_info = ( + await self._page_service.execute_page_with_defaults( + page, enable_retry=False # fail quickly for CI + ) + ) + if page_execution_info.noteburst_error_message is not None: + self._logger.debug( + "Got immediate noteburst error", + path=notebook_ref.notebook_source_path, + error_message=page_execution_info.noteburst_error_message, + ) + check.report_noteburst_failure(page_execution_info) + else: + pending_pages.append(page_execution_info) + self._logger.debug( + "Noteburst result is pending", + path=notebook_ref.notebook_source_path, + ) + + await asyncio.sleep(5.0) # pause for noteburst to work + + # Poll for noteburst results + # TODO add a timeout to set a null result on the check run if + # noteburst doesn't clear the jobs in a reasonable time frame + checked_page_count = 0 + while len(pending_pages) > 0: + checked_page_count += 1 + page_execution = pending_pages.popleft() + self._logger.debug( + "Polling noteburst job status", + path=page_execution.page.repository_source_path, + ) + assert page_execution.noteburst_job is not None + r = await self._page_service.noteburst_api.get_job( + page_execution.noteburst_job.job_url + ) + if r.status_code >= 400: + # This is actually an issue with the noteburst service + # rather the notebook; consider adding that nuance to the + # GitHub Check + check.report_noteburst_failure(page_execution) + continue + + job = r.data + assert job is not None + if job.status == NoteburstJobStatus.complete: + self._logger.debug( + "Noteburst job is complete", + path=page_execution.page.repository_source_path, + ) + check.report_noteburst_completion( + page_execution=page_execution, job_result=job + ) + # TODO this is where we could render that noteburst result + # to HTML automatically + else: + # thow it back on the queue + self._logger.debug( + "Continuing to check noteburst job", + path=page_execution.page.repository_source_path, + ) + pending_pages.append(page_execution) + + # Once we've gone through all the pages once, pause + if checked_page_count >= len(pending_pages): + self._logger.debug( + "Pause polling of noteburst jobs", + checked_page_count=checked_page_count, + ) + await asyncio.sleep(2) + self._logger.debug( + "Pause finished", + checked_page_count=checked_page_count, + ) + checked_page_count = 0 + else: + self._logger.debug( + "Continuing to poll noteburst jobs", + pending_count=len(pending_pages), + checked_page_count=checked_page_count, + ) + + await check.submit_conclusion(github_client=self._github_client) diff --git a/src/timessquare/services/page.py b/src/timessquare/services/page.py index 9536230..506442e 100644 --- a/src/timessquare/services/page.py +++ b/src/timessquare/services/page.py @@ -12,12 +12,13 @@ from timessquare.config import config from timessquare.domain.githubtree import GitHubNode from timessquare.domain.nbhtml import NbDisplaySettings, NbHtmlKey, NbHtmlModel -from timessquare.domain.noteburstjob import ( - NoteburstJobModel, +from timessquare.domain.noteburst import ( + NoteburstApi, NoteburstJobResponseModel, NoteburstJobStatus, ) from timessquare.domain.page import ( + PageExecutionInfo, PageInstanceModel, PageModel, PageSummaryModel, @@ -53,6 +54,7 @@ def __init__( self._job_store = job_store self._http_client = http_client self._logger = logger + self.noteburst_api = NoteburstApi(http_client=http_client) async def create_page_with_notebook_from_upload( self, @@ -63,7 +65,7 @@ async def create_page_with_notebook_from_upload( cache_ttl: Optional[int] = None, tags: Optional[List[str]] = None, authors: Optional[List[PersonModel]] = None, - ) -> str: + ) -> PageExecutionInfo: """Create a page resource given the parameterized Jupyter Notebook content. """ @@ -76,11 +78,27 @@ async def create_page_with_notebook_from_upload( tags=tags, authors=authors, ) - return await self.add_page(page) + return await self.add_page_and_execute(page) - async def add_page(self, page: PageModel, *, execute: bool = True) -> str: + async def add_page_to_store(self, page: PageModel) -> None: """Add a page to the page store. + Parameters + ---------- + page: `PageModel` + The page model. + + Notes + ----- + For API uploads, use `create_page_with_notebook_from_upload` instead. + """ + self._page_store.add(page) + + async def add_page_and_execute( + self, page: PageModel, enable_retry: bool = True + ) -> PageExecutionInfo: + """Add a page to the page store and execute it with defaults. + Parameters ---------- page: `PageModel` @@ -93,10 +111,10 @@ async def add_page(self, page: PageModel, *, execute: bool = True) -> str: ----- For API uploads, use `create_page_with_notebook_from_upload` instead. """ - self._page_store.add(page) - if execute: - await self._request_notebook_execution_for_page_defaults(page) - return page.name + await self.add_page_to_store(page) + return await self.execute_page_with_defaults( + page, enable_retry=enable_retry + ) async def get_page(self, name: str) -> PageModel: """Get the page from the data store, given its name.""" @@ -117,25 +135,33 @@ async def get_page_summaries(self) -> List[PageSummaryModel]: return await self._page_store.list_page_summaries() async def get_pages_for_repo( - self, owner: str, name: str + self, owner: str, name: str, commit: Optional[str] = None ) -> List[PageModel]: """Get all pages backed by a specific GitHub repository.""" return await self._page_store.list_pages_for_repository( - owner=owner, name=name + owner=owner, name=name, commit=commit ) async def get_github_tree(self) -> List[GitHubNode]: """Get the tree of GitHub-backed pages.""" return await self._page_store.get_github_tree() - async def update_page(self, page: PageModel) -> None: + async def update_page_in_store(self, page: PageModel) -> None: """Update the page in the database.""" await self._page_store.update_page(page) - await self._request_notebook_execution_for_page_defaults(page) + await self.execute_page_with_defaults(page) + + async def update_page_and_execute( + self, page: PageModel, enable_retry: bool = True + ) -> PageExecutionInfo: + await self.update_page_in_store(page) + return await self.execute_page_with_defaults( + page, enable_retry=enable_retry + ) - async def _request_notebook_execution_for_page_defaults( - self, page: PageModel - ) -> None: + async def execute_page_with_defaults( + self, page: PageModel, enable_retry: bool = True + ) -> PageExecutionInfo: """Request noteburst execution of with page's default values. This is useful for the `add_page` and `update_page` methods to start @@ -145,7 +171,9 @@ async def _request_notebook_execution_for_page_defaults( page_instance = PageInstanceModel( name=page.name, values=resolved_values, page=page ) - await self._request_noteburst_execution(page_instance) + return await self.request_noteburst_execution( + page_instance, enable_retry=enable_retry + ) async def soft_delete_page(self, page: PageModel) -> None: """Soft delete a page by setting its date_deleted field.""" @@ -256,7 +284,7 @@ async def _get_html_from_noteburst_job( self._logger.debug("No existing noteburst job available") # A record of a noteburst job is not available. Send a request # to noteburst. - await self._request_noteburst_execution(page_instance) + await self.request_noteburst_execution(page_instance) return None r = await self._http_client.get( @@ -289,7 +317,7 @@ async def _get_html_from_noteburst_job( "Got a 404 from a noteburst job", job_url=job.job_url ) await self._job_store.delete(page_instance) - await self._request_noteburst_execution(page_instance) + await self.request_noteburst_execution(page_instance) else: # server error from noteburst self._logger.warning( @@ -300,38 +328,49 @@ async def _get_html_from_noteburst_job( ) return None - async def _request_noteburst_execution( - self, page_instance: PageInstanceModel - ) -> None: + async def request_noteburst_execution( + self, page_instance: PageInstanceModel, enable_retry: bool = True + ) -> PageExecutionInfo: """Request a notebook execution for a given page and parameters, and store the job. """ ipynb = page_instance.page.render_parameters(page_instance.values) - r = await self._http_client.post( - f"{config.environment_url}/noteburst/v1/notebooks/", - json={ - "ipynb": ipynb, - "kernel_name": "LSST", # TODO make a setting per page? - }, - headers=self._noteburst_auth_header, + r = await self.noteburst_api.submit_job( + ipynb=ipynb, enable_retry=enable_retry ) - if r.status_code != 202: + if r.status_code != 202 or r.data is None: self._logger.warning( "Error requesting noteburst execution", noteburst_status=r.status_code, - noteburst_body=r.text, + noteburst_body=r.error, ) - return None + return PageExecutionInfo( + name=page_instance.name, + values=page_instance.values, + page=page_instance.page, + noteburst_job=None, + noteburst_status_code=r.status_code, + noteburst_error_message=r.error, + ) - response_data = r.json() - job = NoteburstJobModel.from_noteburst_response(response_data) - await self._job_store.store_job(job=job, page_id=page_instance) + await self._job_store.store_job( + job=r.data.to_job_model(), page_id=page_instance + ) self._logger.info( "Requested noteburst notebook execution", page_name=page_instance.name, parameters=page_instance.values, - job_url=job.job_url, + job_url=r.data.self_url, + ) + assert r.data is not None + return PageExecutionInfo( + name=page_instance.name, + values=page_instance.values, + page=page_instance.page, + noteburst_job=r.data.to_job_model(), + noteburst_status_code=r.status_code, + noteburst_error_message=r.error, ) async def _create_html_matrix( diff --git a/src/timessquare/storage/noteburstjobstore.py b/src/timessquare/storage/noteburstjobstore.py index 3b0a12c..77fd126 100644 --- a/src/timessquare/storage/noteburstjobstore.py +++ b/src/timessquare/storage/noteburstjobstore.py @@ -4,7 +4,7 @@ import aioredis -from timessquare.domain.noteburstjob import NoteburstJobModel +from timessquare.domain.noteburst import NoteburstJobModel from timessquare.domain.page import PageInstanceIdModel from .redisbase import RedisStore @@ -15,7 +15,7 @@ class NoteburstJobStore(RedisStore[NoteburstJobModel]): requests for a given page and set of parameters. The associated domain model is - `timessquare.domain.noteburstjob.NoteburstJobModel`. + `timessquare.domain.noteburst.NoteburstJobModel`. """ def __init__(self, redis: aioredis.Redis) -> None: @@ -34,7 +34,7 @@ async def store_job( Parameters ---------- - job : `timessquare.domain.noteburstjob.NoteburstJobModel` + job : `timessquare.domain.noteburst.NoteburstJobModel` The job record. page_id : `timessquare.domain.page.PageInstanceIdModel` Identifier of the page instance, composed of the page's name diff --git a/src/timessquare/storage/page.py b/src/timessquare/storage/page.py index e16545f..2124b68 100644 --- a/src/timessquare/storage/page.py +++ b/src/timessquare/storage/page.py @@ -58,6 +58,7 @@ def add(self, page: PageModel) -> None: cache_ttl=page.cache_ttl, github_owner=page.github_owner, github_repo=page.github_repo, + github_commit=page.github_commit, repository_path_prefix=page.repository_path_prefix, repository_display_path_prefix=page.repository_display_path_prefix, repository_path_stem=page.repository_path_stem, @@ -115,10 +116,17 @@ async def get(self, name: str) -> Optional[PageModel]: return self._rehydrate_page_from_sql(sql_page) async def get_github_backed_page( - self, display_path: str + self, display_path: str, commit: Optional[str] = None ) -> Optional[PageModel]: """Get a GitHub-backed page based on the display path, or get `None` if the page does not exist. + + Parameters + ---------- + display_path : str + The GitHub display path, formatted ``owner/repo/file_path``. + commit : str, optional + The Git commit, if this page is associated with a GitHub Check Run. """ path_parts = display_path.split("/") github_owner = path_parts[0] @@ -135,8 +143,15 @@ async def get_github_backed_page( .where(SqlPage.github_repo == github_repo) .where(SqlPage.repository_path_stem == path_stem) .where(SqlPage.repository_display_path_prefix == path_prefix) - .limit(1) + .where(SqlPage.date_deleted == None) # noqa: E711 ) + if commit: + statement = statement.where(SqlPage.github_commit == commit) + else: + statement = statement.where( + SqlPage.github_commit == None # noqa: E711 + ) + statement = statement.limit(1) sql_page = await self._session.scalar(statement) if sql_page is None: return None @@ -144,15 +159,31 @@ async def get_github_backed_page( return self._rehydrate_page_from_sql(sql_page) async def list_pages_for_repository( - self, *, owner: str, name: str + self, *, owner: str, name: str, commit: Optional[str] = None ) -> List[PageModel]: - """Get all pages backed by a specific GitHub repository.""" + """Get all pages backed by a specific GitHub repository. + + Parameters + ---------- + owner : str + The login name of the repository owner. + name : str + The repository name. + commit : str, optional + The commit, if listing pages for a specific GitHub Check Run. + """ statement = ( select(SqlPage) .where(SqlPage.github_owner == owner) .where(SqlPage.github_repo == name) .where(SqlPage.date_deleted == None) # noqa: E711 ) + if commit: + statement = statement.where(SqlPage.github_commit == commit) + else: + statement = statement.where( + SqlPage.github_commit == None # noqa: E711 + ) result = await self._session.execute(statement) return [ self._rehydrate_page_from_sql(sql_page) @@ -188,6 +219,7 @@ def _rehydrate_page_from_sql(self, sql_page: SqlPage) -> PageModel: cache_ttl=sql_page.cache_ttl, github_owner=sql_page.github_owner, github_repo=sql_page.github_repo, + github_commit=sql_page.github_commit, repository_path_prefix=sql_page.repository_path_prefix, repository_display_path_prefix=( sql_page.repository_display_path_prefix @@ -227,6 +259,7 @@ async def get_github_tree(self) -> List[GitHubNode]: owners_statement = ( select(SqlPage.github_owner) .where(SqlPage.date_deleted == None) # noqa: E711 + .where(SqlPage.github_commit == None) # noqa: E711 .distinct(SqlPage.github_owner) ) result = await self._session.execute(owners_statement) @@ -248,6 +281,7 @@ async def _generate_node_for_owner(self, owner_name: str) -> GitHubNode: SqlPage.repository_path_stem, ) .where(SqlPage.date_deleted == None) # noqa: E711 + .where(SqlPage.github_commit == None) # noqa: E711 .where(SqlPage.github_owner == owner_name) .order_by( SqlPage.github_owner.asc(), diff --git a/src/timessquare/worker/functions/create_check_run.py b/src/timessquare/worker/functions/create_check_run.py index 3a5d5d4..96f0816 100644 --- a/src/timessquare/worker/functions/create_check_run.py +++ b/src/timessquare/worker/functions/create_check_run.py @@ -29,5 +29,5 @@ async def create_check_run( db_session=db_session, ) async with db_session.begin(): - await github_repo_service.create_check_run(payload=payload) + await github_repo_service.initiate_check_runs(payload=payload) return "done" diff --git a/src/timessquare/worker/servicefactory.py b/src/timessquare/worker/servicefactory.py index 26829d2..08bc551 100644 --- a/src/timessquare/worker/servicefactory.py +++ b/src/timessquare/worker/servicefactory.py @@ -32,7 +32,10 @@ async def create_github_repo_service( http_client=http_client, logger=logger, db_session=db_session ) return GitHubRepoService( - github_client=github_client, page_service=page_service, logger=logger + http_client=http_client, + github_client=github_client, + page_service=page_service, + logger=logger, ) diff --git a/tests/handlers/v1/github_test.py b/tests/handlers/v1/github_test.py index 83aa029..30ee30b 100644 --- a/tests/handlers/v1/github_test.py +++ b/tests/handlers/v1/github_test.py @@ -40,7 +40,7 @@ async def test_github(client: AsyncClient) -> None: logger=get_logger(), ) - await page_service.add_page( + await page_service.add_page_to_store( PageModel( name="1", ipynb=demo_path.read_text(), @@ -57,10 +57,9 @@ async def test_github(client: AsyncClient) -> None: repository_source_extension=".ipynb", repository_source_sha="1" * 40, repository_sidecar_sha="1" * 40, - ), - execute=False, + ) ) - await page_service.add_page( + await page_service.add_page_to_store( PageModel( name="2", ipynb=demo_path.read_text(), @@ -78,9 +77,8 @@ async def test_github(client: AsyncClient) -> None: repository_source_sha="1" * 40, repository_sidecar_sha="1" * 40, ), - execute=False, ) - await page_service.add_page( + await page_service.add_page_to_store( PageModel( name="3", ipynb=demo_path.read_text(), @@ -98,7 +96,6 @@ async def test_github(client: AsyncClient) -> None: repository_source_sha="1" * 40, repository_sidecar_sha="1" * 40, ), - execute=False, ) await session.commit()