diff --git a/poetry.lock b/poetry.lock index 0bfc4239..1e65b36e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -94,17 +94,6 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -[[package]] -name = "commonmark" -version = "0.9.1" -description = "Python parser for the CommonMark Markdown spec" -category = "main" -optional = false -python-versions = "*" - -[package.extras] -test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] - [[package]] name = "coverage" version = "5.4" @@ -191,7 +180,7 @@ smmap = ">=3.0.1,<4" [[package]] name = "gitpython" -version = "3.1.12" +version = "3.1.13" description = "Python Git Library" category = "main" optional = true @@ -309,7 +298,7 @@ i18n = ["Babel (>=0.8)"] [[package]] name = "joblib" -version = "1.0.0" +version = "1.0.1" description = "Lightweight pipelining with Python functions" category = "main" optional = true @@ -633,7 +622,7 @@ dev = ["pre-commit", "tox"] [[package]] name = "portray" -version = "1.5.1" +version = "1.5.2" description = "Your Project with Great Documentation" category = "main" optional = true @@ -658,37 +647,24 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -[[package]] -name = "pydantic" -version = "1.7.3" -description = "Data validation and settings management using python 3.6 type hinting" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] -typing_extensions = ["typing-extensions (>=3.7.2)"] - [[package]] name = "pygments" -version = "2.7.4" +version = "2.8.0" description = "Pygments is a syntax highlighting package written in Python." category = "main" -optional = false +optional = true python-versions = ">=3.5" [[package]] name = "pylint" -version = "2.6.0" +version = "2.6.2" description = "python code static checker" category = "dev" optional = false python-versions = ">=3.5.*" [package.dependencies] -astroid = ">=2.4.0,<=2.5" +astroid = ">=2.4.0,<2.5" colorama = {version = "*", markers = "sys_platform == \"win32\""} isort = ">=4.2.5,<6" mccabe = ">=0.6,<0.7" @@ -843,26 +819,9 @@ urllib3 = ">=1.21.1,<1.27" security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] -[[package]] -name = "rich" -version = "9.10.0" -description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -category = "main" -optional = false -python-versions = ">=3.6,<4.0" - -[package.dependencies] -colorama = ">=0.4.0,<0.5.0" -commonmark = ">=0.9.0,<0.10.0" -pygments = ">=2.6.0,<3.0.0" -typing-extensions = ">=3.7.4,<4.0.0" - -[package.extras] -jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] - [[package]] name = "scipy" -version = "1.6.0" +version = "1.6.1" description = "SciPy: Scientific Library for Python" category = "main" optional = false @@ -889,7 +848,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "tabulate" -version = "0.8.7" +version = "0.8.8" description = "Pretty-print tabular data" category = "dev" optional = false @@ -941,7 +900,7 @@ python-versions = ">= 3.5" [[package]] name = "tqdm" -version = "4.56.0" +version = "4.57.0" description = "Fast, Extensible Progress Meter" category = "main" optional = true @@ -1025,7 +984,7 @@ docs = ["portray"] [metadata] lock-version = "1.1" python-versions = ">=3.7,<3.9" # go back to "^3.7" once numba gets compatible with 3.9 -content-hash = "5f01c5900c83eb7fa4b804f972d8f4407b34c893dbc1fbf871474823a8ee7a08" +content-hash = "d64d70e91f451f8835f975156cb9c69d1083e653b0897f298186ef5324913d13" [metadata.files] appdirs = [ @@ -1064,10 +1023,6 @@ colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] -commonmark = [ - {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"}, - {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, -] coverage = [ {file = "coverage-5.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:6d9c88b787638a451f41f97446a1c9fd416e669b4d9717ae4615bd29de1ac135"}, {file = "coverage-5.4-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:66a5aae8233d766a877c5ef293ec5ab9520929c2578fd2069308a98b7374ea8c"}, @@ -1189,8 +1144,8 @@ gitdb = [ {file = "gitdb-4.0.5.tar.gz", hash = "sha256:c9e1f2d0db7ddb9a704c2a0217be31214e91a4fe1dea1efad19ae42ba0c285c9"}, ] gitpython = [ - {file = "GitPython-3.1.12-py3-none-any.whl", hash = "sha256:867ec3dfb126aac0f8296b19fb63b8c4a399f32b4b6fafe84c4b10af5fa9f7b5"}, - {file = "GitPython-3.1.12.tar.gz", hash = "sha256:42dbefd8d9e2576c496ed0059f3103dcef7125b9ce16f9d5f9c834aed44a1dac"}, + {file = "GitPython-3.1.13-py3-none-any.whl", hash = "sha256:c5347c81d232d9b8e7f47b68a83e5dc92e7952127133c5f2df9133f2c75a1b29"}, + {file = "GitPython-3.1.13.tar.gz", hash = "sha256:8621a7e777e276a5ec838b59280ba5272dd144a18169c36c903d8b38b99f750a"}, ] hug = [ {file = "hug-2.6.1-py2.py3-none-any.whl", hash = "sha256:31c8fc284f81377278629a4b94cbb619ae9ce829cdc2da9564ccc66a121046b4"}, @@ -1224,8 +1179,8 @@ jinja2 = [ {file = "Jinja2-2.11.3.tar.gz", hash = "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6"}, ] joblib = [ - {file = "joblib-1.0.0-py3-none-any.whl", hash = "sha256:75ead23f13484a2a414874779d69ade40d4fa1abe62b222a23cd50d4bc822f6f"}, - {file = "joblib-1.0.0.tar.gz", hash = "sha256:7ad866067ac1fdec27d51c8678ea760601b70e32ff1881d4dc8e1171f2b64b24"}, + {file = "joblib-1.0.1-py3-none-any.whl", hash = "sha256:feeb1ec69c4d45129954f1b7034954241eedfd6ba39b5e9e4b6883be3332d5e5"}, + {file = "joblib-1.0.1.tar.gz", hash = "sha256:9c17567692206d2f3fb9ecf5e991084254fe631665c450b443761c4186a613f7"}, ] kiwisolver = [ {file = "kiwisolver-1.3.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fd34fbbfbc40628200730bc1febe30631347103fc8d3d4fa012c21ab9c11eca9"}, @@ -1554,44 +1509,20 @@ pluggy = [ {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, ] portray = [ - {file = "portray-1.5.1-py3-none-any.whl", hash = "sha256:72b2c53a091c35fcafec40656238b1929030a054edbc3fe8fa338d0b6ddc0736"}, - {file = "portray-1.5.1.tar.gz", hash = "sha256:7a65b2276d991cb103d1e5cca7c5c0e98eb38ff5f9b1c009ba71e91ce3c72fe2"}, + {file = "portray-1.5.2-py3-none-any.whl", hash = "sha256:40c6dfbff392f16a5c56c93aa79356b4ee25aa2d00579d61b560445ec4d3dc47"}, + {file = "portray-1.5.2.tar.gz", hash = "sha256:c8a3d489cfc95df6922868971996bb23584d37e46aa55e7ab9b6d29aa49f109d"}, ] py = [ {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, ] -pydantic = [ - {file = "pydantic-1.7.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c59ea046aea25be14dc22d69c97bee629e6d48d2b2ecb724d7fe8806bf5f61cd"}, - {file = "pydantic-1.7.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a4143c8d0c456a093387b96e0f5ee941a950992904d88bc816b4f0e72c9a0009"}, - {file = "pydantic-1.7.3-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:d8df4b9090b595511906fa48deda47af04e7d092318bfb291f4d45dfb6bb2127"}, - {file = "pydantic-1.7.3-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:514b473d264671a5c672dfb28bdfe1bf1afd390f6b206aa2ec9fed7fc592c48e"}, - {file = "pydantic-1.7.3-cp36-cp36m-win_amd64.whl", hash = "sha256:dba5c1f0a3aeea5083e75db9660935da90216f8a81b6d68e67f54e135ed5eb23"}, - {file = "pydantic-1.7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:59e45f3b694b05a69032a0d603c32d453a23f0de80844fb14d55ab0c6c78ff2f"}, - {file = "pydantic-1.7.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:5b24e8a572e4b4c18f614004dda8c9f2c07328cb5b6e314d6e1bbd536cb1a6c1"}, - {file = "pydantic-1.7.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:b2b054d095b6431cdda2f852a6d2f0fdec77686b305c57961b4c5dd6d863bf3c"}, - {file = "pydantic-1.7.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:025bf13ce27990acc059d0c5be46f416fc9b293f45363b3d19855165fee1874f"}, - {file = "pydantic-1.7.3-cp37-cp37m-win_amd64.whl", hash = "sha256:6e3874aa7e8babd37b40c4504e3a94cc2023696ced5a0500949f3347664ff8e2"}, - {file = "pydantic-1.7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e682f6442ebe4e50cb5e1cfde7dda6766fb586631c3e5569f6aa1951fd1a76ef"}, - {file = "pydantic-1.7.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:185e18134bec5ef43351149fe34fda4758e53d05bb8ea4d5928f0720997b79ef"}, - {file = "pydantic-1.7.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:f5b06f5099e163295b8ff5b1b71132ecf5866cc6e7f586d78d7d3fd6e8084608"}, - {file = "pydantic-1.7.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:24ca47365be2a5a3cc3f4a26dcc755bcdc9f0036f55dcedbd55663662ba145ec"}, - {file = "pydantic-1.7.3-cp38-cp38-win_amd64.whl", hash = "sha256:d1fe3f0df8ac0f3a9792666c69a7cd70530f329036426d06b4f899c025aca74e"}, - {file = "pydantic-1.7.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f6864844b039805add62ebe8a8c676286340ba0c6d043ae5dea24114b82a319e"}, - {file = "pydantic-1.7.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ecb54491f98544c12c66ff3d15e701612fc388161fd455242447083350904730"}, - {file = "pydantic-1.7.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:ffd180ebd5dd2a9ac0da4e8b995c9c99e7c74c31f985ba090ee01d681b1c4b95"}, - {file = "pydantic-1.7.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:8d72e814c7821125b16f1553124d12faba88e85405b0864328899aceaad7282b"}, - {file = "pydantic-1.7.3-cp39-cp39-win_amd64.whl", hash = "sha256:475f2fa134cf272d6631072554f845d0630907fce053926ff634cc6bc45bf1af"}, - {file = "pydantic-1.7.3-py3-none-any.whl", hash = "sha256:38be427ea01a78206bcaf9a56f835784afcba9e5b88fbdce33bbbfbcd7841229"}, - {file = "pydantic-1.7.3.tar.gz", hash = "sha256:213125b7e9e64713d16d988d10997dabc6a1f73f3991e1ff8e35ebb1409c7dc9"}, -] pygments = [ - {file = "Pygments-2.7.4-py3-none-any.whl", hash = "sha256:bc9591213a8f0e0ca1a5e68a479b4887fdc3e75d0774e5c71c31920c427de435"}, - {file = "Pygments-2.7.4.tar.gz", hash = "sha256:df49d09b498e83c1a73128295860250b0b7edd4c723a32e9bc0d295c7c2ec337"}, + {file = "Pygments-2.8.0-py3-none-any.whl", hash = "sha256:b21b072d0ccdf29297a82a2363359d99623597b8a265b8081760e4d0f7153c88"}, + {file = "Pygments-2.8.0.tar.gz", hash = "sha256:37a13ba168a02ac54cc5891a42b1caec333e59b66addb7fa633ea8a6d73445c0"}, ] pylint = [ - {file = "pylint-2.6.0-py3-none-any.whl", hash = "sha256:bfe68f020f8a0fece830a22dd4d5dddb4ecc6137db04face4c3420a46a52239f"}, - {file = "pylint-2.6.0.tar.gz", hash = "sha256:bb4a908c9dadbc3aac18860550e870f58e1a02c9f2c204fdf5693d73be061210"}, + {file = "pylint-2.6.2-py3-none-any.whl", hash = "sha256:e71c2e9614a4f06e36498f310027942b0f4f2fde20aebb01655b31edc63b9eaf"}, + {file = "pylint-2.6.2.tar.gz", hash = "sha256:718b74786ea7ed07aa0c58bf572154d4679f960d26e9641cc1de204a30b87fc9"}, ] pymdown-extensions = [ {file = "pymdown-extensions-7.1.tar.gz", hash = "sha256:5bf93d1ccd8281948cd7c559eb363e59b179b5373478e8a7195cf4b78e3c11b6"}, @@ -1698,30 +1629,26 @@ requests = [ {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, ] -rich = [ - {file = "rich-9.10.0-py3-none-any.whl", hash = "sha256:3070d53e3a93864de351c1091af1deb25f41e6051b33e485d4626b591c0cfdb3"}, - {file = "rich-9.10.0.tar.gz", hash = "sha256:e0f2db62a52536ee32f6f584a47536465872cae2b94887cf1f080fb9eaa13eb2"}, -] scipy = [ - {file = "scipy-1.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3d4303e3e21d07d9557b26a1707bb9fc065510ee8501c9bf22a0157249a82fd0"}, - {file = "scipy-1.6.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:1bc5b446600c4ff7ab36bade47180673141322f0febaa555f1c433fe04f2a0e3"}, - {file = "scipy-1.6.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:8840a9adb4ede3751f49761653d3ebf664f25195fdd42ada394ffea8903dd51d"}, - {file = "scipy-1.6.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:8629135ee00cc2182ac8be8e75643b9f02235942443732c2ed69ab48edcb6614"}, - {file = "scipy-1.6.0-cp37-cp37m-win32.whl", hash = "sha256:58731bbe0103e96b89b2f41516699db9b63066e4317e31b8402891571f6d358f"}, - {file = "scipy-1.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:876badc33eec20709d4e042a09834f5953ebdac4088d45a4f3a1f18b56885718"}, - {file = "scipy-1.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c0911f3180de343643f369dc5cfedad6ba9f939c2d516bddea4a6871eb000722"}, - {file = "scipy-1.6.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:b8af26839ae343655f3ca377a5d5e5466f1d3b3ac7432a43449154fe958ae0e0"}, - {file = "scipy-1.6.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:4f1d9cc977ac6a4a63c124045c1e8bf67ec37098f67c699887a93736961a00ae"}, - {file = "scipy-1.6.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:eb7928275f3560d47e5538e15e9f32b3d64cd30ea8f85f3e82987425476f53f6"}, - {file = "scipy-1.6.0-cp38-cp38-win32.whl", hash = "sha256:31ab217b5c27ab429d07428a76002b33662f98986095bbce5d55e0788f7e8b15"}, - {file = "scipy-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:2f1c2ebca6fd867160e70102200b1bd07b3b2d31a3e6af3c58d688c15d0d07b7"}, - {file = "scipy-1.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:155225621df90fcd151e25d51c50217e412de717475999ebb76e17e310176981"}, - {file = "scipy-1.6.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:f68d5761a2d2376e2b194c8e9192bbf7c51306ca176f1a0889990a52ef0d551f"}, - {file = "scipy-1.6.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:d902d3a5ad7f28874c0a82db95246d24ca07ad932741df668595fe00a4819870"}, - {file = "scipy-1.6.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:aef3a2dbc436bbe8f6e0b635f0b5fe5ed024b522eee4637dbbe0b974129ca734"}, - {file = "scipy-1.6.0-cp39-cp39-win32.whl", hash = "sha256:cdbc47628184a0ebeb5c08f1892614e1bd4a51f6e0d609c6eed253823a960f5b"}, - {file = "scipy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:313785c4dab65060f9648112d025f6d2fec69a8a889c714328882d678a95f053"}, - {file = "scipy-1.6.0.tar.gz", hash = "sha256:cb6dc9f82dfd95f6b9032a8d7ea70efeeb15d5b5fd6ed4e8537bb3c673580566"}, + {file = "scipy-1.6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a15a1f3fc0abff33e792d6049161b7795909b40b97c6cc2934ed54384017ab76"}, + {file = "scipy-1.6.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:e79570979ccdc3d165456dd62041d9556fb9733b86b4b6d818af7a0afc15f092"}, + {file = "scipy-1.6.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:a423533c55fec61456dedee7b6ee7dce0bb6bfa395424ea374d25afa262be261"}, + {file = "scipy-1.6.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:33d6b7df40d197bdd3049d64e8e680227151673465e5d85723b3b8f6b15a6ced"}, + {file = "scipy-1.6.1-cp37-cp37m-win32.whl", hash = "sha256:6725e3fbb47da428794f243864f2297462e9ee448297c93ed1dcbc44335feb78"}, + {file = "scipy-1.6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:5fa9c6530b1661f1370bcd332a1e62ca7881785cc0f80c0d559b636567fab63c"}, + {file = "scipy-1.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bd50daf727f7c195e26f27467c85ce653d41df4358a25b32434a50d8870fc519"}, + {file = "scipy-1.6.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:f46dd15335e8a320b0fb4685f58b7471702234cba8bb3442b69a3e1dc329c345"}, + {file = "scipy-1.6.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:0e5b0ccf63155d90da576edd2768b66fb276446c371b73841e3503be1d63fb5d"}, + {file = "scipy-1.6.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:2481efbb3740977e3c831edfd0bd9867be26387cacf24eb5e366a6a374d3d00d"}, + {file = "scipy-1.6.1-cp38-cp38-win32.whl", hash = "sha256:68cb4c424112cd4be886b4d979c5497fba190714085f46b8ae67a5e4416c32b4"}, + {file = "scipy-1.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:5f331eeed0297232d2e6eea51b54e8278ed8bb10b099f69c44e2558c090d06bf"}, + {file = "scipy-1.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0c8a51d33556bf70367452d4d601d1742c0e806cd0194785914daf19775f0e67"}, + {file = "scipy-1.6.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:83bf7c16245c15bc58ee76c5418e46ea1811edcc2e2b03041b804e46084ab627"}, + {file = "scipy-1.6.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:794e768cc5f779736593046c9714e0f3a5940bc6dcc1dba885ad64cbfb28e9f0"}, + {file = "scipy-1.6.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:5da5471aed911fe7e52b86bf9ea32fb55ae93e2f0fac66c32e58897cfb02fa07"}, + {file = "scipy-1.6.1-cp39-cp39-win32.whl", hash = "sha256:8e403a337749ed40af60e537cc4d4c03febddcc56cd26e774c9b1b600a70d3e4"}, + {file = "scipy-1.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a5193a098ae9f29af283dcf0041f762601faf2e595c0db1da929875b7570353f"}, + {file = "scipy-1.6.1.tar.gz", hash = "sha256:c4fceb864890b6168e79b0e714c585dbe2fd4222768ee90bc1aa0f8218691b11"}, ] six = [ {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"}, @@ -1732,8 +1659,8 @@ smmap = [ {file = "smmap-3.0.5.tar.gz", hash = "sha256:84c2751ef3072d4f6b2785ec7ee40244c6f45eb934d9e543e2c51f1bd3d54c50"}, ] tabulate = [ - {file = "tabulate-0.8.7-py3-none-any.whl", hash = "sha256:ac64cb76d53b1231d364babcd72abbb16855adac7de6665122f97b593f1eb2ba"}, - {file = "tabulate-0.8.7.tar.gz", hash = "sha256:db2723a20d04bcda8522165c73eea7c300eda74e0ce852d9022e0159d7895007"}, + {file = "tabulate-0.8.8-py3-none-any.whl", hash = "sha256:d6fe298fc0a58d848d6160118d17e70905f36766552ee78f8a1f4d64e8e16916"}, + {file = "tabulate-0.8.8.tar.gz", hash = "sha256:26f2589d80d332fefd2371d396863dedeb806f51b54bdb4b264579270b621e92"}, ] termcolor = [ {file = "termcolor-1.1.0.tar.gz", hash = "sha256:1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b"}, @@ -1790,8 +1717,8 @@ tornado = [ {file = "tornado-6.1.tar.gz", hash = "sha256:33c6e81d7bd55b468d2e793517c909b139960b6c790a60b7991b9b6b76fb9791"}, ] tqdm = [ - {file = "tqdm-4.56.0-py2.py3-none-any.whl", hash = "sha256:4621f6823bab46a9cc33d48105753ccbea671b68bab2c50a9f0be23d4065cb5a"}, - {file = "tqdm-4.56.0.tar.gz", hash = "sha256:fe3d08dd00a526850568d542ff9de9bbc2a09a791da3c334f3213d8d0bbbca65"}, + {file = "tqdm-4.57.0-py2.py3-none-any.whl", hash = "sha256:70657337ec104eb4f3fb229285358f23f045433f6aea26846cdd55f0fd68945c"}, + {file = "tqdm-4.57.0.tar.gz", hash = "sha256:65185676e9fdf20d154cffd1c5de8e39ef9696ff7e59fe0156b1b08e468736af"}, ] typed-ast = [ {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:7703620125e4fb79b64aa52427ec192822e9f45d37d4b6625ab37ef403e1df70"}, diff --git a/pyhdtoolkit/__init__.py b/pyhdtoolkit/__init__.py index a90101ec..bb5074e6 100644 --- a/pyhdtoolkit/__init__.py +++ b/pyhdtoolkit/__init__.py @@ -13,7 +13,7 @@ __title__ = "pyhdtoolkit" __description__ = "An all-in-one toolkit package to easy my Python work in my PhD." __url__ = "https://github.com/fsoubelet/PyhDToolkit" -__version__ = "0.8.0" +__version__ = "0.8.1" __author__ = "Felix Soubelet" __author_email__ = "felix.soubelet@cern.ch" __license__ = "MIT" diff --git a/pyhdtoolkit/cpymadtools/matching.py b/pyhdtoolkit/cpymadtools/matching.py index be381114..328f52fe 100644 --- a/pyhdtoolkit/cpymadtools/matching.py +++ b/pyhdtoolkit/cpymadtools/matching.py @@ -117,7 +117,7 @@ def match(*args, **kwargs): if q1_target and q2_target and dq1_target and dq2_target: logger.info( f"Doing combined matching to Qx={q1_target}, Qy={q2_target}, " - f"dq={dq1_target}, dqy={dq2_target} for sequence '{sequence}'" + f"dqx={dq1_target}, dqy={dq2_target} for sequence '{sequence}'" ) logger.trace(f"Vary knobs sent are {varied_knobs}") match(*varied_knobs, q1=q1_target, q2=q2_target, dq1=dq1_target, dq2=dq2_target) diff --git a/pyhdtoolkit/cpymadtools/special.py b/pyhdtoolkit/cpymadtools/special.py index c0065b92..93a39a36 100644 --- a/pyhdtoolkit/cpymadtools/special.py +++ b/pyhdtoolkit/cpymadtools/special.py @@ -349,6 +349,22 @@ def make_lhc_thin(madx: Madx, sequence: str, slicefactor: int = 1, **kwargs) -> madx.command.makethin(sequence=sequence, style=style, makedipedge=makedipedge) +def re_cycle_sequence(madx: Madx, sequence: str = "lhcb1", start: str = "IP3") -> None: + """ + Re-cycle the provided sequence from a different starting point. + + Args: + madx (Madx): an instantiated cpymad.madx.Madx object. + sequence (str): the sequence to re cycle. + start (str): element to start the new cycle from. + """ + logger.debug(f"Re-cycling sequence '{sequence}' from {start}") + madx.command.seqedit(sequence=sequence) + madx.command.flatten() + madx.command.cycle(start=start) + madx.command.endedit() + + # ----- Twiss Utilities ----- # diff --git a/pyhdtoolkit/scripts/__init__.py b/pyhdtoolkit/scripts/__init__.py deleted file mode 100644 index c15b065b..00000000 --- a/pyhdtoolkit/scripts/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -""" -scripts module -~~~~~~~~~~~~~~~ -Various scripts to run, that will help manipulate data or run different flow of programs and -simulations. This is helpful in my work. - -:copyright: (c) 2019-2020 by Felix Soubelet. -:license: MIT, see LICENSE for more details. -""" diff --git a/pyhdtoolkit/scripts/ac_dipole/__init__.py b/pyhdtoolkit/scripts/ac_dipole/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/pyhdtoolkit/scripts/ac_dipole/sext_ac_dipole_tracking.py b/pyhdtoolkit/scripts/ac_dipole/sext_ac_dipole_tracking.py deleted file mode 100644 index c2f92b1f..00000000 --- a/pyhdtoolkit/scripts/ac_dipole/sext_ac_dipole_tracking.py +++ /dev/null @@ -1,494 +0,0 @@ -""" -Script scripts.ac_dipole.sext_ac_dipole_tracking ------------------------------------------------- - -Created on 2020.02.27 -:author: Felix Soubelet (felix.soubelet@cern.ch) - -This is a Python3 utility to launch a series of MAD-X simlations with the proper parameters, -call the appropriate python scripts on the outputs and organise the results. - -Made to be ran with the OMC conda environment, and ran directly for the commandline. - -A pair of examples -================== - -Running with kicks in the horizontal plane only, for two sigma values: -python path/to/sext_ac_dipole_tracking.py --planes horizontal --mask /path/to/kick/mask.mask \ - --type kick --sigmas 5 10 - -Running with free oscillations in both planes succesively, for many offset values: -python path/to/sext_ac_dipole_tracking.py --planes horizontal vertical \ - --mask /path/to/offset/mask.mask --type amp -- sigmas 5 10 15 20 -""" -import argparse -import shutil -import sys - -from pathlib import Path -from typing import Dict, List, Union - -from loguru import logger - -from pyhdtoolkit.utils.cmdline import CommandLine -from pyhdtoolkit.utils.contexts import timeit -from pyhdtoolkit.utils.defaults import LOGURU_FORMAT, OMC_PYTHON, TBT_CONVERTER_SCRIPT - - -class ACDipoleGrid: - """ - Algorithm as a class to run the simulations and organize the outputs. - """ - - __slots__ = { - "grid_output_dir": "PosixPath object to the directory for all outputs", - "mask_files_dir": "PosixPath object to the directory for simulations' masks", - "outputdata_dir": "PosixPath object to the directory for simulation data", - "trackfiles_dir": "PosixPath object to the directory for trackone files", - "trackfiles_planes": "PosixPath objects to the planes' trackone files", - "run_planes": "List of planes for which to run simulations", - "sigmas": "List of amplitudes for AC dipole to kick to (in bunch sigma)", - "template_file": "PosixPath object to the location of the mask template", - "template_str": "Text in the template_file", - } - - def __init__(self) -> None: - self.grid_output_dir: Path = Path("grid_outputs") - self.mask_files_dir: Path = self.grid_output_dir / "mask_files" - self.outputdata_dir: Path = self.grid_output_dir / "outputdata_dirs" - self.trackfiles_dir: Path = self.grid_output_dir / "trackfiles" - self.trackfiles_planes: Dict[str, Path] = { - "horizontal": self.grid_output_dir / "trackfiles" / "X", - "vertical": self.grid_output_dir / "trackfiles" / "Y", - } - self.run_planes: List[str] = _parse_arguments().planes - self.sigmas: List[float] = sorted(_parse_arguments().sigmas) - self.template_file: Path = Path(_parse_arguments().template) - self.template_str: str = self.template_file.read_text() - - def _check_input_sanity(self) -> None: - """ - Makes sure there are no duplicates in the provided sigma values, because that will mess up - a long time after launch and you will cry. - """ - if len(self.sigmas) != len(set(self.sigmas)): - logger.error("There is a duplicate in the sigma values, which would cause a failure later.") - sys.exit() - - def _create_output_dirs(self) -> None: - """ - Will create the proper output dirs if they don't exist already. - """ - if not self.grid_output_dir.is_dir(): - logger.debug(f"Creating directory '{self.grid_output_dir}'") - self.grid_output_dir.mkdir() - if not self.mask_files_dir.is_dir(): - logger.debug(f"Creating directory '{self.mask_files_dir}'") - self.mask_files_dir.mkdir() - if not self.outputdata_dir.is_dir(): - logger.debug(f"Creating directory '{self.outputdata_dir}'") - self.outputdata_dir.mkdir() - if not self.trackfiles_dir.is_dir(): - logger.debug(f"Creating directory '{self.trackfiles_dir}'") - self.trackfiles_dir.mkdir() - if not self.trackfiles_planes["horizontal"].is_dir(): - logger.debug(f"Creating directory '{self.trackfiles_planes['horizontal']}'") - self.trackfiles_planes["horizontal"].mkdir() - if not self.trackfiles_planes["vertical"].is_dir(): - logger.debug(f"Creating directory '{self.trackfiles_planes['vertical']}'") - self.trackfiles_planes["vertical"].mkdir() - else: - logger.error("Output directories already present, you may want to move those.") - sys.exit() - - def track_forced_oscillations_for_plane(self, kick_plane: str = None) -> None: - """ - Run MAD-X simulations with AC dipole tracking for the given plane, and handle outputs. - - Args: - kick_plane: the name of the plane on which to apply a kick, either 'horizontal' - or 'vertical'. - """ - if kick_plane not in ("horizontal", "vertical"): - logger.error(f"Plane parameter {kick_plane} is not a valid value") - raise ValueError("Plane parameter should be one of 'horizontal' or 'vertical'") - - with timeit( - lambda spanned: logger.info( - f"Tracked all amplitudes for {kick_plane} kicks in {spanned:.4f} seconds" - ) - ): - for kick_in_sigma in self.sigmas: - print("") - # Set the wanted kick value (in sigmas) in the given plane, and a small initial - # offset in the other (small offset so that harpy doesn't cry and we get tune). - # Do NOT kick both planes: cross-terms influence the detuning. - plane_letter = "x" if kick_plane == "horizontal" else "y" - replace_dict = ( - { - "%(SIGMAX_VALUE)s": kick_in_sigma, - "%(SIGMAY_VALUE)s": 0, - "%(AMPLX_VALUE)s": 0, - "%(AMPLY_VALUE)s": 0.5, - } - if kick_plane == "horizontal" - else { - "%(SIGMAX_VALUE)s": 0, - "%(SIGMAY_VALUE)s": kick_in_sigma, - "%(AMPLX_VALUE)s": 0.5, - "%(AMPLY_VALUE)s": 0, - } - ) - filename_to_write = Path(f"sext_ac_dipole_tracking_{kick_in_sigma}_sigma_{plane_letter}_kick") - mask_file = create_script_file( - self.template_str, - values_replacing_dict=replace_dict, - filename=Path(str(filename_to_write)), - ) - run_madx_mask(mask_file) - _move_mask_file_after_running(mask_file_path=mask_file, mask_files_dir=self.mask_files_dir) - _rename_madx_outputs( - kick_in_sigma=kick_in_sigma, outputdata_dir=self.outputdata_dir, plane=plane_letter, - ) - _convert_trackone_to_sdds() - _move_trackone_sdds( - kick_in_sigma=kick_in_sigma, - trackfiles_dir=self.trackfiles_planes[kick_plane], - plane=plane_letter, - ) - - def track_free_oscillations_for_plane(self, kick_plane: str = None) -> None: - """ - Run MAD-X simulations with amplitude offset tracking for the given plane, and handle - outputs. - - Args: - kick_plane: the name of the plane on which to apply an offset, either 'horizontal' - or 'vertical'. - """ - if kick_plane not in ("horizontal", "vertical"): - logger.error(f"Plane parameter {kick_plane} is not a valid value") - raise ValueError("Plane parameter should be one of 'horizontal' or 'vertical'") - - with timeit( - lambda spanned: logger.info( - f"Tracked all amplitudes for {kick_plane} offsets in {spanned:.4f} seconds" - ) - ): - for kick_in_sigma in self.sigmas: - print("") - plane_letter = "x" if kick_plane == "horizontal" else "y" - action_var_value = kick_in_sigma - amplitudes_dict = ( - {"%(AMPLX_VALUE)s": action_var_value, "%(AMPLY_VALUE)s": 0.5} - if kick_plane == "horizontal" - else {"%(AMPLX_VALUE)s": 0.5, "%(AMPLY_VALUE)s": action_var_value} - ) - filename_to_write = Path( - f"initial_amplitude_tracking_{kick_in_sigma}_sigma_{plane_letter}_kick" - ) - mask_file = create_script_file( - self.template_str, - values_replacing_dict=amplitudes_dict, - filename=Path(str(filename_to_write)), - ) - run_madx_mask(mask_file) - _move_mask_file_after_running(mask_file_path=mask_file, mask_files_dir=self.mask_files_dir) - _rename_madx_outputs( - kick_in_sigma=kick_in_sigma, outputdata_dir=self.outputdata_dir, plane=plane_letter, - ) - _convert_trackone_to_sdds() - _move_trackone_sdds( - kick_in_sigma=kick_in_sigma, - trackfiles_dir=self.trackfiles_planes[kick_plane], - plane=plane_letter, - ) - - -@logger.catch -def main() -> None: - """ - Run the whole process: create a class instance, simulate for horizontal and vertical kicks, - exit. - """ - command_line_args = _parse_arguments() - _set_logger_level(command_line_args.log_level) - - simulations = ACDipoleGrid() - simulations._check_input_sanity() - simulations._create_output_dirs() - - sim_type = command_line_args.simulation_type - try: - if sim_type == "kick": - logger.info(f"Planes to kick then track on are: {simulations.run_planes}") - logger.info(f"Kick values to compute are (in bunch sigmas): {simulations.sigmas}") - for plane in simulations.run_planes: - simulations.track_forced_oscillations_for_plane(kick_plane=plane) - elif sim_type == "amp": - logger.info(f"Planes to offset then track on are: {simulations.run_planes}") - logger.info(f"Registered initial tracking amplitudes (in bunch sigmas): {simulations.sigmas}") - for plane in simulations.run_planes: - simulations.track_free_oscillations_for_plane(kick_plane=plane) - else: - logger.error(f"Simulation type {sim_type} is not a valid value") - raise ValueError("Simulation type should be one of 'kick' or 'amp'") - except KeyboardInterrupt: - logger.info("Manual interruption, ending processes") - _cleanup_madx_residuals() - logger.warning("The 'grid_outputs' folder was left untouched, check for unexpected MADX residuals") - - -# ---------------------- Public Utilities ---------------------- # - - -def run_madx_mask(mask_file: Path) -> None: - """ - Run madx on the provided file. - - Args: - mask_file (Path): Path object with the mask file location. - """ - logger.debug(f"Running madx on script: '{mask_file.absolute()}'") - exit_code, std_out = CommandLine.run(f"madx {mask_file.absolute()}") - if exit_code != 0: # Dump madx log in case of failure so we can see where it went wrong. - logger.warning(f"MAD-X command self-killed with exit code: {exit_code}") - log_dump = Path(f"failed_madx_returnedcode_{exit_code}.log") - with log_dump.open("w") as logfile: - logfile.write(std_out.decode()) # Default 'utf-8' encoding, depends on your system. - logger.warning(f"The standard output has been dumped to file 'failed_command_{exit_code}.logfile'") - - -def create_script_file( - template_as_str: str, values_replacing_dict: Dict[str, float], filename: Path, -) -> Path: - """ - Create new script file from template with the appropriate values. - - Args: - template_as_str (str): string content of your template mask file. - values_replacing_dict (Dict[str, float]): keys to find and values to replace them with in - the template. - filename (Path): Path object for the file in which to write the script. - """ - string_mask = _create_script_string(template_as_str, values_replacing_dict) - return _write_script_to_file(string_mask, filename) - - -# ---------------------- Private Utilities ---------------------- # - - -def _convert_trackone_to_sdds() -> None: - """ - Run the omc3 tbt_converter script on trackone output of MAD-X. Will also cleanup the `converter` - and 'stats' files left by tbt_converter afterwards. - """ - if not Path("trackone").is_file(): - logger.error("Tried to call 'tbt_converter' without a 'trackone' file present, aborting") - sys.exit() - - logger.debug(f"Running '{TBT_CONVERTER_SCRIPT}' on 'trackone' file") - CommandLine.run( - f"{OMC_PYTHON.absolute()} {TBT_CONVERTER_SCRIPT.absolute()} " - "--files trackone --outputdir . --tbt_datatype trackone" - ) - logger.debug("Removing trackone file 'trackone'") - Path("trackone").unlink() - - logger.debug("Removing outputs of 'tbt_converter'") - if Path("stats.txt").exists(): - Path("stats.txt").unlink() - for tbt_output_file in list(Path(".").glob("converter_*")): - tbt_output_file.unlink() - - -def _create_script_string(template_as_string: str, values_replacing_dict: Dict[str, float]) -> str: - """ - For each key in the provided dict, will replace it in the template scripts - with the corresponding dict value. - - Args: - template_as_string (str): the string content of your template mask file. - values_replacing_dict (Dict[str, float]): pairs of key, value to find and replace in the - template string. - - Returns: - The new script string. - """ - script_string: str = template_as_string - for key, value in values_replacing_dict.items(): - script_string = script_string.replace(str(key), str(value)) - return script_string - - -def _move_mask_file_after_running(mask_file_path: Path, mask_files_dir: Path) -> None: - """ - Move the mask file after being done running it with MAD-X. - - Args: - mask_file_path (Path): Path object with the file location. - mask_files_dir (Path): Path object with the directory to move mask to' location. - """ - logger.debug(f"Moving mask file '{mask_file_path}' to directory '{mask_files_dir}'") - mask_file_path.rename(f"{mask_files_dir}/{mask_file_path}") - - -def _move_trackone_sdds(kick_in_sigma: Union[str, float], trackfiles_dir: Path, plane: str) -> None: - """ - Call after running omc3's `tbt_converter` on the `trackone` output by MAD-X, will move the - resulting `trackone.sdds` file to the `trackfiles_dir`, with a naming reflecting the ac dipole - kick strength. - - Args: - kick_in_sigma (Union[str, float]): the AC dipole kick value (in sigma) for which you ran - your simulation. - trackfiles_dir (Path): PosixPath to the folder in which to store all sdds trackone files. - plane (str): the plane on which ac dipole provided the kick, should be `x` or `y`. - """ - if str(plane) not in ("x", "y"): - logger.error(f"Plane parameter {plane} is not a valid value") - raise ValueError("Plane parameter should be one of 'x' or 'y'") - logger.debug(f"Moving trackone sdds file to directory '{trackfiles_dir}'") - track_sdds_file = Path("trackone.sdds") - if not track_sdds_file.is_file(): - logger.error("Conversion to trackone sdds file must have failed, check the omc3 script") - sys.exit() - track_sdds_file.rename(f"{trackfiles_dir}/trackone_{kick_in_sigma}_sigma_{plane}.sdds") - - -def _parse_arguments() -> argparse.Namespace: - """ - Simple argument parser to make life easier in the command-line. - Returns a NameSpace with arguments as attributes. - """ - parser = argparse.ArgumentParser(description="Running MAD-X AC dipole trackings for you.") - parser.add_argument( - "-s", - "--sigmas", - dest="sigmas", - nargs="+", - default=[1, 2], - type=float, - help="Different amplitude values (in bunch sigma) for the AC dipole kicks." "Defaults to [1, 2].", - ) - parser.add_argument( - "-p", - "--planes", - dest="planes", - nargs="+", - default=["horizontal"], - type=str, - help="Planes for which to kick, possible values are 'horizontal' and 'vertical'," - "Defaults to 'horizontal'.", - ) - parser.add_argument( - "-m", - "--mask", - dest="template", - default="/afs/cern.ch/work/f/fesoubel/public/MADX_scripts/AC_dipole_tracking/ac_kick_tracking_template.mask", - type=str, - help="Location of your MAD-X template mask file to use, defaults to " - "'/afs/cern.ch/work/f/fesoubel/public/MADX_scripts/AC_dipole_tracking/ac_kick_track_template.mask'.", - ) - parser.add_argument( - "-t", - "--type", - dest="simulation_type", - default="kick", - type=str, - help="Type of simulations to run, either 'kick' for AC dipole kick or 'amp' for free " - "oscillations. Defaults to 'kick'.", - ) - parser.add_argument( - "-l", - "--logs", - dest="log_level", - default="info", - type=str, - help="The base console logging level. Can be 'debug', 'info', 'warning' and 'error'." - "Defaults to 'info'.", - ) - return parser.parse_args() - - -def _rename_madx_outputs(kick_in_sigma: Union[str, float], outputdata_dir: Path, plane: str) -> None: - """ - Call after running MAD-X on your mask, will move the 'Outpudata' created by MAD-X to the - proper place. - - Args: - kick_in_sigma (Union[str, float]): the AC dipole kick value (in sigma) for which you ran - your simulation. - outputdata_dir (Path): PosixPath to the folder in which to store all successive - `Outputdata`'s location. - plane (str): the plane on which ac dipole provided the kick, should be `x` or `y`. - """ - if str(plane) not in ("x", "y"): - raise ValueError(f"Plane parameter should be one of 'x', 'y' but {plane} was provided.") - madx_outputs = Path("Outputdata") - logger.debug(f"Moving MAD-X outputs to directory '{outputdata_dir}'") - madx_outputs.rename(f"{outputdata_dir}/Outputdata_{kick_in_sigma}_sigma_{plane}") - - -def _set_logger_level(log_level: str = "info") -> None: - """ - Sets the logger level to the one provided at the commandline. - - Default loguru handler will have DEBUG level and ID 0. - We need to first remove this default handler and add ours with the wanted level. - - Args: - log_level (str): the default logging level to print out. - """ - logger.remove(0) - logger.add(sys.stderr, format=LOGURU_FORMAT, level=log_level.upper()) - - -def _write_script_to_file(script_as_string: str, filename: Union[str, Path]) -> Path: - """ - Create a new file with the provided script, and return the location. - - Args: - script_as_string (str): the script string to write to file. - filename (Union[str, Path]): the file name to use. - - Returns: - The `pathlib.Path` object to the created file. - """ - file_path = Path(str(filename) + ".mask") - logger.debug(f"Creating new mask file '{file_path}'") - with file_path.open("w") as script: - script.write(script_as_string) - return file_path - - -def _cleanup_madx_residuals() -> None: - """ - Will look for specific madx artifacts and remove them. Meant to be called in case of - interuption. - """ - expected_residuals: Dict[str, List[str]] = { - "symlinks": ["db5", "slhc", "fidel", "wise", "optics2016", "optics2017", "optics2018", "scripts",], - "directories": ["temp", "Outputdata"], - "files": ["fort.18"], - } - - logger.debug("Cleaning up expected MADX residuals") - for residual_type, residual_values in expected_residuals.items(): - logger.trace(f"Cleaning up MADX residual {residual_type}") - for residual in residual_values: - if Path(residual).is_symlink() or Path(residual).is_file(): - Path(residual).unlink() - elif Path(residual).is_dir(): - shutil.rmtree(Path(residual)) - - logger.debug("Cleaning up potential residual mask files") - for suspect_mask in list(Path(".").glob("*.mask")): - if ( - suspect_mask.stem.startswith("initial_") or suspect_mask.stem.startswith("sext_") - ) and suspect_mask.stem.endswith("_kick"): - suspect_mask.unlink() - - -if __name__ == "__main__": - main() diff --git a/pyhdtoolkit/scripts/triplet_errors/__init__.py b/pyhdtoolkit/scripts/triplet_errors/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/pyhdtoolkit/scripts/triplet_errors/algo.py b/pyhdtoolkit/scripts/triplet_errors/algo.py deleted file mode 100644 index 4f339f9b..00000000 --- a/pyhdtoolkit/scripts/triplet_errors/algo.py +++ /dev/null @@ -1,305 +0,0 @@ -""" -Script scripts.triplets_errors.algo ----------------------------------- - -Created on 2019.06.15 -:author: Felix Soubelet (felix.soubelet@cern.ch) - -Command-line utility script, which will launch a series of MAD-X simulations, perform analysis of -the outputs and hand out a plot. - -Arguments should be given as options at launch in the command-line. See README for instructions. -""" - -import argparse -import sys - -from copy import deepcopy -from typing import List - -import cpymad -import numpy as np -import pandas as pd - -from loguru import logger -from rich.progress import track - -from pyhdtoolkit.cpymadtools.generators import LatticeGenerator -from pyhdtoolkit.scripts.triplet_errors.data_classes import BetaBeatValues, StdevValues -from pyhdtoolkit.scripts.triplet_errors.plotting_functions import ( - plot_bbing_max_errorbar, - plot_bbing_with_ips_errorbar, -) -from pyhdtoolkit.utils.contexts import timeit -from pyhdtoolkit.utils.defaults import LOGURU_FORMAT - - -class GridCompute: - """ - Algorithm as a class to run the simulations and analyze the outputs. - - Will prompt error values for confirmation, run MAD-X simulations through a `cpymad.madx.Madx` - object, get beta-beating values from the outputs and return the appropriate structures. - """ - - __slots__ = { - "reference_mad": "cpymad Madx object to run the nominal configuration", - "errors_mad": "cpymad Madx object to run errored simulations", - "rms_betabeatings": "BetaBeatValues class to hold rms beta-beatings from simulations", - "standard_deviations": "StdevValues class to hold standard deviations from simulations", - "lost_seeds_tf": "List of field error values leading to loss of closed orbit", - "lost_seeds_miss": "List of misalignment values leading to loss of closed orbit", - "nominal_twiss": "Twiss dataframe from the nominal simulation", - } - - def __init__(self) -> None: - """ - Initializing will take some time since the reference script is being ran, to store the - reference dframe. Unless you go into PTC it should be a matter of seconds. - """ - self.reference_mad = cpymad.madx.Madx(stdout=False) - self.errors_mad = cpymad.madx.Madx(stdout=False) - self.rms_betabeatings = BetaBeatValues() - self.standard_deviations = StdevValues() - self.lost_seeds_tf: List[int] = [] - self.lost_seeds_miss: List[int] = [] - self.nominal_twiss = self._get_nominal_twiss() - - def _get_nominal_twiss(self) -> pd.DataFrame: - """ - Run a MAD-X simulation without errors, and extract the nominal Twiss from the results. - This will be stored in the `nominal_twiss` instance attribute. - - Returns: - Nothing, directly updates the instance's `nominal_twiss` attribute inplace. - """ - logger.info("Running simulation for reference nominal run") - ref_script = LatticeGenerator.generate_tripleterrors_study_reference() - self.reference_mad.input(ref_script) - logger.debug("Extracting reference Twiss dframe from cpymad") - return deepcopy(self.reference_mad.table.twiss.dframe()) - - def run_tf_errors(self, error_values: List[float], n_seeds: int) -> None: - """ - Run simulations for field errors, compute the values from the outputs, and store the final - results in the class's data structures. - - Args: - error_values (List[float]): the different error values to run simulations for - n_seeds (int): number of simulations to run for each error values. - - Returns: - Nothing, directly updates the instance's `rms_betabeatings` and `standard_deviations` - attributes. - """ - with timeit(lambda spanned: logger.info(f"Simulated field errors in: {spanned:.4f} seconds")): - for error in error_values: - logger.debug(f"Running simulation for Relative Field Error: {error}E-4") - temp_data = BetaBeatValues() - - for _ in track(range(n_seeds), description="Simulating Field Errors Seeds", transient=True): - # Getting beta-beatings & appending to temporary BetaBeatValues - tferrors_twiss: pd.DataFrame = self._track_tf_error(error) - betabeatings: pd.DataFrame = _get_betabeatings(self.nominal_twiss, tferrors_twiss) - temp_data.update_tf_from_cpymad(betabeatings) - - # Append computed seeds' RMS for this error value in `rms_betabeatings` attribute. - self.rms_betabeatings.update_tf_from_seeds(temp_data) - - # Getting stdev of all values for the N computed seeds - self.standard_deviations.update_tf(temp_data) - - # Getting the lost seeds if any - self.lost_seeds_tf.append(n_seeds - len(temp_data.tferror_bbx)) - - def _track_tf_error(self, error: float) -> pd.DataFrame: - """ - Run tferror tracking for a given seed, which is randomly assigned at function call. - - Args: - error (float): the error value to input in the madx script. - - Returns: - The twiss dframe from cpymad. - """ - seed = str(np.random.randint(1e6, 5e6)) - tferror_script = LatticeGenerator.generate_tripleterrors_study_tferror_job(seed, str(error)) - self.errors_mad.input(tferror_script) - return self.errors_mad.table.twiss.dframe() - - def run_miss_errors(self, error_values: List[float], n_seeds: int) -> None: - """ - Run the simulations for misalignment errors, compute the values from the outputs, and store - the final results in the class's data structures. - - Args: - error_values (List[float]): the different error values to run simulations for. - n_seeds (int): number of simulations to run for each error values. - - Returns: - Nothing, directly updates the instance's `rms_betabeatings` and `standard_deviations` - attributes. - """ - with timeit(lambda spanned: logger.info(f"Simulated misalignment errors in: {spanned:.4f} seconds")): - for error in error_values: - logger.debug(f"Running for Longitudinal Misalignment Error: {float(error)}mm") - temp_data = ( - BetaBeatValues() - ) # this will hold the beta-beats for all seeds with this error value. - - for _ in track(range(n_seeds), description="Simulating Misalignment Seeds", transient=True): - # Getting beta-beatings & appending to temporary BetaBeatValues - mserrors_twiss: pd.DataFrame = self._track_miss_error(error) - betabeatings: pd.DataFrame = _get_betabeatings(self.nominal_twiss, mserrors_twiss) - temp_data.update_miss_from_cpymad(betabeatings) - - # Append computed seeds' RMS for this error value in `rms_betabeatings` attribute. - self.rms_betabeatings.update_miss_from_seeds(temp_data) - - # Getting stdev of all values for the N computed seeds - self.standard_deviations.update_miss(temp_data) - - # Getting the lost seeds if any - self.lost_seeds_miss.append(n_seeds - len(temp_data.misserror_bbx)) - - def _track_miss_error(self, error: float) -> pd.DataFrame: - """ - Run misserror tracking for a given seed, which is randomly assigned at function call. - - Args: - error (float): the error value to input in the madx script. - - Returns: - The twiss dframe from cpymad. - """ - seed = str(np.random.randint(1e6, 5e6)) - mserror_script = LatticeGenerator.generate_tripleterrors_study_mserror_job(seed, str(error)) - self.errors_mad.input(mserror_script) - return self.errors_mad.table.twiss.dframe() - - -def _get_betabeatings(nominal_twiss: pd.DataFrame, errors_twiss: pd.DataFrame) -> pd.DataFrame: - """ - Simple function to get beta-beatings from a `cpymad.madx.Madx`'s Twiss output. - - Args: - nominal_twiss (pd.DataFrame): a twiss.dframe() results from a reference scenario. - errors_twiss (pd.DataFrame): a twiss.dframe() results from the perturbed scenario. - - Returns: - A `pd.DataFrame` with the beta-beat values, in percentage. - """ - betabeat = pd.DataFrame() - betabeat["NAME"] = nominal_twiss.name - betabeat["s"] = nominal_twiss.s - betabeat["BETX"] = 100 * (errors_twiss.betx - nominal_twiss.betx) / nominal_twiss.betx - betabeat["BETY"] = 100 * (errors_twiss.bety - nominal_twiss.bety) / nominal_twiss.bety - return betabeat - - -def _parse_arguments() -> argparse.Namespace: - """ - Simple argument parser to make life easier in the command-line. - Returns a NameSpace with arguments as attributes. - """ - parser = argparse.ArgumentParser(description="Running the beta-beating script.") - parser.add_argument( - "-e", - "--errors", - dest="errors", - nargs="+", - default=[1, 3, 5], - type=int, - help="Error values to simulate", - ) - parser.add_argument( - "-s", "--seeds", dest="seeds", default=50, type=int, help="Number of seeds to simulate per error.", - ) - parser.add_argument( - "-p", "--plotbetas", dest="plotbetas", default=False, help="Option for plotting betas at each error.", - ) - parser.add_argument( - "-l", - "--logs", - dest="log_level", - default="info", - type=str, - help="The base console logging level. Can be 'debug', 'info', 'warning' and 'error'." - "Defaults to 'info'.", - ) - return parser.parse_args() - - -def _set_logger_level(log_level: str = "info") -> None: - """ - Sets the logger level to the one provided at the commandline. - - Default loguru handler will have DEBUG level and ID 0. - We need to first remove this default handler and add ours with the wanted level. - - Args: - log_level (str): string, the default logging level to print out. - """ - logger.remove(0) - logger.add(sys.stderr, format=LOGURU_FORMAT, level=log_level.upper()) - - -@logger.catch -def main() -> None: - """ - Run the whole process. - - Will prompt for error grid values for confirmation. Instantiates a GridCompute object and runs - for each type of errors. The results are stored in the class itself, to be accessed for - plotting. - """ - command_line_args = _parse_arguments() - _set_logger_level(command_line_args.log_level) - simulations = GridCompute() - - logger.info(f"Here are the error values that will be ran: {command_line_args.errors}") - - # Running simulations - simulations.run_tf_errors(command_line_args.errors, command_line_args.seeds) - simulations.run_miss_errors(command_line_args.errors, command_line_args.seeds) - - # Getting the results in dataframes and exporting to csv - logger.info("Exporting results to csv") - bbing_df: pd.DataFrame = simulations.rms_betabeatings.to_pandas() - std_df: pd.DataFrame = simulations.standard_deviations.to_pandas() - bbing_df.to_csv("beta_beatings.csv", index=False) - std_df.to_csv("standard_deviations.csv", index=False) - - # Plotting the results - plot_bbing_max_errorbar( - command_line_args.errors, - beta_beatings_df=bbing_df, - stdev_df=std_df, - plane="Horizontal", - figname="miss_vs_tf_max_hor.png", - ) - plot_bbing_max_errorbar( - command_line_args.errors, - beta_beatings_df=bbing_df, - stdev_df=std_df, - plane="Vertical", - figname="miss_vs_tf_max_ver.png", - ) - plot_bbing_with_ips_errorbar( - command_line_args.errors, - beta_beatings_df=bbing_df, - stdev_df=std_df, - plane="Horizontal", - figname="miss_vs_tf_ips_hor.png", - ) - plot_bbing_with_ips_errorbar( - command_line_args.errors, - beta_beatings_df=bbing_df, - stdev_df=std_df, - plane="Vertical", - figname="miss_vs_tf_ips_ver.png", - ) - - -if __name__ == "__main__": - main() diff --git a/pyhdtoolkit/scripts/triplet_errors/data_classes.py b/pyhdtoolkit/scripts/triplet_errors/data_classes.py deleted file mode 100644 index 98f2524c..00000000 --- a/pyhdtoolkit/scripts/triplet_errors/data_classes.py +++ /dev/null @@ -1,264 +0,0 @@ -""" -Module scripts.triplet_errors.data_classes ------------------------------------------- - -Created on 2019.06.15 -:author: Felix Soubelet (felix.soubelet@cern.ch) - -A few classes that will be useful to store values calculated from the results of the GridCompute -Algorithm. -""" - -from typing import List - -import numpy as np -import pandas as pd - -from loguru import logger -from pydantic import BaseModel - - -class BetaBeatValues(BaseModel): - """ - Simple class to store and transfer beta-beating values. - - Class attributes are as follows: - "tferror_bbx": "Horizontal beta-beating values from field errors", - "tferror_bby": "Vertical beta-beating values from field errors", - "ip1_tferror_bbx": "Horizontal beta-beating values from field errors at IP1", - "ip1_tferror_bby": "Vertical beta-beating values from field errors at IP1", - "ip5_tferror_bbx": "Horizontal beta-beating values from field errors at IP5", - "ip5_tferror_bby": "Vertical beta-beating values from field errors at IP5", - "max_tferror_bbx": "Maximal horizontal beta-beating values from field errors", - "max_tferror_bby": "Maximal vertical beta-beating values from field errors", - "misserror_bbx": "Horizontal beta-beating values from misalignment errors", - "misserror_bby": "Horizontal beta-beating values from misalignment errors", - "ip1_misserror_bbx": "Horizontal beta-beating values from misalignment errors at IP1", - "ip1_misserror_bby": "Vertical beta-beating values from misalignment errors at IP1", - "ip5_misserror_bbx": "Horizontal beta-beating values from misalignment errors at IP5", - "ip5_misserror_bby": "Vertical beta-beating values from misalignment errors at IP5", - "max_misserror_bbx": "Maximal horizontal beta-beating values from misalignment errors", - "max_misserror_bby": "Maximal vertical beta-beating values from misalignment errors", - """ - - tferror_bbx: List[float] = [] - tferror_bby: List[float] = [] - ip1_tferror_bbx: List[float] = [] - ip1_tferror_bby: List[float] = [] - ip5_tferror_bbx: List[float] = [] - ip5_tferror_bby: List[float] = [] - max_tferror_bbx: List[float] = [] - max_tferror_bby: List[float] = [] - misserror_bbx: List[float] = [] - misserror_bby: List[float] = [] - ip1_misserror_bbx: List[float] = [] - ip1_misserror_bby: List[float] = [] - ip5_misserror_bbx: List[float] = [] - ip5_misserror_bby: List[float] = [] - max_misserror_bbx: List[float] = [] - max_misserror_bby: List[float] = [] - - def describe(self) -> None: - """ - Simple print statement of instance attributes. - """ - for attribute, value in self.dict().items(): - print(f"{attribute:<20} {value}") - - def update_tf_from_cpymad(self, cpymad_betabeatings: pd.DataFrame) -> None: - """ - This is to update a temporary BetaBeatValues after having ran a simulation for a specific - seed. Appends relevant values to the instance's attributes. - - Args: - cpymad_betabeatings (pd.DataFrame): the beta-beatings from the simulation, compared to - the nominal twiss from a reference run. - """ - logger.trace("Getting rms and max values for betatron functions of provided run") - self.tferror_bbx.append(_get_rms(cpymad_betabeatings["BETX"])) - self.tferror_bby.append(_get_rms(cpymad_betabeatings["BETY"])) - self.max_tferror_bbx.append(cpymad_betabeatings["BETX"].max()) - self.max_tferror_bby.append(cpymad_betabeatings["BETY"].max()) - - logger.trace("Getting betatron functions at IP1 and IP5") - # cpymad naming: lowercase and appended with :beam_number - self.ip1_tferror_bbx.append(cpymad_betabeatings.BETY[cpymad_betabeatings.NAME == "ip1:1"][0]) - self.ip1_tferror_bby.append(cpymad_betabeatings.BETY[cpymad_betabeatings.NAME == "ip1:1"][0]) - self.ip5_tferror_bbx.append(cpymad_betabeatings.BETX[cpymad_betabeatings.NAME == "ip5:1"][0]) - self.ip5_tferror_bby.append(cpymad_betabeatings.BETY[cpymad_betabeatings.NAME == "ip5:1"][0]) - - def update_tf_from_seeds(self, temp_data) -> None: - """ - Updates the error's beta-beatings values after having ran simulations for all seeds. - Append computed rms values for a group of seeds, to field errors result values. - - Args: - temp_data: a `BetaBeatValues` object with the seeds' results. - """ - self.tferror_bbx.append(_get_rms(temp_data.tferror_bbx)) - self.tferror_bby.append(_get_rms(temp_data.tferror_bby)) - self.max_tferror_bbx.append(_get_rms(temp_data.max_tferror_bbx)) - self.max_tferror_bby.append(_get_rms(temp_data.max_tferror_bby)) - self.ip1_tferror_bbx.append(_get_rms(temp_data.ip1_tferror_bbx)) - self.ip1_tferror_bby.append(_get_rms(temp_data.ip1_tferror_bby)) - self.ip5_tferror_bbx.append(_get_rms(temp_data.ip5_tferror_bbx)) - self.ip5_tferror_bby.append(_get_rms(temp_data.ip5_tferror_bby)) - - def update_miss_from_cpymad(self, cpymad_betabeatings: pd.DataFrame) -> None: - """ - Updates a temporary BetaBeatValues after having ran a simulation for a specific seed. - Appends relevant values to the instance's attributes. - - Args: - cpymad_betabeatings (pd.DataFrame): the beta-beatings from the simulation, compared to - the nominal twiss from a reference run. - """ - logger.trace("Getting rms and max values for betatron functions of provided run") - self.misserror_bbx.append(_get_rms(cpymad_betabeatings["BETX"])) - self.misserror_bby.append(_get_rms(cpymad_betabeatings["BETY"])) - self.max_misserror_bbx.append(cpymad_betabeatings["BETX"].max()) - self.max_misserror_bby.append(cpymad_betabeatings["BETY"].max()) - - logger.trace("Getting betatron functions at IP1 and IP5") - # cpymad naming: lowercase and appended with :beam_number - self.ip1_misserror_bbx.append(cpymad_betabeatings.BETX[cpymad_betabeatings.NAME == "ip1:1"][0]) - self.ip1_misserror_bby.append(cpymad_betabeatings.BETY[cpymad_betabeatings.NAME == "ip1:1"][0]) - self.ip5_misserror_bbx.append(cpymad_betabeatings.BETX[cpymad_betabeatings.NAME == "ip5:1"][0]) - self.ip5_misserror_bby.append(cpymad_betabeatings.BETY[cpymad_betabeatings.NAME == "ip5:1"][0]) - - def update_miss_from_seeds(self, temp_data) -> None: - """ - Append computed rms values for a group of seeds, to misalignment result values. - - Args: - temp_data: a `BetaBeatValues` object with the seeds' results. - """ - self.misserror_bbx.append(_get_rms(temp_data.misserror_bbx)) - self.misserror_bby.append(_get_rms(temp_data.misserror_bby)) - self.max_misserror_bbx.append(_get_rms(temp_data.max_misserror_bbx)) - self.max_misserror_bby.append(_get_rms(temp_data.max_misserror_bby)) - self.ip1_misserror_bbx.append(_get_rms(temp_data.ip1_misserror_bbx)) - self.ip1_misserror_bby.append(_get_rms(temp_data.ip1_misserror_bby)) - self.ip5_misserror_bbx.append(_get_rms(temp_data.ip5_misserror_bbx)) - self.ip5_misserror_bby.append(_get_rms(temp_data.ip5_misserror_bby)) - - def to_pandas(self, *args, **kwargs) -> pd.DataFrame: - """ - Exports stored values as a pandas DataFrame. - - Returns: - A `pandas.DataFrame` object with the instance's attributes as columns. - """ - return pd.DataFrame(self.dict(*args, **kwargs)) - - -class StdevValues(BaseModel): - """ - Simple class to store and transfer standard deviation values. - - Class attributes are as follows: - "stdev_tf_x": "Horizontal standard deviation values from field errors", - "stdev_tf_y": "Vertical standard deviation values from field errors", - "ip1_stdev_tf_x": "Horizontal standard deviation values from field errors at IP1", - "ip1_stdev_tf_y": "Vertical standard deviation values from field errors at IP1", - "ip5_stdev_tf_x": "Horizontal standard deviation values from field errors at IP5", - "ip5_stdev_tf_y": "Vertical standard deviation values from field errors at IP5", - "max_stdev_tf_x": "Maximal horizontal standard deviation values from field errors", - "max_stdev_tf_y": "Maximal vertical standard deviation values from field errors", - "stdev_miss_x": "Horizontal standard deviation values from misalignment errors", - "stdev_miss_y": "Horizontal standard deviation values from misalignment errors", - "ip1_stdev_miss_x": "Horizontal standard deviation values from misalignment errors at IP1", - "ip1_stdev_miss_y": "Vertical standard deviation values from misalignment errors at IP1", - "ip5_stdev_miss_x": "Horizontal standard deviation values from misalignment errors at IP5", - "ip5_stdev_miss_y": "Vertical standard deviation values from misalignment errors at IP5", - "max_stdev_miss_x": "Maximal horizontal standard deviation values from misalignment errors", - "max_stdev_miss_y": "Maximal vertical standard deviation values from misalignment errors", - """ - - stdev_tf_x: List[float] = [] - stdev_tf_y: List[float] = [] - ip1_stdev_tf_x: List[float] = [] - ip1_stdev_tf_y: List[float] = [] - ip5_stdev_tf_x: List[float] = [] - ip5_stdev_tf_y: List[float] = [] - max_stdev_tf_x: List[float] = [] - max_stdev_tf_y: List[float] = [] - stdev_miss_x: List[float] = [] - stdev_miss_y: List[float] = [] - ip1_stdev_miss_x: List[float] = [] - ip1_stdev_miss_y: List[float] = [] - ip5_stdev_miss_x: List[float] = [] - ip5_stdev_miss_y: List[float] = [] - max_stdev_miss_x: List[float] = [] - max_stdev_miss_y: List[float] = [] - - def describe(self) -> None: - """ - Simple print statement of instance attributes. - """ - for attribute, value in self.dict().items(): - print(f"{attribute:<20} {value}") - - def update_tf(self, temp_data) -> None: - """ - Append computed stdev values for a group of seeds, to field errors result values. - - Args: - temp_data: a `BetaBeatValues` object with the seeds' results. - - Returns: - Nothing, updates inplace. - """ - self.stdev_tf_x.append(np.std(temp_data.tferror_bbx)) - self.stdev_tf_y.append(np.std(temp_data.tferror_bby)) - self.max_stdev_tf_x.append(np.std(temp_data.max_tferror_bbx)) - self.max_stdev_tf_y.append(np.std(temp_data.max_tferror_bby)) - self.ip1_stdev_tf_x.append(np.std(temp_data.ip1_tferror_bbx)) - self.ip1_stdev_tf_y.append(np.std(temp_data.ip1_tferror_bby)) - self.ip5_stdev_tf_x.append(np.std(temp_data.ip5_tferror_bbx)) - self.ip5_stdev_tf_y.append(np.std(temp_data.ip5_tferror_bby)) - - def update_miss(self, temp_data) -> None: - """ - Append computed rms values for a group of seeds, to misalignment errors result values. - - Args: - temp_data: a `BetaBeatValues` object with the seeds' results. - - Returns: - Nothing, updates inplace. - """ - self.stdev_miss_x.append(np.std(temp_data.misserror_bbx)) - self.stdev_miss_y.append(np.std(temp_data.misserror_bby)) - self.max_stdev_miss_x.append(np.std(temp_data.max_misserror_bbx)) - self.max_stdev_miss_y.append(np.std(temp_data.max_misserror_bby)) - self.ip1_stdev_miss_x.append(np.std(temp_data.ip1_misserror_bbx)) - self.ip1_stdev_miss_y.append(np.std(temp_data.ip1_misserror_bby)) - self.ip5_stdev_miss_x.append(np.std(temp_data.ip5_misserror_bbx)) - self.ip5_stdev_miss_y.append(np.std(temp_data.ip5_misserror_bby)) - - def to_pandas(self, *args, **kwargs) -> pd.DataFrame: - """ - Simple function to export stored values as a pandas dataframe. - - Returns: - A `pandas.DataFrame` object with the instance's attributes as columns. - """ - return pd.DataFrame(self.dict(*args, **kwargs)) - - -def _get_rms(values_list: List[float]) -> float: - """ - Get the root mean square of a list of values. - - Args: - values_list (List[float]): a distribution of values. - - Returns: - The root mean square of said distribution. - """ - try: - return np.sqrt(np.sum(i ** 2 for i in values_list) / len(values_list)) - except ZeroDivisionError as issue: - logger.exception("An empty list was provided, check the simulation logs to understand why.") - raise ZeroDivisionError("No values were provided") from issue diff --git a/pyhdtoolkit/scripts/triplet_errors/plotting_functions.py b/pyhdtoolkit/scripts/triplet_errors/plotting_functions.py deleted file mode 100644 index b7da80d0..00000000 --- a/pyhdtoolkit/scripts/triplet_errors/plotting_functions.py +++ /dev/null @@ -1,305 +0,0 @@ -""" -Script scripts.triplet_errors.plotting_functions ------------------------------------------------- - -Created on 2019.06.15 -:author: Felix Soubelet - -A collection of functions that will be useful to plot the results from GridCompute Algorithm. -""" - -import os -import pathlib - -from typing import List - -import matplotlib -import matplotlib.pyplot as plt -import pandas as pd - -from loguru import logger - -if os.environ.get("Display", "") == "": - logger.warning("Display configuration error found. Using non-interactive Agg backend") - matplotlib.use("Agg") - - -def plot_betas_across_machine( - s_values: List[float], - betx_values: List[float], - bety_values: List[float], - error_type: str, - error_value: str, -) -> None: - """ - Plot beta functions across the machine. Save according to simulation scenario. - Creates a plot of the horizontal and vertical beta functions across the whole machine. Gives a - title generated according to the error type and error value. Saves in dedicated subfolder. - - Args: - s_values (List[float]): the values of the s axis. - betx_values (List[float]): horizontal betatron function values accross the machine. - bety_values (List[float]): vertical betatron function values accross the machine. - error_type (str): which error you have simulated too get those results. - error_value (str): the value of the error you used in your simulations. - """ - if error_type == "TFERROR": - title = f"r'Beta values, hllhc1.3, 15cm optics, relative field error: {error_value}[$10^{-4}$]'" - elif error_type == "MISERROR": - title = f"r'Beta values, hllhc1.3 15cm optics, misalignment: {error_value}[mm]'" - else: - logger.warning(f"Invalid error parameter {error_type} provided, aborting plot") - raise ValueError("Error parameter should be either `TFERROR` or `MISERROR`.") - - output_dir = pathlib.Path("beta_plots") / f"{error_type}" / f"{error_value}" - if not output_dir.is_dir(): - logger.info(f"Creating directory {output_dir}") - output_dir.mkdir() - - plt.figure(figsize=(18, 10)) - plt.title(title, fontsize=21) - plt.xlabel("Position alongside s axis [m]", fontsize=17) - plt.ylabel(r"$\beta$ value [m]", fontsize=17) - plt.xticks(fontsize=12) - plt.yticks(fontsize=12) - plt.xlim(6500, max(s_values)) - plt.plot(s_values, betx_values, label="BETX") - plt.plot(s_values, bety_values, label="BETY") - plt.legend(loc="best", fontsize="xx-large") - plt.savefig(f"beta_plots/{error_type}/{error_value}/betas_across_machine.png", format="pdf", dpi=300) - logger.info(f"Plotted betas for {error_type} {error_value}") - - -def plot_bbing_max_errorbar( - errors: List[float], beta_beatings_df: pd.DataFrame, stdev_df: pd.DataFrame, plane: str, figname: str, -) -> None: - """ - Plot beta-beating values, with error bars, as a function of the error values. Save according - to plotted plane. Creates a plot of the horizontal or vertical beta-beatings across the range - of simulated error values. Gives a title generated according to the error type and error - value. Saves in dedicated subfolder. - - Args: - errors (List[float]): the different error values simulated. - beta_beatings_df (pd.DataFrame): the resulting beta-beating values. - stdev_df (pd.DataFrame): the standard deviations for those values. - plane (str): the name of the plane to plot. - figname (str): how to name the file when exporting the plot. - """ - - if plane.lower() == "horizontal": - _, axes = plt.subplots(1, 1, figsize=(8, 6)) - axes.errorbar( - errors, - beta_beatings_df.tferror_bbx, - yerr=stdev_df.stdev_tf_x, - color="C0", - label="Global Beta-Beating from Field Errors", - ) - axes.errorbar( - errors, - beta_beatings_df.misserror_bbx, - yerr=stdev_df.stdev_miss_x, - color="C1", - label="Global Beta-Beating from Misalignment Errors", - ) - axes.plot( - errors, beta_beatings_df.max_tferror_bbx, "^", color="C0", label="Max Value from Field Errors", - ) - axes.plot( - errors, - beta_beatings_df.max_misserror_bbx, - "^", - color="C1", - label="Max Value from Misalignment Errors", - ) - axes.set_xlabel(r"Relative Field Error [$10^{-4}$] or Longitudinal Misalignment [mm]", fontsize=15) - axes.set_ylabel(r"$\Delta \beta / \beta$ [%]", fontsize=15) - plt.tight_layout() - plt.title(f"Beta-Beating Against Triplet Errors, {plane} Plane", fontsize=15) - plt.legend(loc="best") - plt.savefig(figname, format="pdf", dpi=300) - - elif plane.lower() == "vertical": - _, axes = plt.subplots(1, 1, figsize=(8, 6)) - axes.errorbar( - errors, - beta_beatings_df.tferror_bby, - yerr=stdev_df.stdev_tf_y, - color="C0", - label="Global Beta-Beating from Field Errors", - ) - axes.errorbar( - errors, - beta_beatings_df.misserror_bby, - yerr=stdev_df.stdev_miss_y, - color="C1", - label="Global Beta-Beating from Misalignment Errors", - ) - axes.plot( - errors, beta_beatings_df.max_tferror_bby, "^", color="C0", label="Max Value from Field Errors", - ) - axes.plot( - errors, - beta_beatings_df.max_misserror_bby, - "^", - color="C1", - label="Max Value from Misalignment Errors", - ) - axes.set_xlabel(r"Relative Field Error [$10^{-4}$] or Longitudinal Misalignment [mm]", fontsize=15) - axes.set_ylabel(r"$\Delta \beta / \beta$ [%]", fontsize=15) - plt.tight_layout() - plt.title(f"Beta-beating against triplet errors, {plane} plane", fontsize=15) - plt.legend(loc="best") - plt.savefig(figname, format="pdf", dpi=300) - - else: - logger.warning(f"Invalid plane parameter {plane} provided, aborting plot") - raise ValueError("Plane parameter should be either `Horizontal` or `Vertical`") - logger.info(f"Plotted beta-beatings with error bars for {plane.lower()} plane") - - -def plot_bbing_with_ips_errorbar( - errors: List[float], beta_beatings_df: pd.DataFrame, stdev_df: pd.DataFrame, plane: str, figname: str, -) -> None: - """ - Plot beta-beating values, with error bars, as a function of the error values. Save according - to plotted plane. Creates a plot of the horizontal or vertical beta-beatings across the range - of simulated error values, with the addition of the beta-beating value at IPs. Gives a title - generated according to the error type and error value. Saves in dedicated subfolder. - - Args: - errors (List[float]): list with different error values simulated. - beta_beatings_df (pd.DataFrame): the resulting beta-beating values. - stdev_df (pd.DataFrame): the standard deviations for those values. - plane (str): the name of the plane to plot. - figname (str): how to name the file when exporting the plot. - """ - - if plane.lower() == "horizontal": - _, axes = plt.subplots(1, 1, figsize=(8, 6)) - axes.errorbar( - errors, - beta_beatings_df.tferror_bbx, - yerr=stdev_df.stdev_tf_x, - color="C0", - label="Global Beta-Beating from Field Errors", - ) - axes.errorbar( - errors, - beta_beatings_df.misserror_bbx, - yerr=stdev_df.stdev_miss_x, - color="C1", - label="Global Beta-Beating from Misalignment Errors", - ) - axes.plot( - errors, - beta_beatings_df.ip1_tferror_bbx, - "^", - color="C0", - label="IP1 Beta-Beating Value from Field Errors", - ) - axes.plot( - errors, - beta_beatings_df.ip1_misserror_bbx, - "^", - color="C1", - label="IP1 Beta-Beating Value from Misalignment Errors", - ) - axes.plot( - errors, - beta_beatings_df.ip5_tferror_bbx, - "x", - color="C0", - label="IP5 Beta-Beating Value from Field Errors", - ) - axes.plot( - errors, - beta_beatings_df.ip5_misserror_bbx, - "x", - color="C1", - label="IP5 Beta-Beating Value from Misalignment Errors", - ) - axes.set_xlabel(r"Relative Field Error [$10^{-4}$] or Longitudinal Misalignment [mm]", fontsize=15) - axes.set_ylabel(r"$\Delta \beta / \beta$ [%]", fontsize=15) - plt.tight_layout() - plt.title(f"Beta-Beating Against Triplet Errors, {plane} Plane", fontsize=15) - plt.legend(loc="best") - plt.savefig(figname, format="pdf", dpi=300) - - elif plane.lower() == "vertical": - _, axes = plt.subplots(1, 1, figsize=(8, 6)) - axes.errorbar( - errors, - beta_beatings_df.tferror_bby, - yerr=stdev_df.stdev_tf_y, - color="C0", - label="Global Beta-Beating from Field Errors", - ) - axes.errorbar( - errors, - beta_beatings_df.misserror_bby, - yerr=stdev_df.stdev_miss_y, - color="C1", - label="Global Beta-Beating from Misalignment Errors", - ) - axes.plot( - errors, - beta_beatings_df.ip1_tferror_bby, - "^", - color="C0", - label="IP1 Beta-Beating Value from Field Errors", - ) - axes.plot( - errors, - beta_beatings_df.ip1_misserror_bby, - "^", - color="C1", - label="IP1 Beta-Beating Value from Misalignment Errors", - ) - axes.plot( - errors, - beta_beatings_df.ip5_tferror_bby, - "x", - color="C0", - label="IP5 Beta-Beating Value from Field Errors", - ) - axes.plot( - errors, - beta_beatings_df.ip5_misserror_bby, - "x", - color="C1", - label="IP5 Beta-Beating Value from Misalignment Errors", - ) - axes.set_xlabel(r"TRelative Field Error [$10^{-4}$] or Longitudinal Misalignment [mm]", fontsize=15) - axes.set_ylabel(r"$\Delta \beta / \beta$ [%]", fontsize=15) - plt.tight_layout() - plt.title(f"Beta-Beating Against Triplet Errors, {plane} Plane", fontsize=15) - plt.legend(loc="best") - plt.savefig(figname, format="pdf", dpi=300) - - else: - logger.warning(f"Invalid plane parameter {plane} provided, aborting plot") - raise ValueError("Plane parameter should be either `Horizontal` or `Vertical`") - logger.info(f"Plotted beta-beatings (including IPs) with error bars for {plane.lower()} plane") - - -def plot_intermediate_beta_histograms( - betasx: List[float], betasy: List[float], error_val: float, title: str, outputname: str -) -> None: - """ - Plot histogram distribution for betas at seeds. - - Args: - betasx (List[float]): horizontal beta values for all seeds for a specific error value. - betasy (List[float]): vertical beta values for all seeds for a specific error value. - error_val (float): the error value. - title (str): the title to give the figure. - outputname (str): the name to give the file saving the figure. - """ - plt.hist(betasx, bins=50, label=f"{error_val}, horizontal", alpha=0.6, density=True) - plt.hist(betasy, bins=50, label=f"{error_val}, vertical", alpha=0.6, density=True) - plt.legend(loc="best") - plt.title(title) - plt.savefig(outputname, format="pdf", dpi=300) - logger.info(f"Plotted intermediate beta histogram, saved as {outputname}") diff --git a/pyproject.toml b/pyproject.toml index 290a52f0..ae21ef04 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "pyhdtoolkit" -version = "0.8.0" +version = "0.8.1" description = "An all-in-one toolkit package to easy my Python work in my PhD." authors = ["Felix Soubelet "] license = "MIT" @@ -37,10 +37,8 @@ pandas = "^1.0" matplotlib = "^3.1" scipy = "^1.4" tfs-pandas = "^2.0" -pydantic = "^1.6" loguru = "^0" numba = ">=0.51,<1.0" -rich = "^9.0" cpymad = "^1.6" # Defining optional dependencies for extras @@ -74,7 +72,6 @@ source = ["pyhdtoolkit/"] [tool.coverage.report] ignore_errors = true -omit = ["pyhdtoolkit/scripts/*"] # ----- Documentation Configuration ----- #