diff --git a/pixi.lock b/pixi.lock
index 64d2eac..e843b45 100644
--- a/pixi.lock
+++ b/pixi.lock
@@ -127,7 +127,11 @@ environments:
- pypi: https://files.pythonhosted.org/packages/74/f5/9373290775639cb67a2fce7f629a1c240dce9f12fe927bc32b2736e16dfc/argcomplete-3.6.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/ed/c9/d7977eaacb9df673210491da99e6a247e93df98c715fc43fd136ce1d3d33/arrow-1.4.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/54/51/321e821856452f7386c4e9df866f196720b1ad0c5ea1623ea7399969ae3b/authlib-1.6.6-py2.py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/77/39/4d8414260c3d83f22029a39e51553c173611b378d62ca391e5ca68e65cfa/awkward-2.9.0-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/ca/aa/ab2d6d68c3ee50f6dedbbc91a31cd38f9fede9258d54e7aca29bfca4ebc1/awkward_cpp-52-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/fc/d8/b8fcba9464f02b121f39de2db2bf57f0b216fe11d014513d666e8634380d/azure_core-1.38.0-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/24/b4/11f8a31d8b67cca3371e046db49baa7c0594d71eb40ac8121e2fc0888db0/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_x86_64.whl
+ - pypi: https://files.pythonhosted.org/packages/1a/39/47f9197bdd44df24d67ac8893641e16f386c984a0619ef2ee4c51fbbc019/beautifulsoup4-4.14.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/5e/0a/3966f239e1d9da93cb755dc0213835ce4e9ed93645192878d0a055ecdc31/boto3-1.42.42-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/e6/51/aac7e419521d5519e13087a7198623655648c939822bd7f4bdc9ccbe07f9/botocore-1.42.42-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/ef/79/c45f2d53efe6ada1110cf6f9fca095e4ff47a0454444aefdde6ac4789179/cachecontrol-0.14.4-py3-none-any.whl
@@ -136,6 +140,7 @@ environments:
- pypi: https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/a7/06/3d6badcf13db419e25b07041d9c7b4a2c331d3f4e7134445ec5df57714cd/coloredlogs-15.0.1-py2.py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/19/0f/f6121b90b86b9093c066889274d26a1de3f29969d45c2ed1ecbe2033cb78/cramjam-2.11.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/2b/08/f83e2e0814248b844265802d081f2fac2f1cbe6cd258e72ba14ff006823a/cryptography-46.0.4-cp311-abi3-manylinux_2_28_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/db/2b/1239938a2629c29363e07724d7bd4c87a8b566947ecee2afb5f5ac34e1bb/cwl_upgrader-1.2.14-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/63/4b/ccab2a5ca9e0b6553810b85c06387e60fc9443cec3c987e3a062705bd225/cwl_utils-0.40-py3-none-any.whl
@@ -143,9 +148,10 @@ environments:
- pypi: https://files.pythonhosted.org/packages/65/ee/a7aba2b112c5ae879d5cfb231c75189a7fd2a5e84b6af7e07dd71fb2bb35/cwltool-3.1.20260108082145-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/5a/36/17015b7bae2783f7bbde50a8bafdeb702802c080322204f1bfcae25b9e02/DB12-1.0.4-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl
- - pypi: https://files.pythonhosted.org/packages/75/c0/63d2ab6ef062e05e795fb49ebcd8a907c1d4f78d9f01c577266b12bd0da2/dirac-9.0.18-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/84/d0/205d54408c08b13550c733c4b85429e7ead111c7f0014309637425520a9a/deprecated-1.3.1-py2.py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/d0/d0/9e71fdc3394ffc632f35946c572e60fcc2a5452ba0a23c52493f23d60672/dirac-9.1.6-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/9f/90/279f55fff9481f9e0424c3c97b24dc10004ec8d8f98ddf5afd07a7b79194/diraccfg-1.0.1-py2.py3-none-any.whl
- - pypi: https://files.pythonhosted.org/packages/06/d2/500c9ae651fd3821ca70814aa40cb5ab9bab9b479387ccd8dcb4df745d44/diraccommon-9.0.18-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/f8/b2/ad8e7e63fdf5add3ceb7a0805d700e9fd7cb7d5743f765a4994b4ec286d7/diraccommon-9.1.6-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/ab/7e/5f02b757bb825e5cdc65f6f7a12c209963bec877d61497393bea8f41f9ce/diracx_api-0.0.8-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/b0/28/87e78ff0d6041f40431d88b8aa3b645be7476a420d8dcbf7197f5b394c5c/diracx_cli-0.0.8-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/f2/61/0c78d9778bffd844863d3173a5fefb506d7131ceebecee523a9e27024aa1/diracx_client-0.0.8-py3-none-any.whl
@@ -154,7 +160,10 @@ environments:
- pypi: https://files.pythonhosted.org/packages/dc/80/12235e5b75bb2c586733280854f131b86051e0bbdfb55349ff70d0f72cf9/dogpile_cache-1.5.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/58/19/0380af745f151a1648657bbcef0fb49ac28bf09083d94498163ffd9b32dc/dominate-2.9.1-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/de/15/545e2b6cf2e3be84bc1ed85613edd75b8aea69807a71c26f4ca6a9258e82/email_validator-2.3.0-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/37/f9/f8497ef8b873a8bb2a750ee2a6c5f0fc22258e1acb6245fd237042a6c279/fabric-3.2.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/a7/b2/fabede9fafd976b991e9f1b9c8c873ed86f202889b864756f240ce6dd855/frozenlist-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl
+ - pypi: https://files.pythonhosted.org/packages/d5/1f/5f4a3cd9e4440e9d9bc78ad0a91a1c8d46b4d429d5239ebe6793c9fe5c41/fsspec-2026.3.0-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/73/90/a2c51050d9254bd9134e6368b3f94f92f0eb2c34ed0ca19ec449ce2fc288/fsspec_xrootd-0.5.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/5f/e8/2e6301567e6debaad6abae0e217428471651ce877537b7095b6a8e7d8cd2/fts3-3.14.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/a0/61/5c78b91c3143ed5c14207f463aecfc8f9dbb5092fb2869baf37c273b2705/gitdb-4.0.12-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/b0/57/dea471da24ceac6de8c3dc5d37e4ddde57a5c340d6bac90010898734de34/gitlint_core-0.19.1-py3-none-any.whl
@@ -164,17 +173,25 @@ environments:
- pypi: https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/d0/23/49cf8ea1d129637941f06fb78f5f66077bf362762c5f6c01712c4cd0e87f/hyperscan-0.8.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/a4/ed/1f1afb2e9e7f38a545d628f864d562a5ae64fe6f7a10e28ffb9b185b4e89/importlib_resources-6.5.2-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/32/4b/b99e37f88336009971405cbb7630610322ed6fbfa31e1d7ab3fbf3049a2d/invoke-2.2.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/14/2f/967ba146e6d58cf6a652da73885f52fc68001525b4197effc174321d70b4/jmespath-1.1.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/a1/01/9674cc6d478406ae61d910cb16ca8b5699a8a9e6a2019987ebe5a5957d1d/joserfc-1.6.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/ba/24/c65fe1aef4e0681cb17ca136eb0f3e20a47d3941a306bc9d636938029ca5/lb_telemetry-0.5.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/87/a4/afc9dddc6b14fb3d52a900cd9b4c77770128edc4b07e576034bbd0ffd290/LbCondaWrappers-0.5.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/d7/4c/f3b97c7d6008b3a895bbadb2deb44ad3446ae5fe204c72cd540dc222e57d/lbenv-2.4.0-py3-none-any.whl
- - pypi: https://files.pythonhosted.org/packages/16/4a/b4d7feb029d4e75d4882d8d1d9029938c31a2e73074f87ffcff0f4a8ba9e/lbplatformutils-4.5.1-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/6d/10/b37ac718c5903758fa9058a5182026a4f3b65443196b82c7840389ea0dbd/lbplatformutils-4.6.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/02/64/c86924898062e8217ed914a29458cfde9e4a9b80e4d4cbcca141983ba339/lbprodrun-1.12.4-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/29/ce/ed422816fb30ffa3bc11597b30d5deca06b4a1388707a04215da73c65b53/levenshtein-0.27.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl
+ - pypi: git+https://gitlab.cern.ch/jlisalab/LHCbDIRAC.git?rev=modules-to-cwl-migration#bc5f8f1804107bc8807edcc98a97c346ef61c0e8
+ - pypi: https://files.pythonhosted.org/packages/7f/37/8ea3555769b6048b5e4ec162cc90fd32e761c0e381ffb3baf888cb0d8a71/lhcbdiracx_api-0.0.8-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/68/15/bfb0c717b8f23c16907f3e73e8f56010ccd72e9900a108209665b0d9ed4b/lhcbdiracx_cli-0.0.8-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/2d/88/67602dfa2d7ab5d0518af82db34ab4d70dc2c3029b3ca788299a3be4a96d/lhcbdiracx_client-0.0.8-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/34/8f/6105afdd8f4e1f3b198d09e4b2622622923d9fa9e077aed852c0bb035a3a/lhcbdiracx_core-0.0.8-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/b3/68/aa714515d65090fcbcc9a1f3debd5a644b14aad11e59238f42f00bd4b298/logzero-1.7.0-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/29/9c/47293c58cc91769130fbf85531280e8cc7868f7fbb6d92f4670071b9cb3e/lxml-6.0.2-cp314-cp314-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl
@@ -183,6 +200,9 @@ environments:
- pypi: https://files.pythonhosted.org/packages/2f/40/dc34d1a8d5f1e51fc64640b62b191684da52ca469da9cd74e84936ffa4a6/msgpack-1.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/fe/3b/8ec5074bcfc450fe84273713b4b0a0dd47c0249358f5d82eb8104ffe2520/multidict-6.7.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/9e/c9/b2622292ea83fbb4ec318f5b9ab867d0a28ab43c5717bb85b0a5f6b3b0a4/networkx-3.6.1-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/76/3e/c0b690253f0b82d86e99949af13533363acfb5432ecb5d53dd5b3bce9c34/orjson-3.11.9-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
+ - pypi: https://files.pythonhosted.org/packages/15/88/3cdd54fa279341afa10acf8d2b503556b1375245dccc9315659f795dd2e9/pandas-3.0.2-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl
+ - pypi: https://files.pythonhosted.org/packages/a9/90/a744336f5af32c433bd09af7854599682a383b37cfd78f7de263de6ad6cb/paramiko-4.0.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/b2/f2/889ad4b2408f72fe1a4f6a19491177b30ea7bf1a0fd5f17050ca08cfc882/propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl
@@ -195,12 +215,16 @@ environments:
- pypi: https://files.pythonhosted.org/packages/c1/60/5d4751ba3f4a40a6891f24eec885f51afd78d208498268c734e256fb13c4/pydantic_settings-2.12.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/7e/32/a7125fb28c4261a627f999d5fb4afff25b523800faed2c30979949d6facd/pydot-4.0.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/3e/d0/f301f83ac8dbe53442c5a43f6a39016f94f754d7a9815a875b65e218a307/pynacl-1.6.2-cp38-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/10/bd/c038d7cc38edc1aa5bf91ab8068b63d4308c66c4c8bb3cbba7dfbc049f9c/pyparsing-3.3.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/38/ee/a61bb562bdf6f0bc6c51cdcf80ab5503cbb4b2f5053fa4b054cc0a56e48a/python_gitlab-8.2.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/c8/85/9535df0b78ba51f478c9ce7eb6d1f85535cc31fe356773b48fd9d3e563ca/rapidfuzz-3.14.5-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/b9/20/35d2baebacf357b562bd081936b66cd845775442973cb033a377fd639a84/rdflib-7.5.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/ef/45/615f5babd880b4bd7d405cc0dc348234c5ffb6ed1ea33e152ede08b2072d/rich-14.3.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/04/80/97b6f357ac458d9ad9872cc3183ca09ef7439ac89e030ea43053ba1294b6/rich_argparse-1.7.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/af/fe/b6045c782f1fd1ae317d2a6ca1884857ce5c20f59befe6ab25a8603c43a7/ruamel_yaml-0.18.17-py3-none-any.whl
@@ -210,8 +234,10 @@ environments:
- pypi: https://files.pythonhosted.org/packages/89/d4/24a137517140fc8cc07f7423695b9296c993d6b6cbf2a7867d8f859de77f/schema_salad-8.9.20251102115403-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/70/98/d82f14ac7ffedbd38dfa2383f142b26d18d23ca6cf35a40f4af60df666bd/sh-2.2.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/59/25/68f81c50aeb3f30e01c20da0f56d14c1a779b57d9308c50e19c63dc8413f/signurlarity-0.2.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/04/be/d09147ad1ec7934636ad912901c5fd7667e1c858e19d355237db0d0cd5e4/smmap-5.0.2-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/46/2c/1462b1d0a634697ae9e55b3cecdcb64788e8b7d63f54d923fcd0bb140aed/soupsieve-2.8.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/f9/38/8b6fc7a8153cb49eb3a9a13acfa9eeb6cc476e37888781e593e6f02ac05e/spython-0.3.14-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/05/45/1256fb597bb83b58a01ddb600c59fe6fdf0e5afe333f0456ed75c0f8d7bd/sqlalchemy-2.0.46-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/f4/40/8561ce06dc46fd17242c7724ab25b257a2ac1b35f4ebf551b40ce6105cfa/stevedore-5.6.0-py3-none-any.whl
@@ -219,11 +245,14 @@ environments:
- pypi: https://files.pythonhosted.org/packages/a0/1d/d9257dd49ff2ca23ea5f132edf1281a0c4f9de8a762b9ae399b670a59235/typer-0.21.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/c4/55/85e2732345dd8b66437cddedac4ee7ef2d9c25bf8792830b095f2ee658f3/uproot-5.7.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/3c/c1/d73f12f8cdb1891334a2ccf7389eed244d3941e74d80dd220badb937f3fb/wcwidth-0.5.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/db/56/073989deb4b5d7d6e7ea424476a4ae4bda02140f2dbeaafb14ba4864dd60/wrapt-2.1.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/1c/1c/ab905d19a1349e847e37e02933316d17adfd1dd70b64d366885ab0bd959d/xattr-1.3.0-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/ca/6d/b1a49f9712a910acdcb8dc5765e57d60c2be9fe9b001a21b6a98a1d85adb/xenv-0.0.6-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/38/34/98a2f52245f4d47be93b580dae5f9861ef58977d73a79eb47c58f1ad1f3a/xmltodict-1.0.4-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/ed/ba/603ce3961e339413543d8cd44f21f2c80e2a7c5cfe692a7b1f2cccf58f3c/xxhash-3.6.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/f5/e7/d8c5a7752fef68205296201f8ec2bf718f5c805a7a7e9880576c67600658/yarl-1.22.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/31/dc/cc50210e11e465c975462439a492516a73300ab8caa8f5e0902544fd748b/zstandard-0.25.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl
@@ -342,7 +371,11 @@ environments:
- pypi: https://files.pythonhosted.org/packages/74/f5/9373290775639cb67a2fce7f629a1c240dce9f12fe927bc32b2736e16dfc/argcomplete-3.6.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/ed/c9/d7977eaacb9df673210491da99e6a247e93df98c715fc43fd136ce1d3d33/arrow-1.4.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/54/51/321e821856452f7386c4e9df866f196720b1ad0c5ea1623ea7399969ae3b/authlib-1.6.6-py2.py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/77/39/4d8414260c3d83f22029a39e51553c173611b378d62ca391e5ca68e65cfa/awkward-2.9.0-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/9a/02/0e550c9606ffae81603a9d240369a93cdf1e4bc48e2e314d367825a1c02d/awkward_cpp-52-cp314-cp314-macosx_10_15_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/fc/d8/b8fcba9464f02b121f39de2db2bf57f0b216fe11d014513d666e8634380d/azure_core-1.38.0-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/5d/ba/2af136406e1c3839aea9ecadc2f6be2bcd1eff255bd451dd39bcf302c47a/bcrypt-5.0.0-cp39-abi3-macosx_10_12_universal2.whl
+ - pypi: https://files.pythonhosted.org/packages/1a/39/47f9197bdd44df24d67ac8893641e16f386c984a0619ef2ee4c51fbbc019/beautifulsoup4-4.14.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/5e/0a/3966f239e1d9da93cb755dc0213835ce4e9ed93645192878d0a055ecdc31/boto3-1.42.42-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/e6/51/aac7e419521d5519e13087a7198623655648c939822bd7f4bdc9ccbe07f9/botocore-1.42.42-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/ef/79/c45f2d53efe6ada1110cf6f9fca095e4ff47a0454444aefdde6ac4789179/cachecontrol-0.14.4-py3-none-any.whl
@@ -351,6 +384,7 @@ environments:
- pypi: https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl
- pypi: https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/a7/06/3d6badcf13db419e25b07041d9c7b4a2c331d3f4e7134445ec5df57714cd/coloredlogs-15.0.1-py2.py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/de/07/a1051cdbbe6d723df16d756b97f09da7c1adb69e29695c58f0392bc12515/cramjam-2.11.0-cp314-cp314-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl
- pypi: https://files.pythonhosted.org/packages/8d/99/157aae7949a5f30d51fcb1a9851e8ebd5c74bf99b5285d8bb4b8b9ee641e/cryptography-46.0.4-cp311-abi3-macosx_10_9_universal2.whl
- pypi: https://files.pythonhosted.org/packages/db/2b/1239938a2629c29363e07724d7bd4c87a8b566947ecee2afb5f5ac34e1bb/cwl_upgrader-1.2.14-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/63/4b/ccab2a5ca9e0b6553810b85c06387e60fc9443cec3c987e3a062705bd225/cwl_utils-0.40-py3-none-any.whl
@@ -358,9 +392,10 @@ environments:
- pypi: https://files.pythonhosted.org/packages/65/ee/a7aba2b112c5ae879d5cfb231c75189a7fd2a5e84b6af7e07dd71fb2bb35/cwltool-3.1.20260108082145-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/5a/36/17015b7bae2783f7bbde50a8bafdeb702802c080322204f1bfcae25b9e02/DB12-1.0.4-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl
- - pypi: https://files.pythonhosted.org/packages/75/c0/63d2ab6ef062e05e795fb49ebcd8a907c1d4f78d9f01c577266b12bd0da2/dirac-9.0.18-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/84/d0/205d54408c08b13550c733c4b85429e7ead111c7f0014309637425520a9a/deprecated-1.3.1-py2.py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/d0/d0/9e71fdc3394ffc632f35946c572e60fcc2a5452ba0a23c52493f23d60672/dirac-9.1.6-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/9f/90/279f55fff9481f9e0424c3c97b24dc10004ec8d8f98ddf5afd07a7b79194/diraccfg-1.0.1-py2.py3-none-any.whl
- - pypi: https://files.pythonhosted.org/packages/06/d2/500c9ae651fd3821ca70814aa40cb5ab9bab9b479387ccd8dcb4df745d44/diraccommon-9.0.18-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/f8/b2/ad8e7e63fdf5add3ceb7a0805d700e9fd7cb7d5743f765a4994b4ec286d7/diraccommon-9.1.6-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/ab/7e/5f02b757bb825e5cdc65f6f7a12c209963bec877d61497393bea8f41f9ce/diracx_api-0.0.8-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/b0/28/87e78ff0d6041f40431d88b8aa3b645be7476a420d8dcbf7197f5b394c5c/diracx_cli-0.0.8-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/f2/61/0c78d9778bffd844863d3173a5fefb506d7131ceebecee523a9e27024aa1/diracx_client-0.0.8-py3-none-any.whl
@@ -369,7 +404,10 @@ environments:
- pypi: https://files.pythonhosted.org/packages/dc/80/12235e5b75bb2c586733280854f131b86051e0bbdfb55349ff70d0f72cf9/dogpile_cache-1.5.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/58/19/0380af745f151a1648657bbcef0fb49ac28bf09083d94498163ffd9b32dc/dominate-2.9.1-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/de/15/545e2b6cf2e3be84bc1ed85613edd75b8aea69807a71c26f4ca6a9258e82/email_validator-2.3.0-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/37/f9/f8497ef8b873a8bb2a750ee2a6c5f0fc22258e1acb6245fd237042a6c279/fabric-3.2.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/8e/e8/a1185e236ec66c20afd72399522f142c3724c785789255202d27ae992818/frozenlist-1.8.0-cp314-cp314-macosx_10_13_x86_64.whl
+ - pypi: https://files.pythonhosted.org/packages/d5/1f/5f4a3cd9e4440e9d9bc78ad0a91a1c8d46b4d429d5239ebe6793c9fe5c41/fsspec-2026.3.0-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/73/90/a2c51050d9254bd9134e6368b3f94f92f0eb2c34ed0ca19ec449ce2fc288/fsspec_xrootd-0.5.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/5f/e8/2e6301567e6debaad6abae0e217428471651ce877537b7095b6a8e7d8cd2/fts3-3.14.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/a0/61/5c78b91c3143ed5c14207f463aecfc8f9dbb5092fb2869baf37c273b2705/gitdb-4.0.12-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/b0/57/dea471da24ceac6de8c3dc5d37e4ddde57a5c340d6bac90010898734de34/gitlint_core-0.19.1-py3-none-any.whl
@@ -379,17 +417,25 @@ environments:
- pypi: https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/22/ed/9c45c468fd6c31df3fe0622394b1853c00b86545d1e297f3fb9fba1232ce/hyperscan-0.8.2-cp314-cp314-macosx_10_15_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/a4/ed/1f1afb2e9e7f38a545d628f864d562a5ae64fe6f7a10e28ffb9b185b4e89/importlib_resources-6.5.2-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/32/4b/b99e37f88336009971405cbb7630610322ed6fbfa31e1d7ab3fbf3049a2d/invoke-2.2.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/14/2f/967ba146e6d58cf6a652da73885f52fc68001525b4197effc174321d70b4/jmespath-1.1.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/a1/01/9674cc6d478406ae61d910cb16ca8b5699a8a9e6a2019987ebe5a5957d1d/joserfc-1.6.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/ba/24/c65fe1aef4e0681cb17ca136eb0f3e20a47d3941a306bc9d636938029ca5/lb_telemetry-0.5.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/87/a4/afc9dddc6b14fb3d52a900cd9b4c77770128edc4b07e576034bbd0ffd290/LbCondaWrappers-0.5.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/d7/4c/f3b97c7d6008b3a895bbadb2deb44ad3446ae5fe204c72cd540dc222e57d/lbenv-2.4.0-py3-none-any.whl
- - pypi: https://files.pythonhosted.org/packages/16/4a/b4d7feb029d4e75d4882d8d1d9029938c31a2e73074f87ffcff0f4a8ba9e/lbplatformutils-4.5.1-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/6d/10/b37ac718c5903758fa9058a5182026a4f3b65443196b82c7840389ea0dbd/lbplatformutils-4.6.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/02/64/c86924898062e8217ed914a29458cfde9e4a9b80e4d4cbcca141983ba339/lbprodrun-1.12.4-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/f3/e1/2f705da403f865a5fa3449b155738dc9c53021698fd6926253a9af03180b/levenshtein-0.27.3-cp314-cp314-macosx_10_15_x86_64.whl
+ - pypi: git+https://gitlab.cern.ch/jlisalab/LHCbDIRAC.git?rev=modules-to-cwl-migration#bc5f8f1804107bc8807edcc98a97c346ef61c0e8
+ - pypi: https://files.pythonhosted.org/packages/7f/37/8ea3555769b6048b5e4ec162cc90fd32e761c0e381ffb3baf888cb0d8a71/lhcbdiracx_api-0.0.8-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/68/15/bfb0c717b8f23c16907f3e73e8f56010ccd72e9900a108209665b0d9ed4b/lhcbdiracx_cli-0.0.8-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/2d/88/67602dfa2d7ab5d0518af82db34ab4d70dc2c3029b3ca788299a3be4a96d/lhcbdiracx_client-0.0.8-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/34/8f/6105afdd8f4e1f3b198d09e4b2622622923d9fa9e077aed852c0bb035a3a/lhcbdiracx_core-0.0.8-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/b3/68/aa714515d65090fcbcc9a1f3debd5a644b14aad11e59238f42f00bd4b298/logzero-1.7.0-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/c8/e8/c128e37589463668794d503afaeb003987373c5f94d667124ffd8078bbd9/lxml-6.0.2-cp314-cp314-macosx_10_13_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl
@@ -398,6 +444,9 @@ environments:
- pypi: https://files.pythonhosted.org/packages/22/71/201105712d0a2ff07b7873ed3c220292fb2ea5120603c00c4b634bcdafb3/msgpack-1.1.2-cp314-cp314-macosx_10_13_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/d5/22/492f2246bb5b534abd44804292e81eeaf835388901f0c574bac4eeec73c5/multidict-6.7.1-cp314-cp314-macosx_10_15_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/9e/c9/b2622292ea83fbb4ec318f5b9ab867d0a28ab43c5717bb85b0a5f6b3b0a4/networkx-3.6.1-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/8e/eb/5da01e356015aee6ecfa1187ced87aef51364e306f5e695dd52719bf0e78/orjson-3.11.9-cp314-cp314-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl
+ - pypi: https://files.pythonhosted.org/packages/bb/40/c6ea527147c73b24fc15c891c3fcffe9c019793119c5742b8784a062c7db/pandas-3.0.2-cp314-cp314-macosx_10_15_x86_64.whl
+ - pypi: https://files.pythonhosted.org/packages/a9/90/a744336f5af32c433bd09af7854599682a383b37cfd78f7de263de6ad6cb/paramiko-4.0.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/65/9b/03b04e7d82a5f54fb16113d839f5ea1ede58a61e90edf515f6577c66fa8f/propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl
@@ -410,12 +459,16 @@ environments:
- pypi: https://files.pythonhosted.org/packages/c1/60/5d4751ba3f4a40a6891f24eec885f51afd78d208498268c734e256fb13c4/pydantic_settings-2.12.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/7e/32/a7125fb28c4261a627f999d5fb4afff25b523800faed2c30979949d6facd/pydot-4.0.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/be/7b/4845bbf88e94586ec47a432da4e9107e3fc3ce37eb412b1398630a37f7dd/pynacl-1.6.2-cp38-abi3-macosx_10_10_universal2.whl
- pypi: https://files.pythonhosted.org/packages/10/bd/c038d7cc38edc1aa5bf91ab8068b63d4308c66c4c8bb3cbba7dfbc049f9c/pyparsing-3.3.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/38/ee/a61bb562bdf6f0bc6c51cdcf80ab5503cbb4b2f5053fa4b054cc0a56e48a/python_gitlab-8.2.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/81/41/aa3ffb3355e62e1bf91f6599b3092e866bc88487a07c524004943c7676df/rapidfuzz-3.14.5-cp314-cp314-macosx_10_15_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/b9/20/35d2baebacf357b562bd081936b66cd845775442973cb033a377fd639a84/rdflib-7.5.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/ef/45/615f5babd880b4bd7d405cc0dc348234c5ffb6ed1ea33e152ede08b2072d/rich-14.3.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/04/80/97b6f357ac458d9ad9872cc3183ca09ef7439ac89e030ea43053ba1294b6/rich_argparse-1.7.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/af/fe/b6045c782f1fd1ae317d2a6ca1884857ce5c20f59befe6ab25a8603c43a7/ruamel_yaml-0.18.17-py3-none-any.whl
@@ -425,8 +478,10 @@ environments:
- pypi: https://files.pythonhosted.org/packages/62/86/3915cb5a603e1b1d798e1ee1ce2a0a390a0f85d35da97e4b6d1c6a45421b/schema_salad-8.9.20251102115403-cp314-cp314-macosx_10_15_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/70/98/d82f14ac7ffedbd38dfa2383f142b26d18d23ca6cf35a40f4af60df666bd/sh-2.2.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/59/25/68f81c50aeb3f30e01c20da0f56d14c1a779b57d9308c50e19c63dc8413f/signurlarity-0.2.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/04/be/d09147ad1ec7934636ad912901c5fd7667e1c858e19d355237db0d0cd5e4/smmap-5.0.2-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/46/2c/1462b1d0a634697ae9e55b3cecdcb64788e8b7d63f54d923fcd0bb140aed/soupsieve-2.8.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/f9/38/8b6fc7a8153cb49eb3a9a13acfa9eeb6cc476e37888781e593e6f02ac05e/spython-0.3.14-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/fc/a1/9c4efa03300926601c19c18582531b45aededfb961ab3c3585f1e24f120b/sqlalchemy-2.0.46-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/f4/40/8561ce06dc46fd17242c7724ab25b257a2ac1b35f4ebf551b40ce6105cfa/stevedore-5.6.0-py3-none-any.whl
@@ -434,11 +489,14 @@ environments:
- pypi: https://files.pythonhosted.org/packages/a0/1d/d9257dd49ff2ca23ea5f132edf1281a0c4f9de8a762b9ae399b670a59235/typer-0.21.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/c4/55/85e2732345dd8b66437cddedac4ee7ef2d9c25bf8792830b095f2ee658f3/uproot-5.7.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/3c/c1/d73f12f8cdb1891334a2ccf7389eed244d3941e74d80dd220badb937f3fb/wcwidth-0.5.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/95/a0/1c2396e272f91efe6b16a6a8bce7ad53856c8f9ae4f34ceaa711d63ec9e1/wrapt-2.1.1-cp314-cp314-macosx_10_15_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/9d/0a/03192e78071cfb86e6d8ceae0e5dcec4bacf0fd734755263aabd01532e50/xattr-1.3.0-cp314-cp314-macosx_10_15_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/ca/6d/b1a49f9712a910acdcb8dc5765e57d60c2be9fe9b001a21b6a98a1d85adb/xenv-0.0.6-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/38/34/98a2f52245f4d47be93b580dae5f9861ef58977d73a79eb47c58f1ad1f3a/xmltodict-1.0.4-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/7e/5e/0138bc4484ea9b897864d59fce9be9086030825bc778b76cb5a33a906d37/xxhash-3.6.0-cp314-cp314-macosx_10_13_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/e4/04/3532d990fdbab02e5ede063676b5c4260e7f3abea2151099c2aa745acc4c/yarl-1.22.0-cp314-cp314-macosx_10_13_x86_64.whl
- pypi: https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/3d/5c/f8923b595b55fe49e30612987ad8bf053aef555c14f05bb659dd5dbe3e8a/zstandard-0.25.0-cp314-cp314-macosx_10_13_x86_64.whl
@@ -557,7 +615,11 @@ environments:
- pypi: https://files.pythonhosted.org/packages/74/f5/9373290775639cb67a2fce7f629a1c240dce9f12fe927bc32b2736e16dfc/argcomplete-3.6.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/ed/c9/d7977eaacb9df673210491da99e6a247e93df98c715fc43fd136ce1d3d33/arrow-1.4.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/54/51/321e821856452f7386c4e9df866f196720b1ad0c5ea1623ea7399969ae3b/authlib-1.6.6-py2.py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/77/39/4d8414260c3d83f22029a39e51553c173611b378d62ca391e5ca68e65cfa/awkward-2.9.0-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/fb/b9/0978fa6f21f504b617ccee4843210d7ab8921a10e94e3bbf084498dcfad7/awkward_cpp-52-cp314-cp314-macosx_11_0_arm64.whl
- pypi: https://files.pythonhosted.org/packages/fc/d8/b8fcba9464f02b121f39de2db2bf57f0b216fe11d014513d666e8634380d/azure_core-1.38.0-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/5d/ba/2af136406e1c3839aea9ecadc2f6be2bcd1eff255bd451dd39bcf302c47a/bcrypt-5.0.0-cp39-abi3-macosx_10_12_universal2.whl
+ - pypi: https://files.pythonhosted.org/packages/1a/39/47f9197bdd44df24d67ac8893641e16f386c984a0619ef2ee4c51fbbc019/beautifulsoup4-4.14.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/5e/0a/3966f239e1d9da93cb755dc0213835ce4e9ed93645192878d0a055ecdc31/boto3-1.42.42-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/e6/51/aac7e419521d5519e13087a7198623655648c939822bd7f4bdc9ccbe07f9/botocore-1.42.42-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/ef/79/c45f2d53efe6ada1110cf6f9fca095e4ff47a0454444aefdde6ac4789179/cachecontrol-0.14.4-py3-none-any.whl
@@ -566,6 +628,7 @@ environments:
- pypi: https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl
- pypi: https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/a7/06/3d6badcf13db419e25b07041d9c7b4a2c331d3f4e7134445ec5df57714cd/coloredlogs-15.0.1-py2.py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/de/07/a1051cdbbe6d723df16d756b97f09da7c1adb69e29695c58f0392bc12515/cramjam-2.11.0-cp314-cp314-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl
- pypi: https://files.pythonhosted.org/packages/8d/99/157aae7949a5f30d51fcb1a9851e8ebd5c74bf99b5285d8bb4b8b9ee641e/cryptography-46.0.4-cp311-abi3-macosx_10_9_universal2.whl
- pypi: https://files.pythonhosted.org/packages/db/2b/1239938a2629c29363e07724d7bd4c87a8b566947ecee2afb5f5ac34e1bb/cwl_upgrader-1.2.14-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/63/4b/ccab2a5ca9e0b6553810b85c06387e60fc9443cec3c987e3a062705bd225/cwl_utils-0.40-py3-none-any.whl
@@ -573,9 +636,10 @@ environments:
- pypi: https://files.pythonhosted.org/packages/65/ee/a7aba2b112c5ae879d5cfb231c75189a7fd2a5e84b6af7e07dd71fb2bb35/cwltool-3.1.20260108082145-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/5a/36/17015b7bae2783f7bbde50a8bafdeb702802c080322204f1bfcae25b9e02/DB12-1.0.4-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl
- - pypi: https://files.pythonhosted.org/packages/75/c0/63d2ab6ef062e05e795fb49ebcd8a907c1d4f78d9f01c577266b12bd0da2/dirac-9.0.18-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/84/d0/205d54408c08b13550c733c4b85429e7ead111c7f0014309637425520a9a/deprecated-1.3.1-py2.py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/d0/d0/9e71fdc3394ffc632f35946c572e60fcc2a5452ba0a23c52493f23d60672/dirac-9.1.6-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/9f/90/279f55fff9481f9e0424c3c97b24dc10004ec8d8f98ddf5afd07a7b79194/diraccfg-1.0.1-py2.py3-none-any.whl
- - pypi: https://files.pythonhosted.org/packages/06/d2/500c9ae651fd3821ca70814aa40cb5ab9bab9b479387ccd8dcb4df745d44/diraccommon-9.0.18-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/f8/b2/ad8e7e63fdf5add3ceb7a0805d700e9fd7cb7d5743f765a4994b4ec286d7/diraccommon-9.1.6-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/ab/7e/5f02b757bb825e5cdc65f6f7a12c209963bec877d61497393bea8f41f9ce/diracx_api-0.0.8-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/b0/28/87e78ff0d6041f40431d88b8aa3b645be7476a420d8dcbf7197f5b394c5c/diracx_cli-0.0.8-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/f2/61/0c78d9778bffd844863d3173a5fefb506d7131ceebecee523a9e27024aa1/diracx_client-0.0.8-py3-none-any.whl
@@ -584,7 +648,10 @@ environments:
- pypi: https://files.pythonhosted.org/packages/dc/80/12235e5b75bb2c586733280854f131b86051e0bbdfb55349ff70d0f72cf9/dogpile_cache-1.5.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/58/19/0380af745f151a1648657bbcef0fb49ac28bf09083d94498163ffd9b32dc/dominate-2.9.1-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/de/15/545e2b6cf2e3be84bc1ed85613edd75b8aea69807a71c26f4ca6a9258e82/email_validator-2.3.0-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/37/f9/f8497ef8b873a8bb2a750ee2a6c5f0fc22258e1acb6245fd237042a6c279/fabric-3.2.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/a1/93/72b1736d68f03fda5fdf0f2180fb6caaae3894f1b854d006ac61ecc727ee/frozenlist-1.8.0-cp314-cp314-macosx_11_0_arm64.whl
+ - pypi: https://files.pythonhosted.org/packages/d5/1f/5f4a3cd9e4440e9d9bc78ad0a91a1c8d46b4d429d5239ebe6793c9fe5c41/fsspec-2026.3.0-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/73/90/a2c51050d9254bd9134e6368b3f94f92f0eb2c34ed0ca19ec449ce2fc288/fsspec_xrootd-0.5.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/5f/e8/2e6301567e6debaad6abae0e217428471651ce877537b7095b6a8e7d8cd2/fts3-3.14.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/a0/61/5c78b91c3143ed5c14207f463aecfc8f9dbb5092fb2869baf37c273b2705/gitdb-4.0.12-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/b0/57/dea471da24ceac6de8c3dc5d37e4ddde57a5c340d6bac90010898734de34/gitlint_core-0.19.1-py3-none-any.whl
@@ -593,17 +660,25 @@ environments:
- pypi: https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/d8/da/a8bb48a4fee86b5dad8a358559b70b010cd7effaa70ca5bb4e6e82e13703/hyperscan-0.8.2-cp314-cp314-macosx_11_0_arm64.whl
- pypi: https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/a4/ed/1f1afb2e9e7f38a545d628f864d562a5ae64fe6f7a10e28ffb9b185b4e89/importlib_resources-6.5.2-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/32/4b/b99e37f88336009971405cbb7630610322ed6fbfa31e1d7ab3fbf3049a2d/invoke-2.2.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/14/2f/967ba146e6d58cf6a652da73885f52fc68001525b4197effc174321d70b4/jmespath-1.1.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/a1/01/9674cc6d478406ae61d910cb16ca8b5699a8a9e6a2019987ebe5a5957d1d/joserfc-1.6.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/ba/24/c65fe1aef4e0681cb17ca136eb0f3e20a47d3941a306bc9d636938029ca5/lb_telemetry-0.5.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/87/a4/afc9dddc6b14fb3d52a900cd9b4c77770128edc4b07e576034bbd0ffd290/LbCondaWrappers-0.5.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/d7/4c/f3b97c7d6008b3a895bbadb2deb44ad3446ae5fe204c72cd540dc222e57d/lbenv-2.4.0-py3-none-any.whl
- - pypi: https://files.pythonhosted.org/packages/16/4a/b4d7feb029d4e75d4882d8d1d9029938c31a2e73074f87ffcff0f4a8ba9e/lbplatformutils-4.5.1-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/6d/10/b37ac718c5903758fa9058a5182026a4f3b65443196b82c7840389ea0dbd/lbplatformutils-4.6.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/02/64/c86924898062e8217ed914a29458cfde9e4a9b80e4d4cbcca141983ba339/lbprodrun-1.12.4-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/76/2c/bb6ef359e007fe7b6b3195b68a94f4dd3ecd1885ee337ee8fbd4df55996f/levenshtein-0.27.3-cp314-cp314-macosx_11_0_arm64.whl
+ - pypi: git+https://gitlab.cern.ch/jlisalab/LHCbDIRAC.git?rev=modules-to-cwl-migration#bc5f8f1804107bc8807edcc98a97c346ef61c0e8
+ - pypi: https://files.pythonhosted.org/packages/7f/37/8ea3555769b6048b5e4ec162cc90fd32e761c0e381ffb3baf888cb0d8a71/lhcbdiracx_api-0.0.8-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/68/15/bfb0c717b8f23c16907f3e73e8f56010ccd72e9900a108209665b0d9ed4b/lhcbdiracx_cli-0.0.8-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/2d/88/67602dfa2d7ab5d0518af82db34ab4d70dc2c3029b3ca788299a3be4a96d/lhcbdiracx_client-0.0.8-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/34/8f/6105afdd8f4e1f3b198d09e4b2622622923d9fa9e077aed852c0bb035a3a/lhcbdiracx_core-0.0.8-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/b3/68/aa714515d65090fcbcc9a1f3debd5a644b14aad11e59238f42f00bd4b298/logzero-1.7.0-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/03/15/d4a377b385ab693ce97b472fe0c77c2b16ec79590e688b3ccc71fba19884/lxml-6.0.2-cp314-cp314-macosx_10_13_universal2.whl
- pypi: https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl
@@ -612,6 +687,9 @@ environments:
- pypi: https://files.pythonhosted.org/packages/1b/9f/38ff9e57a2eade7bf9dfee5eae17f39fc0e998658050279cbb14d97d36d9/msgpack-1.1.2-cp314-cp314-macosx_11_0_arm64.whl
- pypi: https://files.pythonhosted.org/packages/f1/4f/733c48f270565d78b4544f2baddc2fb2a245e5a8640254b12c36ac7ac68e/multidict-6.7.1-cp314-cp314-macosx_11_0_arm64.whl
- pypi: https://files.pythonhosted.org/packages/9e/c9/b2622292ea83fbb4ec318f5b9ab867d0a28ab43c5717bb85b0a5f6b3b0a4/networkx-3.6.1-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/8e/eb/5da01e356015aee6ecfa1187ced87aef51364e306f5e695dd52719bf0e78/orjson-3.11.9-cp314-cp314-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl
+ - pypi: https://files.pythonhosted.org/packages/95/25/bdb9326c3b5455f8d4d3549fce7abcf967259de146fe2cf7a82368141948/pandas-3.0.2-cp314-cp314-macosx_11_0_arm64.whl
+ - pypi: https://files.pythonhosted.org/packages/a9/90/a744336f5af32c433bd09af7854599682a383b37cfd78f7de263de6ad6cb/paramiko-4.0.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/b2/fa/89a8ef0468d5833a23fff277b143d0573897cf75bd56670a6d28126c7d68/propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl
@@ -624,12 +702,16 @@ environments:
- pypi: https://files.pythonhosted.org/packages/c1/60/5d4751ba3f4a40a6891f24eec885f51afd78d208498268c734e256fb13c4/pydantic_settings-2.12.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/7e/32/a7125fb28c4261a627f999d5fb4afff25b523800faed2c30979949d6facd/pydot-4.0.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/be/7b/4845bbf88e94586ec47a432da4e9107e3fc3ce37eb412b1398630a37f7dd/pynacl-1.6.2-cp38-abi3-macosx_10_10_universal2.whl
- pypi: https://files.pythonhosted.org/packages/10/bd/c038d7cc38edc1aa5bf91ab8068b63d4308c66c4c8bb3cbba7dfbc049f9c/pyparsing-3.3.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/38/ee/a61bb562bdf6f0bc6c51cdcf80ab5503cbb4b2f5053fa4b054cc0a56e48a/python_gitlab-8.2.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/2d/e1/c2141f1840a41e07ad2db6f724945f8f8ff3065463899a22939152dd6e09/rapidfuzz-3.14.5-cp314-cp314-macosx_11_0_arm64.whl
- pypi: https://files.pythonhosted.org/packages/b9/20/35d2baebacf357b562bd081936b66cd845775442973cb033a377fd639a84/rdflib-7.5.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/ef/45/615f5babd880b4bd7d405cc0dc348234c5ffb6ed1ea33e152ede08b2072d/rich-14.3.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/04/80/97b6f357ac458d9ad9872cc3183ca09ef7439ac89e030ea43053ba1294b6/rich_argparse-1.7.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/af/fe/b6045c782f1fd1ae317d2a6ca1884857ce5c20f59befe6ab25a8603c43a7/ruamel_yaml-0.18.17-py3-none-any.whl
@@ -639,8 +721,10 @@ environments:
- pypi: https://files.pythonhosted.org/packages/71/3f/212e32937253312e102e152c954a5495df0379255719ce28e0288194748d/schema_salad-8.9.20251102115403-cp314-cp314-macosx_11_0_arm64.whl
- pypi: https://files.pythonhosted.org/packages/70/98/d82f14ac7ffedbd38dfa2383f142b26d18d23ca6cf35a40f4af60df666bd/sh-2.2.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/59/25/68f81c50aeb3f30e01c20da0f56d14c1a779b57d9308c50e19c63dc8413f/signurlarity-0.2.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/04/be/d09147ad1ec7934636ad912901c5fd7667e1c858e19d355237db0d0cd5e4/smmap-5.0.2-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/46/2c/1462b1d0a634697ae9e55b3cecdcb64788e8b7d63f54d923fcd0bb140aed/soupsieve-2.8.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/f9/38/8b6fc7a8153cb49eb3a9a13acfa9eeb6cc476e37888781e593e6f02ac05e/spython-0.3.14-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/e9/f8/5ecdfc73383ec496de038ed1614de9e740a82db9ad67e6e4514ebc0708a3/sqlalchemy-2.0.46-cp314-cp314-macosx_11_0_arm64.whl
- pypi: https://files.pythonhosted.org/packages/f4/40/8561ce06dc46fd17242c7724ab25b257a2ac1b35f4ebf551b40ce6105cfa/stevedore-5.6.0-py3-none-any.whl
@@ -648,11 +732,14 @@ environments:
- pypi: https://files.pythonhosted.org/packages/a0/1d/d9257dd49ff2ca23ea5f132edf1281a0c4f9de8a762b9ae399b670a59235/typer-0.21.1-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/c4/55/85e2732345dd8b66437cddedac4ee7ef2d9c25bf8792830b095f2ee658f3/uproot-5.7.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/3c/c1/d73f12f8cdb1891334a2ccf7389eed244d3941e74d80dd220badb937f3fb/wcwidth-0.5.3-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/b0/9a/d2faba7e61072a7507b5722db63562fdb22f5a24e237d460d18755627f15/wrapt-2.1.1-cp314-cp314-macosx_11_0_arm64.whl
- pypi: https://files.pythonhosted.org/packages/3d/36/9ab4f0b5c3d10df3aceaecf7e395cabe7fb7c7c004b2dc3f3cff0ef70fc3/xattr-1.3.0-cp314-cp314-macosx_11_0_arm64.whl
- pypi: https://files.pythonhosted.org/packages/ca/6d/b1a49f9712a910acdcb8dc5765e57d60c2be9fe9b001a21b6a98a1d85adb/xenv-0.0.6-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/38/34/98a2f52245f4d47be93b580dae5f9861ef58977d73a79eb47c58f1ad1f3a/xmltodict-1.0.4-py3-none-any.whl
+ - pypi: https://files.pythonhosted.org/packages/18/d7/5dac2eb2ec75fd771957a13e5dda560efb2176d5203f39502a5fc571f899/xxhash-3.6.0-cp314-cp314-macosx_11_0_arm64.whl
- pypi: https://files.pythonhosted.org/packages/11/63/ff458113c5c2dac9a9719ac68ee7c947cb621432bcf28c9972b1c0e83938/yarl-1.22.0-cp314-cp314-macosx_11_0_arm64.whl
- pypi: https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl
- pypi: https://files.pythonhosted.org/packages/8d/09/d0a2a14fc3439c5f874042dca72a79c70a532090b7ba0003be73fee37ae2/zstandard-0.25.0-cp314-cp314-macosx_11_0_arm64.whl
@@ -859,6 +946,39 @@ packages:
requires_dist:
- cryptography
requires_python: '>=3.9'
+- pypi: https://files.pythonhosted.org/packages/77/39/4d8414260c3d83f22029a39e51553c173611b378d62ca391e5ca68e65cfa/awkward-2.9.0-py3-none-any.whl
+ name: awkward
+ version: 2.9.0
+ sha256: 4859e371c606ca7fe737546f302de08110d53ed986cdd1254fb059dd48912db6
+ requires_dist:
+ - awkward-cpp==52
+ - fsspec>=2022.11.0
+ - importlib-metadata>=4.13.0 ; python_full_version < '3.12'
+ - numpy>=1.21.3
+ - packaging
+ - typing-extensions>=4.1.0 ; python_full_version < '3.11'
+ requires_python: '>=3.10'
+- pypi: https://files.pythonhosted.org/packages/9a/02/0e550c9606ffae81603a9d240369a93cdf1e4bc48e2e314d367825a1c02d/awkward_cpp-52-cp314-cp314-macosx_10_15_x86_64.whl
+ name: awkward-cpp
+ version: '52'
+ sha256: d792c969c5261d8141c0b817a6a541849355b0fafe49e6e63a542a501cc0b73a
+ requires_dist:
+ - numpy>=1.21.3
+ requires_python: '>=3.10'
+- pypi: https://files.pythonhosted.org/packages/ca/aa/ab2d6d68c3ee50f6dedbbc91a31cd38f9fede9258d54e7aca29bfca4ebc1/awkward_cpp-52-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl
+ name: awkward-cpp
+ version: '52'
+ sha256: bbfd5745b59684a044c91394d7c1c5a82bac204ed9ef6125f37ffe35aa719e2b
+ requires_dist:
+ - numpy>=1.21.3
+ requires_python: '>=3.10'
+- pypi: https://files.pythonhosted.org/packages/fb/b9/0978fa6f21f504b617ccee4843210d7ab8921a10e94e3bbf084498dcfad7/awkward_cpp-52-cp314-cp314-macosx_11_0_arm64.whl
+ name: awkward-cpp
+ version: '52'
+ sha256: 626e75125267c7ce51fdb891fa628e7cf3ea9c37df19126e25dd9587917f94ab
+ requires_dist:
+ - numpy>=1.21.3
+ requires_python: '>=3.10'
- pypi: https://files.pythonhosted.org/packages/fc/d8/b8fcba9464f02b121f39de2db2bf57f0b216fe11d014513d666e8634380d/azure_core-1.38.0-py3-none-any.whl
name: azure-core
version: 1.38.0
@@ -882,6 +1002,35 @@ packages:
purls: []
size: 10186
timestamp: 1753456386827
+- pypi: https://files.pythonhosted.org/packages/24/b4/11f8a31d8b67cca3371e046db49baa7c0594d71eb40ac8121e2fc0888db0/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_x86_64.whl
+ name: bcrypt
+ version: 5.0.0
+ sha256: f8429e1c410b4073944f03bd778a9e066e7fad723564a52ff91841d278dfc822
+ requires_dist:
+ - pytest>=3.2.1,!=3.3.0 ; extra == 'tests'
+ - mypy ; extra == 'typecheck'
+ requires_python: '>=3.8'
+- pypi: https://files.pythonhosted.org/packages/5d/ba/2af136406e1c3839aea9ecadc2f6be2bcd1eff255bd451dd39bcf302c47a/bcrypt-5.0.0-cp39-abi3-macosx_10_12_universal2.whl
+ name: bcrypt
+ version: 5.0.0
+ sha256: 0c418ca99fd47e9c59a301744d63328f17798b5947b0f791e9af3c1c499c2d0a
+ requires_dist:
+ - pytest>=3.2.1,!=3.3.0 ; extra == 'tests'
+ - mypy ; extra == 'typecheck'
+ requires_python: '>=3.8'
+- pypi: https://files.pythonhosted.org/packages/1a/39/47f9197bdd44df24d67ac8893641e16f386c984a0619ef2ee4c51fbbc019/beautifulsoup4-4.14.3-py3-none-any.whl
+ name: beautifulsoup4
+ version: 4.14.3
+ sha256: 0918bfe44902e6ad8d57732ba310582e98da931428d231a5ecb9e7c703a735bb
+ requires_dist:
+ - soupsieve>=1.6.1
+ - typing-extensions>=4.0.0
+ - cchardet ; extra == 'cchardet'
+ - chardet ; extra == 'chardet'
+ - charset-normalizer ; extra == 'charset-normalizer'
+ - html5lib ; extra == 'html5lib'
+ - lxml ; extra == 'lxml'
+ requires_python: '>=3.7.0'
- pypi: https://files.pythonhosted.org/packages/5e/0a/3966f239e1d9da93cb755dc0213835ce4e9ed93645192878d0a055ecdc31/boto3-1.42.42-py3-none-any.whl
name: boto3
version: 1.42.42
@@ -1117,6 +1266,30 @@ packages:
- humanfriendly>=9.1
- capturer>=2.4 ; extra == 'cron'
requires_python: '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*'
+- pypi: https://files.pythonhosted.org/packages/19/0f/f6121b90b86b9093c066889274d26a1de3f29969d45c2ed1ecbe2033cb78/cramjam-2.11.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
+ name: cramjam
+ version: 2.11.0
+ sha256: 17eb39b1696179fb471eea2de958fa21f40a2cd8bf6b40d428312d5541e19dc4
+ requires_dist:
+ - black==22.3.0 ; extra == 'dev'
+ - numpy ; extra == 'dev'
+ - pytest>=5.30 ; extra == 'dev'
+ - pytest-xdist ; extra == 'dev'
+ - pytest-benchmark ; extra == 'dev'
+ - hypothesis==6.60.0 ; extra == 'dev'
+ requires_python: '>=3.8'
+- pypi: https://files.pythonhosted.org/packages/de/07/a1051cdbbe6d723df16d756b97f09da7c1adb69e29695c58f0392bc12515/cramjam-2.11.0-cp314-cp314-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl
+ name: cramjam
+ version: 2.11.0
+ sha256: 7ba5e38c9fbd06f086f4a5a64a1a5b7b417cd3f8fc07a20e5c03651f72f36100
+ requires_dist:
+ - black==22.3.0 ; extra == 'dev'
+ - numpy ; extra == 'dev'
+ - pytest>=5.30 ; extra == 'dev'
+ - pytest-xdist ; extra == 'dev'
+ - pytest-benchmark ; extra == 'dev'
+ - hypothesis==6.60.0 ; extra == 'dev'
+ requires_python: '>=3.8'
- pypi: https://files.pythonhosted.org/packages/2b/08/f83e2e0814248b844265802d081f2fac2f1cbe6cd258e72ba14ff006823a/cryptography-46.0.4-cp311-abi3-manylinux_2_28_x86_64.whl
name: cryptography
version: 46.0.4
@@ -1389,10 +1562,23 @@ packages:
version: 5.2.1
sha256: d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a
requires_python: '>=3.8'
-- pypi: https://files.pythonhosted.org/packages/75/c0/63d2ab6ef062e05e795fb49ebcd8a907c1d4f78d9f01c577266b12bd0da2/dirac-9.0.18-py3-none-any.whl
+- pypi: https://files.pythonhosted.org/packages/84/d0/205d54408c08b13550c733c4b85429e7ead111c7f0014309637425520a9a/deprecated-1.3.1-py2.py3-none-any.whl
+ name: deprecated
+ version: 1.3.1
+ sha256: 597bfef186b6f60181535a29fbe44865ce137a5079f295b479886c82729d5f3f
+ requires_dist:
+ - wrapt>=1.10,<3
+ - inspect2 ; python_full_version < '3'
+ - tox ; extra == 'dev'
+ - pytest ; extra == 'dev'
+ - pytest-cov ; extra == 'dev'
+ - bump2version<1 ; extra == 'dev'
+ - setuptools ; python_full_version >= '3.12' and extra == 'dev'
+ requires_python: '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*'
+- pypi: https://files.pythonhosted.org/packages/d0/d0/9e71fdc3394ffc632f35946c572e60fcc2a5452ba0a23c52493f23d60672/dirac-9.1.6-py3-none-any.whl
name: dirac
- version: 9.0.18
- sha256: 8e32e7486eb49ad88278b2cb3d56a4ec684715de639f6c09c8a4e05b837e268a
+ version: 9.1.6
+ sha256: d818427204216f239df4171ddaa3cc646d7e208a34577bb8c86f2d9d50f6ffb3
requires_dist:
- boto3>=1.35
- botocore>=1.35
@@ -1400,17 +1586,20 @@ packages:
- certifi
- cwltool
- diraccfg
- - diraccommon==9.0.18
+ - diraccommon==9.1.6
- diracx-client>=0.0.1
- diracx-core>=0.0.1
- diracx-cli>=0.0.1
- db12
+ - fabric
- fts3
- gfal2-python
- importlib-metadata>=4.4
- importlib-resources
+ - invoke
- m2crypto>=0.36
- packaging
+ - paramiko
- pexpect
- prompt-toolkit>=3
- psutil
@@ -1456,8 +1645,8 @@ packages:
requires_python: '>=3.11'
- pypi: ./
name: dirac-cwl
- version: 1.2.1.dev14+g6f8f6ff96.d20260303
- sha256: 0b624b7b1adb33bc3b4cb29dbf85bb2db495122acda95aaa775f0aedef77767f
+ version: 1.2.1.dev11+g1da58a2a5.d20260428
+ sha256: 6b2880b8b3e1502d70cfb2bf1823439ef595fe963b873bb23ef101a9cae108f2
requires_dist:
- cwl-utils
- cwlformat
@@ -1468,6 +1657,7 @@ packages:
- diracx-client>=0.0.8
- diracx-cli>=0.0.8
- lbprodrun
+ - lhcbdirac @ git+https://****@gitlab.cern.ch/jlisalab/LHCbDIRAC.git@modules-to-cwl-migration
- pydantic
- pyyaml
- typer
@@ -1488,10 +1678,10 @@ packages:
- pytest-cov ; extra == 'testing'
- pylint>=1.6.5 ; extra == 'testing'
requires_python: '>=3.9'
-- pypi: https://files.pythonhosted.org/packages/06/d2/500c9ae651fd3821ca70814aa40cb5ab9bab9b479387ccd8dcb4df745d44/diraccommon-9.0.18-py3-none-any.whl
+- pypi: https://files.pythonhosted.org/packages/f8/b2/ad8e7e63fdf5add3ceb7a0805d700e9fd7cb7d5743f765a4994b4ec286d7/diraccommon-9.1.6-py3-none-any.whl
name: diraccommon
- version: 9.0.18
- sha256: e32f417cb4805c8c73b940921f6f4d06d2926ac834c02e3a99d5db2ac5c2fe14
+ version: 9.1.6
+ sha256: 53c765edf120eff9764d49e57d4073c0f2d671ed391f52a2ca940abef0810963
requires_dist:
- diraccfg
- pydantic>=2.0.0
@@ -1653,6 +1843,16 @@ packages:
purls: []
size: 143991
timestamp: 1763549744569
+- pypi: https://files.pythonhosted.org/packages/37/f9/f8497ef8b873a8bb2a750ee2a6c5f0fc22258e1acb6245fd237042a6c279/fabric-3.2.3-py3-none-any.whl
+ name: fabric
+ version: 3.2.3
+ sha256: ce61917f4f398018337ce279b357650a3a74baecf3fdd53a5839013944af965e
+ requires_dist:
+ - invoke>=2.0,<3.0
+ - paramiko>=2.4
+ - decorator>=5
+ - deprecated>=1.2
+ - pytest>=7 ; extra == 'pytest'
- conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.20.3-pyhd8ed1ab_0.conda
sha256: 8b90dc21f00167a7e58abb5141a140bdb31a7c5734fe1361b5f98f4a4183fd32
md5: 2cfaaccf085c133a477f0a7a8657afe9
@@ -1678,6 +1878,129 @@ packages:
version: 1.8.0
sha256: cba69cb73723c3f329622e34bdbf5ce1f80c21c290ff04256cff1cd3c2036ed2
requires_python: '>=3.9'
+- pypi: https://files.pythonhosted.org/packages/d5/1f/5f4a3cd9e4440e9d9bc78ad0a91a1c8d46b4d429d5239ebe6793c9fe5c41/fsspec-2026.3.0-py3-none-any.whl
+ name: fsspec
+ version: 2026.3.0
+ sha256: d2ceafaad1b3457968ed14efa28798162f1638dbb5d2a6868a2db002a5ee39a4
+ requires_dist:
+ - adlfs ; extra == 'abfs'
+ - adlfs ; extra == 'adl'
+ - pyarrow>=1 ; extra == 'arrow'
+ - dask ; extra == 'dask'
+ - distributed ; extra == 'dask'
+ - pre-commit ; extra == 'dev'
+ - ruff>=0.5 ; extra == 'dev'
+ - numpydoc ; extra == 'doc'
+ - sphinx ; extra == 'doc'
+ - sphinx-design ; extra == 'doc'
+ - sphinx-rtd-theme ; extra == 'doc'
+ - yarl ; extra == 'doc'
+ - dropbox ; extra == 'dropbox'
+ - dropboxdrivefs ; extra == 'dropbox'
+ - requests ; extra == 'dropbox'
+ - adlfs ; extra == 'full'
+ - aiohttp!=4.0.0a0,!=4.0.0a1 ; extra == 'full'
+ - dask ; extra == 'full'
+ - distributed ; extra == 'full'
+ - dropbox ; extra == 'full'
+ - dropboxdrivefs ; extra == 'full'
+ - fusepy ; extra == 'full'
+ - gcsfs>2024.2.0 ; extra == 'full'
+ - libarchive-c ; extra == 'full'
+ - ocifs ; extra == 'full'
+ - panel ; extra == 'full'
+ - paramiko ; extra == 'full'
+ - pyarrow>=1 ; extra == 'full'
+ - pygit2 ; extra == 'full'
+ - requests ; extra == 'full'
+ - s3fs>2024.2.0 ; extra == 'full'
+ - smbprotocol ; extra == 'full'
+ - tqdm ; extra == 'full'
+ - fusepy ; extra == 'fuse'
+ - gcsfs>2024.2.0 ; extra == 'gcs'
+ - pygit2 ; extra == 'git'
+ - requests ; extra == 'github'
+ - gcsfs ; extra == 'gs'
+ - panel ; extra == 'gui'
+ - pyarrow>=1 ; extra == 'hdfs'
+ - aiohttp!=4.0.0a0,!=4.0.0a1 ; extra == 'http'
+ - libarchive-c ; extra == 'libarchive'
+ - ocifs ; extra == 'oci'
+ - s3fs>2024.2.0 ; extra == 's3'
+ - paramiko ; extra == 'sftp'
+ - smbprotocol ; extra == 'smb'
+ - paramiko ; extra == 'ssh'
+ - aiohttp!=4.0.0a0,!=4.0.0a1 ; extra == 'test'
+ - numpy ; extra == 'test'
+ - pytest ; extra == 'test'
+ - pytest-asyncio!=0.22.0 ; extra == 'test'
+ - pytest-benchmark ; extra == 'test'
+ - pytest-cov ; extra == 'test'
+ - pytest-mock ; extra == 'test'
+ - pytest-recording ; extra == 'test'
+ - pytest-rerunfailures ; extra == 'test'
+ - requests ; extra == 'test'
+ - aiobotocore>=2.5.4,<3.0.0 ; extra == 'test-downstream'
+ - dask[dataframe,test] ; extra == 'test-downstream'
+ - moto[server]>4,<5 ; extra == 'test-downstream'
+ - pytest-timeout ; extra == 'test-downstream'
+ - xarray ; extra == 'test-downstream'
+ - adlfs ; extra == 'test-full'
+ - aiohttp!=4.0.0a0,!=4.0.0a1 ; extra == 'test-full'
+ - backports-zstd ; python_full_version < '3.14' and extra == 'test-full'
+ - cloudpickle ; extra == 'test-full'
+ - dask ; extra == 'test-full'
+ - distributed ; extra == 'test-full'
+ - dropbox ; extra == 'test-full'
+ - dropboxdrivefs ; extra == 'test-full'
+ - fastparquet ; extra == 'test-full'
+ - fusepy ; extra == 'test-full'
+ - gcsfs ; extra == 'test-full'
+ - jinja2 ; extra == 'test-full'
+ - kerchunk ; extra == 'test-full'
+ - libarchive-c ; extra == 'test-full'
+ - lz4 ; extra == 'test-full'
+ - notebook ; extra == 'test-full'
+ - numpy ; extra == 'test-full'
+ - ocifs ; extra == 'test-full'
+ - pandas<3.0.0 ; extra == 'test-full'
+ - panel ; extra == 'test-full'
+ - paramiko ; extra == 'test-full'
+ - pyarrow ; extra == 'test-full'
+ - pyarrow>=1 ; extra == 'test-full'
+ - pyftpdlib ; extra == 'test-full'
+ - pygit2 ; extra == 'test-full'
+ - pytest ; extra == 'test-full'
+ - pytest-asyncio!=0.22.0 ; extra == 'test-full'
+ - pytest-benchmark ; extra == 'test-full'
+ - pytest-cov ; extra == 'test-full'
+ - pytest-mock ; extra == 'test-full'
+ - pytest-recording ; extra == 'test-full'
+ - pytest-rerunfailures ; extra == 'test-full'
+ - python-snappy ; extra == 'test-full'
+ - requests ; extra == 'test-full'
+ - smbprotocol ; extra == 'test-full'
+ - tqdm ; extra == 'test-full'
+ - urllib3 ; extra == 'test-full'
+ - zarr ; extra == 'test-full'
+ - zstandard ; python_full_version < '3.14' and extra == 'test-full'
+ - tqdm ; extra == 'tqdm'
+ requires_python: '>=3.10'
+- pypi: https://files.pythonhosted.org/packages/73/90/a2c51050d9254bd9134e6368b3f94f92f0eb2c34ed0ca19ec449ce2fc288/fsspec_xrootd-0.5.2-py3-none-any.whl
+ name: fsspec-xrootd
+ version: 0.5.2
+ sha256: 314763b6f31c01358ffe1fb1dca038085690efbee80ac5f5204f66d0ae9fd417
+ requires_dist:
+ - fsspec
+ - pytest>=6 ; extra == 'dev'
+ - sphinx>=4.0 ; extra == 'docs'
+ - myst-parser>=0.13 ; extra == 'docs'
+ - sphinx-book-theme>=0.1.0 ; extra == 'docs'
+ - sphinx-copybutton ; extra == 'docs'
+ - pytest>=6 ; extra == 'test'
+ - pytest-rerunfailures ; extra == 'test'
+ - pytest-timeout ; extra == 'test'
+ requires_python: '>=3.10'
- pypi: https://files.pythonhosted.org/packages/5f/e8/2e6301567e6debaad6abae0e217428471651ce877537b7095b6a8e7d8cd2/fts3-3.14.2-py3-none-any.whl
name: fts3
version: 3.14.2
@@ -1995,6 +2318,21 @@ packages:
- pyreadline ; python_full_version < '3.8' and sys_platform == 'win32'
- pyreadline3 ; python_full_version >= '3.8' and sys_platform == 'win32'
requires_python: '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*'
+- pypi: https://files.pythonhosted.org/packages/22/ed/9c45c468fd6c31df3fe0622394b1853c00b86545d1e297f3fb9fba1232ce/hyperscan-0.8.2-cp314-cp314-macosx_10_15_x86_64.whl
+ name: hyperscan
+ version: 0.8.2
+ sha256: 2c579c1ebccc384d904de4a20e7a105df6041dd82adb54cb9acd5bb19b9b07dc
+ requires_python: '>=3.9,<4.0'
+- pypi: https://files.pythonhosted.org/packages/d0/23/49cf8ea1d129637941f06fb78f5f66077bf362762c5f6c01712c4cd0e87f/hyperscan-0.8.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl
+ name: hyperscan
+ version: 0.8.2
+ sha256: 0c0af5d882bd6afb61e2b9a13c0d39fcbcee49c62f392096d6303bd34452813f
+ requires_python: '>=3.9,<4.0'
+- pypi: https://files.pythonhosted.org/packages/d8/da/a8bb48a4fee86b5dad8a358559b70b010cd7effaa70ca5bb4e6e82e13703/hyperscan-0.8.2-cp314-cp314-macosx_11_0_arm64.whl
+ name: hyperscan
+ version: 0.8.2
+ sha256: 4e9f8d1ae2c9596385d906e062b9e0081ae843e3975fd4a656e5fcf6bbc48c13
+ requires_python: '>=3.9,<4.0'
- conda: https://conda.anaconda.org/conda-forge/linux-64/icu-78.2-h33c6efd_0.conda
sha256: 142a722072fa96cf16ff98eaaf641f54ab84744af81754c292cb81e0881c0329
md5: 186a18e3ba246eccfc7cff00cd19a870
@@ -2107,6 +2445,11 @@ packages:
- pkg:pypi/iniconfig?source=compressed-mapping
size: 13387
timestamp: 1760831448842
+- pypi: https://files.pythonhosted.org/packages/32/4b/b99e37f88336009971405cbb7630610322ed6fbfa31e1d7ab3fbf3049a2d/invoke-2.2.1-py3-none-any.whl
+ name: invoke
+ version: 2.2.1
+ sha256: 2413bc441b376e5cd3f55bb5d364f973ad8bdd7bf87e53c79de3c11bf3feecc8
+ requires_python: '>=3.6'
- pypi: https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl
name: isodate
version: 0.7.2
@@ -2275,10 +2618,10 @@ packages:
- pytest-cov ; extra == 'testing'
- coverage ; extra == 'testing'
requires_python: '>=3.7'
-- pypi: https://files.pythonhosted.org/packages/16/4a/b4d7feb029d4e75d4882d8d1d9029938c31a2e73074f87ffcff0f4a8ba9e/lbplatformutils-4.5.1-py3-none-any.whl
+- pypi: https://files.pythonhosted.org/packages/6d/10/b37ac718c5903758fa9058a5182026a4f3b65443196b82c7840389ea0dbd/lbplatformutils-4.6.1-py3-none-any.whl
name: lbplatformutils
- version: 4.5.1
- sha256: f61f8192bf93da16d50a3e039e9998d75aed8ffc486c226ab838bdaeb1b98679
+ version: 4.6.1
+ sha256: 92e6dd273e77873ba6cbd302c8b29fde71e5dbb7706f05856e362fb44fd9eee8
requires_python: '>=3.7,<4.0'
- pypi: https://files.pythonhosted.org/packages/02/64/c86924898062e8217ed914a29458cfde9e4a9b80e4d4cbcca141983ba339/lbprodrun-1.12.4-py3-none-any.whl
name: lbprodrun
@@ -2310,6 +2653,112 @@ packages:
purls: []
size: 725507
timestamp: 1770267139900
+- pypi: https://files.pythonhosted.org/packages/29/ce/ed422816fb30ffa3bc11597b30d5deca06b4a1388707a04215da73c65b53/levenshtein-0.27.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl
+ name: levenshtein
+ version: 0.27.3
+ sha256: ce3bbbe92172a08b599d79956182c6b7ab6ec8d4adbe7237417a363b968ad87b
+ requires_dist:
+ - rapidfuzz>=3.9.0,<4.0.0
+ requires_python: '>=3.10'
+- pypi: https://files.pythonhosted.org/packages/76/2c/bb6ef359e007fe7b6b3195b68a94f4dd3ecd1885ee337ee8fbd4df55996f/levenshtein-0.27.3-cp314-cp314-macosx_11_0_arm64.whl
+ name: levenshtein
+ version: 0.27.3
+ sha256: 8e5037c4a6f97a238e24aad6f98a1e984348b7931b1b04b6bd02bd4f8238150d
+ requires_dist:
+ - rapidfuzz>=3.9.0,<4.0.0
+ requires_python: '>=3.10'
+- pypi: https://files.pythonhosted.org/packages/f3/e1/2f705da403f865a5fa3449b155738dc9c53021698fd6926253a9af03180b/levenshtein-0.27.3-cp314-cp314-macosx_10_15_x86_64.whl
+ name: levenshtein
+ version: 0.27.3
+ sha256: a6728bfae9a86002f0223576675fc7e2a6e7735da47185a1d13d1eaaa73dd4be
+ requires_dist:
+ - rapidfuzz>=3.9.0,<4.0.0
+ requires_python: '>=3.10'
+- pypi: git+https://gitlab.cern.ch/jlisalab/LHCbDIRAC.git?rev=modules-to-cwl-migration#bc5f8f1804107bc8807edcc98a97c346ef61c0e8
+ name: lhcbdirac
+ version: 0.1.dev20470+gbc5f8f180
+ requires_dist:
+ - dirac~=9.1
+ - lbplatformutils>=4.6.1
+ - lbenv>=2.3.0
+ - lbprodrun
+ - lbcondawrappers
+ - requests
+ - pydantic>=2
+ - uproot[xrootd]>=5.3
+ - pyyaml
+ - xmltodict
+ - hyperscan
+ - levenshtein
+ - zstandard
+ - rich
+ - httpx
+ - beautifulsoup4
+ - python-gitlab
+ - pandas
+ - numpy
+ - lhcbdiracx-client
+ - lhcbdiracx-core
+ - lhcbdiracx-cli
+ - signurlarity
+ - oracledb ; extra == 'server'
+ - dirac[server]~=9.1.0 ; extra == 'server'
+ - psutil ; extra == 'server'
+ - stomp-py ; extra == 'server'
+ - suds ; extra == 'server'
+ - mock ; extra == 'testing'
+ - pytest-mock ; extra == 'testing'
+ - pillow ; extra == 'testing'
+ - pytest ; extra == 'testing'
+ requires_python: '>=3.11'
+- pypi: https://files.pythonhosted.org/packages/7f/37/8ea3555769b6048b5e4ec162cc90fd32e761c0e381ffb3baf888cb0d8a71/lhcbdiracx_api-0.0.8-py3-none-any.whl
+ name: lhcbdiracx-api
+ version: 0.0.8
+ sha256: 13337f9b98b0907e372e014ee1012d99c7d1f1d8e3877daf96d73b165ca10aca
+ requires_dist:
+ - lhcbdiracx-core
+ - lhcbdiracx-client
+ - diracx-api==0.0.8
+ - diracx-api[types]==0.0.8 ; extra == 'types'
+ - diracx-api[testing]==0.0.8 ; extra == 'testing'
+ requires_python: '>=3.11'
+- pypi: https://files.pythonhosted.org/packages/68/15/bfb0c717b8f23c16907f3e73e8f56010ccd72e9900a108209665b0d9ed4b/lhcbdiracx_cli-0.0.8-py3-none-any.whl
+ name: lhcbdiracx-cli
+ version: 0.0.8
+ sha256: 85e357eed0578796f78a5502c2235949dcd7fa456e7efaf1a0b2913f926f1b64
+ requires_dist:
+ - lhcbdiracx-core
+ - lhcbdiracx-client
+ - lhcbdiracx-api
+ - diracx-cli==0.0.8
+ - diracx-cli[types]==0.0.8 ; extra == 'types'
+ - diracx-cli[testing]==0.0.8 ; extra == 'testing'
+ requires_python: '>=3.11'
+- pypi: https://files.pythonhosted.org/packages/2d/88/67602dfa2d7ab5d0518af82db34ab4d70dc2c3029b3ca788299a3be4a96d/lhcbdiracx_client-0.0.8-py3-none-any.whl
+ name: lhcbdiracx-client
+ version: 0.0.8
+ sha256: 72bea573c481d011d816cbf02e39cee610b4b7aa8b57a2941659036de483907e
+ requires_dist:
+ - lhcbdiracx-core
+ - diracx-client==0.0.8
+ - types-requests ; extra == 'types'
+ - diracx-api[types]==0.0.8 ; extra == 'types'
+ - diracx-client[testing]==0.0.8 ; extra == 'testing'
+ - diracx-testing==0.0.8 ; extra == 'testing'
+ requires_python: '>=3.11'
+- pypi: https://files.pythonhosted.org/packages/34/8f/6105afdd8f4e1f3b198d09e4b2622622923d9fa9e077aed852c0bb035a3a/lhcbdiracx_core-0.0.8-py3-none-any.whl
+ name: lhcbdiracx-core
+ version: 0.0.8
+ sha256: a23f9b343efddb80cb53c3e06c409c65221ff29a339d4aebe336f930d04c7942
+ requires_dist:
+ - diracx-core==0.0.8
+ - lhcbdiracx-testing ; extra == 'testing'
+ - diracx-testing ; extra == 'testing'
+ - diracx-core[types]==0.0.8 ; extra == 'testing'
+ - diracx-core[testing]==0.0.8 ; extra == 'types'
+ - types-cachetools ; extra == 'types'
+ - types-pyyaml ; extra == 'types'
+ requires_python: '>=3.11'
- conda: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.11.0-5_h4a7cf45_openblas.conda
build_number: 5
sha256: 18c72545080b86739352482ba14ba2c4815e19e26a7417ca21a95b76ec8da24c
@@ -4030,6 +4479,16 @@ packages:
purls: []
size: 3104268
timestamp: 1769556384749
+- pypi: https://files.pythonhosted.org/packages/76/3e/c0b690253f0b82d86e99949af13533363acfb5432ecb5d53dd5b3bce9c34/orjson-3.11.9-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
+ name: orjson
+ version: 3.11.9
+ sha256: aaea64f3f467d22e70eeed68bdccb3bc4f83f650446c4a03c59f2cba28a108db
+ requires_python: '>=3.10'
+- pypi: https://files.pythonhosted.org/packages/8e/eb/5da01e356015aee6ecfa1187ced87aef51364e306f5e695dd52719bf0e78/orjson-3.11.9-cp314-cp314-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl
+ name: orjson
+ version: 3.11.9
+ sha256: b6ef1979adc4bc243523f1a2ba91418030a8e29b0a99cbe7e0e2d6807d4dce6e
+ requires_python: '>=3.10'
- conda: https://conda.anaconda.org/conda-forge/noarch/packaging-26.0-pyhcf101f3_0.conda
sha256: c1fc0f953048f743385d31c468b4a678b3ad20caffdeaa94bed85ba63049fd58
md5: b76541e68fea4d511b1ac46a28dcd2c6
@@ -4042,6 +4501,289 @@ packages:
- pkg:pypi/packaging?source=compressed-mapping
size: 72010
timestamp: 1769093650580
+- pypi: https://files.pythonhosted.org/packages/15/88/3cdd54fa279341afa10acf8d2b503556b1375245dccc9315659f795dd2e9/pandas-3.0.2-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl
+ name: pandas
+ version: 3.0.2
+ sha256: deeca1b5a931fdf0c2212c8a659ade6d3b1edc21f0914ce71ef24456ca7a6535
+ requires_dist:
+ - numpy>=1.26.0 ; python_full_version < '3.14'
+ - numpy>=2.3.3 ; python_full_version >= '3.14'
+ - python-dateutil>=2.8.2
+ - tzdata ; sys_platform == 'win32'
+ - tzdata ; sys_platform == 'emscripten'
+ - hypothesis>=6.116.0 ; extra == 'test'
+ - pytest>=8.3.4 ; extra == 'test'
+ - pytest-xdist>=3.6.1 ; extra == 'test'
+ - pyarrow>=13.0.0 ; extra == 'pyarrow'
+ - bottleneck>=1.4.2 ; extra == 'performance'
+ - numba>=0.60.0 ; extra == 'performance'
+ - numexpr>=2.10.2 ; extra == 'performance'
+ - scipy>=1.14.1 ; extra == 'computation'
+ - xarray>=2024.10.0 ; extra == 'computation'
+ - fsspec>=2024.10.0 ; extra == 'fss'
+ - s3fs>=2024.10.0 ; extra == 'aws'
+ - gcsfs>=2024.10.0 ; extra == 'gcp'
+ - odfpy>=1.4.1 ; extra == 'excel'
+ - openpyxl>=3.1.5 ; extra == 'excel'
+ - python-calamine>=0.3.0 ; extra == 'excel'
+ - pyxlsb>=1.0.10 ; extra == 'excel'
+ - xlrd>=2.0.1 ; extra == 'excel'
+ - xlsxwriter>=3.2.0 ; extra == 'excel'
+ - pyarrow>=13.0.0 ; extra == 'parquet'
+ - pyarrow>=13.0.0 ; extra == 'feather'
+ - pyiceberg>=0.8.1 ; extra == 'iceberg'
+ - tables>=3.10.1 ; extra == 'hdf5'
+ - pyreadstat>=1.2.8 ; extra == 'spss'
+ - sqlalchemy>=2.0.36 ; extra == 'postgresql'
+ - psycopg2>=2.9.10 ; extra == 'postgresql'
+ - adbc-driver-postgresql>=1.2.0 ; extra == 'postgresql'
+ - sqlalchemy>=2.0.36 ; extra == 'mysql'
+ - pymysql>=1.1.1 ; extra == 'mysql'
+ - sqlalchemy>=2.0.36 ; extra == 'sql-other'
+ - adbc-driver-postgresql>=1.2.0 ; extra == 'sql-other'
+ - adbc-driver-sqlite>=1.2.0 ; extra == 'sql-other'
+ - beautifulsoup4>=4.12.3 ; extra == 'html'
+ - html5lib>=1.1 ; extra == 'html'
+ - lxml>=5.3.0 ; extra == 'html'
+ - lxml>=5.3.0 ; extra == 'xml'
+ - matplotlib>=3.9.3 ; extra == 'plot'
+ - jinja2>=3.1.5 ; extra == 'output-formatting'
+ - tabulate>=0.9.0 ; extra == 'output-formatting'
+ - pyqt5>=5.15.9 ; extra == 'clipboard'
+ - qtpy>=2.4.2 ; extra == 'clipboard'
+ - zstandard>=0.23.0 ; extra == 'compression'
+ - pytz>=2024.2 ; extra == 'timezone'
+ - adbc-driver-postgresql>=1.2.0 ; extra == 'all'
+ - adbc-driver-sqlite>=1.2.0 ; extra == 'all'
+ - beautifulsoup4>=4.12.3 ; extra == 'all'
+ - bottleneck>=1.4.2 ; extra == 'all'
+ - fastparquet>=2024.11.0 ; extra == 'all'
+ - fsspec>=2024.10.0 ; extra == 'all'
+ - gcsfs>=2024.10.0 ; extra == 'all'
+ - html5lib>=1.1 ; extra == 'all'
+ - hypothesis>=6.116.0 ; extra == 'all'
+ - jinja2>=3.1.5 ; extra == 'all'
+ - lxml>=5.3.0 ; extra == 'all'
+ - matplotlib>=3.9.3 ; extra == 'all'
+ - numba>=0.60.0 ; extra == 'all'
+ - numexpr>=2.10.2 ; extra == 'all'
+ - odfpy>=1.4.1 ; extra == 'all'
+ - openpyxl>=3.1.5 ; extra == 'all'
+ - psycopg2>=2.9.10 ; extra == 'all'
+ - pyarrow>=13.0.0 ; extra == 'all'
+ - pyiceberg>=0.8.1 ; extra == 'all'
+ - pymysql>=1.1.1 ; extra == 'all'
+ - pyqt5>=5.15.9 ; extra == 'all'
+ - pyreadstat>=1.2.8 ; extra == 'all'
+ - pytest>=8.3.4 ; extra == 'all'
+ - pytest-xdist>=3.6.1 ; extra == 'all'
+ - python-calamine>=0.3.0 ; extra == 'all'
+ - pytz>=2024.2 ; extra == 'all'
+ - pyxlsb>=1.0.10 ; extra == 'all'
+ - qtpy>=2.4.2 ; extra == 'all'
+ - scipy>=1.14.1 ; extra == 'all'
+ - s3fs>=2024.10.0 ; extra == 'all'
+ - sqlalchemy>=2.0.36 ; extra == 'all'
+ - tables>=3.10.1 ; extra == 'all'
+ - tabulate>=0.9.0 ; extra == 'all'
+ - xarray>=2024.10.0 ; extra == 'all'
+ - xlrd>=2.0.1 ; extra == 'all'
+ - xlsxwriter>=3.2.0 ; extra == 'all'
+ - zstandard>=0.23.0 ; extra == 'all'
+ requires_python: '>=3.11'
+- pypi: https://files.pythonhosted.org/packages/95/25/bdb9326c3b5455f8d4d3549fce7abcf967259de146fe2cf7a82368141948/pandas-3.0.2-cp314-cp314-macosx_11_0_arm64.whl
+ name: pandas
+ version: 3.0.2
+ sha256: 0555c5882688a39317179ab4a0ed41d3ebc8812ab14c69364bbee8fb7a3f6288
+ requires_dist:
+ - numpy>=1.26.0 ; python_full_version < '3.14'
+ - numpy>=2.3.3 ; python_full_version >= '3.14'
+ - python-dateutil>=2.8.2
+ - tzdata ; sys_platform == 'win32'
+ - tzdata ; sys_platform == 'emscripten'
+ - hypothesis>=6.116.0 ; extra == 'test'
+ - pytest>=8.3.4 ; extra == 'test'
+ - pytest-xdist>=3.6.1 ; extra == 'test'
+ - pyarrow>=13.0.0 ; extra == 'pyarrow'
+ - bottleneck>=1.4.2 ; extra == 'performance'
+ - numba>=0.60.0 ; extra == 'performance'
+ - numexpr>=2.10.2 ; extra == 'performance'
+ - scipy>=1.14.1 ; extra == 'computation'
+ - xarray>=2024.10.0 ; extra == 'computation'
+ - fsspec>=2024.10.0 ; extra == 'fss'
+ - s3fs>=2024.10.0 ; extra == 'aws'
+ - gcsfs>=2024.10.0 ; extra == 'gcp'
+ - odfpy>=1.4.1 ; extra == 'excel'
+ - openpyxl>=3.1.5 ; extra == 'excel'
+ - python-calamine>=0.3.0 ; extra == 'excel'
+ - pyxlsb>=1.0.10 ; extra == 'excel'
+ - xlrd>=2.0.1 ; extra == 'excel'
+ - xlsxwriter>=3.2.0 ; extra == 'excel'
+ - pyarrow>=13.0.0 ; extra == 'parquet'
+ - pyarrow>=13.0.0 ; extra == 'feather'
+ - pyiceberg>=0.8.1 ; extra == 'iceberg'
+ - tables>=3.10.1 ; extra == 'hdf5'
+ - pyreadstat>=1.2.8 ; extra == 'spss'
+ - sqlalchemy>=2.0.36 ; extra == 'postgresql'
+ - psycopg2>=2.9.10 ; extra == 'postgresql'
+ - adbc-driver-postgresql>=1.2.0 ; extra == 'postgresql'
+ - sqlalchemy>=2.0.36 ; extra == 'mysql'
+ - pymysql>=1.1.1 ; extra == 'mysql'
+ - sqlalchemy>=2.0.36 ; extra == 'sql-other'
+ - adbc-driver-postgresql>=1.2.0 ; extra == 'sql-other'
+ - adbc-driver-sqlite>=1.2.0 ; extra == 'sql-other'
+ - beautifulsoup4>=4.12.3 ; extra == 'html'
+ - html5lib>=1.1 ; extra == 'html'
+ - lxml>=5.3.0 ; extra == 'html'
+ - lxml>=5.3.0 ; extra == 'xml'
+ - matplotlib>=3.9.3 ; extra == 'plot'
+ - jinja2>=3.1.5 ; extra == 'output-formatting'
+ - tabulate>=0.9.0 ; extra == 'output-formatting'
+ - pyqt5>=5.15.9 ; extra == 'clipboard'
+ - qtpy>=2.4.2 ; extra == 'clipboard'
+ - zstandard>=0.23.0 ; extra == 'compression'
+ - pytz>=2024.2 ; extra == 'timezone'
+ - adbc-driver-postgresql>=1.2.0 ; extra == 'all'
+ - adbc-driver-sqlite>=1.2.0 ; extra == 'all'
+ - beautifulsoup4>=4.12.3 ; extra == 'all'
+ - bottleneck>=1.4.2 ; extra == 'all'
+ - fastparquet>=2024.11.0 ; extra == 'all'
+ - fsspec>=2024.10.0 ; extra == 'all'
+ - gcsfs>=2024.10.0 ; extra == 'all'
+ - html5lib>=1.1 ; extra == 'all'
+ - hypothesis>=6.116.0 ; extra == 'all'
+ - jinja2>=3.1.5 ; extra == 'all'
+ - lxml>=5.3.0 ; extra == 'all'
+ - matplotlib>=3.9.3 ; extra == 'all'
+ - numba>=0.60.0 ; extra == 'all'
+ - numexpr>=2.10.2 ; extra == 'all'
+ - odfpy>=1.4.1 ; extra == 'all'
+ - openpyxl>=3.1.5 ; extra == 'all'
+ - psycopg2>=2.9.10 ; extra == 'all'
+ - pyarrow>=13.0.0 ; extra == 'all'
+ - pyiceberg>=0.8.1 ; extra == 'all'
+ - pymysql>=1.1.1 ; extra == 'all'
+ - pyqt5>=5.15.9 ; extra == 'all'
+ - pyreadstat>=1.2.8 ; extra == 'all'
+ - pytest>=8.3.4 ; extra == 'all'
+ - pytest-xdist>=3.6.1 ; extra == 'all'
+ - python-calamine>=0.3.0 ; extra == 'all'
+ - pytz>=2024.2 ; extra == 'all'
+ - pyxlsb>=1.0.10 ; extra == 'all'
+ - qtpy>=2.4.2 ; extra == 'all'
+ - scipy>=1.14.1 ; extra == 'all'
+ - s3fs>=2024.10.0 ; extra == 'all'
+ - sqlalchemy>=2.0.36 ; extra == 'all'
+ - tables>=3.10.1 ; extra == 'all'
+ - tabulate>=0.9.0 ; extra == 'all'
+ - xarray>=2024.10.0 ; extra == 'all'
+ - xlrd>=2.0.1 ; extra == 'all'
+ - xlsxwriter>=3.2.0 ; extra == 'all'
+ - zstandard>=0.23.0 ; extra == 'all'
+ requires_python: '>=3.11'
+- pypi: https://files.pythonhosted.org/packages/bb/40/c6ea527147c73b24fc15c891c3fcffe9c019793119c5742b8784a062c7db/pandas-3.0.2-cp314-cp314-macosx_10_15_x86_64.whl
+ name: pandas
+ version: 3.0.2
+ sha256: db0dbfd2a6cdf3770aa60464d50333d8f3d9165b2f2671bcc299b72de5a6677b
+ requires_dist:
+ - numpy>=1.26.0 ; python_full_version < '3.14'
+ - numpy>=2.3.3 ; python_full_version >= '3.14'
+ - python-dateutil>=2.8.2
+ - tzdata ; sys_platform == 'win32'
+ - tzdata ; sys_platform == 'emscripten'
+ - hypothesis>=6.116.0 ; extra == 'test'
+ - pytest>=8.3.4 ; extra == 'test'
+ - pytest-xdist>=3.6.1 ; extra == 'test'
+ - pyarrow>=13.0.0 ; extra == 'pyarrow'
+ - bottleneck>=1.4.2 ; extra == 'performance'
+ - numba>=0.60.0 ; extra == 'performance'
+ - numexpr>=2.10.2 ; extra == 'performance'
+ - scipy>=1.14.1 ; extra == 'computation'
+ - xarray>=2024.10.0 ; extra == 'computation'
+ - fsspec>=2024.10.0 ; extra == 'fss'
+ - s3fs>=2024.10.0 ; extra == 'aws'
+ - gcsfs>=2024.10.0 ; extra == 'gcp'
+ - odfpy>=1.4.1 ; extra == 'excel'
+ - openpyxl>=3.1.5 ; extra == 'excel'
+ - python-calamine>=0.3.0 ; extra == 'excel'
+ - pyxlsb>=1.0.10 ; extra == 'excel'
+ - xlrd>=2.0.1 ; extra == 'excel'
+ - xlsxwriter>=3.2.0 ; extra == 'excel'
+ - pyarrow>=13.0.0 ; extra == 'parquet'
+ - pyarrow>=13.0.0 ; extra == 'feather'
+ - pyiceberg>=0.8.1 ; extra == 'iceberg'
+ - tables>=3.10.1 ; extra == 'hdf5'
+ - pyreadstat>=1.2.8 ; extra == 'spss'
+ - sqlalchemy>=2.0.36 ; extra == 'postgresql'
+ - psycopg2>=2.9.10 ; extra == 'postgresql'
+ - adbc-driver-postgresql>=1.2.0 ; extra == 'postgresql'
+ - sqlalchemy>=2.0.36 ; extra == 'mysql'
+ - pymysql>=1.1.1 ; extra == 'mysql'
+ - sqlalchemy>=2.0.36 ; extra == 'sql-other'
+ - adbc-driver-postgresql>=1.2.0 ; extra == 'sql-other'
+ - adbc-driver-sqlite>=1.2.0 ; extra == 'sql-other'
+ - beautifulsoup4>=4.12.3 ; extra == 'html'
+ - html5lib>=1.1 ; extra == 'html'
+ - lxml>=5.3.0 ; extra == 'html'
+ - lxml>=5.3.0 ; extra == 'xml'
+ - matplotlib>=3.9.3 ; extra == 'plot'
+ - jinja2>=3.1.5 ; extra == 'output-formatting'
+ - tabulate>=0.9.0 ; extra == 'output-formatting'
+ - pyqt5>=5.15.9 ; extra == 'clipboard'
+ - qtpy>=2.4.2 ; extra == 'clipboard'
+ - zstandard>=0.23.0 ; extra == 'compression'
+ - pytz>=2024.2 ; extra == 'timezone'
+ - adbc-driver-postgresql>=1.2.0 ; extra == 'all'
+ - adbc-driver-sqlite>=1.2.0 ; extra == 'all'
+ - beautifulsoup4>=4.12.3 ; extra == 'all'
+ - bottleneck>=1.4.2 ; extra == 'all'
+ - fastparquet>=2024.11.0 ; extra == 'all'
+ - fsspec>=2024.10.0 ; extra == 'all'
+ - gcsfs>=2024.10.0 ; extra == 'all'
+ - html5lib>=1.1 ; extra == 'all'
+ - hypothesis>=6.116.0 ; extra == 'all'
+ - jinja2>=3.1.5 ; extra == 'all'
+ - lxml>=5.3.0 ; extra == 'all'
+ - matplotlib>=3.9.3 ; extra == 'all'
+ - numba>=0.60.0 ; extra == 'all'
+ - numexpr>=2.10.2 ; extra == 'all'
+ - odfpy>=1.4.1 ; extra == 'all'
+ - openpyxl>=3.1.5 ; extra == 'all'
+ - psycopg2>=2.9.10 ; extra == 'all'
+ - pyarrow>=13.0.0 ; extra == 'all'
+ - pyiceberg>=0.8.1 ; extra == 'all'
+ - pymysql>=1.1.1 ; extra == 'all'
+ - pyqt5>=5.15.9 ; extra == 'all'
+ - pyreadstat>=1.2.8 ; extra == 'all'
+ - pytest>=8.3.4 ; extra == 'all'
+ - pytest-xdist>=3.6.1 ; extra == 'all'
+ - python-calamine>=0.3.0 ; extra == 'all'
+ - pytz>=2024.2 ; extra == 'all'
+ - pyxlsb>=1.0.10 ; extra == 'all'
+ - qtpy>=2.4.2 ; extra == 'all'
+ - scipy>=1.14.1 ; extra == 'all'
+ - s3fs>=2024.10.0 ; extra == 'all'
+ - sqlalchemy>=2.0.36 ; extra == 'all'
+ - tables>=3.10.1 ; extra == 'all'
+ - tabulate>=0.9.0 ; extra == 'all'
+ - xarray>=2024.10.0 ; extra == 'all'
+ - xlrd>=2.0.1 ; extra == 'all'
+ - xlsxwriter>=3.2.0 ; extra == 'all'
+ - zstandard>=0.23.0 ; extra == 'all'
+ requires_python: '>=3.11'
+- pypi: https://files.pythonhosted.org/packages/a9/90/a744336f5af32c433bd09af7854599682a383b37cfd78f7de263de6ad6cb/paramiko-4.0.0-py3-none-any.whl
+ name: paramiko
+ version: 4.0.0
+ sha256: 0e20e00ac666503bf0b4eda3b6d833465a2b7aff2e2b3d79a8bba5ef144ee3b9
+ requires_dist:
+ - bcrypt>=3.2
+ - cryptography>=3.3
+ - invoke>=2.0
+ - pynacl>=1.5
+ - pyasn1>=0.1.7 ; extra == 'gssapi'
+ - gssapi>=1.4.1 ; sys_platform != 'win32' and extra == 'gssapi'
+ - pywin32>=2.1.8 ; sys_platform == 'win32' and extra == 'gssapi'
+ requires_python: '>=3.9'
- conda: https://conda.anaconda.org/conda-forge/noarch/pathspec-1.0.4-pyhd8ed1ab_0.conda
sha256: 29ea20d0faf20374fcd61c25f6d32fb8e9a2c786a7f1473a0c3ead359470fbe1
md5: 2908273ac396d2cd210a8127f5f1c0d6
@@ -4397,6 +5139,34 @@ packages:
- coverage[toml]==7.10.7 ; extra == 'tests'
- pytest>=8.4.2,<9.0.0 ; extra == 'tests'
requires_python: '>=3.9'
+- pypi: https://files.pythonhosted.org/packages/3e/d0/f301f83ac8dbe53442c5a43f6a39016f94f754d7a9815a875b65e218a307/pynacl-1.6.2-cp38-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl
+ name: pynacl
+ version: 1.6.2
+ sha256: 8a66d6fb6ae7661c58995f9c6435bda2b1e68b54b598a6a10247bfcdadac996c
+ requires_dist:
+ - cffi>=1.4.1 ; python_full_version < '3.9' and platform_python_implementation != 'PyPy'
+ - cffi>=2.0.0 ; python_full_version >= '3.9' and platform_python_implementation != 'PyPy'
+ - pytest>=7.4.0 ; extra == 'tests'
+ - pytest-cov>=2.10.1 ; extra == 'tests'
+ - pytest-xdist>=3.5.0 ; extra == 'tests'
+ - hypothesis>=3.27.0 ; extra == 'tests'
+ - sphinx<7 ; extra == 'docs'
+ - sphinx-rtd-theme ; extra == 'docs'
+ requires_python: '>=3.8'
+- pypi: https://files.pythonhosted.org/packages/be/7b/4845bbf88e94586ec47a432da4e9107e3fc3ce37eb412b1398630a37f7dd/pynacl-1.6.2-cp38-abi3-macosx_10_10_universal2.whl
+ name: pynacl
+ version: 1.6.2
+ sha256: c949ea47e4206af7c8f604b8278093b674f7c79ed0d4719cc836902bf4517465
+ requires_dist:
+ - cffi>=1.4.1 ; python_full_version < '3.9' and platform_python_implementation != 'PyPy'
+ - cffi>=2.0.0 ; python_full_version >= '3.9' and platform_python_implementation != 'PyPy'
+ - pytest>=7.4.0 ; extra == 'tests'
+ - pytest-cov>=2.10.1 ; extra == 'tests'
+ - pytest-xdist>=3.5.0 ; extra == 'tests'
+ - hypothesis>=3.27.0 ; extra == 'tests'
+ - sphinx<7 ; extra == 'docs'
+ - sphinx-rtd-theme ; extra == 'docs'
+ requires_python: '>=3.8'
- pypi: https://files.pythonhosted.org/packages/10/bd/c038d7cc38edc1aa5bf91ab8068b63d4308c66c4c8bb3cbba7dfbc049f9c/pyparsing-3.3.2-py3-none-any.whl
name: pyparsing
version: 3.3.2
@@ -4598,6 +5368,17 @@ packages:
- pkg:pypi/gfal2-python?source=hash-mapping
size: 185917
timestamp: 1769083275804
+- pypi: https://files.pythonhosted.org/packages/38/ee/a61bb562bdf6f0bc6c51cdcf80ab5503cbb4b2f5053fa4b054cc0a56e48a/python_gitlab-8.2.0-py3-none-any.whl
+ name: python-gitlab
+ version: 8.2.0
+ sha256: 884618d4d60beadb21bb0c5f0cca46e70c6e501784f136bf0b6f85f5bc15ce62
+ requires_dist:
+ - requests>=2.32.0
+ - requests-toolbelt>=1.0.0
+ - argcomplete>=1.10.0,<3 ; extra == 'autocompletion'
+ - pyyaml>=6.0.1 ; extra == 'yaml'
+ - gql[httpx]>=3.5.0,<5 ; extra == 'graphql'
+ requires_python: '>=3.10.0'
- conda: https://conda.anaconda.org/conda-forge/linux-64/python-librt-0.7.8-py314h0f05182_0.conda
sha256: 7c4615367e1d8bee1e98abcfccd742fb0c382a150f21cb592a66af69063eae43
md5: 1cdbb8798d700d90f33998d41baed1ec
@@ -4698,6 +5479,27 @@ packages:
- pkg:pypi/pyyaml?source=compressed-mapping
size: 189475
timestamp: 1770223788648
+- pypi: https://files.pythonhosted.org/packages/2d/e1/c2141f1840a41e07ad2db6f724945f8f8ff3065463899a22939152dd6e09/rapidfuzz-3.14.5-cp314-cp314-macosx_11_0_arm64.whl
+ name: rapidfuzz
+ version: 3.14.5
+ sha256: 0298d357e2bc59d572da4db0bc631009b6f8f6c9bc8c11e99a12b833f16b6575
+ requires_dist:
+ - numpy ; extra == 'all'
+ requires_python: '>=3.10'
+- pypi: https://files.pythonhosted.org/packages/81/41/aa3ffb3355e62e1bf91f6599b3092e866bc88487a07c524004943c7676df/rapidfuzz-3.14.5-cp314-cp314-macosx_10_15_x86_64.whl
+ name: rapidfuzz
+ version: 3.14.5
+ sha256: 1a31cc6d7d03e7318a0974c038959c59e19c752b81115f2e9138b3331cd64d45
+ requires_dist:
+ - numpy ; extra == 'all'
+ requires_python: '>=3.10'
+- pypi: https://files.pythonhosted.org/packages/c8/85/9535df0b78ba51f478c9ce7eb6d1f85535cc31fe356773b48fd9d3e563ca/rapidfuzz-3.14.5-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl
+ name: rapidfuzz
+ version: 3.14.5
+ sha256: 4900143d82071bdda533b00300c40b14b963ff826b3642cc463b6dd0f036585e
+ requires_dist:
+ - numpy ; extra == 'all'
+ requires_python: '>=3.10'
- pypi: https://files.pythonhosted.org/packages/b9/20/35d2baebacf357b562bd081936b66cd845775442973cb033a377fd639a84/rdflib-7.5.0-py3-none-any.whl
name: rdflib
version: 7.5.0
@@ -4773,6 +5575,13 @@ packages:
- pysocks>=1.5.6,!=1.5.7 ; extra == 'socks'
- chardet>=3.0.2,<6 ; extra == 'use-chardet-on-py3'
requires_python: '>=3.9'
+- pypi: https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl
+ name: requests-toolbelt
+ version: 1.0.0
+ sha256: cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06
+ requires_dist:
+ - requests>=2.0.1,<3.0.0
+ requires_python: '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*'
- pypi: https://files.pythonhosted.org/packages/ef/45/615f5babd880b4bd7d405cc0dc348234c5ffb6ed1ea33e152ede08b2072d/rich-14.3.2-py3-none-any.whl
name: rich
version: 14.3.2
@@ -5060,6 +5869,25 @@ packages:
version: 1.5.4
sha256: 7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686
requires_python: '>=3.7'
+- pypi: https://files.pythonhosted.org/packages/59/25/68f81c50aeb3f30e01c20da0f56d14c1a779b57d9308c50e19c63dc8413f/signurlarity-0.2.2-py3-none-any.whl
+ name: signurlarity
+ version: 0.2.2
+ sha256: 0c8089ecac04ce105e525b60749d134b171410beefc962e167e0e64141d6e7a7
+ requires_dist:
+ - cryptography>=41.0.0
+ - httpx>=0.24.0
+ - orjson
+ - aiobotocore>=2.15 ; extra == 'testing'
+ - botocore>=1.35 ; extra == 'testing'
+ - httpx ; extra == 'testing'
+ - moto[server] ; extra == 'testing'
+ - pytest ; extra == 'testing'
+ - pytest-asyncio ; extra == 'testing'
+ - pytest-benchmark ; extra == 'testing'
+ - pytest-xdist ; extra == 'testing'
+ - rich ; extra == 'testing'
+ - ty ; extra == 'testing'
+ requires_python: '>=3.11'
- pypi: https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl
name: six
version: 1.17.0
@@ -5070,6 +5898,11 @@ packages:
version: 5.0.2
sha256: b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e
requires_python: '>=3.7'
+- pypi: https://files.pythonhosted.org/packages/46/2c/1462b1d0a634697ae9e55b3cecdcb64788e8b7d63f54d923fcd0bb140aed/soupsieve-2.8.3-py3-none-any.whl
+ name: soupsieve
+ version: 2.8.3
+ sha256: ed64f2ba4eebeab06cc4962affce381647455978ffc1e36bb79a545b91f45a95
+ requires_python: '>=3.9'
- pypi: https://files.pythonhosted.org/packages/f9/38/8b6fc7a8153cb49eb3a9a13acfa9eeb6cc476e37888781e593e6f02ac05e/spython-0.3.14-py3-none-any.whl
name: spython
version: 0.3.14
@@ -5380,6 +6213,26 @@ packages:
- pkg:pypi/ukkonen?source=hash-mapping
size: 14884
timestamp: 1769439056290
+- pypi: https://files.pythonhosted.org/packages/c4/55/85e2732345dd8b66437cddedac4ee7ef2d9c25bf8792830b095f2ee658f3/uproot-5.7.3-py3-none-any.whl
+ name: uproot
+ version: 5.7.3
+ sha256: aeb096ab2ef10f96c3914fcf981352b69e350fac58a4658586f7eb9e3326b957
+ requires_dist:
+ - awkward>=2.8.2
+ - cramjam>=2.5.0
+ - fsspec!=2026.2.0
+ - numpy
+ - packaging
+ - typing-extensions>=4.1.0 ; python_full_version < '3.11'
+ - xxhash
+ - kvikio-cu12 ; extra == 'gds-cu12'
+ - nvidia-nvcomp-cu12 ; extra == 'gds-cu12'
+ - kvikio-cu13 ; extra == 'gds-cu13'
+ - nvidia-nvcomp-cu13 ; extra == 'gds-cu13'
+ - aiohttp ; extra == 'http'
+ - s3fs ; extra == 's3'
+ - fsspec-xrootd>=0.5.0 ; extra == 'xrootd'
+ requires_python: '>=3.10'
- pypi: https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl
name: urllib3
version: 2.6.3
@@ -5483,6 +6336,14 @@ packages:
requires_dist:
- coverage ; extra == 'testing'
requires_python: '>=3.7'
+- pypi: https://files.pythonhosted.org/packages/38/34/98a2f52245f4d47be93b580dae5f9861ef58977d73a79eb47c58f1ad1f3a/xmltodict-1.0.4-py3-none-any.whl
+ name: xmltodict
+ version: 1.0.4
+ sha256: a4a00d300b0e1c59fc2bfccb53d7b2e88c32f200df138a0dd2229f842497026a
+ requires_dist:
+ - pytest ; extra == 'test'
+ - pytest-cov ; extra == 'test'
+ requires_python: '>=3.9'
- conda: https://conda.anaconda.org/conda-forge/linux-64/xrootd-5.9.1-py314h75aeccf_0.conda
sha256: 2351cace7322d68dd834c276f4cb19bc35a68d90642dd7083b4924bb26a66228
md5: d9b7e0eeecec187f4344983ba341c2d7
@@ -5576,6 +6437,21 @@ packages:
- pkg:pypi/xrootd?source=hash-mapping
size: 3347452
timestamp: 1769448002819
+- pypi: https://files.pythonhosted.org/packages/18/d7/5dac2eb2ec75fd771957a13e5dda560efb2176d5203f39502a5fc571f899/xxhash-3.6.0-cp314-cp314-macosx_11_0_arm64.whl
+ name: xxhash
+ version: 3.6.0
+ sha256: a54844be970d3fc22630b32d515e79a90d0a3ddb2644d8d7402e3c4c8da61405
+ requires_python: '>=3.7'
+- pypi: https://files.pythonhosted.org/packages/7e/5e/0138bc4484ea9b897864d59fce9be9086030825bc778b76cb5a33a906d37/xxhash-3.6.0-cp314-cp314-macosx_10_13_x86_64.whl
+ name: xxhash
+ version: 3.6.0
+ sha256: a40a3d35b204b7cc7643cbcf8c9976d818cb47befcfac8bbefec8038ac363f3e
+ requires_python: '>=3.7'
+- pypi: https://files.pythonhosted.org/packages/ed/ba/603ce3961e339413543d8cd44f21f2c80e2a7c5cfe692a7b1f2cccf58f3c/xxhash-3.6.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl
+ name: xxhash
+ version: 3.6.0
+ sha256: 0226aa89035b62b6a86d3c68df4d7c1f47a342b8683da2b60cedcddb46c4d95b
+ requires_python: '>=3.7'
- conda: https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h280c20c_3.conda
sha256: 6d9ea2f731e284e9316d95fa61869fe7bbba33df7929f82693c121022810f4ad
md5: a77f85f77be52ff59391544bfe73390a
diff --git a/pyproject.toml b/pyproject.toml
index 2837f9f..dbc0eb4 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -31,6 +31,7 @@ dependencies = [
"diracx-client>=0.0.8",
"diracx-cli>=0.0.8",
"lbprodrun",
+ "LHCbDIRAC @ git+https://git@gitlab.cern.ch/jlisalab/LHCbDIRAC.git@modules-to-cwl-migration", # Temporary fork dependency
"pydantic",
"pyyaml",
"typer",
@@ -78,7 +79,7 @@ allow_redefinition = true
enable_error_code = ["import", "attr-defined"]
[[tool.mypy.overrides]]
-module = ["requests", "yaml"]
+module = ["requests", "yaml", "DIRAC.*", "LHCbDIRAC.*", "DIRACCommon.*"]
ignore_missing_imports = true
[tool.pytest.ini_options]
diff --git a/src/dirac_cwl/commands/__init__.py b/src/dirac_cwl/commands/__init__.py
index 01e8b17..f2446d6 100644
--- a/src/dirac_cwl/commands/__init__.py
+++ b/src/dirac_cwl/commands/__init__.py
@@ -1,5 +1,20 @@
"""Command classes for workflow pre/post-processing operations."""
+from .analyze_xml_summary import AnalyseXmlSummary
+from .bookkeeping_report import BookeepingReport
from .core import PostProcessCommand, PreProcessCommand
+from .failover_request import FailoverRequest
+from .upload_log_file import UploadLogFile
+from .upload_output_data import UploadOutputData
+from .workflow_accounting import WorkflowAccounting
-__all__ = ["PreProcessCommand", "PostProcessCommand"]
+__all__ = [
+ "AnalyseXmlSummary",
+ "PreProcessCommand",
+ "PostProcessCommand",
+ "UploadLogFile",
+ "BookeepingReport",
+ "FailoverRequest",
+ "UploadOutputData",
+ "WorkflowAccounting",
+]
diff --git a/src/dirac_cwl/commands/analyze_xml_summary.py b/src/dirac_cwl/commands/analyze_xml_summary.py
new file mode 100644
index 0000000..ecb1543
--- /dev/null
+++ b/src/dirac_cwl/commands/analyze_xml_summary.py
@@ -0,0 +1,84 @@
+"""LHCb command for checking the XMLSummary output to ensure that the execution was done correctly."""
+
+import os
+
+from DIRAC.TransformationSystem.Client.FileReport import FileReport
+from DIRAC.WorkloadManagementSystem.Client.JobReport import JobReport
+from LHCbDIRAC.Core.Utilities.XMLSummaries import XMLSummary
+from LHCbDIRAC.Workflow.Modules.AnalyseXMLSummary import _areInputsOK, _isXMLSummaryOK
+from LHCbDIRAC.Workflow.Modules.BookkeepingReport import _generate_xml_object
+
+from dirac_cwl.core.exceptions import WorkflowProcessingException
+
+from .core import PostProcessCommand
+from .utils import prepare_lhcb_workflow_commons, save_workflow_commons
+
+
+class AnalyseXmlSummary(PostProcessCommand):
+ """Performs a series of checks on the XMLSummary output to make sure the execution was done correctly."""
+
+ def execute(self, job_path, **kwargs):
+ """Execute the command.
+
+ :param job_path: Path to the job working directory.
+ :param kwargs: Additional keyword arguments.
+ """
+ failed = False
+ try:
+ workflow_commons_path = kwargs.get("workflow_commons_path", os.path.join(job_path, "workflow_commons.json"))
+
+ workflow_commons = prepare_lhcb_workflow_commons(
+ workflow_commons_path,
+ extra_mandatory_values=[
+ "bk_step_id",
+ ],
+ extra_default_values={
+ "bookkeeping_LFNs": [],
+ "size": {},
+ "md5": {},
+ "guid": {},
+ "sim_description": "NoSimConditions",
+ },
+ )
+
+ if not workflow_commons["step_status"]["OK"]:
+ return
+
+ if "xml_summary_path" in workflow_commons:
+ xf_o = XMLSummary(workflow_commons["xml_summary_path"])
+ else:
+ xf_o = _generate_xml_object(
+ workflow_commons["cleaned_application_name"],
+ workflow_commons["production_id"],
+ workflow_commons["prod_job_id"],
+ workflow_commons["command_number"],
+ workflow_commons["command_id"],
+ )
+
+ file_report = FileReport()
+ job_report = JobReport(workflow_commons["job_id"])
+
+ file_report.statusDict = workflow_commons["file_report_files_dict"]
+
+ jobOk = _isXMLSummaryOK(xf_o)
+
+ if jobOk:
+ jobOk = _areInputsOK(
+ xf_o,
+ workflow_commons["inputs"],
+ workflow_commons["number_of_events"],
+ workflow_commons["production_id"],
+ file_report,
+ )
+ if not jobOk:
+ job_report.setApplicationStatus("XMLSummary reports error")
+ raise WorkflowProcessingException("XMLSummary reports error")
+
+ job_report.setApplicationStatus(f"{workflow_commons['application_name']} Step OK")
+
+ except:
+ failed = True
+ raise
+
+ finally:
+ save_workflow_commons(workflow_commons, workflow_commons_path, failed=failed)
diff --git a/src/dirac_cwl/commands/bookkeeping_report.py b/src/dirac_cwl/commands/bookkeeping_report.py
new file mode 100644
index 0000000..1573ad7
--- /dev/null
+++ b/src/dirac_cwl/commands/bookkeeping_report.py
@@ -0,0 +1,168 @@
+"""LHCb command for bookkeeping report file generation based on the XMLSummary and the XML catalog."""
+
+import os
+
+from DIRAC.Workflow.Utilities.Utils import getStepCPUTimes
+from LHCbDIRAC.BookkeepingSystem.Client.BookkeepingClient import BookkeepingClient
+from LHCbDIRAC.Core.Utilities.ProductionData import constructProductionLFNs
+from LHCbDIRAC.Core.Utilities.XMLSummaries import XMLSummary
+from LHCbDIRAC.Workflow.Modules.BookkeepingReport import (
+ _generate_xml_object,
+ _generateInputFiles,
+ _generateOutputFiles,
+ _prepare_job_info,
+ _process_time,
+)
+from LHCbDIRAC.Workflow.Modules.ModulesUtilities import getNumberOfProcessorsToUse
+
+from dirac_cwl.core.exceptions import WorkflowProcessingException
+
+from .core import PostProcessCommand
+from .utils import prepare_lhcb_workflow_commons, save_workflow_commons
+
+
+class BookeepingReport(PostProcessCommand):
+ """Generates a bookkeeping report file based on the XMLSummary and the pool XML catalog."""
+
+ def execute(self, job_path, **kwargs):
+ """Execute the command.
+
+ :param job_path: Path to the job working directory.
+ :param kwargs: Additional keyword arguments.
+ """
+ failed = False
+ try:
+ # Obtain Workflow Commons
+ workflow_commons_path = kwargs.get("workflow_commons_path", os.path.join(job_path, "workflow_commons.json"))
+
+ workflow_commons = prepare_lhcb_workflow_commons(
+ workflow_commons_path,
+ extra_mandatory_values=[
+ "bk_step_id",
+ ],
+ extra_default_values={
+ "bookkeeping_LFNs": [],
+ "size": {},
+ "md5": {},
+ "guid": {},
+ "sim_description": "NoSimConditions",
+ },
+ )
+
+ if not workflow_commons["step_status"]["OK"]:
+ return
+
+ # Setup variables
+ start_time = workflow_commons.get("start_time", None)
+
+ cpu_times = {}
+ if start_time:
+ cpu_times["StartTime"] = start_time
+ if "start_stats" in workflow_commons:
+ cpu_times["StartStats"] = workflow_commons["start_stats"]
+
+ exectime, cputime = getStepCPUTimes(cpu_times)
+
+ number_of_processors = getNumberOfProcessorsToUse(
+ workflow_commons["job_id"], workflow_commons["max_number_of_processors"]
+ )
+
+ bk_client = BookkeepingClient()
+
+ parameters = {
+ "PRODUCTION_ID": workflow_commons["production_id"],
+ "JOB_ID": workflow_commons["prod_job_id"],
+ "configVersion": workflow_commons["config_version"],
+ "outputList": workflow_commons["outputs"],
+ "configName": workflow_commons["config_name"],
+ "outputDataFileMask": workflow_commons["output_data_file_mask"],
+ }
+
+ if "bookkeeping_LFNs" in workflow_commons and "production_output_data" in workflow_commons:
+ bk_lfns = workflow_commons["bookkeeping_LFNs"]
+
+ if not isinstance(bk_lfns, list):
+ bk_lfns = [i.strip() for i in bk_lfns.split(";")]
+
+ else:
+ result = constructProductionLFNs(parameters, bk_client)
+ if not result["OK"]:
+ raise WorkflowProcessingException("Could not create production LFNs")
+
+ bk_lfns = result["Value"]["BookkeepingLFNs"]
+
+ ldate, ltime, ldatestart, ltimestart = _process_time(start_time)
+
+ # Obtain XMLSummary
+ if "xml_summary_path" in workflow_commons:
+ xf_o = XMLSummary(workflow_commons["xml_summary_path"])
+ else:
+ xf_o = _generate_xml_object(
+ workflow_commons["cleaned_application_name"],
+ workflow_commons["production_id"],
+ workflow_commons["prod_job_id"],
+ workflow_commons["command_number"],
+ workflow_commons["command_id"],
+ )
+
+ info_dict = {
+ "exectime": exectime,
+ "cputime": cputime,
+ "numberOfProcessors": number_of_processors,
+ "production_id": workflow_commons["production_id"],
+ "jobID": workflow_commons["job_id"],
+ "siteName": workflow_commons["site_name"],
+ "jobType": workflow_commons["job_type"],
+ "applicationName": workflow_commons["application_name"],
+ "applicationVersion": workflow_commons["application_version"],
+ "numberOfEvents": workflow_commons["number_of_events"],
+ }
+
+ # Generate job_info object
+ job_info = _prepare_job_info(
+ info_dict,
+ ldatestart,
+ ltimestart,
+ ldate,
+ ltime,
+ xf_o,
+ workflow_commons["inputs"],
+ workflow_commons["command_id"],
+ workflow_commons["bk_step_id"],
+ bk_client,
+ workflow_commons["config_name"],
+ workflow_commons["config_version"],
+ )
+
+ # Add input files to job_info
+ _generateInputFiles(job_info, bk_lfns, workflow_commons["inputs"])
+
+ # Add output files to job_info
+ _generateOutputFiles(
+ job_info,
+ bk_lfns,
+ workflow_commons["event_type"],
+ workflow_commons["application_name"],
+ xf_o,
+ workflow_commons["outputs"],
+ workflow_commons["inputs"],
+ )
+
+ # Generate SimulationConditions
+ if workflow_commons["application_name"] == "Gauss":
+ job_info.simulation_condition = workflow_commons["sim_description"]
+
+ # Convert job_info object to XML
+ doc = job_info.to_xml()
+
+ # Write to file
+ bfilename = f"bookkeeping_{workflow_commons['command_id']}.xml"
+ with open(bfilename, "wb") as bfile:
+ bfile.write(doc)
+
+ except:
+ failed = True
+ raise
+
+ finally:
+ save_workflow_commons(workflow_commons, workflow_commons_path, failed=failed)
diff --git a/src/dirac_cwl/commands/failover_request.py b/src/dirac_cwl/commands/failover_request.py
new file mode 100644
index 0000000..a56119f
--- /dev/null
+++ b/src/dirac_cwl/commands/failover_request.py
@@ -0,0 +1,114 @@
+"""LHCb command for committing the status of the files in the file report.
+
+The status will be "Processed" if everything ended properly or "Unused" if it did not.
+"""
+
+import json
+import os
+
+from DIRAC.AccountingSystem.Client.DataStoreClient import DataStoreClient
+from DIRAC.RequestManagementSystem.Client.Request import Request
+from DIRAC.RequestManagementSystem.private.RequestValidator import RequestValidator
+from DIRAC.TransformationSystem.Client.FileReport import FileReport
+from DIRAC.WorkloadManagementSystem.Client.JobReport import JobReport
+from LHCbDIRAC.Workflow.Modules.FailoverRequest import _prepareRequest
+
+from dirac_cwl.core.exceptions import WorkflowProcessingException
+
+from .core import PostProcessCommand
+from .utils import prepare_lhcb_workflow_commons, save_workflow_commons
+
+
+class FailoverRequest(PostProcessCommand):
+ """Commits the status of the files in the file report.
+
+ The status will be "Processed" if everything ended properly or "Unused" if it did not.
+ """
+
+ def execute(self, job_path, **kwargs):
+ """Execute the command.
+
+ :param job_path: Path to the job working directory.
+ :param kwargs: Additional keyword arguments.
+ """
+ failed = False
+ try:
+ workflow_commons_path = kwargs.get("workflow_commons_path", os.path.join(job_path, "workflow_commons.json"))
+
+ workflow_commons = prepare_lhcb_workflow_commons(
+ workflow_commons_path,
+ extra_mandatory_values=[],
+ extra_default_values={"accounting_registers": None},
+ )
+
+ request = Request(workflow_commons["request_dict"])
+ file_report = FileReport()
+ file_report.statusDict = workflow_commons["file_report_files_dict"]
+
+ job_report = JobReport(workflow_commons["job_id"])
+
+ _prepareRequest(request, workflow_commons["job_id"])
+
+ filesInFileReport = file_report.getFiles()
+
+ for lfn in workflow_commons["inputs"]:
+ if lfn not in filesInFileReport:
+ status = "Processed" if workflow_commons["step_status"]["OK"] else "Unused"
+ file_report.setFileStatus(int(workflow_commons["production_id"]), lfn, status)
+
+ file_report.commit()
+
+ if workflow_commons["step_status"]["OK"]:
+ if file_report.getFiles():
+ result = file_report.generateForwardDISET()
+ if result["OK"] and result["Value"]:
+ request.addOperation(result["Value"])
+
+ job_report.setApplicationStatus("Job Finished Successfully", True)
+
+ self.generateFailoverFile(job_report, request, workflow_commons)
+
+ except:
+ failed = True
+ raise
+
+ finally:
+ save_workflow_commons(workflow_commons, workflow_commons_path, request=request, failed=failed)
+
+ def generateFailoverFile(self, job_report, request, workflow_commons):
+ """Create a request.json file."""
+ result = job_report.generateForwardDISET()
+
+ if result["OK"]:
+ if result["Value"]:
+ request.addOperation(result["Value"])
+
+ if len(request):
+ # Try to optimize the request
+ try:
+ request.optimize()
+ except: # noqa: E722
+ pass
+
+ # Validate request
+ result = RequestValidator().validate(request)
+ if not result["OK"]:
+ raise WorkflowProcessingException(
+ "Failed to generate FailoverFile. Invalid request object", result["Message"]
+ )
+
+ # Get the request as a Json
+ result = request.toJSON()
+ if not result["OK"]:
+ raise WorkflowProcessingException(result["Message"])
+
+ # Write it
+ fname = f"{workflow_commons['production_id']}_{workflow_commons['prod_job_id']}_request.json"
+ with open(fname, "w", encoding="utf-8") as f:
+ json.dump(result["Value"], f)
+
+ if workflow_commons["accounting_registers"]:
+ dsc = DataStoreClient()
+ for register in workflow_commons["accounting_registers"]:
+ dsc.addRegister(register)
+ dsc.commit()
diff --git a/src/dirac_cwl/commands/upload_log_file.py b/src/dirac_cwl/commands/upload_log_file.py
new file mode 100644
index 0000000..19a5a7e
--- /dev/null
+++ b/src/dirac_cwl/commands/upload_log_file.py
@@ -0,0 +1,149 @@
+"""Post-processing command for uploading logging information to a Storage Element."""
+
+import os
+import shlex
+
+from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
+from DIRAC.Core.Utilities.ReturnValues import returnSingleResult
+from DIRAC.Core.Utilities.Subprocess import systemCall
+from DIRAC.DataManagementSystem.Client.FailoverTransfer import FailoverTransfer
+from DIRAC.RequestManagementSystem.Client.Request import Request
+from DIRAC.Resources.Storage.StorageElement import StorageElement
+from DIRAC.WorkloadManagementSystem.Client.JobReport import JobReport
+from LHCbDIRAC.BookkeepingSystem.Client.BookkeepingClient import BookkeepingClient
+from LHCbDIRAC.Core.Utilities.ProductionData import getLogPath
+from LHCbDIRAC.Workflow.Modules.FailoverRequest import _prepareRequest
+from LHCbDIRAC.Workflow.Modules.UploadLogFile import (
+ _createLogUploadRequest,
+ _determineRelevantFiles,
+ _get_log_url,
+ _populateLogDirectory,
+ _setLogFilePermissions,
+ _uploadLogToFailoverSE,
+ _zip_files,
+)
+
+from dirac_cwl.core.exceptions import WorkflowProcessingException
+
+from .core import PostProcessCommand
+from .utils import prepare_lhcb_workflow_commons, save_workflow_commons
+
+
+class UploadLogFile(PostProcessCommand):
+ """Post-processing command for log file uploading."""
+
+ def execute(self, job_path, **kwargs):
+ """Execute the log uploading process.
+
+ :param job_path: Path to the job working directory.
+ :param kwargs: Additional keyword arguments.
+ """
+ # Obtain workflow information
+ failed = False
+ workflow_commons = {}
+ request = None
+ try:
+ workflow_commons_path = kwargs.get("workflow_commons_path", os.path.join(job_path, "workflow_commons.json"))
+
+ workflow_commons = prepare_lhcb_workflow_commons(
+ workflow_commons_path,
+ extra_mandatory_values=[],
+ extra_default_values={"log_target_path": None, "log_file_path": ""},
+ )
+ request = Request(workflow_commons["request_dict"])
+
+ if not workflow_commons["step_status"]["OK"]:
+ return
+
+ log_lfn_path = workflow_commons["log_target_path"]
+ if not log_lfn_path:
+ parameters = {
+ "PRODUCTION_ID": workflow_commons["production_id"],
+ "JOB_ID": workflow_commons["job_id"],
+ "configName": workflow_commons["config_name"],
+ "configVersion": workflow_commons["config_version"],
+ }
+ result = getLogPath(parameters, BookkeepingClient())
+ if not result["OK"]:
+ raise WorkflowProcessingException("Could not create LogFilePath", result["Message"])
+ log_lfn_path = result["Value"]["LogTargetPath"][0]
+
+ if not isinstance(log_lfn_path, str):
+ log_lfn_path = log_lfn_path[0]
+
+ workflow_commons["log_lfn_path"] = log_lfn_path
+
+ ops = Operations()
+ log_se = ops.getValue("LogStorage/LogSE", "LogSE")
+ log_extensions = ops.getValue("LogFiles/Extensions", [])
+
+ _prepareRequest(request, workflow_commons["job_id"])
+ failover_transfer = FailoverTransfer(request)
+ job_report = JobReport(workflow_commons["job_id"])
+
+ res = systemCall(0, shlex.split("ls -al"))
+
+ workflow_commons["log_dir"] = os.path.realpath(
+ f"./job/log/{workflow_commons['production_id']}/{workflow_commons['prod_job_id']}"
+ )
+
+ ##########################################
+ # First determine the files which should be saved
+ res = _determineRelevantFiles(log_extensions)
+ if not res["OK"]:
+ return
+ selectedFiles = res["Value"]
+
+ #########################################
+ # Create a temporary directory containing these files
+ res = _populateLogDirectory(selectedFiles, workflow_commons["log_dir"])
+ if not res["OK"]:
+ job_report.setApplicationStatus("Failed To Populate Log Dir")
+ return
+
+ #########################################
+ # Make sure all the files in the log directory have the correct permissions
+ result = _setLogFilePermissions(workflow_commons["log_dir"])
+
+ # zip all files
+ result = _zip_files(workflow_commons["prod_job_id"], selectedFiles)
+ if not result["OK"]:
+ job_report.setApplicationStatus("Failed to create zip of log files")
+ return
+
+ zip_file_name = result["Value"]
+
+ # Instantiate the failover transfer client with the global request object
+ if not failover_transfer:
+ failover_transfer = FailoverTransfer(request)
+
+ # logFilePath is something like /lhcb/MC/2016/LOG/00095376/0000/
+ # the zipFileName should have the same name, e.g. 00000381.zip
+ zipPath = os.path.join(workflow_commons["log_file_path"], zip_file_name)
+ logHttpsURL = _get_log_url(log_se, zipPath)
+
+ res = returnSingleResult(StorageElement(log_se).putFile({zipPath: zip_file_name}))
+ if not res["OK"]:
+ result = _uploadLogToFailoverSE(
+ failover_transfer, zip_file_name, log_lfn_path, workflow_commons["site_name"]
+ )
+
+ if not result["OK"]:
+ job_report.setApplicationStatus("Failed To Upload Logs")
+ else:
+ uploadedSE = result["Value"]["uploadedSE"]
+ request = failover_transfer.request
+ _createLogUploadRequest(request, log_se, log_lfn_path, uploadedSE)
+
+ # While it's the zip file that is uploaded, we set in job parameters its directory,
+ # as the .zip is deflated automatically
+ job_report.setJobParameter(
+ "Log URL", f"Log file directory"
+ )
+
+ except Exception as e:
+ failed = True
+ raise WorkflowProcessingException(e) from e
+
+ finally:
+ save_workflow_commons(workflow_commons, workflow_commons_path, request, failed=failed)
diff --git a/src/dirac_cwl/commands/upload_output_data.py b/src/dirac_cwl/commands/upload_output_data.py
new file mode 100644
index 0000000..03617bf
--- /dev/null
+++ b/src/dirac_cwl/commands/upload_output_data.py
@@ -0,0 +1,190 @@
+"""LHCb command for registering the outputs generated to the corresponding SE or the FailoverSE in case of failure."""
+
+import os
+import random
+
+from DIRAC.DataManagementSystem.Client.DataManager import DataManager
+from DIRAC.DataManagementSystem.Client.FailoverTransfer import FailoverTransfer
+from DIRAC.RequestManagementSystem.Client.Request import Request
+from DIRAC.TransformationSystem.Client.FileReport import FileReport
+from DIRAC.WorkloadManagementSystem.Client.JobReport import JobReport
+from LHCbDIRAC.BookkeepingSystem.Client.BookkeepingClient import BookkeepingClient
+from LHCbDIRAC.Core.Utilities.ProductionData import constructProductionLFNs
+from LHCbDIRAC.Core.Utilities.ResolveSE import getDestinationSEList
+from LHCbDIRAC.DataManagementSystem.Client.ConsistencyChecks import getFileDescendents
+from LHCbDIRAC.Workflow.Modules.UploadOutputData import (
+ _createMetaDict,
+ _getBKFiles,
+ _getCleanRequest,
+ _getFileMetada,
+ _registerLFNs,
+ _resolveSEs,
+ _sendBKReport,
+)
+
+from dirac_cwl.core.exceptions import WorkflowProcessingException
+
+from .core import PostProcessCommand
+from .utils import prepare_lhcb_workflow_commons, save_workflow_commons
+
+
+class UploadOutputData(PostProcessCommand):
+ """Registers every output generated to the corresponding SE and Catalog or to the FailoverSE in case of failure."""
+
+ def execute(self, job_path, **kwargs):
+ """Execute the command.
+
+ :param job_path: Path to the job working directory.
+ :param kwargs: Additional keyword arguments.
+ """
+ fail = False
+ try:
+ workflow_commons_path = kwargs.get("workflow_commons_path", os.path.join(job_path, "workflow_commons.json"))
+
+ workflow_commons = prepare_lhcb_workflow_commons(
+ workflow_commons_path,
+ extra_mandatory_values=["output_data_step", "output_SEs"],
+ extra_default_values={
+ "file_descendants": None,
+ "prod_output_LFNs": None,
+ "run_number": "Unknown",
+ "output_mode": "Any",
+ },
+ )
+ request = Request(workflow_commons["request_dict"])
+
+ if not workflow_commons["step_status"]["OK"]:
+ return
+
+ bk_client = BookkeepingClient()
+ data_manager = DataManager()
+
+ failover_se_list = getDestinationSEList("Tier1-Failover", workflow_commons["site_name"], outputmode="Any")
+ random.shuffle(failover_se_list)
+
+ file_report = FileReport()
+ file_report.statusDict = workflow_commons["file_report_files_dict"]
+
+ job_report = JobReport(workflow_commons["job_id"])
+
+ if not workflow_commons["prod_output_LFNs"]:
+ parameters = {
+ "PRODUCTION_ID": workflow_commons["production_id"],
+ "JOB_ID": workflow_commons["job_id"],
+ "configVersion": workflow_commons["config_version"],
+ "outputList": workflow_commons["outputs"],
+ "configName": workflow_commons["config_name"],
+ "outputDataFileMask": workflow_commons["output_data_file_mask"],
+ }
+ result = constructProductionLFNs(parameters, bk_client)
+
+ if not result["OK"]:
+ raise WorkflowProcessingException("Unable to construsct production LFNs")
+
+ workflow_commons["prod_output_LFNs"] = result["Value"]["ProductionOutputData"]
+
+ file_metadata = _getFileMetada(
+ workflow_commons["outputs"],
+ workflow_commons["prod_output_LFNs"],
+ workflow_commons["output_data_file_mask"],
+ workflow_commons["output_data_step"],
+ workflow_commons["output_SEs"],
+ )
+
+ if not file_metadata:
+ return
+
+ final = _resolveSEs(
+ file_metadata,
+ None,
+ workflow_commons["site_name"],
+ workflow_commons["output_mode"],
+ workflow_commons["run_number"],
+ )
+
+ if workflow_commons["inputs"]:
+ lfns_with_descendants = workflow_commons["file_descendants"]
+
+ if not lfns_with_descendants:
+ lfns_with_descendants = getFileDescendents(
+ workflow_commons["production_id"],
+ workflow_commons["inputs"],
+ dm=data_manager,
+ bkClient=bk_client,
+ )
+
+ if lfns_with_descendants:
+ file_report.setFileStatus(
+ int(workflow_commons["production_id"]), lfns_with_descendants, "Processed"
+ )
+ raise WorkflowProcessingException("Input Data Already Processed")
+
+ bkFiles = _getBKFiles()
+
+ for bkFile in bkFiles:
+ with open(bkFile) as fd:
+ bkXML = fd.read()
+
+ result = _sendBKReport(bk_client, request, bkXML)
+
+ failover_transfer = FailoverTransfer(request)
+
+ perform_bk_registration = []
+
+ failover = {}
+ for file_name, metadata in final.items():
+ targetSE = metadata["resolvedSE"]
+ file_meta_dict = _createMetaDict(metadata)
+ result = failover_transfer.transferAndRegisterFile(
+ fileName=file_name,
+ localPath=metadata["localpath"],
+ lfn=metadata["filedict"]["LFN"],
+ destinationSEList=targetSE,
+ fileMetaDict=file_meta_dict,
+ masterCatalogOnly=True,
+ )
+ if not result["OK"]:
+ failover[file_name] = metadata
+ else:
+ perform_bk_registration.append(metadata)
+
+ cleanUp = False
+ for file_name, metadata in failover.items():
+ random.shuffle(failover_se_list)
+ targetSE = metadata["resolvedSE"][0]
+ metadata["resolvedSE"] = failover_se_list
+
+ file_meta_dict = _createMetaDict(metadata)
+ result = failover_transfer.transferAndRegisterFileFailover(
+ fileName=file_name,
+ localPath=metadata["localpath"],
+ lfn=metadata["filedict"]["LFN"],
+ targetSE=targetSE,
+ failoverSEList=metadata["resolvedSE"],
+ fileMetaDict=file_meta_dict,
+ masterCatalogOnly=True,
+ )
+ if not result["OK"]:
+ cleanUp = True
+ break
+
+ request = failover_transfer.request
+ if cleanUp:
+ request = _getCleanRequest(request, final)
+ raise WorkflowProcessingException("Failed to upload output data")
+
+ if final:
+ report = ", ".join(final)
+ job_report.setJobParameter("UploadedOutputData", report)
+
+ if perform_bk_registration:
+ result = _registerLFNs(request, perform_bk_registration)
+ if not result["OK"]:
+ raise WorkflowProcessingException(result["Message"])
+
+ except:
+ fail = True
+ raise
+
+ finally:
+ save_workflow_commons(workflow_commons, workflow_commons_path, request, failed=fail)
diff --git a/src/dirac_cwl/commands/utils.py b/src/dirac_cwl/commands/utils.py
new file mode 100644
index 0000000..f910bd5
--- /dev/null
+++ b/src/dirac_cwl/commands/utils.py
@@ -0,0 +1,127 @@
+"""."""
+
+import json
+import os
+import shutil
+
+from DIRAC import siteName
+from DIRAC.Core.Utilities.ReturnValues import S_ERROR, S_OK
+
+from dirac_cwl.core.exceptions import WorkflowProcessingException
+
+
+def prepare_lhcb_workflow_commons(workflow_commons_path, extra_mandatory_values=[], extra_default_values={}):
+ """Return a dictionary containing the values of a workflow_commons.json file.
+
+ Also performs a series of checks to ensure everything is in order.
+ """
+ if not os.path.exists(workflow_commons_path):
+ raise WorkflowProcessingException(f"{workflow_commons_path} file not found")
+
+ with open(workflow_commons_path, "r", encoding="utf-8") as f:
+ workflow_commons = json.load(f)
+
+ if not workflow_commons:
+ raise WorkflowProcessingException(f"{workflow_commons_path} cannot be empty")
+
+ mandatory_values = [
+ "job_id",
+ "job_type",
+ "production_id",
+ "prod_job_id",
+ "number_of_events",
+ "application_name",
+ "application_version",
+ "inputs",
+ "outputs", # outputList
+ "executable",
+ "command_id", # StepID
+ "command_number",
+ ]
+
+ mandatory_values.extend(extra_mandatory_values)
+ missing_values = []
+
+ for value in mandatory_values:
+ if value not in workflow_commons:
+ missing_values.append(value)
+
+ if missing_values:
+ raise WorkflowProcessingException(
+ f"The following values are missing in workflow_commons.json: {missing_values}"
+ )
+
+ commons_defaults = {
+ "output_data_file_mask": "",
+ "run_metadata": {},
+ "log_target_path": "",
+ "production_output_data": [],
+ "CPUe": 0,
+ "max_number_of_events": "0",
+ "output_data_type": None,
+ "application_log": "",
+ "application_type": None,
+ "options_file": None,
+ "options_line": None,
+ "extra_packages": "",
+ "multi_core": False,
+ "max_number_of_processors": None,
+ "system_config": None,
+ "mcTCK": None,
+ "condDB_tag": None,
+ "DQ_tag": None,
+ "step_status": S_OK(),
+ "config_name": None,
+ "config_version": None,
+ "request_dict": {},
+ "file_report_files_dict": {},
+ "number_of_processors": 1,
+ }
+
+ for k, v in extra_default_values.items():
+ if k not in commons_defaults:
+ commons_defaults[k] = v
+
+ for k, v in commons_defaults.items():
+ if k not in workflow_commons:
+ workflow_commons[k] = v
+
+ cleaned_application_name = workflow_commons["application_name"].replace("/", "")
+ workflow_commons["cleaned_application_name"] = cleaned_application_name
+
+ workflow_commons["site_name"] = siteName()
+
+ return workflow_commons
+
+
+def save_workflow_commons(wf_commons, wf_file_path, request=None, failed=False):
+ """Update the workflow_commons file to accomodate for the new values.
+
+ Ensures that no data is lost during the update by creating a backup.
+ """
+ if not (os.path.exists(wf_file_path) and os.path.isfile(wf_file_path)):
+ raise WorkflowProcessingException(f"Workflow Commons file '{wf_file_path}' not found")
+
+ wf_filename = os.path.basename(wf_file_path)
+ wf_backup = f"{wf_filename}.bak"
+
+ shutil.move(wf_file_path, wf_backup)
+
+ if failed:
+ wf_commons["step_status"] = S_ERROR()
+
+ if request:
+ wf_commons["request_dict"] = json.loads(request.toJSON()["Value"])
+
+ try:
+ with open(wf_file_path, "x", encoding="utf-8") as f:
+ json.dump(wf_commons, f)
+ except Exception:
+ os.unlink(wf_file_path)
+ shutil.copy2(wf_backup, wf_file_path)
+ return False
+
+ finally:
+ os.unlink(wf_backup)
+
+ return True
diff --git a/src/dirac_cwl/commands/workflow_accounting.py b/src/dirac_cwl/commands/workflow_accounting.py
new file mode 100644
index 0000000..0f69946
--- /dev/null
+++ b/src/dirac_cwl/commands/workflow_accounting.py
@@ -0,0 +1,108 @@
+"""LHCb command for preparing and sending accounting information to the DIRAC Accounting system.
+
+Formerly known as StepAccounting.
+"""
+
+import os
+from datetime import datetime
+
+from DIRAC import gConfig
+from DIRAC.AccountingSystem.Client.DataStoreClient import DataStoreClient
+from DIRAC.Workflow.Utilities.Utils import getStepCPUTimes
+from LHCbDIRAC.AccountingSystem.Client.Types.JobStep import JobStep
+from LHCbDIRAC.Core.Utilities.XMLSummaries import XMLSummary
+from LHCbDIRAC.Workflow.Modules.BookkeepingReport import _generate_xml_object
+
+from dirac_cwl.core.exceptions import WorkflowProcessingException
+
+from .core import PostProcessCommand
+from .utils import prepare_lhcb_workflow_commons, save_workflow_commons
+
+
+class WorkflowAccounting(PostProcessCommand):
+ """Prepares and sends accounting information to the DIRAC Accounting system."""
+
+ def execute(self, job_path, **kwargs):
+ """Execute the command.
+
+ :param job_path: Path to the job working directory.
+ :param kwargs: Additional keyword arguments.
+ """
+ failed = False
+ try:
+ # Obtain Workflow Commons
+ workflow_commons_path = kwargs.get("workflow_commons_path", os.path.join(job_path, "workflow_commons.json"))
+ workflow_commons = {}
+ workflow_commons = prepare_lhcb_workflow_commons(
+ workflow_commons_path,
+ extra_mandatory_values=["bk_step_id", "step_proc_pass", "event_type"],
+ extra_default_values={
+ "step_proc_pass": "",
+ "run_number": "Unknown",
+ },
+ )
+
+ cpu_times = {}
+ if "start_time" in workflow_commons:
+ cpu_times["StartTime"] = workflow_commons["start_time"]
+ if "start_stats" in workflow_commons:
+ cpu_times["StartStats"] = workflow_commons["start_stats"]
+
+ exec_time, cpu_time = getStepCPUTimes(cpu_times)
+
+ cpuNormFactor = gConfig.getValue("/LocalSite/CPUNormalizationFactor", 0.0)
+ normCPU = cpu_time * cpuNormFactor
+
+ jobStep = JobStep()
+
+ if "xml_summary_path" in workflow_commons:
+ xf_o = XMLSummary(workflow_commons["xml_summary_path"])
+ else:
+ xf_o = _generate_xml_object(
+ workflow_commons["cleaned_application_name"],
+ workflow_commons["production_id"],
+ workflow_commons["prod_job_id"],
+ workflow_commons["command_number"],
+ workflow_commons["command_id"],
+ )
+
+ now = datetime.utcnow()
+ jobStep.setStartTime(now)
+ jobStep.setEndTime(now)
+
+ dataDict = {
+ "JobGroup": str(workflow_commons["production_id"]),
+ "RunNumber": workflow_commons["run_number"],
+ "EventType": workflow_commons["event_type"],
+ "ProcessingType": workflow_commons["step_proc_pass"], # this is the processing pass of the step
+ "ProcessingStep": workflow_commons["bk_step_id"], # the step ID
+ "Site": workflow_commons["site_name"],
+ "FinalStepState": workflow_commons["step_status"],
+ "CPUTime": cpu_time,
+ "NormCPUTime": normCPU,
+ "ExecTime": exec_time * workflow_commons["number_of_processors"],
+ "InputData": sum(xf_o.inputFileStats.values()),
+ "OutputData": sum(xf_o.outputFileStats.values()),
+ "InputEvents": xf_o.inputEventsTotal,
+ "OutputEvents": xf_o.outputEventsTotal,
+ }
+
+ jobStep.setValuesFromDict(dataDict)
+
+ res = jobStep.checkValues()
+ if not res["OK"]:
+ raise WorkflowProcessingException(
+ "Values for StepAccounting are wrong:", f"{res['Message']}. Here are the given data: {dataDict}"
+ )
+
+ dsc = DataStoreClient()
+ dsc.addRegister(jobStep)
+ workflow_commons["accounting_registers"] = dsc.__registersList
+
+ except Exception as e:
+ failed = True
+ raise WorkflowProcessingException() from e
+
+ finally:
+ if workflow_commons:
+ save_workflow_commons(workflow_commons, workflow_commons_path, failed=failed)
diff --git a/test/test_commands.py b/test/test_commands.py
new file mode 100644
index 0000000..51d18a6
--- /dev/null
+++ b/test/test_commands.py
@@ -0,0 +1,2713 @@
+"""Tests for the commands.
+
+This module tests the execution of the different commands.
+"""
+
+import json
+import os
+import shutil
+import time
+import xml.etree.ElementTree as ET
+import zipfile
+from pathlib import Path
+from textwrap import dedent
+
+import LHCbDIRAC
+import pytest
+from DIRAC import siteName
+from DIRAC.AccountingSystem.Client.DataStoreClient import DataStoreClient
+from DIRAC.DataManagementSystem.Client.FailoverTransfer import FailoverTransfer
+from DIRAC.RequestManagementSystem.Client.File import File
+from DIRAC.RequestManagementSystem.Client.Operation import Operation
+from DIRAC.RequestManagementSystem.Client.Request import Request
+from DIRAC.TransformationSystem.Client.FileReport import FileReport
+from DIRAC.WorkloadManagementSystem.Client.JobReport import JobReport
+from DIRACCommon.Core.Utilities.ReturnValues import S_ERROR, S_OK
+from LHCbDIRAC.BookkeepingSystem.Client.BookkeepingClient import BookkeepingClient
+from LHCbDIRAC.Core.Utilities.XMLSummaries import XMLSummary
+from pytest_mock import MockerFixture
+
+from dirac_cwl.commands import (
+ AnalyseXmlSummary,
+ BookeepingReport,
+ FailoverRequest,
+ UploadLogFile,
+ UploadOutputData,
+ WorkflowAccounting,
+)
+from dirac_cwl.core.exceptions import WorkflowProcessingException
+
+number_of_processors = 1
+job_path = "."
+
+
+@pytest.fixture
+def wf_commons():
+ """Workflow commons dictionary fixture."""
+ yield {
+ "job_id": 0,
+ "job_type": "merge",
+ "production_id": "123",
+ "prod_job_id": "00000456",
+ "event_type": "123456789",
+ "number_of_events": "100",
+ "config_name": "aConfigName",
+ "config_version": "aConfigVersion",
+ "application_name": "someApp",
+ "application_version": "v1r0",
+ "bk_step_id": "123",
+ "inputs": [],
+ "outputs": [],
+ "executable": "",
+ "command_id": "1",
+ "command_number": 1,
+ }
+
+ Path(os.path.join(job_path, "workflow_commons.json")).unlink(missing_ok=True)
+
+
+@pytest.fixture
+def xml_summary_file(wf_commons):
+ """XMLSummaryFile file path fixture."""
+ path = os.path.join(
+ job_path,
+ f"summary{wf_commons['application_name']}_{wf_commons['production_id']}_{wf_commons['prod_job_id']}_{wf_commons['command_id']}.xml",
+ )
+ yield path
+ Path(path).unlink(missing_ok=True)
+
+
+@pytest.fixture
+def request_file(wf_commons):
+ """RequstDict file path fixture."""
+ path = os.path.join(job_path, f"{wf_commons['production_id']}_{wf_commons['prod_job_id']}_request.json")
+ yield path
+ Path(path).unlink(missing_ok=True)
+
+
+def prepare_XMLSummary_file(xml_summary, content):
+ """Pepares a xml summary file and returns it as a class."""
+ with open(xml_summary, "w", encoding="utf-8") as f:
+ f.write(content)
+ return XMLSummary(xml_summary)
+
+
+def get_typed_parameter_value(name, root):
+ """Find the value of a specific TypedParameter by its name."""
+ for child in root:
+ if child.tag == "TypedParameter" and child.attrib["Name"] == name:
+ return child.attrib["Value"]
+ return None
+
+
+def get_output_file_details(output_file):
+ """Extract details from an OutputFile element."""
+ details = {
+ "Name": output_file.attrib["Name"],
+ "TypeName": output_file.attrib["TypeName"],
+ "Parameters": {},
+ "Replicas": [],
+ }
+
+ for elem in output_file:
+ if elem.tag == "Parameter":
+ details["Parameters"][elem.attrib["Name"]] = elem.attrib["Value"]
+ elif elem.tag == "Replica":
+ details["Replicas"].append({"Name": elem.attrib["Name"], "Location": elem.attrib["Location"]})
+
+ return details
+
+
+def create_workflow_commons(wf_dict):
+ """Dump the content of wf_commons to a file."""
+ path = os.path.join(job_path, "workflow_commons.json")
+ with open(path, "w", encoding="utf-8") as f:
+ json.dump(wf_dict, f)
+ return path
+
+
+class TestUploadLogFile:
+ """Collection of tests for the UploadLogFile command."""
+
+ @pytest.fixture
+ def uplogfile(self, mocker, wf_commons):
+ """Fixture for UploadLogFile module."""
+ uplogfile = UploadLogFile()
+
+ yield uplogfile
+
+ Path(f"{wf_commons['prod_job_id']}.zip").unlink(missing_ok=True)
+ shutil.rmtree("unzipped", ignore_errors=True)
+
+ @pytest.fixture
+ def prodconf_json(self):
+ """prodconf.json file fixture."""
+ filename = "prodConf_example.json"
+
+ with open(filename, "w") as f:
+ f.write('{"foo": "bar"}')
+
+ yield filename
+
+ Path(filename).unlink(missing_ok=True)
+
+ @pytest.fixture
+ def prodconf_py(self):
+ """prodconf.py file fixture."""
+ filename = "prodConf_example.py"
+
+ with open(filename, "w") as f:
+ f.write('foo = "bar"')
+
+ yield filename
+
+ Path(filename).unlink(missing_ok=True)
+
+ # Test Scenarios
+ def test_uploadLogFile_success(self, mocker, uplogfile, wf_commons, prodconf_json, prodconf_py):
+ """Test successful execution of UploadLogFile module."""
+ log_url = "notImportant"
+ mockSEMethod = mocker.patch(
+ "DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__executeMethod",
+ return_value=S_OK({"Failed": [], "Successful": {log_url: log_url}}),
+ )
+ mock_request = mocker.patch("dirac_cwl.commands.upload_log_file.Request")
+ mock_failover = mocker.patch("dirac_cwl.commands.upload_log_file.FailoverTransfer")
+ mock_job_report = mocker.patch("dirac_cwl.commands.upload_log_file.JobReport")
+
+ req = Request()
+ mock_request.return_value = req
+
+ failover = FailoverTransfer(req)
+ mocker.patch.object(failover, "transferAndRegisterFile", return_value=S_OK())
+ mock_failover.return_value = failover
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setApplicationStatus")
+ mocker.patch.object(jr, "setJobParameter")
+ mock_job_report.return_value = jr
+
+ uplogfile.request = Request()
+
+ # Execute the module
+ wf_commons_path = create_workflow_commons(wf_commons)
+
+ uplogfile.execute(job_path)
+
+ with open(wf_commons_path, "r", encoding="utf-8") as f:
+ updated_wf_commons = json.load(f)
+
+ # Check the log directory
+ assert updated_wf_commons["log_dir"] != ""
+ log_dir = Path(updated_wf_commons["log_dir"])
+ assert log_dir.exists()
+ assert log_dir.is_dir()
+ assert log_dir.joinpath(prodconf_json).exists()
+ assert log_dir.joinpath(prodconf_json).read_text() == '{"foo": "bar"}'
+ assert log_dir.joinpath(prodconf_py).exists()
+ assert log_dir.joinpath(prodconf_py).read_text() == 'foo = "bar"'
+
+ for file in log_dir.iterdir():
+ assert file.stat().st_mode & 0o777 == 0o755
+
+ # Check the generated zip file
+ zipFile = Path(f"{updated_wf_commons['prod_job_id']}.zip")
+ assert zipFile.exists()
+
+ zipfile.ZipFile(zipFile, "r").extractall("unzipped")
+ unzipped = Path("unzipped").joinpath(updated_wf_commons["prod_job_id"])
+ assert unzipped.joinpath(prodconf_json).exists()
+ assert unzipped.joinpath(prodconf_py).exists()
+ assert unzipped.joinpath(prodconf_json).read_text() == '{"foo": "bar"}'
+ assert unzipped.joinpath(prodconf_py).read_text() == 'foo = "bar"'
+
+ # Make sure that StorageElement was called twice (getURL, putFile)
+ assert mockSEMethod.call_count == 2
+
+ # Make sure that the request was not created
+ assert failover.transferAndRegisterFile.call_count == 0
+
+ # Make sure the application status was not changed
+ assert jr.setApplicationStatus.call_count == 0
+
+ # Check the jobReport.setParameter arguments
+ assert jr.setJobParameter.call_count == 1
+ assert jr.setJobParameter.call_args_list
+ params = jr.setJobParameter.call_args_list[0][0]
+ assert params[0] == "Log URL"
+ assert params[1] == f'Log file directory'
+
+ shutil.rmtree(updated_wf_commons["log_dir"], ignore_errors=True)
+
+ def test_uploadLogFile_noOutputFile(self, mocker, uplogfile, wf_commons):
+ """Test execution of UploadLogFile module when there is no output files.
+
+ * populateLogDirectory should return an error, because there is no "successful" files in log_dir.
+ """
+ mockSEMethod = mocker.patch(
+ "DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__executeMethod",
+ return_value=S_OK({"Failed": [], "Successful": {"notImportant": "notImportant"}}),
+ )
+ mock_request = mocker.patch("dirac_cwl.commands.upload_log_file.Request")
+ mock_failover = mocker.patch("dirac_cwl.commands.upload_log_file.FailoverTransfer")
+ mock_job_report = mocker.patch("dirac_cwl.commands.upload_log_file.JobReport")
+
+ req = Request()
+ mock_request.return_value = req
+
+ failover = FailoverTransfer(req)
+ mocker.patch.object(failover, "transferAndRegisterFile", return_value=S_OK())
+ mock_failover.return_value = failover
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setApplicationStatus")
+ mocker.patch.object(jr, "setJobParameter")
+ mock_job_report.return_value = jr
+
+ # Execute the module
+ wf_commons_path = create_workflow_commons(wf_commons)
+
+ uplogfile.execute(job_path)
+
+ with open(wf_commons_path, "r", encoding="utf-8") as f:
+ updated_wf_commons = json.load(f)
+
+ # Check the log directory
+ assert updated_wf_commons["log_dir"] != ""
+ log_dir = Path(updated_wf_commons["log_dir"])
+ assert log_dir.exists()
+ assert log_dir.is_dir()
+ # Make sure log_dir is an empty directory
+ assert not list(log_dir.iterdir())
+
+ # Check the generated zip file
+ zipFile = Path(f"{updated_wf_commons['prod_job_id']}.zip")
+ assert not zipFile.exists()
+
+ # Make sure that StorageElement was called twice (getURL, putFile)
+ assert mockSEMethod.call_count == 0
+
+ # Make sure that the request was not created
+ assert failover.transferAndRegisterFile.call_count == 0
+
+ # Make sure the application status was changed
+ assert jr.setApplicationStatus.call_count == 1
+ assert jr.setJobParameter.call_count == 0
+
+ shutil.rmtree(updated_wf_commons["log_dir"], ignore_errors=True)
+
+ def test_uploadLogFile_zipException(self, mocker, uplogfile, wf_commons, prodconf_json, prodconf_py):
+ """Test execution of UploadLogFile module when an exception is raised when zipping files."""
+ mocker.patch("LHCbDIRAC.Workflow.Modules.UploadLogFile.zipFiles", side_effect=OSError)
+ mockSEMethod = mocker.patch(
+ "DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__executeMethod",
+ return_value=S_OK({"Failed": [], "Successful": {"notImportant": "notImportant"}}),
+ )
+ mock_request = mocker.patch("dirac_cwl.commands.upload_log_file.Request")
+ mock_failover = mocker.patch("dirac_cwl.commands.upload_log_file.FailoverTransfer")
+ mock_job_report = mocker.patch("dirac_cwl.commands.upload_log_file.JobReport")
+
+ req = Request()
+ mock_request.return_value = req
+
+ failover = FailoverTransfer(req)
+ mocker.patch.object(failover, "transferAndRegisterFile", return_value=S_OK())
+ mock_failover.return_value = failover
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setApplicationStatus")
+ mock_job_report.return_value = jr
+
+ # Execute the module
+ wf_commons_path = create_workflow_commons(wf_commons)
+
+ uplogfile.execute(job_path)
+
+ with open(wf_commons_path, "r", encoding="utf-8") as f:
+ updated_wf_commons = json.load(f)
+
+ # Check the log directory
+ assert updated_wf_commons["log_dir"] != ""
+ log_dir = Path(updated_wf_commons["log_dir"])
+ assert log_dir.exists()
+ assert log_dir.is_dir()
+ assert log_dir.joinpath(prodconf_json).exists()
+ assert log_dir.joinpath(prodconf_json).read_text() == '{"foo": "bar"}'
+ assert log_dir.joinpath(prodconf_py).exists()
+ assert log_dir.joinpath(prodconf_py).read_text() == 'foo = "bar"'
+
+ for file in log_dir.iterdir():
+ assert file.stat().st_mode & 0o777 == 0o755
+
+ # Check the generated zip file
+ zipFile = Path(f"{updated_wf_commons['prod_job_id']}.zip")
+ assert not zipFile.exists()
+
+ # Make sure that StorageElement was called twice (getURL, putFile)
+ assert mockSEMethod.call_count == 0
+
+ # Make sure that the request was not created
+ assert failover.transferAndRegisterFile.call_count == 0
+
+ # Make sure the application status was changed
+ assert jr.setApplicationStatus.call_count == 1
+
+ shutil.rmtree(updated_wf_commons["log_dir"], ignore_errors=True)
+
+ def test_uploadLogFile_zipError(self, mocker, uplogfile, wf_commons, prodconf_json, prodconf_py):
+ """Test execution of UploadLogFile module when an error is occurring when zipping files."""
+ mocker.patch("LHCbDIRAC.Workflow.Modules.UploadLogFile.zipFiles", return_value=S_ERROR("Error"))
+ mockSEMethod = mocker.patch(
+ "DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__executeMethod",
+ return_value=S_OK({"Failed": [], "Successful": {"notImportant": "notImportant"}}),
+ )
+ mock_request = mocker.patch("dirac_cwl.commands.upload_log_file.Request")
+ mock_failover = mocker.patch("dirac_cwl.commands.upload_log_file.FailoverTransfer")
+ mock_job_report = mocker.patch("dirac_cwl.commands.upload_log_file.JobReport")
+
+ req = Request()
+ mock_request.return_value = req
+
+ failover = FailoverTransfer(req)
+ mocker.patch.object(failover, "transferAndRegisterFile", return_value=S_OK())
+ mock_failover.return_value = failover
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setApplicationStatus")
+ mock_job_report.return_value = jr
+
+ # Execute the module
+ wf_commons_path = create_workflow_commons(wf_commons)
+
+ uplogfile.execute(job_path)
+
+ with open(wf_commons_path, "r", encoding="utf-8") as f:
+ updated_wf_commons = json.load(f)
+
+ # Check the log directory
+ assert updated_wf_commons["log_dir"] != ""
+ log_dir = Path(updated_wf_commons["log_dir"])
+ assert log_dir.exists()
+ assert log_dir.is_dir()
+ assert log_dir.joinpath(prodconf_json).exists()
+ assert log_dir.joinpath(prodconf_json).read_text() == '{"foo": "bar"}'
+ assert log_dir.joinpath(prodconf_py).exists()
+ assert log_dir.joinpath(prodconf_py).read_text() == 'foo = "bar"'
+
+ for file in log_dir.iterdir():
+ assert file.stat().st_mode & 0o777 == 0o755
+
+ # Check the generated zip file
+ zipFile = Path(f"{updated_wf_commons['prod_job_id']}.zip")
+ assert not zipFile.exists()
+
+ # Make sure that StorageElement was called twice (getURL, putFile)
+ assert mockSEMethod.call_count == 0
+
+ # Make sure that the request was not created
+ assert failover.transferAndRegisterFile.call_count == 0
+
+ # Make sure the application status was changed
+ assert jr.setApplicationStatus.call_count == 1
+
+ shutil.rmtree(updated_wf_commons["log_dir"], ignore_errors=True)
+
+ def test_uploadLogFile_SEError(self, mocker, uplogfile, wf_commons, prodconf_json, prodconf_py):
+ """Test execution of UploadLogFile module when an error is occurring when calling StorageElement."""
+ mocker.patch("LHCbDIRAC.Workflow.Modules.UploadLogFile.getDestinationSEList", return_value=["SE1", "SE2"])
+ mockSEMethod = mocker.patch(
+ "DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__executeMethod",
+ return_value=S_ERROR("Error"),
+ )
+ mock_request = mocker.patch("dirac_cwl.commands.upload_log_file.Request")
+ mock_failover = mocker.patch("dirac_cwl.commands.upload_log_file.FailoverTransfer")
+ mock_job_report = mocker.patch("dirac_cwl.commands.upload_log_file.JobReport")
+
+ req = Request()
+ mock_request.return_value = req
+
+ failover = FailoverTransfer(req)
+ mocker.patch.object(failover, "transferAndRegisterFile", return_value=S_OK({"uploadedSE": "SE1"}))
+ mock_failover.return_value = failover
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setApplicationStatus")
+ mock_job_report.return_value = jr
+
+ # Execute the module
+ wf_commons_path = create_workflow_commons(wf_commons)
+
+ uplogfile.execute(job_path)
+
+ with open(wf_commons_path, "r", encoding="utf-8") as f:
+ updated_wf_commons = json.load(f)
+
+ # Check the log directory
+ assert updated_wf_commons["log_dir"] != ""
+ log_dir = Path(updated_wf_commons["log_dir"])
+ assert log_dir.exists()
+ assert log_dir.is_dir()
+ assert log_dir.joinpath(prodconf_json).exists()
+ assert log_dir.joinpath(prodconf_json).read_text() == '{"foo": "bar"}'
+ assert log_dir.joinpath(prodconf_py).exists()
+ assert log_dir.joinpath(prodconf_py).read_text() == 'foo = "bar"'
+
+ for file in log_dir.iterdir():
+ assert file.stat().st_mode & 0o777 == 0o755
+
+ # Check the generated zip file
+ zipFile = Path(f"{updated_wf_commons['prod_job_id']}.zip")
+ assert zipFile.exists()
+
+ zipfile.ZipFile(zipFile, "r").extractall("unzipped")
+ unzipped = Path("unzipped").joinpath(updated_wf_commons["prod_job_id"])
+ assert unzipped.joinpath(prodconf_json).exists()
+ assert unzipped.joinpath(prodconf_py).exists()
+ assert unzipped.joinpath(prodconf_json).read_text() == '{"foo": "bar"}'
+ assert unzipped.joinpath(prodconf_py).read_text() == 'foo = "bar"'
+
+ # Make sure that StorageElement was called twice (getURL, putFile)
+ assert mockSEMethod.call_count == 2
+
+ # Make sure that the request was created
+ assert failover.transferAndRegisterFile.call_count == 1
+
+ operations = updated_wf_commons["request_dict"]["Operations"]
+
+ assert len(operations) == 2
+ assert operations[0]["Type"] == "LogUpload"
+ assert len(operations[0]["Files"]) == 1
+ assert operations[0]["Files"][0]["LFN"] == updated_wf_commons["log_lfn_path"]
+
+ assert operations[1]["Type"] == "RemoveFile"
+ assert len(operations[1]["Files"]) == 1
+ assert operations[1]["Files"][0]["LFN"] == updated_wf_commons["log_lfn_path"]
+
+ # Make sure the application status was not changed
+ assert jr.setApplicationStatus.call_count == 0
+
+ shutil.rmtree(updated_wf_commons["log_dir"], ignore_errors=True)
+
+ def test_uploadLogFile_transferError(self, mocker, uplogfile, wf_commons, prodconf_json, prodconf_py):
+ """Test execution of UploadLogFile module when calling StorageElement and FailoverTransfer fail."""
+ mocker.patch("LHCbDIRAC.Workflow.Modules.UploadLogFile.getDestinationSEList", return_value=["SE1", "SE2"])
+ mockSEMethod = mocker.patch(
+ "DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__executeMethod",
+ return_value=S_ERROR("Error"),
+ )
+ mock_request = mocker.patch("dirac_cwl.commands.upload_log_file.Request")
+ mock_failover = mocker.patch("dirac_cwl.commands.upload_log_file.FailoverTransfer")
+ mock_job_report = mocker.patch("dirac_cwl.commands.upload_log_file.JobReport")
+
+ req = Request()
+ mock_request.return_value = req
+
+ failover = FailoverTransfer(req)
+ mocker.patch.object(failover, "transferAndRegisterFile", return_value=S_ERROR("Error"))
+ mock_failover.return_value = failover
+
+ mock_job_report = mocker.patch("dirac_cwl.commands.upload_log_file.JobReport")
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setApplicationStatus")
+ mock_job_report.return_value = jr
+
+ # Execute the module
+ wf_commons_path = create_workflow_commons(wf_commons)
+
+ uplogfile.execute(job_path)
+
+ with open(wf_commons_path, "r", encoding="utf-8") as f:
+ updated_wf_commons = json.load(f)
+
+ # Check the log directory
+ assert updated_wf_commons["log_dir"] != ""
+ log_dir = Path(updated_wf_commons["log_dir"])
+ assert log_dir.exists()
+ assert log_dir.is_dir()
+ assert log_dir.joinpath(prodconf_json).exists()
+ assert log_dir.joinpath(prodconf_json).read_text() == '{"foo": "bar"}'
+ assert log_dir.joinpath(prodconf_py).exists()
+ assert log_dir.joinpath(prodconf_py).read_text() == 'foo = "bar"'
+
+ for file in log_dir.iterdir():
+ assert file.stat().st_mode & 0o777 == 0o755
+
+ # Check the generated zip file
+ zipFile = Path(f"{updated_wf_commons['prod_job_id']}.zip")
+ assert zipFile.exists()
+
+ zipfile.ZipFile(zipFile, "r").extractall("unzipped")
+ unzipped = Path("unzipped").joinpath(updated_wf_commons["prod_job_id"])
+ assert unzipped.joinpath(prodconf_json).exists()
+ assert unzipped.joinpath(prodconf_py).exists()
+ assert unzipped.joinpath(prodconf_json).read_text() == '{"foo": "bar"}'
+ assert unzipped.joinpath(prodconf_py).read_text() == 'foo = "bar"'
+
+ # Make sure that StorageElement was called twice (getURL, putFile)
+ assert mockSEMethod.call_count == 2
+
+ # Make sure that the request was not created
+ assert failover.transferAndRegisterFile.call_count == 1
+
+ operations = updated_wf_commons["request_dict"]["Operations"]
+
+ assert len(operations) == 0
+
+ # Make sure the application status was changed
+ assert jr.setApplicationStatus.call_count == 1
+
+ shutil.rmtree(updated_wf_commons["log_dir"], ignore_errors=True)
+
+
+class TestBookkeepingReport:
+ """Collection of tests for the BookkeepingReport command."""
+
+ @pytest.fixture
+ def bookkeeping_file(self, wf_commons):
+ """Bookkeeping report file fixture."""
+ path = os.path.join(job_path, f"bookkeeping_{wf_commons['command_id']}.xml")
+ yield path
+ Path(path).unlink(missing_ok=True)
+
+ @pytest.fixture
+ def bk_report(self, mocker):
+ """BookkeepingReport mocked command.
+
+ Cleans created files after execution.
+ """
+ mock_get_n_procs = mocker.patch("dirac_cwl.commands.bookkeeping_report.getNumberOfProcessorsToUse")
+
+ mock_get_n_procs.return_value = number_of_processors
+
+ yield BookeepingReport()
+
+ Path("00209455_00001537_1").unlink(missing_ok=True)
+ Path("00209455_00001537_1.sim").unlink(missing_ok=True)
+
+ def test_bkreport_prod_mcsimulation_success(self, bk_report, wf_commons, bookkeeping_file, xml_summary_file):
+ """Test successful execution of BookkeepingReport module."""
+ wf_commons["application_name"] = "Gauss"
+ wf_commons["job_type"] = "MCSimulation"
+
+ wf_commons["bookkeeping_LFNs"] = [
+ "/lhcb/LHCb/Collision16/SIM/00209455/0000/00209455_00001537_1.sim",
+ ]
+ wf_commons["production_output_data"] = [
+ "/lhcb/LHCb/Collision16/SIM/00209455/0000/00209455_00001537_1.sim",
+ ]
+
+ wf_commons["start_time"] = time.time() - 1000
+
+ # Input data should be None as we use Gauss (MCSimulation)
+ wf_commons["outputs"] = [
+ {"outputDataName": "00209455_00001537_1.sim", "outputDataType": "sim"},
+ ]
+ Path(wf_commons["outputs"][0]["outputDataName"]).touch()
+
+ # Mock the XMLSummary object
+ xml_content = dedent("""\
+
+
+ True
+ finalize
+
+ 2129228.0
+
+
+
+
+ 1
+ 77
+ 2644
+ 6262
+ 8391
+ 963
+ 18139
+ 45169
+ 52237
+ 79
+
+
+
+ """)
+
+ wf_commons["xml_summary_path"] = xml_summary_file
+ xf_o = prepare_XMLSummary_file(xml_summary_file, xml_content)
+
+ wf_commons_path = create_workflow_commons(wf_commons)
+
+ # Execute the module
+ bk_report.execute(job_path)
+
+ with open(wf_commons_path, "r", encoding="utf-8") as f:
+ updated_wf_commons = json.load(f)
+
+ xml_path = bookkeeping_file
+ assert Path(xml_path).exists(), "XML report file not created."
+
+ # Validate the XML file
+ tree = ET.parse(xml_path)
+ root = tree.getroot()
+
+ # Extract fields from the XML and perform further operations
+ assert root.tag == "Job", "Root tag should be Job."
+ assert root.attrib["ConfigName"] == updated_wf_commons["config_name"]
+ assert root.attrib["ConfigVersion"] == updated_wf_commons["config_version"]
+ assert root.attrib["Date"]
+ assert root.attrib["Time"]
+
+ assert get_typed_parameter_value("ProgramName", root) == updated_wf_commons["application_name"]
+ assert get_typed_parameter_value("ProgramVersion", root) == updated_wf_commons["application_version"]
+ assert get_typed_parameter_value("DiracVersion", root) == LHCbDIRAC.__version__
+ assert get_typed_parameter_value("Name", root) == updated_wf_commons["command_id"]
+ assert float(get_typed_parameter_value("ExecTime", root)) > 1000
+ assert get_typed_parameter_value("CPUTIME", root) == "0"
+
+ assert get_typed_parameter_value("FirstEventNumber", root) == "1"
+ assert get_typed_parameter_value("StatisticsRequested", root) == str(updated_wf_commons["number_of_events"])
+ assert get_typed_parameter_value("NumberOfEvents", root) == str(xf_o.outputEventsTotal)
+
+ assert get_typed_parameter_value("Production", root) == updated_wf_commons["production_id"]
+ assert get_typed_parameter_value("DiracJobId", root) == str(updated_wf_commons["job_id"])
+ assert get_typed_parameter_value("Location", root) == siteName()
+ assert get_typed_parameter_value("JobStart", root)
+ assert get_typed_parameter_value("JobEnd", root)
+ assert get_typed_parameter_value("JobType", root) == updated_wf_commons["job_type"]
+
+ assert get_typed_parameter_value("WorkerNode", root)
+ assert get_typed_parameter_value("WNMEMORY", root)
+ assert get_typed_parameter_value("WNCPUPOWER", root)
+ assert get_typed_parameter_value("WNMODEL", root)
+ assert get_typed_parameter_value("WNCACHE", root)
+ assert get_typed_parameter_value("WNCPUHS06", root)
+ assert get_typed_parameter_value("NumberOfProcessors", root) == str(number_of_processors)
+
+ # Input should be empty
+ input_file = root.find("InputFile")
+ assert input_file is None, "InputFile element should not be present."
+
+ # Output should not be empty
+ output_files = root.findall("OutputFile")
+ assert output_files, "No OutputFile elements found."
+
+ first_output_details = get_output_file_details(output_files[0])
+ assert first_output_details["Name"] == updated_wf_commons["production_output_data"][0]
+ assert first_output_details["TypeName"] == "SIM"
+ assert first_output_details["Parameters"]["FileSize"] == "0"
+ assert "CreationDate" in first_output_details["Parameters"]
+ assert "MD5Sum" in first_output_details["Parameters"]
+ assert "Guid" in first_output_details["Parameters"]
+
+ assert len(output_files) == 1
+
+ def test_bkreport_prod_mcsimulation_noinputoutput_success(
+ self, bk_report, wf_commons, bookkeeping_file, xml_summary_file
+ ):
+ """Test successful execution of BookkeepingReport module.
+
+ * No input files because wf_commons["stepInputData is empty
+ * No output files because wf_commons["stepOutputData is empty
+ * No pool xml catalog
+ * Simulation conditions because the application used is Gauss
+ """
+ # Mock the BookkeepingReport module
+ wf_commons["application_name"] = "Gauss"
+ wf_commons["job_type"] = "MCSimulation"
+
+ # This was obtained from a previous module (likely GaudiApplication)
+ wf_commons["bookkeeping_LFNs"] = [
+ "/lhcb/LHCb/Collision16/SIM/00209455/0000/00209455_00001537_1",
+ ]
+ wf_commons["production_output_data"] = [
+ "/lhcb/LHCb/Collision16/SIM/00209455/0000/00209455_00001537_1",
+ ]
+
+ wf_commons["start_time"] = time.time() - 1000
+
+ # Mock the XMLSummary object
+ xml_content = dedent("""\
+
+
+ True
+ finalize
+
+ 2129228.0
+
+
+
+
+ 1
+ 77
+ 2644
+ 6262
+ 8391
+ 963
+ 18139
+ 45169
+ 52237
+ 79
+
+
+
+ """)
+
+ wf_commons["xml_summary_path"] = xml_summary_file
+ xf_o = prepare_XMLSummary_file(xml_summary_file, xml_content)
+
+ wf_commons_path = create_workflow_commons(wf_commons)
+
+ # Execute the module
+ bk_report.execute(job_path)
+
+ with open(wf_commons_path, "r", encoding="utf-8") as f:
+ updated_wf_commons = json.load(f)
+
+ # Check if the XML report file is created
+ xml_path = bookkeeping_file
+ assert Path(xml_path).exists(), "XML report file not created."
+
+ # Validate the XML file
+ tree = ET.parse(xml_path)
+ root = tree.getroot()
+
+ # Extract fields from the XML and perform further operations
+ assert root.tag == "Job", "Root tag should be Job."
+ assert root.attrib["ConfigName"] == updated_wf_commons["config_name"]
+ assert root.attrib["ConfigVersion"] == updated_wf_commons["config_version"]
+ assert root.attrib["Date"]
+ assert root.attrib["Time"]
+
+ assert get_typed_parameter_value("ProgramName", root) == updated_wf_commons["application_name"]
+ assert get_typed_parameter_value("ProgramVersion", root) == updated_wf_commons["application_version"]
+ assert get_typed_parameter_value("DiracVersion", root) == LHCbDIRAC.__version__
+ assert get_typed_parameter_value("Name", root) == updated_wf_commons["command_id"]
+ assert float(get_typed_parameter_value("ExecTime", root)) > 1000
+ assert get_typed_parameter_value("CPUTIME", root) == "0"
+
+ assert get_typed_parameter_value("FirstEventNumber", root) == "1"
+ assert get_typed_parameter_value("StatisticsRequested", root) == str(updated_wf_commons["number_of_events"])
+ assert get_typed_parameter_value("NumberOfEvents", root) == str(xf_o.outputEventsTotal)
+
+ assert get_typed_parameter_value("Production", root) == updated_wf_commons["production_id"]
+ assert get_typed_parameter_value("DiracJobId", root) == str(updated_wf_commons["job_id"])
+ assert get_typed_parameter_value("Location", root) == siteName()
+ assert get_typed_parameter_value("JobStart", root)
+ assert get_typed_parameter_value("JobEnd", root)
+ assert get_typed_parameter_value("JobType", root) == updated_wf_commons["job_type"]
+
+ assert get_typed_parameter_value("WorkerNode", root)
+ assert get_typed_parameter_value("WNMEMORY", root)
+ assert get_typed_parameter_value("WNCPUPOWER", root)
+ assert get_typed_parameter_value("WNMODEL", root)
+ assert get_typed_parameter_value("WNCACHE", root)
+ assert get_typed_parameter_value("WNCPUHS06", root)
+ assert get_typed_parameter_value("NumberOfProcessors", root) == str(number_of_processors)
+
+ # Input should be empty
+ input_file = root.find("InputFile")
+ assert input_file is None, "InputFile element should not be present."
+
+ # Output should be empty
+ output_file = root.find("OutputFile")
+ assert output_file is None, "OutputFile element should not be present."
+
+ def test_bk_report_prod_mcreconstruction_success(self, bk_report, wf_commons, bookkeeping_file, xml_summary_file):
+ """Test successful execution of BookkeepingReport module."""
+ wf_commons["application_name"] = "Boole"
+ wf_commons["job_type"] = "MCReconstruction"
+
+ wf_commons["bookkeeping_LFNs"] = [
+ "/lhcb/LHCb/Collision16/SIM/00209455/0000/00209455_00001537_1",
+ ]
+ wf_commons["log_file_path"] = "/lhcb/LHCb/Collision16/LOG/00209455/0000/"
+ wf_commons["production_output_data"] = [
+ "/lhcb/LHCb/Collision16/SIM/00209455/0000/00209455_00001537_1",
+ ]
+
+ wf_commons["start_time"] = time.time() - 1000
+
+ wf_commons["inputs"] = ["/lhcb/MC/2018/SIM/00212581/0000/00212581_00001446_1.sim"]
+ wf_commons["outputs"] = [
+ {"outputDataName": "00209455_00001537_1", "outputDataType": "digi"},
+ ]
+ wf_commons["application_log"] = "application.log"
+ Path(wf_commons["application_log"]).touch()
+ Path(wf_commons["outputs"][0]["outputDataName"]).touch()
+
+ # Mock the XMLSummary object
+ xml_content = dedent("""\
+
+
+ True
+ finalize
+
+ 866104.0
+
+
+ 200
+
+
+
+ """)
+
+ wf_commons["xml_summary_path"] = xml_summary_file
+
+ xf_o = prepare_XMLSummary_file(xml_summary_file, xml_content)
+
+ wf_commons_path = create_workflow_commons(wf_commons)
+
+ # Execute the module
+ bk_report.execute(job_path)
+
+ with open(wf_commons_path, "r", encoding="utf-8") as f:
+ updated_wf_commons = json.load(f)
+
+ # Check if the XML report file is created
+ xml_path = bookkeeping_file
+ assert Path(xml_path).exists(), "XML report file not created."
+
+ # Validate the XML file
+ tree = ET.parse(xml_path)
+ root = tree.getroot()
+
+ # Extract fields from the XML and perform further operations
+ assert root.tag == "Job", "Root tag should be Job."
+ assert root.attrib["ConfigName"] == updated_wf_commons["config_name"]
+ assert root.attrib["ConfigVersion"] == updated_wf_commons["config_version"]
+ assert root.attrib["Date"]
+ assert root.attrib["Time"]
+
+ assert get_typed_parameter_value("ProgramName", root) == updated_wf_commons["application_name"]
+ assert get_typed_parameter_value("ProgramVersion", root) == updated_wf_commons["application_version"]
+ assert get_typed_parameter_value("DiracVersion", root) == LHCbDIRAC.__version__
+ assert get_typed_parameter_value("Name", root) == updated_wf_commons["command_id"]
+ assert float(get_typed_parameter_value("ExecTime", root)) > 1000
+ assert get_typed_parameter_value("CPUTIME", root) == "0"
+
+ assert get_typed_parameter_value("FirstEventNumber", root) == "1"
+ assert get_typed_parameter_value("StatisticsRequested", root) == str(updated_wf_commons["number_of_events"])
+ assert get_typed_parameter_value("NumberOfEvents", root) == str(xf_o.inputEventsTotal)
+
+ assert get_typed_parameter_value("Production", root) == updated_wf_commons["production_id"]
+ assert get_typed_parameter_value("DiracJobId", root) == str(updated_wf_commons["job_id"])
+ assert get_typed_parameter_value("Location", root) == siteName()
+ assert get_typed_parameter_value("JobStart", root)
+ assert get_typed_parameter_value("JobEnd", root)
+ assert get_typed_parameter_value("JobType", root) == updated_wf_commons["job_type"]
+
+ assert get_typed_parameter_value("WorkerNode", root)
+ assert get_typed_parameter_value("WNMEMORY", root)
+ assert get_typed_parameter_value("WNCPUPOWER", root)
+ assert get_typed_parameter_value("WNMODEL", root)
+ assert get_typed_parameter_value("WNCACHE", root)
+ assert get_typed_parameter_value("WNCPUHS06", root)
+ assert get_typed_parameter_value("NumberOfProcessors", root) == str(number_of_processors)
+
+ # Input should not be empty
+ input_file = root.find("InputFile")
+ assert input_file is not None, "InputFile element should be present."
+
+ # Output should not be empty
+ output_files = root.findall("OutputFile")
+ assert output_files, "No OutputFile elements found."
+
+ first_output_details = get_output_file_details(output_files[0])
+ assert first_output_details["Name"] == updated_wf_commons["production_output_data"][0]
+ assert first_output_details["TypeName"] == "DIGI"
+ assert first_output_details["Parameters"]["FileSize"] == "0"
+ assert "CreationDate" in first_output_details["Parameters"]
+ assert "MD5Sum" in first_output_details["Parameters"]
+ assert "Guid" in first_output_details["Parameters"]
+
+ assert len(output_files) == 1
+
+ # Because we are using Gauss, sim conditions should be present too
+ simulation_condition = root.find("SimulationCondition")
+ assert simulation_condition is None, "SimulationCondition element should not be present."
+
+ def test_bkreport_previousError_success(self, mocker, bk_report, wf_commons, bookkeeping_file):
+ """Test previous command failure."""
+ wf_commons["application_name"] = "Gauss"
+ wf_commons["application_version"] = wf_commons["config_version"]
+ wf_commons["job_type"] = "MCSimulation"
+ wf_commons["step_status"] = S_ERROR()
+
+ create_workflow_commons(wf_commons)
+
+ bk_report.execute(job_path)
+
+ assert not os.path.exists(bookkeeping_file)
+
+
+class TestFailoverRequest:
+ """Collection of tests for the FailoverRequest command."""
+
+ @pytest.fixture
+ def failover_request(self, mocker: MockerFixture):
+ """FailoverRequest mocked command.
+
+ Cleans created files after execution.
+ """
+ mocker.patch("dirac_cwl.commands.failover_request.RequestValidator")
+
+ yield FailoverRequest()
+
+ def test_failoverRequest_success(self, mocker: MockerFixture, failover_request, wf_commons, request_file):
+ """Test successful execution of FailoverRequest module."""
+ problematic_files = [
+ "/lhcb/data/2010/EW.DST/00008380/0000/00008380_00000287_1.ew.dst",
+ ]
+
+ mock_file_report = mocker.patch("dirac_cwl.commands.failover_request.FileReport")
+ mock_job_report = mocker.patch("dirac_cwl.commands.failover_request.JobReport")
+
+ fr = FileReport()
+ mocker.patch.object(fr, "getFiles", side_effect=[problematic_files, []])
+ mocker.patch.object(fr, "commit", return_value=S_OK("Anything"))
+ mocker.patch.object(fr, "setFileStatus")
+ mock_file_report.return_value = fr
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setApplicationStatus")
+ mock_job_report.return_value = jr
+
+ wf_commons["inputs"] = [
+ "/lhcb/data/2010/EW.DST/00008380/0000/00008380_00000281_1.ew.dst",
+ "/lhcb/data/2011/EW.DST/00008380/0000/00008380_00000281_1.ew.dst",
+ ] + problematic_files
+
+ wf_commons_path = create_workflow_commons(wf_commons)
+
+ failover_request.execute(job_path)
+
+ with open(wf_commons_path, "r", encoding="utf-8") as f:
+ updated_wf_commons = json.load(f)
+
+ # Check the FileReport calls: the problematic file should not appear
+ # The input files should be set to "Processed"
+ assert fr.setFileStatus.call_count == 2
+ args = fr.setFileStatus.call_args_list
+ assert args[0][0][0] == int(updated_wf_commons["production_id"])
+ assert args[0][0][1] == updated_wf_commons["inputs"][0]
+ assert args[0][0][2] == "Processed"
+
+ assert args[1][0][0] == int(updated_wf_commons["production_id"])
+ assert args[1][0][1] == updated_wf_commons["inputs"][1]
+ assert args[1][0][2] == "Processed"
+
+ # Make sure the appliction is successfully finished
+ assert jr.setApplicationStatus.call_count == 1
+ assert jr.setApplicationStatus.call_args[0][0] == "Job Finished Successfully"
+
+ # Make sure the forward DISET is not generated
+ operations = updated_wf_commons["request_dict"]["Operations"]
+ assert len(operations) == 0
+
+ # Make sure the request json does not exists
+ assert not Path(request_file).exists()
+
+ def test_failoverRequest_commitFailure1(self, mocker: MockerFixture, failover_request, wf_commons, request_file):
+ """Test execution of FailoverRequest module when the fileReport.commit() fails.
+
+ In this context, the second call to commit() will work, so the request should not be generated.
+ """
+ problematic_files = [
+ "/lhcb/data/2010/EW.DST/00008380/0000/00008380_00000287_1.ew.dst",
+ ]
+ # Both calla to getFiles() will return the problematic files because the commit did not work
+ mock_file_report = mocker.patch("dirac_cwl.commands.failover_request.FileReport")
+ mock_job_report = mocker.patch("dirac_cwl.commands.failover_request.JobReport")
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setApplicationStatus")
+ mock_job_report.return_value = jr
+
+ fr = FileReport()
+ mocker.patch.object(fr, "getFiles", side_effect=[problematic_files, problematic_files])
+ mocker.patch.object(fr, "commit", side_effect=[S_ERROR("Error"), S_OK(None)])
+ mocker.patch.object(fr, "setFileStatus")
+ mock_file_report.return_value = fr
+
+ wf_commons["inputs"] = [
+ "/lhcb/data/2010/EW.DST/00008380/0000/00008380_00000281_1.ew.dst",
+ "/lhcb/data/2011/EW.DST/00008380/0000/00008380_00000281_1.ew.dst",
+ ] + problematic_files
+
+ # Execute the module
+ wf_commons_path = create_workflow_commons(wf_commons)
+
+ failover_request.execute(job_path)
+
+ with open(wf_commons_path, "r", encoding="utf-8") as f:
+ updated_wf_commons = json.load(f)
+
+ # Check the FileReport calls: the problematic file should not appear
+ # The input files should be set to "Processed"
+ assert fr.setFileStatus.call_count == 2
+ args = fr.setFileStatus.call_args_list
+ assert args[0][0][0] == int(updated_wf_commons["production_id"])
+ assert args[0][0][1] == updated_wf_commons["inputs"][0]
+ assert args[0][0][2] == "Processed"
+
+ assert args[1][0][0] == int(updated_wf_commons["production_id"])
+ assert args[1][0][1] == updated_wf_commons["inputs"][1]
+ assert args[1][0][2] == "Processed"
+
+ # Make sure the appliction is successfully finished
+ assert jr.setApplicationStatus.call_count == 1
+ assert jr.setApplicationStatus.call_args[0][0] == "Job Finished Successfully"
+
+ # Make sure the forward DISET is generated
+ operations = updated_wf_commons["request_dict"]["Operations"]
+ assert len(operations) == 0
+
+ # Make sure the request json does not exists
+ assert not Path(request_file).exists()
+
+ def test_failoverRequest_commitFailure2(self, mocker: MockerFixture, failover_request, wf_commons, request_file):
+ """Test execution of FailoverRequest module when the fileReport.commit() fails.
+
+ In this context, the second call to commit() will fail, so the request should be generated.
+ """
+ problematic_files = [
+ "/lhcb/data/2010/EW.DST/00008380/0000/00008380_00000287_1.ew.dst",
+ ]
+ # Both calla to getFiles() will return the problematic files because the commit did not work
+ mock_file_report = mocker.patch("dirac_cwl.commands.failover_request.FileReport")
+ mock_job_report = mocker.patch("dirac_cwl.commands.failover_request.JobReport")
+
+ fr = FileReport()
+ mocker.patch.object(fr, "getFiles", side_effect=[problematic_files, problematic_files])
+ mocker.patch.object(fr, "commit", side_effect=[S_ERROR("Error"), S_ERROR("Error")])
+ mocker.patch.object(fr, "setFileStatus")
+ mock_file_report.return_value = fr
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setApplicationStatus")
+ mock_job_report.return_value = jr
+
+ wf_commons["inputs"] = [
+ "/lhcb/data/2010/EW.DST/00008380/0000/00008380_00000281_1.ew.dst",
+ "/lhcb/data/2011/EW.DST/00008380/0000/00008380_00000281_1.ew.dst",
+ ] + problematic_files
+
+ wf_commons_path = create_workflow_commons(wf_commons)
+
+ # Execute the module
+ failover_request.execute(job_path)
+
+ with open(wf_commons_path, "r", encoding="utf-8") as f:
+ updated_wf_commons = json.load(f)
+
+ # Check the FileReport calls: the problematic file should not appear
+ # The input files should be set to "Processed"
+ assert fr.setFileStatus.call_count == 2
+ args = fr.setFileStatus.call_args_list
+ assert args[0][0][0] == int(updated_wf_commons["production_id"])
+ assert args[0][0][1] == updated_wf_commons["inputs"][0]
+ assert args[0][0][2] == "Processed"
+
+ assert args[1][0][0] == int(updated_wf_commons["production_id"])
+ assert args[1][0][1] == updated_wf_commons["inputs"][1]
+ assert args[1][0][2] == "Processed"
+
+ # Make sure the appliction is successfully finished
+ assert jr.setApplicationStatus.call_count == 1
+ assert jr.setApplicationStatus.call_args[0][0] == "Job Finished Successfully"
+
+ # Make sure the forward DISET is generated
+ operations = updated_wf_commons["request_dict"]["Operations"]
+
+ assert len(operations) == 1
+ assert operations[0]["Type"] == "SetFileStatus"
+
+ # Make sure the request json does not exists
+ assert Path(request_file).exists()
+
+ def test_failoverRequest_previousError_fail(
+ self, mocker: MockerFixture, failover_request, wf_commons, request_file
+ ):
+ """Test FailoverRequest with an intentional failure."""
+ problematic_files = [
+ "/lhcb/data/2010/EW.DST/00008380/0000/00008380_00000287_1.ew.dst",
+ ]
+ mock_file_report = mocker.patch("dirac_cwl.commands.failover_request.FileReport")
+ mock_job_report = mocker.patch("dirac_cwl.commands.failover_request.JobReport")
+
+ fr = FileReport()
+ mocker.patch.object(fr, "getFiles", side_effect=[problematic_files, problematic_files])
+ mocker.patch.object(fr, "commit", side_effect=[S_ERROR("Error"), S_ERROR("Error")])
+ mocker.patch.object(fr, "setFileStatus")
+ mock_file_report.return_value = fr
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setApplicationStatus")
+ mock_job_report.return_value = jr
+
+ wf_commons["inputs"] = [
+ "/lhcb/data/2010/EW.DST/00008380/0000/00008380_00000281_1.ew.dst",
+ "/lhcb/data/2011/EW.DST/00008380/0000/00008380_00000281_1.ew.dst",
+ ] + problematic_files
+
+ # Intentional error
+ wf_commons["step_status"] = S_ERROR()
+
+ wf_commons_path = create_workflow_commons(wf_commons)
+
+ # Execute the module
+ failover_request.execute(job_path)
+
+ with open(wf_commons_path, "r", encoding="utf-8") as f:
+ updated_wf_commons = json.load(f)
+
+ # Check the FileReport calls: the problematic file should not appear
+ # The input files should be set to "Unused"
+ assert fr.setFileStatus.call_count == 2
+ args = fr.setFileStatus.call_args_list
+ assert args[0][0][0] == int(updated_wf_commons["production_id"])
+ assert args[0][0][1] == updated_wf_commons["inputs"][0]
+ assert args[0][0][2] == "Unused"
+
+ assert args[1][0][0] == int(updated_wf_commons["production_id"])
+ assert args[1][0][1] == updated_wf_commons["inputs"][1]
+ assert args[1][0][2] == "Unused"
+
+ # Make sure the appliction is not reported as a success
+ assert jr.setApplicationStatus.call_count == 0
+
+ # Make sure the forward DISET is not generated
+ operations = updated_wf_commons["request_dict"]["Operations"]
+ assert len(operations) == 0
+
+ # Make sure the request json does not exists
+ assert not Path(request_file).exists()
+
+
+class TestUploadOutputDataFile:
+ """Collection of tests for the UploadOutputData command."""
+
+ OUTPUT_DATA_STEP = "1"
+
+ @pytest.fixture
+ def sim_file(self, wf_commons):
+ """Sim result file fixture."""
+ path = f"{wf_commons['production_id']}_{wf_commons['prod_job_id']}_{self.OUTPUT_DATA_STEP}.sim"
+ with open(path, "w") as f:
+ f.write("Bookkeeping file content")
+ yield path
+ Path(path).unlink(missing_ok=True)
+
+ @pytest.fixture
+ def bk_file(self, wf_commons):
+ """Bookkeeping file fixture."""
+ path = os.path.join(job_path, f"bookkeeping_{wf_commons['production_id']}_{wf_commons['prod_job_id']}.xml")
+ with open(path, "w") as f:
+ f.write("Sim file content")
+ yield path
+ Path(path).unlink(missing_ok=True)
+
+ @pytest.fixture
+ def watchdog_file(self, wf_commons):
+ """Watchdog file fixture."""
+ path = os.path.join(job_path, "DISABLE_WATCHDOG_CPU_WALLCLOCK_CHECK")
+ yield path
+ Path(path).unlink(missing_ok=True)
+
+ @pytest.fixture
+ def upload_output(self, mocker, wf_commons):
+ """Fixture for UploadOutputData module."""
+ mocker.patch("dirac_cwl.commands.upload_output_data.getDestinationSEList", return_value=["CERN", "CNAF"])
+ mocker.patch("LHCbDIRAC.Workflow.Modules.UploadOutputData.getDestinationSEList", return_value=["CERN", "CNAF"])
+
+ # Mock FileCatalog
+ mocker.patch("DIRAC.Resources.Catalog.FileCatalog.FileCatalog.__init__", return_value=None)
+ mocker.patch("DIRAC.Resources.Catalog.FileCatalog.FileCatalog.__getattr__", return_value=lambda x: S_OK({}))
+
+ if "ProductionOutputData" in wf_commons:
+ wf_commons.pop("ProductionOutputData")
+
+ yield UploadOutputData()
+
+ # Test Scenarios
+ def test_uploadOutputData_success(self, mocker, upload_output, wf_commons, sim_file, bk_file):
+ """Test successful execution of UploadOutputData module.
+
+ * The output should be uploaded and registered in the bookkeeping system.
+ * The bookkeeping report should be sent and the job parameter updated.
+ """
+ mock_file_report = mocker.patch("dirac_cwl.commands.upload_output_data.FileReport")
+ mock_job_report = mocker.patch("dirac_cwl.commands.upload_output_data.JobReport")
+ mock_request = mocker.patch("dirac_cwl.commands.upload_output_data.Request")
+ mock_failover = mocker.patch("dirac_cwl.commands.upload_output_data.FailoverTransfer")
+ mock_bk_client = mocker.patch("dirac_cwl.commands.upload_output_data.BookkeepingClient")
+
+ fr = FileReport()
+ mocker.patch.object(fr, "setFileStatus")
+ mock_file_report.return_value = fr
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setJobParameter")
+ mock_job_report.return_value = jr
+
+ req = Request()
+ mock_request.return_value = req
+
+ failover = FailoverTransfer(req)
+ mocker.patch.object(
+ failover, "transferAndRegisterFile", return_value=S_OK({"uploadedSE": "CERN", "lfn": sim_file})
+ )
+ mocker.patch.object(failover, "transferAndRegisterFileFailover")
+ mock_failover.return_value = failover
+
+ bkClient = BookkeepingClient()
+ mocker.patch.object(bkClient, "sendXMLBookkeepingReport", return_value=S_OK())
+ mock_bk_client.return_value = bkClient
+
+ wf_commons["outputs"] = [
+ {"outputDataName": sim_file, "outputDataType": "sim", "outputBKType": "SIM", "stepName": "Gauss_1"}
+ ]
+ wf_commons["output_SEs"] = {
+ "SIM": "Tier1-Buffer",
+ }
+ wf_commons["output_data_step"] = self.OUTPUT_DATA_STEP
+
+ wf_commons_path = create_workflow_commons(wf_commons)
+
+ # Execute module
+ upload_output.execute(job_path)
+
+ with open(wf_commons_path, "r", encoding="utf-8") as f:
+ updated_wf_commons = json.load(f)
+
+ assert fr.setFileStatus.call_count == 0
+ assert bkClient.sendXMLBookkeepingReport.call_count == 1
+
+ assert failover.transferAndRegisterFile.call_count == 1
+ assert failover.transferAndRegisterFile.call_args[1]["fileName"] == sim_file
+
+ assert failover.transferAndRegisterFileFailover.call_count == 0
+
+ assert jr.setJobParameter.call_count == 1
+ assert jr.setJobParameter.call_args[0][0] == "UploadedOutputData"
+ assert jr.setJobParameter.call_args[0][1] == sim_file
+
+ # Make sure the forward DISET is not generated
+ operations = updated_wf_commons["request_dict"]["Operations"]
+ assert len(operations) == 0
+
+ def test_uploadOutputData_failedBKRegistration(self, mocker, upload_output, wf_commons, sim_file, bk_file):
+ """Test execution of UploadOutputData module when the BK registation fails.
+
+ * The output should be uploaded but not registered in the bookkeeping system now.
+ """
+ mock_file_report = mocker.patch("dirac_cwl.commands.upload_output_data.FileReport")
+ mock_job_report = mocker.patch("dirac_cwl.commands.upload_output_data.JobReport")
+ mock_request = mocker.patch("dirac_cwl.commands.upload_output_data.Request")
+ mock_failover = mocker.patch("dirac_cwl.commands.upload_output_data.FailoverTransfer")
+ mock_bk_client = mocker.patch("dirac_cwl.commands.upload_output_data.BookkeepingClient")
+
+ fr = FileReport()
+ mocker.patch.object(fr, "setFileStatus")
+ mock_file_report.return_value = fr
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setJobParameter")
+ mock_job_report.return_value = jr
+
+ req = Request()
+ mock_request.return_value = req
+
+ failover = FailoverTransfer(req)
+ mocker.patch.object(
+ failover, "transferAndRegisterFile", return_value=S_OK({"uploadedSE": "CERN", "lfn": sim_file})
+ )
+ mocker.patch.object(failover, "transferAndRegisterFileFailover")
+ mock_failover.return_value = failover
+
+ bkClient = BookkeepingClient()
+ mocker.patch.object(bkClient, "sendXMLBookkeepingReport", return_value=S_OK())
+ mock_bk_client.return_value = bkClient
+
+ # BK registration failure
+ mocker.patch(
+ "DIRAC.Resources.Catalog.FileCatalog.FileCatalog.__getattr__",
+ return_value=lambda x: S_OK(
+ {
+ "Failed": {
+ f"/lhcb/{wf_commons['config_name']}/{wf_commons['config_version']}/"
+ f"SIM/00000{wf_commons['production_id']}/0000/{sim_file}": "error"
+ }
+ }
+ ),
+ )
+
+ wf_commons["outputs"] = [
+ {"outputDataName": sim_file, "outputDataType": "sim", "outputBKType": "SIM", "stepName": "Gauss_1"}
+ ]
+ wf_commons["output_SEs"] = {
+ "SIM": "Tier1-Buffer",
+ }
+ wf_commons["output_data_step"] = self.OUTPUT_DATA_STEP
+
+ wf_commons_path = create_workflow_commons(wf_commons)
+
+ # Execute module
+ upload_output.execute(job_path)
+
+ with open(wf_commons_path, "r", encoding="utf-8") as f:
+ updated_wf_commons = json.load(f)
+
+ assert fr.setFileStatus.call_count == 0
+ assert bkClient.sendXMLBookkeepingReport.call_count == 1
+
+ assert failover.transferAndRegisterFile.call_count == 1
+ assert failover.transferAndRegisterFile.call_args[1]["fileName"] == sim_file
+
+ assert failover.transferAndRegisterFileFailover.call_count == 0
+
+ assert jr.setJobParameter.call_count == 1
+ assert jr.setJobParameter.call_args[0][0] == "UploadedOutputData"
+ assert jr.setJobParameter.call_args[0][1] == sim_file
+
+ # Make sure the request is generated
+ operations = updated_wf_commons["request_dict"]["Operations"]
+ assert len(operations) == 1
+
+ assert operations[0]["Type"] == "RegisterFile"
+ assert operations[0]["Catalog"] == "BookkeepingDB"
+ assert sim_file in operations[0]["Files"][0]["LFN"]
+
+ def test_uploadOutputData_postponeBKRegistration(self, mocker, upload_output, wf_commons, sim_file, bk_file):
+ """Test execution of UploadOutputData module when there is already a RegisterFile operation on the output.
+
+ * The output should be uploaded but not registered in the bookkeeping system now.
+ """
+ mock_file_report = mocker.patch("dirac_cwl.commands.upload_output_data.FileReport")
+ mock_job_report = mocker.patch("dirac_cwl.commands.upload_output_data.JobReport")
+ mock_request = mocker.patch("dirac_cwl.commands.upload_output_data.Request")
+ mock_failover = mocker.patch("dirac_cwl.commands.upload_output_data.FailoverTransfer")
+ mock_bk_client = mocker.patch("dirac_cwl.commands.upload_output_data.BookkeepingClient")
+
+ fr = FileReport()
+ mocker.patch.object(fr, "setFileStatus")
+ mock_file_report.return_value = fr
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setJobParameter")
+ mock_job_report.return_value = jr
+
+ # Mock a previous failover request: the BK registration should be postponed and added to the request
+ req = Request()
+ file1 = File()
+ file1.LFN = (
+ f"/lhcb/{wf_commons['config_name']}/{wf_commons['config_version']}"
+ f"/SIM/00000{wf_commons['production_id']}/0000/{sim_file}"
+ )
+ o1 = Operation()
+ o1.Type = "RegisterFile"
+ o1.addFile(file1)
+ req.addOperation(o1)
+ mock_request.return_value = req
+
+ failover = FailoverTransfer(req)
+ mocker.patch.object(
+ failover, "transferAndRegisterFile", return_value=S_OK({"uploadedSE": "CERN", "lfn": sim_file})
+ )
+ mocker.patch.object(failover, "transferAndRegisterFileFailover")
+ mock_failover.return_value = failover
+
+ bkClient = BookkeepingClient()
+ mocker.patch.object(bkClient, "sendXMLBookkeepingReport", return_value=S_OK())
+ mock_bk_client.return_value = bkClient
+
+ wf_commons["outputs"] = [
+ {"outputDataName": sim_file, "outputDataType": "sim", "outputBKType": "SIM", "stepName": "Gauss_1"}
+ ]
+ wf_commons["output_SEs"] = {
+ "SIM": "Tier1-Buffer",
+ }
+ wf_commons["output_data_step"] = self.OUTPUT_DATA_STEP
+
+ wf_commons_path = create_workflow_commons(wf_commons)
+
+ # Execute module
+ upload_output.execute(job_path)
+
+ with open(wf_commons_path, "r", encoding="utf-8") as f:
+ updated_wf_commons = json.load(f)
+
+ assert fr.setFileStatus.call_count == 0
+ assert bkClient.sendXMLBookkeepingReport.call_count == 1
+
+ assert failover.transferAndRegisterFile.call_count == 1
+ assert failover.transferAndRegisterFile.call_args[1]["fileName"] == sim_file
+
+ assert failover.transferAndRegisterFileFailover.call_count == 0
+
+ assert jr.setJobParameter.call_count == 1
+ assert jr.setJobParameter.call_args[0][0] == "UploadedOutputData"
+ assert jr.setJobParameter.call_args[0][1] == sim_file
+
+ # Make sure the request is generated
+ operations = updated_wf_commons["request_dict"]["Operations"]
+ assert len(operations) == 2
+
+ assert operations[0]["Type"] == "RegisterFile"
+ assert operations[0]["Catalog"] is None
+ assert sim_file in operations[0]["Files"][0]["LFN"]
+
+ assert operations[1]["Type"] == "RegisterFile"
+ assert operations[1]["Catalog"] == "BookkeepingDB"
+ assert sim_file in operations[1]["Files"][0]["LFN"]
+
+ def test_uploadOutputData_errorBKRegistration(self, mocker, upload_output, wf_commons, sim_file, bk_file):
+ """Test execution of UploadOutputData module when an error occurs during the BK registation.
+
+ * The output should be uploaded but not registered in the bookkeeping system at all.
+ """
+ mock_file_report = mocker.patch("dirac_cwl.commands.upload_output_data.FileReport")
+ mock_job_report = mocker.patch("dirac_cwl.commands.upload_output_data.JobReport")
+ mock_request = mocker.patch("dirac_cwl.commands.upload_output_data.Request")
+ mock_failover = mocker.patch("dirac_cwl.commands.upload_output_data.FailoverTransfer")
+ mock_bk_client = mocker.patch("dirac_cwl.commands.upload_output_data.BookkeepingClient")
+
+ fr = FileReport()
+ mocker.patch.object(fr, "setFileStatus")
+ mock_file_report.return_value = fr
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setJobParameter")
+ mock_job_report.return_value = jr
+
+ req = Request()
+ mock_request.return_value = req
+
+ failover = FailoverTransfer(req)
+ mocker.patch.object(
+ failover, "transferAndRegisterFile", return_value=S_OK({"uploadedSE": "CERN", "lfn": sim_file})
+ )
+ mocker.patch.object(failover, "transferAndRegisterFileFailover")
+ mock_failover.return_value = failover
+
+ bkClient = BookkeepingClient()
+ mocker.patch.object(bkClient, "sendXMLBookkeepingReport", return_value=S_OK())
+ mock_bk_client.return_value = bkClient
+
+ # BK registration failure
+ mocker.patch(
+ "DIRAC.Resources.Catalog.FileCatalog.FileCatalog.__getattr__",
+ return_value=lambda x: S_ERROR("Error registering file"),
+ )
+
+ wf_commons["outputs"] = [
+ {"outputDataName": sim_file, "outputDataType": "sim", "outputBKType": "SIM", "stepName": "Gauss_1"}
+ ]
+ wf_commons["output_SEs"] = {
+ "SIM": "Tier1-Buffer",
+ }
+ wf_commons["output_data_step"] = self.OUTPUT_DATA_STEP
+
+ # BK registration failure
+ mocker.patch(
+ "DIRAC.Resources.Catalog.FileCatalog.FileCatalog.__getattr__",
+ return_value=lambda x: S_ERROR("Error registering file"),
+ )
+
+ wf_commons_path = create_workflow_commons(wf_commons)
+
+ # Execute module
+ with pytest.raises(WorkflowProcessingException, match="Could Not Perform BK Registration"):
+ upload_output.execute(job_path)
+
+ with open(wf_commons_path, "r", encoding="utf-8") as f:
+ updated_wf_commons = json.load(f)
+
+ assert fr.setFileStatus.call_count == 0
+ assert bkClient.sendXMLBookkeepingReport.call_count == 1
+
+ assert failover.transferAndRegisterFile.call_count == 1
+ assert failover.transferAndRegisterFile.call_args[1]["fileName"] == sim_file
+
+ assert failover.transferAndRegisterFileFailover.call_count == 0
+
+ assert jr.setJobParameter.call_count == 1
+ assert jr.setJobParameter.call_args[0][0] == "UploadedOutputData"
+ assert jr.setJobParameter.call_args[0][1] == sim_file
+
+ # Make sure the request is generated
+ operations = updated_wf_commons["request_dict"]["Operations"]
+ assert len(operations) == 0
+
+ def test_uploadOutputData_failUpload1(self, mocker, upload_output, wf_commons, sim_file, bk_file):
+ """Test execution of UploadOutputData module when there is a 1st failure to upload outputs.
+
+ * The output should be uploaded correctly with the second method.
+ """
+ mock_file_report = mocker.patch("dirac_cwl.commands.upload_output_data.FileReport")
+ mock_job_report = mocker.patch("dirac_cwl.commands.upload_output_data.JobReport")
+ mock_request = mocker.patch("dirac_cwl.commands.upload_output_data.Request")
+ mock_failover = mocker.patch("dirac_cwl.commands.upload_output_data.FailoverTransfer")
+ mock_bk_client = mocker.patch("dirac_cwl.commands.upload_output_data.BookkeepingClient")
+
+ fr = FileReport()
+ mocker.patch.object(fr, "setFileStatus")
+ mock_file_report.return_value = fr
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setJobParameter")
+ mock_job_report.return_value = jr
+
+ req = Request()
+ mock_request.return_value = req
+
+ failover = FailoverTransfer(req)
+ mocker.patch.object(failover, "transferAndRegisterFile", return_value=S_ERROR("Error uploading file"))
+ mocker.patch.object(failover, "transferAndRegisterFileFailover", return_value=S_OK())
+ mock_failover.return_value = failover
+
+ bkClient = BookkeepingClient()
+ mocker.patch.object(bkClient, "sendXMLBookkeepingReport", return_value=S_OK())
+ mock_bk_client.return_value = bkClient
+
+ wf_commons["outputs"] = [
+ {"outputDataName": sim_file, "outputDataType": "sim", "outputBKType": "SIM", "stepName": "Gauss_1"}
+ ]
+ wf_commons["output_SEs"] = {
+ "SIM": "Tier1-Buffer",
+ }
+ wf_commons["output_data_step"] = self.OUTPUT_DATA_STEP
+
+ wf_commons_path = create_workflow_commons(wf_commons)
+
+ # Execute module
+ upload_output.execute(job_path)
+
+ with open(wf_commons_path, "r", encoding="utf-8") as f:
+ updated_wf_commons = json.load(f)
+
+ assert fr.setFileStatus.call_count == 0
+ assert bkClient.sendXMLBookkeepingReport.call_count == 1
+
+ assert failover.transferAndRegisterFile.call_count == 1
+ assert failover.transferAndRegisterFile.call_args[1]["fileName"] == sim_file
+
+ assert failover.transferAndRegisterFileFailover.call_count == 1
+ assert failover.transferAndRegisterFileFailover.call_args[1]["fileName"] == sim_file
+
+ assert jr.setJobParameter.call_count == 1
+ assert jr.setJobParameter.call_args[0][0] == "UploadedOutputData"
+ assert jr.setJobParameter.call_args[0][1] == sim_file
+
+ # Make sure the request is not generated
+ operations = updated_wf_commons["request_dict"]["Operations"]
+ assert len(operations) == 0
+
+ def test_uploadOutputData_failUpload2(self, mocker, upload_output, wf_commons, sim_file, bk_file):
+ """Test execution of UploadOutputData module when there is a 2 failures to upload outputs.
+
+ * A request should be generated to upload outputs later.
+ """
+ mock_file_report = mocker.patch("dirac_cwl.commands.upload_output_data.FileReport")
+ mock_job_report = mocker.patch("dirac_cwl.commands.upload_output_data.JobReport")
+ mock_request = mocker.patch("dirac_cwl.commands.upload_output_data.Request")
+ mock_failover = mocker.patch("dirac_cwl.commands.upload_output_data.FailoverTransfer")
+ mock_bk_client = mocker.patch("dirac_cwl.commands.upload_output_data.BookkeepingClient")
+
+ fr = FileReport()
+ mocker.patch.object(fr, "setFileStatus")
+ mock_file_report.return_value = fr
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setJobParameter")
+ mock_job_report.return_value = jr
+
+ # Mock a previous failover request:
+ # Add the end of the execution, o1 should be removed
+ req = Request()
+
+ file1 = File()
+ file1.LFN = (
+ f"/lhcb/{wf_commons['config_name']}/{wf_commons['config_version']}"
+ f"/SIM/00000{wf_commons['production_id']}/0000/{sim_file}"
+ )
+ file2 = File()
+ file2.LFN = "/another/file.txt"
+
+ o1 = Operation()
+ o1.Type = "RegisterFile"
+ o1.addFile(file1)
+ o2 = Operation()
+ o2.Type = "RegisterFile"
+ o2.addFile(file2)
+
+ req.addOperation(o1)
+ req.addOperation(o2)
+
+ mock_request.return_value = req
+
+ failover = FailoverTransfer(req)
+ mocker.patch.object(failover, "transferAndRegisterFile", return_value=S_ERROR("Error uploading file"))
+ mocker.patch.object(failover, "transferAndRegisterFileFailover", return_value=S_ERROR("Error uploading file"))
+ mock_failover.return_value = failover
+
+ bkClient = BookkeepingClient()
+ mocker.patch.object(bkClient, "sendXMLBookkeepingReport", return_value=S_OK())
+ mock_bk_client.return_value = bkClient
+
+ wf_commons["outputs"] = [
+ {"outputDataName": sim_file, "outputDataType": "sim", "outputBKType": "SIM", "stepName": "Gauss_1"}
+ ]
+ wf_commons["output_SEs"] = {
+ "SIM": "Tier1-Buffer",
+ }
+ wf_commons["output_data_step"] = self.OUTPUT_DATA_STEP
+
+ wf_commons_path = create_workflow_commons(wf_commons)
+
+ # Execute module
+ with pytest.raises(WorkflowProcessingException, match="Failed to upload output data"):
+ upload_output.execute(job_path)
+
+ with open(wf_commons_path, "r", encoding="utf-8") as f:
+ updated_wf_commons = json.load(f)
+
+ assert fr.setFileStatus.call_count == 0
+ assert bkClient.sendXMLBookkeepingReport.call_count == 1
+
+ assert failover.transferAndRegisterFile.call_count == 1
+ assert failover.transferAndRegisterFile.call_args[1]["fileName"] == sim_file
+
+ assert failover.transferAndRegisterFileFailover.call_count == 1
+ assert failover.transferAndRegisterFileFailover.call_args[1]["fileName"] == sim_file
+
+ assert jr.setJobParameter.call_count == 0
+
+ # Make sure the request is generated
+
+ operations = updated_wf_commons["request_dict"]["Operations"]
+ assert len(operations) == 2
+
+ assert operations[0]["Type"] == "RegisterFile"
+ assert operations[0]["TargetSE"] is None
+ assert operations[0]["SourceSE"] is None
+ assert sim_file not in operations[0]["Files"][0]["LFN"]
+
+ assert operations[1]["Type"] == "RemoveFile"
+ assert operations[1]["TargetSE"] is None
+ assert operations[1]["SourceSE"] is None
+ assert sim_file in operations[1]["Files"][0]["LFN"]
+
+ def test_uploadOutputData_BKReportError(self, mocker, upload_output, wf_commons, sim_file, bk_file):
+ """Test execution of UploadOutputData module when the BK report cannot be sent.
+
+ * The output should be uploaded and registered in the bookkeeping system.
+ * The bookkeeping report should be added to a failover request.
+ """
+ mock_file_report = mocker.patch("dirac_cwl.commands.upload_output_data.FileReport")
+ mock_job_report = mocker.patch("dirac_cwl.commands.upload_output_data.JobReport")
+ mock_request = mocker.patch("dirac_cwl.commands.upload_output_data.Request")
+ mock_failover = mocker.patch("dirac_cwl.commands.upload_output_data.FailoverTransfer")
+ mock_bk_client = mocker.patch("dirac_cwl.commands.upload_output_data.BookkeepingClient")
+
+ fr = FileReport()
+ mocker.patch.object(fr, "setFileStatus")
+ mock_file_report.return_value = fr
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setJobParameter")
+ mock_job_report.return_value = jr
+
+ req = Request()
+ mock_request.return_value = req
+
+ failover = FailoverTransfer(req)
+ mocker.patch.object(
+ failover, "transferAndRegisterFile", return_value=S_OK({"uploadedSE": "CERN", "lfn": sim_file})
+ )
+ mocker.patch.object(failover, "transferAndRegisterFileFailover", return_value=S_ERROR("Error uploading file"))
+ mock_failover.return_value = failover
+
+ bkClient = BookkeepingClient()
+ # Mock the sendXMLBookkeepingReport method
+ mocker.patch.object(
+ bkClient,
+ "sendXMLBookkeepingReport",
+ return_value={"OK": False, "rpcStub": "Error", "Message": "Error sending BK report"},
+ )
+ mock_bk_client.return_value = bkClient
+
+ wf_commons["outputs"] = [
+ {"outputDataName": sim_file, "outputDataType": "sim", "outputBKType": "SIM", "stepName": "Gauss_1"}
+ ]
+ wf_commons["output_SEs"] = {
+ "SIM": "Tier1-Buffer",
+ }
+ wf_commons["output_data_step"] = self.OUTPUT_DATA_STEP
+
+ wf_commons_path = create_workflow_commons(wf_commons)
+
+ # Execute module
+ upload_output.execute(job_path)
+
+ with open(wf_commons_path, "r", encoding="utf-8") as f:
+ updated_wf_commons = json.load(f)
+
+ assert fr.setFileStatus.call_count == 0
+ assert bkClient.sendXMLBookkeepingReport.call_count == 1
+
+ assert failover.transferAndRegisterFile.call_count == 1
+ assert failover.transferAndRegisterFile.call_args[1]["fileName"] == sim_file
+
+ assert failover.transferAndRegisterFileFailover.call_count == 0
+
+ assert jr.setJobParameter.call_count == 1
+ assert jr.setJobParameter.call_args[0][0] == "UploadedOutputData"
+ assert jr.setJobParameter.call_args[0][1] == sim_file
+
+ # Make sure the request is not generated
+ operations = updated_wf_commons["request_dict"]["Operations"]
+ assert len(operations) == 1
+
+ assert operations[0]["Type"] == "ForwardDISET"
+
+ def test_uploadOutputData_withDescendents(self, mocker, upload_output, wf_commons, sim_file, bk_file):
+ """Test execution of UploadOutputData module when there is already file descendants.
+
+ It means that the input data has already been processed.
+ * The output should not be uploaded and registered in the bookkeeping system.
+ * The bookkeeping report should not be sent.
+ """
+ mock_file_report = mocker.patch("dirac_cwl.commands.upload_output_data.FileReport")
+ mock_job_report = mocker.patch("dirac_cwl.commands.upload_output_data.JobReport")
+ mock_request = mocker.patch("dirac_cwl.commands.upload_output_data.Request")
+ mock_failover = mocker.patch("dirac_cwl.commands.upload_output_data.FailoverTransfer")
+ mock_bk_client = mocker.patch("dirac_cwl.commands.upload_output_data.BookkeepingClient")
+
+ mocker.patch(
+ "dirac_cwl.commands.upload_output_data.getFileDescendents", return_value=S_OK(["/path/to/other/file.txt"])
+ )
+
+ fr = FileReport()
+ mocker.patch.object(fr, "setFileStatus")
+ mock_file_report.return_value = fr
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setJobParameter")
+ mock_job_report.return_value = jr
+
+ req = Request()
+ mock_request.return_value = req
+
+ failover = FailoverTransfer(req)
+ mocker.patch.object(
+ failover, "transferAndRegisterFile", return_value=S_OK({"uploadedSE": "CERN", "lfn": sim_file})
+ )
+ mocker.patch.object(failover, "transferAndRegisterFileFailover")
+ mock_failover.return_value = failover
+
+ bkClient = BookkeepingClient()
+ mocker.patch.object(bkClient, "sendXMLBookkeepingReport")
+ mock_bk_client.return_value = bkClient
+
+ wf_commons["outputs"] = [
+ {"outputDataName": sim_file, "outputDataType": "sim", "outputBKType": "SIM", "stepName": "Gauss_1"}
+ ]
+ wf_commons["output_SEs"] = {
+ "SIM": "Tier1-Buffer",
+ }
+ wf_commons["inputs"] = ["AnyInputFile1"]
+ wf_commons["output_data_step"] = self.OUTPUT_DATA_STEP
+
+ wf_commons_path = create_workflow_commons(wf_commons)
+
+ # Execute module
+ with pytest.raises(WorkflowProcessingException):
+ upload_output.execute(job_path)
+
+ with open(wf_commons_path, "r", encoding="utf-8") as f:
+ updated_wf_commons = json.load(f)
+
+ assert fr.setFileStatus.call_count == 1
+ assert fr.setFileStatus.call_args[0][0] == int(wf_commons["production_id"])
+ assert bkClient.sendXMLBookkeepingReport.call_count == 0
+
+ assert failover.transferAndRegisterFile.call_count == 0
+ assert failover.transferAndRegisterFileFailover.call_count == 0
+
+ assert jr.setJobParameter.call_count == 0
+
+ # Make sure the request is not generated
+ operations = updated_wf_commons["request_dict"]["Operations"]
+ assert len(operations) == 0
+
+ def test_uploadOutputData_noOutput(self, mocker, upload_output, wf_commons, sim_file):
+ """Test UploadOutputData with no output data."""
+ mock_file_report = mocker.patch("dirac_cwl.commands.upload_output_data.FileReport")
+ mock_job_report = mocker.patch("dirac_cwl.commands.upload_output_data.JobReport")
+ mock_request = mocker.patch("dirac_cwl.commands.upload_output_data.Request")
+ mock_failover = mocker.patch("dirac_cwl.commands.upload_output_data.FailoverTransfer")
+ mock_bk_client = mocker.patch("dirac_cwl.commands.upload_output_data.BookkeepingClient")
+
+ fr = FileReport()
+ mocker.patch.object(fr, "setFileStatus")
+ mock_file_report.return_value = fr
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setJobParameter")
+ mock_job_report.return_value = jr
+
+ req = Request()
+ mock_request.return_value = req
+
+ failover = FailoverTransfer(req)
+ mocker.patch.object(
+ failover, "transferAndRegisterFile", return_value=S_OK({"uploadedSE": "CERN", "lfn": sim_file})
+ )
+ mocker.patch.object(failover, "transferAndRegisterFileFailover")
+ mock_failover.return_value = failover
+
+ bkClient = BookkeepingClient()
+ mocker.patch.object(bkClient, "sendXMLBookkeepingReport")
+ mock_bk_client.return_value = bkClient
+
+ wf_commons["outputs"] = [
+ {"outputDataName": sim_file, "outputDataType": "sim", "outputBKType": "SIM", "stepName": "Gauss_1"}
+ ]
+ wf_commons["output_SEs"] = {
+ "SIM": "Tier1-Buffer",
+ }
+ wf_commons["output_data_step"] = self.OUTPUT_DATA_STEP
+
+ # Remove the output
+ Path(sim_file).unlink(missing_ok=True)
+
+ wf_commons_path = create_workflow_commons(wf_commons)
+
+ # Execute module
+ with pytest.raises(OSError, match="Output data not found"):
+ upload_output.execute(job_path)
+
+ with open(wf_commons_path, "r", encoding="utf-8") as f:
+ updated_wf_commons = json.load(f)
+
+ assert fr.setFileStatus.call_count == 0
+ assert bkClient.sendXMLBookkeepingReport.call_count == 0
+
+ assert failover.transferAndRegisterFile.call_count == 0
+ assert failover.transferAndRegisterFileFailover.call_count == 0
+
+ assert jr.setJobParameter.call_count == 0
+
+ # Make sure the request is not generated
+ print(updated_wf_commons)
+ operations = updated_wf_commons["request_dict"]["Operations"]
+ assert len(operations) == 0
+
+ def test_uploadOutputData_previousError_fail(self, mocker, upload_output, wf_commons, sim_file):
+ """Test UploadOutputData with an intentional failure."""
+ mock_file_report = mocker.patch("dirac_cwl.commands.upload_output_data.FileReport")
+ mock_job_report = mocker.patch("dirac_cwl.commands.upload_output_data.JobReport")
+ mock_request = mocker.patch("dirac_cwl.commands.upload_output_data.Request")
+ mock_failover = mocker.patch("dirac_cwl.commands.upload_output_data.FailoverTransfer")
+ mock_bk_client = mocker.patch("dirac_cwl.commands.upload_output_data.BookkeepingClient")
+
+ fr = FileReport()
+ mocker.patch.object(fr, "setFileStatus")
+ mock_file_report.return_value = fr
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setJobParameter")
+ mock_job_report.return_value = jr
+
+ req = Request()
+ mock_request.return_value = req
+
+ failover = FailoverTransfer(req)
+ mocker.patch.object(failover, "transferAndRegisterFile")
+ mocker.patch.object(failover, "transferAndRegisterFileFailover")
+ mock_failover.return_value = failover
+
+ bkClient = BookkeepingClient()
+ mocker.patch.object(bkClient, "sendXMLBookkeepingReport")
+ mock_bk_client.return_value = bkClient
+
+ wf_commons["outputs"] = [
+ {"outputDataName": sim_file, "outputDataType": "sim", "outputBKType": "SIM", "stepName": "Gauss_1"}
+ ]
+ wf_commons["output_SEs"] = {
+ "SIM": "Tier1-Buffer",
+ }
+ wf_commons["output_data_step"] = self.OUTPUT_DATA_STEP
+
+ wf_commons["step_status"] = S_ERROR()
+
+ Path(sim_file).unlink(missing_ok=True)
+
+ wf_commons_path = create_workflow_commons(wf_commons)
+
+ upload_output.execute(job_path)
+
+ with open(wf_commons_path, "r", encoding="utf-8") as f:
+ updated_wf_commons = json.load(f)
+
+ assert fr.setFileStatus.call_count == 0
+ assert bkClient.sendXMLBookkeepingReport.call_count == 0
+
+ assert failover.transferAndRegisterFile.call_count == 0
+ assert failover.transferAndRegisterFileFailover.call_count == 0
+
+ assert jr.setJobParameter.call_count == 0
+
+ # Make sure the request is not generated
+ operations = updated_wf_commons["request_dict"]["Operations"]
+ assert len(operations) == 0
+
+
+class TestAnalyseXmlSummary:
+ """Collection of tests for the AnalyseXmlSummary command."""
+
+ @pytest.fixture
+ def axlf(self, mocker):
+ """Fixture for AnalyseXmlSummary module."""
+ yield AnalyseXmlSummary()
+
+ # Test scenarios
+ def test_analyseXMLSummary_basic_success(self, mocker, axlf, wf_commons, xml_summary_file):
+ """Test basic success scenario."""
+ mock_file_report = mocker.patch("dirac_cwl.commands.analyze_xml_summary.FileReport")
+ mock_job_report = mocker.patch("dirac_cwl.commands.analyze_xml_summary.JobReport")
+
+ fr = FileReport()
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setApplicationStatus")
+ jr.setApplicationStatus.return_value = S_OK()
+
+ mock_file_report.return_value = fr
+ mock_job_report.return_value = jr
+
+ xml_content = dedent("""
+
+ True
+ finalize
+
+ 866104.0
+
+
+ 200
+
+
+
+ """)
+
+ xf_o = prepare_XMLSummary_file(xml_summary_file, xml_content)
+ wf_commons["xml_summary_path"] = xml_summary_file
+ wf_commons["number_of_events"] = -1
+
+ assert xf_o.success == "True"
+ assert xf_o.step == "finalize"
+ assert xf_o._outputsOK()
+ assert not xf_o.inputFileStats["mult"]
+ assert not xf_o.inputFileStats["other"]
+
+ create_workflow_commons(wf_commons)
+ axlf.execute(job_path)
+
+ jr.setApplicationStatus.assert_called_once()
+ assert fr.statusDict == {}
+
+ def test_analyseXMLSummary_previousError_success(self, mocker, axlf, wf_commons, xml_summary_file):
+ """Test success scenario with previous error: stepStatus = S_ERROR()."""
+ mock_file_report = mocker.patch("dirac_cwl.commands.analyze_xml_summary.FileReport")
+ mock_job_report = mocker.patch("dirac_cwl.commands.analyze_xml_summary.JobReport")
+
+ fr = FileReport()
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setApplicationStatus")
+ jr.setApplicationStatus.return_value = S_OK()
+
+ mock_file_report.return_value = fr
+ mock_job_report.return_value = jr
+
+ xml_content = dedent("""
+
+ True
+ finalize
+
+ 866104.0
+
+
+ 200
+
+
+
+ """)
+
+ xf_o = prepare_XMLSummary_file(xml_summary_file, xml_content)
+ wf_commons["xml_summary_path"] = xml_summary_file
+ wf_commons["step_status"] = S_ERROR()
+ wf_commons["number_of_events"] = -1
+
+ assert xf_o.success == "True"
+ assert xf_o.step == "finalize"
+ assert xf_o._outputsOK()
+ assert not xf_o.inputFileStats["mult"]
+ assert not xf_o.inputFileStats["other"]
+
+ create_workflow_commons(wf_commons)
+ axlf.execute(job_path)
+
+ jr.setApplicationStatus.assert_not_called()
+ assert fr.statusDict == {}
+
+ def test_analyseXMLSummary_badInput_success(self, mocker, axlf, wf_commons, xml_summary_file):
+ """Test success scenario with part and fail input not part of the input data list."""
+ mock_file_report = mocker.patch("dirac_cwl.commands.analyze_xml_summary.FileReport")
+ mock_job_report = mocker.patch("dirac_cwl.commands.analyze_xml_summary.JobReport")
+
+ fr = FileReport()
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setApplicationStatus")
+ jr.setApplicationStatus.return_value = S_OK()
+
+ mock_file_report.return_value = fr
+ mock_job_report.return_value = jr
+
+ xml_content = dedent("""
+
+ True
+ finalize
+
+ 866104.0
+
+
+ 200
+ 200
+
+
+
+ """)
+
+ xf_o = prepare_XMLSummary_file(xml_summary_file, xml_content)
+ wf_commons["xml_summary_path"] = xml_summary_file
+ wf_commons["number_of_events"] = -1
+
+ assert xf_o.success == "True"
+ assert xf_o.step == "finalize"
+ assert xf_o._outputsOK()
+ assert not xf_o.inputFileStats["mult"]
+ assert not xf_o.inputFileStats["other"]
+
+ create_workflow_commons(wf_commons)
+ axlf.execute(job_path)
+
+ jr.setApplicationStatus.assert_called_once()
+ assert fr.statusDict == {}
+
+ def test_analyseXMLSummary_partInput_success(self, mocker, axlf, wf_commons, xml_summary_file):
+ """Test success scenario with part input part of the input data list."""
+ # Input is 'part' and is part of the input data list but the number of events is not -1
+ mock_file_report = mocker.patch("dirac_cwl.commands.analyze_xml_summary.FileReport")
+ mock_job_report = mocker.patch("dirac_cwl.commands.analyze_xml_summary.JobReport")
+
+ fr = FileReport()
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setApplicationStatus")
+ jr.setApplicationStatus.return_value = S_OK()
+
+ mock_file_report.return_value = fr
+ mock_job_report.return_value = jr
+
+ xml_content = dedent("""
+
+ True
+ finalize
+
+ 866104.0
+
+
+ 200
+
+
+
+ """)
+ xf_o = prepare_XMLSummary_file(xml_summary_file, xml_content)
+ wf_commons["xml_summary_path"] = xml_summary_file
+ wf_commons["inputs"] = ["00012478_00000532_1.sim"]
+ wf_commons["number_of_events"] = 1
+
+ assert xf_o.success == "True"
+ assert xf_o.step == "finalize"
+ assert xf_o._outputsOK()
+ assert not xf_o.inputFileStats["mult"]
+ assert not xf_o.inputFileStats["other"]
+
+ create_workflow_commons(wf_commons)
+ axlf.execute(job_path)
+
+ jr.setApplicationStatus.assert_called_once()
+ assert fr.statusDict == {}
+
+ def test_analyseXMLSummary_notSuccess_fail(self, mocker, axlf, wf_commons, xml_summary_file):
+ """Test failure scenario with success=False."""
+ mock_file_report = mocker.patch("dirac_cwl.commands.analyze_xml_summary.FileReport")
+ mock_job_report = mocker.patch("dirac_cwl.commands.analyze_xml_summary.JobReport")
+
+ fr = FileReport()
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setApplicationStatus")
+ jr.setApplicationStatus.return_value = S_OK()
+
+ mock_file_report.return_value = fr
+ mock_job_report.return_value = jr
+
+ xml_content = dedent("""
+
+ False
+ finalize
+
+ 866104.0
+
+
+ 200
+
+
+
+ """)
+
+ xf_o = prepare_XMLSummary_file(xml_summary_file, xml_content)
+ wf_commons["xml_summary_path"] = xml_summary_file
+ wf_commons["number_of_events"] = -1
+
+ assert xf_o.success == "False"
+ assert xf_o.step == "finalize"
+ assert xf_o._outputsOK()
+ assert not xf_o.inputFileStats["mult"]
+ assert not xf_o.inputFileStats["other"]
+
+ create_workflow_commons(wf_commons)
+ with pytest.raises(WorkflowProcessingException):
+ axlf.execute(job_path)
+
+ jr.setApplicationStatus.assert_called_once()
+ assert fr.statusDict == {}
+
+ def test_analyseXMLSummary_badStep_fail(self, mocker, axlf, wf_commons, xml_summary_file):
+ """Test failure scenario with step != finalize."""
+ mock_file_report = mocker.patch("dirac_cwl.commands.analyze_xml_summary.FileReport")
+ mock_job_report = mocker.patch("dirac_cwl.commands.analyze_xml_summary.JobReport")
+
+ fr = FileReport()
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setApplicationStatus")
+ jr.setApplicationStatus.return_value = S_OK()
+
+ mock_file_report.return_value = fr
+ mock_job_report.return_value = jr
+
+ xml_content = dedent("""
+
+ True
+ execute
+
+ 866104.0
+
+
+ 200
+
+
+
+ """)
+
+ xf_o = prepare_XMLSummary_file(xml_summary_file, xml_content)
+ wf_commons["xml_summary_path"] = xml_summary_file
+ wf_commons["number_of_events"] = -1
+
+ assert xf_o.success == "True"
+ assert xf_o.step == "execute"
+ assert xf_o._outputsOK()
+ assert not xf_o.inputFileStats["mult"]
+ assert not xf_o.inputFileStats["other"]
+
+ create_workflow_commons(wf_commons)
+ with pytest.raises(WorkflowProcessingException):
+ axlf.execute(job_path)
+
+ jr.setApplicationStatus.assert_called_once()
+ assert fr.statusDict == {}
+
+ def test_analyseXMLSummary_badOutput_fail(self, mocker, axlf, wf_commons, xml_summary_file):
+ """Test failure scenario with output status != full."""
+ mock_file_report = mocker.patch("dirac_cwl.commands.analyze_xml_summary.FileReport")
+ mock_job_report = mocker.patch("dirac_cwl.commands.analyze_xml_summary.JobReport")
+
+ fr = FileReport()
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setApplicationStatus")
+ jr.setApplicationStatus.return_value = S_OK()
+
+ mock_file_report.return_value = fr
+ mock_job_report.return_value = jr
+
+ xml_content = dedent("""
+
+ True
+ finalize
+
+ 866104.0
+
+
+ 200
+
+
+
+ """)
+
+ xf_o = prepare_XMLSummary_file(xml_summary_file, xml_content)
+ wf_commons["xml_summary_path"] = xml_summary_file
+ wf_commons["number_of_events"] = -1
+
+ assert xf_o.success == "True"
+ assert xf_o.step == "finalize"
+ assert not xf_o._outputsOK()
+ assert not xf_o.inputFileStats["mult"]
+ assert not xf_o.inputFileStats["other"]
+
+ create_workflow_commons(wf_commons)
+ with pytest.raises(WorkflowProcessingException):
+ axlf.execute(job_path)
+
+ jr.setApplicationStatus.assert_called_once()
+ assert fr.statusDict == {}
+
+ def test_analyseXMLSummary_badInput_fail(self, mocker, axlf, wf_commons, xml_summary_file):
+ """Test failure scenario with input status = mult."""
+ mock_file_report = mocker.patch("dirac_cwl.commands.analyze_xml_summary.FileReport")
+ mock_job_report = mocker.patch("dirac_cwl.commands.analyze_xml_summary.JobReport")
+
+ fr = FileReport()
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setApplicationStatus")
+ jr.setApplicationStatus.return_value = S_OK()
+
+ mock_file_report.return_value = fr
+ mock_job_report.return_value = jr
+
+ xml_content = dedent("""
+
+ True
+ finalize
+
+ 866104.0
+
+
+ 200
+
+
+
+ """)
+
+ xf_o = prepare_XMLSummary_file(xml_summary_file, xml_content)
+ wf_commons["xml_summary_path"] = xml_summary_file
+ wf_commons["number_of_events"] = -1
+
+ assert xf_o.success == "True"
+ assert xf_o.step == "finalize"
+ assert xf_o._outputsOK()
+ assert xf_o.inputFileStats["mult"]
+ assert not xf_o.inputFileStats["other"]
+
+ create_workflow_commons(wf_commons)
+ with pytest.raises(WorkflowProcessingException):
+ axlf.execute(job_path)
+
+ jr.setApplicationStatus.assert_called_once()
+ assert fr.statusDict == {}
+
+ def test_analyseXMLSummary_badInput2_fail(self, mocker, axlf, wf_commons, xml_summary_file):
+ """Test failure scenario with an unknown input status (weoweo)."""
+ mock_file_report = mocker.patch("dirac_cwl.commands.analyze_xml_summary.FileReport")
+ mock_job_report = mocker.patch("dirac_cwl.commands.analyze_xml_summary.JobReport")
+
+ fr = FileReport()
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setApplicationStatus")
+ jr.setApplicationStatus.return_value = S_OK()
+
+ mock_file_report.return_value = fr
+ mock_job_report.return_value = jr
+
+ xml_content = dedent("""
+
+ True
+ finalize
+
+ 866104.0
+
+
+ 200
+
+
+
+ """)
+
+ xf_o = prepare_XMLSummary_file(xml_summary_file, xml_content)
+ wf_commons["xml_summary_path"] = xml_summary_file
+ wf_commons["number_of_events"] = -1
+
+ assert xf_o.success == "True"
+ assert xf_o.step == "finalize"
+ assert xf_o._outputsOK()
+ assert not xf_o.inputFileStats["mult"]
+ assert xf_o.inputFileStats["other"]
+
+ create_workflow_commons(wf_commons)
+ with pytest.raises(WorkflowProcessingException):
+ axlf.execute(job_path)
+
+ jr.setApplicationStatus.assert_called_once()
+ assert fr.statusDict == {}
+
+ def test_analyseXMLSummary_badInput3_fail(self, mocker, axlf, wf_commons, xml_summary_file):
+ """Test failure scenario with input status = fail."""
+ mock_file_report = mocker.patch("dirac_cwl.commands.analyze_xml_summary.FileReport")
+ mock_job_report = mocker.patch("dirac_cwl.commands.analyze_xml_summary.JobReport")
+
+ fr = FileReport()
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setApplicationStatus")
+ jr.setApplicationStatus.return_value = S_OK()
+
+ mock_file_report.return_value = fr
+ mock_job_report.return_value = jr
+
+ xml_content = dedent("""
+
+ True
+ finalize
+
+ 866104.0
+
+
+ 200
+ 200
+
+
+
+ """)
+
+ xf_o = prepare_XMLSummary_file(xml_summary_file, xml_content)
+ wf_commons["xml_summary_path"] = xml_summary_file
+ wf_commons["inputs"] = ["00012478_00000532_1.sim"]
+ wf_commons["number_of_events"] = -1
+
+ assert xf_o.success == "True"
+ assert xf_o.step == "finalize"
+ assert xf_o._outputsOK()
+ assert not xf_o.inputFileStats["mult"]
+ assert not xf_o.inputFileStats["other"]
+
+ create_workflow_commons(wf_commons)
+ with pytest.raises(WorkflowProcessingException):
+ axlf.execute(job_path)
+
+ jr.setApplicationStatus.assert_called_once()
+ assert fr.statusDict == {"00012478_00000532_1.sim": "Problematic"}
+
+ def test_analyseXMLSummary_badInput4_fail(self, mocker, axlf, wf_commons, xml_summary_file):
+ """Test failure scenario with input status = part."""
+ # Input is 'part' and is part of the input data list but the number of events is -1 (by default)
+ mock_file_report = mocker.patch("dirac_cwl.commands.analyze_xml_summary.FileReport")
+ mock_job_report = mocker.patch("dirac_cwl.commands.analyze_xml_summary.JobReport")
+
+ fr = FileReport()
+
+ jr = JobReport(wf_commons["job_id"])
+ mocker.patch.object(jr, "setApplicationStatus")
+ jr.setApplicationStatus.return_value = S_OK()
+
+ mock_file_report.return_value = fr
+ mock_job_report.return_value = jr
+
+ xml_content = dedent("""
+
+ True
+ finalize
+
+ 866104.0
+
+
+ 200
+ 200
+
+
+
+ """)
+
+ xf_o = prepare_XMLSummary_file(xml_summary_file, xml_content)
+ wf_commons["xml_summary_path"] = xml_summary_file
+ wf_commons["inputs"] = ["00012478_00000532_1.sim"]
+ wf_commons["number_of_events"] = -1
+
+ assert xf_o.success == "True"
+ assert xf_o.step == "finalize"
+ assert xf_o._outputsOK()
+ assert not xf_o.inputFileStats["mult"]
+ assert not xf_o.inputFileStats["other"]
+
+ create_workflow_commons(wf_commons)
+ with pytest.raises(WorkflowProcessingException):
+ axlf.execute(job_path)
+
+ jr.setApplicationStatus.assert_called_once()
+ assert fr.statusDict == {"00012478_00000532_1.sim": "Problematic"}
+
+
+class TestWorkflowAccounting:
+ """Collection of tests for the WorkflowAccounting command."""
+
+ @pytest.fixture
+ def accounting(self, mocker):
+ """Fixture for WorkflowAccounting module."""
+ yield WorkflowAccounting()
+
+ # Test Scenarios
+ def test_accounting_success(self, mocker, accounting, wf_commons, xml_summary_file):
+ """Test successful execution of WorkflowAccounting module."""
+ mock_data_store = mocker.patch("dirac_cwl.commands.workflow_accounting.DataStoreClient")
+ dsc = DataStoreClient()
+ mocker.patch.object(dsc, "addRegister")
+ mock_data_store.return_value = dsc
+
+ wf_commons["application_name"] = "Gauss"
+ xml_content = dedent("""
+
+ True
+ finalize
+
+ 866104.0
+
+
+ 200
+
+
+
+ """)
+
+ prepare_XMLSummary_file(xml_summary_file, xml_content)
+
+ wf_commons["xml_summary_path"] = xml_summary_file
+ wf_commons["bk_step_id"] = "12345"
+ wf_commons["step_proc_pass"] = "Sim09m"
+ wf_commons["event_type"] = "23103003"
+
+ create_workflow_commons(wf_commons)
+
+ accounting.execute(job_path)
+
+ # Make sure the dsc was called
+ dsc.addRegister.assert_called_once()
+
+ def test_accounting_noApplicationName_fail(self, mocker, accounting, wf_commons, xml_summary_file):
+ """Test WorkflowAccounting when there is no application name in step commons."""
+ mock_data_store = mocker.patch("dirac_cwl.commands.workflow_accounting.DataStoreClient")
+ dsc = DataStoreClient()
+ mocker.patch.object(dsc, "addRegister")
+ mock_data_store.return_value = dsc
+
+ xml_content = dedent("""
+
+ True
+ finalize
+
+ 866104.0
+
+
+ 200
+
+
+
+ """)
+
+ prepare_XMLSummary_file(xml_summary_file, xml_content)
+
+ wf_commons.pop("application_name")
+ wf_commons["xml_summary_path"] = xml_summary_file
+
+ create_workflow_commons(wf_commons)
+
+ with pytest.raises(WorkflowProcessingException):
+ accounting.execute(job_path)
+
+ assert not dsc.addRegister.called, "No accounting data should be added."
+
+ def test_accounting_incompleteData(self, mocker, accounting, wf_commons, xml_summary_file):
+ """Test successful execution of WorkflowAccounting module."""
+ mock_data_store = mocker.patch("dirac_cwl.commands.workflow_accounting.DataStoreClient")
+ dsc = DataStoreClient()
+ mocker.patch.object(dsc, "addRegister")
+ mock_data_store.return_value = dsc
+
+ xml_content = dedent("""
+
+ True
+ finalize
+
+ 866104.0
+
+
+ 200
+
+
+
+ """)
+
+ prepare_XMLSummary_file(xml_summary_file, xml_content)
+
+ wf_commons["xml_summary_path"] = xml_summary_file
+ wf_commons["application_name"] = "Gauss"
+
+ create_workflow_commons(wf_commons)
+
+ with pytest.raises(WorkflowProcessingException):
+ accounting.execute(job_path)
+
+ assert not dsc.addRegister.called, "No accounting data should be added."
+
+ def test_accounting_previousError_fail(self, mocker, accounting, wf_commons, xml_summary_file):
+ """Test WorkflowAccounting with an intentional failure."""
+ mock_data_store = mocker.patch("dirac_cwl.commands.workflow_accounting.DataStoreClient")
+ dsc = DataStoreClient()
+ mocker.patch.object(dsc, "addRegister")
+ mock_data_store.return_value = dsc
+
+ xml_content = dedent("""
+
+ True
+ finalize
+
+ 866104.0
+
+
+ 200
+
+
+
+ """)
+
+ prepare_XMLSummary_file(xml_summary_file, xml_content)
+
+ wf_commons["xml_summary_path"] = xml_summary_file
+ wf_commons["application_name"] = "Gauss"
+ wf_commons["bk_step_id"] = "12345"
+ wf_commons["step_proc_pass"] = "Sim09m"
+ wf_commons["event_type"] = "23103003"
+ wf_commons["step_status"] = S_ERROR()
+
+ create_workflow_commons(wf_commons)
+
+ accounting.execute(job_path)
+
+ assert dsc.addRegister.called, "Accounting data should be added."