From 1ab36df23f0c8861630fa877e26c4724e2242428 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Thu, 6 Mar 2025 23:33:04 +0000 Subject: [PATCH 01/37] Remove CodeCarbon from Python API --- poetry.lock | 708 +++----------------------------- pyproject.toml | 2 +- simvue/api/objects/folder.py | 2 +- simvue/api/request.py | 2 +- simvue/config/parameters.py | 2 - simvue/config/user.py | 8 +- simvue/eco.py | 132 ------ simvue/eco/__init__.py | 14 + simvue/eco/api_client.py | 142 +++++++ simvue/eco/config.py | 34 ++ simvue/eco/emissions_monitor.py | 306 ++++++++++++++ simvue/run.py | 113 ++--- simvue/utilities.py | 5 + 13 files changed, 621 insertions(+), 849 deletions(-) delete mode 100644 simvue/eco.py create mode 100644 simvue/eco/__init__.py create mode 100644 simvue/eco/api_client.py create mode 100644 simvue/eco/config.py create mode 100644 simvue/eco/emissions_monitor.py diff --git a/poetry.lock b/poetry.lock index 952cf7e0..25b1782a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -12,49 +12,6 @@ files = [ {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] -[[package]] -name = "anyio" -version = "4.8.0" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, - {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, -] - -[package.dependencies] -exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} -idna = ">=2.8" -sniffio = ">=1.1" -typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} - -[package.extras] -doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] -trio = ["trio (>=0.26.1)"] - -[[package]] -name = "arrow" -version = "1.3.0" -description = "Better dates & times for Python" -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, - {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, -] - -[package.dependencies] -python-dateutil = ">=2.7.0" -types-python-dateutil = ">=2.8.10" - -[package.extras] -doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] -test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"] - [[package]] name = "attrs" version = "25.1.0" @@ -87,87 +44,6 @@ files = [ {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, ] -[[package]] -name = "cffi" -version = "1.17.1" -description = "Foreign Function Interface for Python calling C code." -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "platform_python_implementation != \"PyPy\"" -files = [ - {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, - {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, - {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, - {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, - {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, - {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, - {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, - {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, - {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, - {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, - {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, - {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, - {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, - {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, - {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, - {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, - {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, - {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, - {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, - {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, - {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, - {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, - {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, - {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, - {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, - {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, - {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, - {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, -] - -[package.dependencies] -pycparser = "*" - [[package]] name = "charset-normalizer" version = "3.4.1" @@ -285,36 +161,6 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} -[[package]] -name = "codecarbon" -version = "2.8.3" -description = "" -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "codecarbon-2.8.3-py3-none-any.whl", hash = "sha256:d3204852ad0c83d94d0f16b7d922e7f540c1e5f488d911f3e75408fe29f4ef4c"}, - {file = "codecarbon-2.8.3.tar.gz", hash = "sha256:037dd5afa1c5f60154f893ecd1631e0c849786edcfc9ff34a7ef467707891269"}, -] - -[package.dependencies] -arrow = "*" -click = "*" -fief-client = {version = "*", extras = ["cli"]} -pandas = "*" -prometheus-client = "*" -psutil = "*" -py-cpuinfo = "*" -pynvml = "*" -questionary = "*" -rapidfuzz = "*" -requests = "*" -rich = "*" -typer = "*" - -[package.extras] -viz = ["dash", "dash-bootstrap-components (<1.0.0)", "fire"] - [[package]] name = "colorama" version = "0.4.6" @@ -482,60 +328,6 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 [package.extras] toml = ["tomli ; python_full_version <= \"3.11.0a6\""] -[[package]] -name = "cryptography" -version = "44.0.1" -description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = false -python-versions = "!=3.9.0,!=3.9.1,>=3.7" -groups = ["main"] -files = [ - {file = "cryptography-44.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:887143b9ff6bad2b7570da75a7fe8bbf5f65276365ac259a5d2d5147a73775f2"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:322eb03ecc62784536bc173f1483e76747aafeb69c8728df48537eb431cd1911"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:21377472ca4ada2906bc313168c9dc7b1d7ca417b63c1c3011d0c74b7de9ae69"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:df978682c1504fc93b3209de21aeabf2375cb1571d4e61907b3e7a2540e83026"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:eb3889330f2a4a148abead555399ec9a32b13b7c8ba969b72d8e500eb7ef84cd"}, - {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:8e6a85a93d0642bd774460a86513c5d9d80b5c002ca9693e63f6e540f1815ed0"}, - {file = "cryptography-44.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6f76fdd6fd048576a04c5210d53aa04ca34d2ed63336d4abd306d0cbe298fddf"}, - {file = "cryptography-44.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6c8acf6f3d1f47acb2248ec3ea261171a671f3d9428e34ad0357148d492c7864"}, - {file = "cryptography-44.0.1-cp37-abi3-win32.whl", hash = "sha256:24979e9f2040c953a94bf3c6782e67795a4c260734e5264dceea65c8f4bae64a"}, - {file = "cryptography-44.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:fd0ee90072861e276b0ff08bd627abec29e32a53b2be44e41dbcdf87cbee2b00"}, - {file = "cryptography-44.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a2d8a7045e1ab9b9f803f0d9531ead85f90c5f2859e653b61497228b18452008"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8272f257cf1cbd3f2e120f14c68bff2b6bdfcc157fafdee84a1b795efd72862"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e8d181e90a777b63f3f0caa836844a1182f1f265687fac2115fcf245f5fbec3"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:436df4f203482f41aad60ed1813811ac4ab102765ecae7a2bbb1dbb66dcff5a7"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4f422e8c6a28cf8b7f883eb790695d6d45b0c385a2583073f3cec434cc705e1a"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:72198e2b5925155497a5a3e8c216c7fb3e64c16ccee11f0e7da272fa93b35c4c"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:2a46a89ad3e6176223b632056f321bc7de36b9f9b93b2cc1cccf935a3849dc62"}, - {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:53f23339864b617a3dfc2b0ac8d5c432625c80014c25caac9082314e9de56f41"}, - {file = "cryptography-44.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:888fcc3fce0c888785a4876ca55f9f43787f4c5c1cc1e2e0da71ad481ff82c5b"}, - {file = "cryptography-44.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:00918d859aa4e57db8299607086f793fa7813ae2ff5a4637e318a25ef82730f7"}, - {file = "cryptography-44.0.1-cp39-abi3-win32.whl", hash = "sha256:9b336599e2cb77b1008cb2ac264b290803ec5e8e89d618a5e978ff5eb6f715d9"}, - {file = "cryptography-44.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:e403f7f766ded778ecdb790da786b418a9f2394f36e8cc8b796cc056ab05f44f"}, - {file = "cryptography-44.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1f9a92144fa0c877117e9748c74501bea842f93d21ee00b0cf922846d9d0b183"}, - {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:610a83540765a8d8ce0f351ce42e26e53e1f774a6efb71eb1b41eb01d01c3d12"}, - {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5fed5cd6102bb4eb843e3315d2bf25fede494509bddadb81e03a859c1bc17b83"}, - {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:f4daefc971c2d1f82f03097dc6f216744a6cd2ac0f04c68fb935ea2ba2a0d420"}, - {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94f99f2b943b354a5b6307d7e8d19f5c423a794462bde2bf310c770ba052b1c4"}, - {file = "cryptography-44.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d9c5b9f698a83c8bd71e0f4d3f9f839ef244798e5ffe96febfa9714717db7af7"}, - {file = "cryptography-44.0.1.tar.gz", hash = "sha256:f51f5705ab27898afda1aaa430f34ad90dc117421057782022edf0600bec5f14"}, -] - -[package.dependencies] -cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} - -[package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""] -docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] -nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""] -pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] -sdist = ["build (>=1.0.0)"] -ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi (>=2024)", "cryptography-vectors (==44.0.1)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] -test-randomorder = ["pytest-randomly"] - [[package]] name = "cycler" version = "0.12.1" @@ -553,6 +345,18 @@ files = [ docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] tests = ["pytest", "pytest-cov", "pytest-xdist"] +[[package]] +name = "decorator" +version = "5.2.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"}, + {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"}, +] + [[package]] name = "deepmerge" version = "2.0" @@ -611,7 +415,7 @@ version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" -groups = ["main", "dev"] +groups = ["dev"] markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, @@ -636,28 +440,6 @@ files = [ [package.extras] testing = ["hatch", "pre-commit", "pytest", "tox"] -[[package]] -name = "fief-client" -version = "0.20.0" -description = "Fief Client for Python" -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "fief_client-0.20.0-py3-none-any.whl", hash = "sha256:425f40cc7c45c651daec63da402e033c53d91dcaa3f9bf208873fd8692fc16dc"}, - {file = "fief_client-0.20.0.tar.gz", hash = "sha256:dbfb906d03c4a5402ceac5c843aa4708535fb6f5d5c1c4e263ec06fbbbc434d7"}, -] - -[package.dependencies] -httpx = ">=0.21.3,<0.28.0" -jwcrypto = ">=1.4,<2.0.0" -yaspin = {version = "*", optional = true, markers = "extra == \"cli\""} - -[package.extras] -cli = ["yaspin"] -fastapi = ["fastapi", "makefun (>=1.14.0,<2.0.0)"] -flask = ["flask"] - [[package]] name = "fire" version = "0.7.0" @@ -758,6 +540,37 @@ ufo = ["fs (>=2.2.0,<3)"] unicode = ["unicodedata2 (>=15.1.0) ; python_version <= \"3.12\""] woff = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "zopfli (>=0.1.4)"] +[[package]] +name = "future" +version = "1.0.0" +description = "Clean single-source support for Python 3 and 2" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main"] +files = [ + {file = "future-1.0.0-py3-none-any.whl", hash = "sha256:929292d34f5872e70396626ef385ec22355a1fae8ad29e1a734c3e43f9fbc216"}, + {file = "future-1.0.0.tar.gz", hash = "sha256:bd2968309307861edae1458a4f8a4f3598c03be43b97521076aebf5d94c07b05"}, +] + +[[package]] +name = "geocoder" +version = "1.38.1" +description = "Geocoder is a simple and consistent geocoding library." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "geocoder-1.38.1-py2.py3-none-any.whl", hash = "sha256:a733e1dfbce3f4e1a526cac03aadcedb8ed1239cf55bd7f3a23c60075121a834"}, + {file = "geocoder-1.38.1.tar.gz", hash = "sha256:c9925374c961577d0aee403b09e6f8ea1971d913f011f00ca70c76beaf7a77e7"}, +] + +[package.dependencies] +click = "*" +future = "*" +ratelim = "*" +requests = "*" +six = "*" + [[package]] name = "gitdb" version = "4.0.12" @@ -792,66 +605,6 @@ gitdb = ">=4.0.1,<5" doc = ["sphinx (>=7.1.2,<7.2)", "sphinx-autodoc-typehints", "sphinx_rtd_theme"] test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock ; python_version < \"3.8\"", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions ; python_version < \"3.11\""] -[[package]] -name = "h11" -version = "0.14.0" -description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] - -[[package]] -name = "httpcore" -version = "1.0.7" -description = "A minimal low-level HTTP client." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, - {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, -] - -[package.dependencies] -certifi = "*" -h11 = ">=0.13,<0.15" - -[package.extras] -asyncio = ["anyio (>=4.0,<5.0)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<1.0)"] - -[[package]] -name = "httpx" -version = "0.27.2" -description = "The next generation HTTP client." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, - {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, -] - -[package.dependencies] -anyio = "*" -certifi = "*" -httpcore = "==1.*" -idna = "*" -sniffio = "*" - -[package.extras] -brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -zstd = ["zstandard (>=0.18.0)"] - [[package]] name = "humanfriendly" version = "10.0" @@ -938,22 +691,6 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] -[[package]] -name = "jwcrypto" -version = "1.5.6" -description = "Implementation of JOSE Web standards" -optional = false -python-versions = ">= 3.8" -groups = ["main"] -files = [ - {file = "jwcrypto-1.5.6-py3-none-any.whl", hash = "sha256:150d2b0ebbdb8f40b77f543fb44ffd2baeff48788be71f67f03566692fd55789"}, - {file = "jwcrypto-1.5.6.tar.gz", hash = "sha256:771a87762a0c081ae6166958a954f80848820b2ab066937dc8b8379d65b1b039"}, -] - -[package.dependencies] -cryptography = ">=3.4" -typing-extensions = ">=4.5.0" - [[package]] name = "kiwisolver" version = "1.4.8" @@ -1045,31 +782,6 @@ files = [ {file = "kiwisolver-1.4.8.tar.gz", hash = "sha256:23d5f023bdc8c7e54eb65f03ca5d5bb25b601eac4d7f1a042888a1f45237987e"}, ] -[[package]] -name = "markdown-it-py" -version = "3.0.0" -description = "Python port of markdown-it. Markdown parsing, done right!" -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, -] - -[package.dependencies] -mdurl = ">=0.1,<1.0" - -[package.extras] -benchmarking = ["psutil", "pytest", "pytest-benchmark"] -code-style = ["pre-commit (>=3.0,<4.0)"] -compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] -linkify = ["linkify-it-py (>=1,<3)"] -plugins = ["mdit-py-plugins"] -profiling = ["gprof2dot"] -rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] - [[package]] name = "markupsafe" version = "3.0.2" @@ -1200,18 +912,6 @@ python-dateutil = ">=2.7" [package.extras] dev = ["meson-python (>=0.13.1,<0.17.0)", "pybind11 (>=2.13.2,!=2.13.3)", "setuptools (>=64)", "setuptools_scm (>=7)"] -[[package]] -name = "mdurl" -version = "0.1.2" -description = "Markdown URL utilities" -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, - {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, -] - [[package]] name = "msgpack" version = "1.1.0" @@ -1381,18 +1081,6 @@ files = [ {file = "numpy-2.2.3.tar.gz", hash = "sha256:dbdc15f0c81611925f382dfa97b3bd0bc2c1ce19d4fe50482cb0ddc12ba30020"}, ] -[[package]] -name = "nvidia-ml-py" -version = "12.570.86" -description = "Python Bindings for the NVIDIA Management Library" -optional = false -python-versions = "*" -groups = ["main"] -files = [ - {file = "nvidia_ml_py-12.570.86-py3-none-any.whl", hash = "sha256:58907de35a845abd13dcb227f18298f3b5dd94a72d04c9e594e77711e95c0b51"}, - {file = "nvidia_ml_py-12.570.86.tar.gz", hash = "sha256:0508d4a0c7b6d015cf574530b95a62ed4fc89da3b8b47e1aefe6777db170ec8b"}, -] - [[package]] name = "packaging" version = "24.2" @@ -1619,36 +1307,6 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] -[[package]] -name = "prometheus-client" -version = "0.21.1" -description = "Python client for the Prometheus monitoring system." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "prometheus_client-0.21.1-py3-none-any.whl", hash = "sha256:594b45c410d6f4f8888940fe80b5cc2521b305a1fafe1c58609ef715a001f301"}, - {file = "prometheus_client-0.21.1.tar.gz", hash = "sha256:252505a722ac04b0456be05c05f75f45d760c2911ffc45f2a06bcaed9f3ae3fb"}, -] - -[package.extras] -twisted = ["twisted"] - -[[package]] -name = "prompt-toolkit" -version = "3.0.50" -description = "Library for building powerful interactive command lines in Python" -optional = false -python-versions = ">=3.8.0" -groups = ["main"] -files = [ - {file = "prompt_toolkit-3.0.50-py3-none-any.whl", hash = "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198"}, - {file = "prompt_toolkit-3.0.50.tar.gz", hash = "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab"}, -] - -[package.dependencies] -wcwidth = "*" - [[package]] name = "psutil" version = "6.1.1" @@ -1692,31 +1350,6 @@ files = [ {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] -[[package]] -name = "py-cpuinfo" -version = "9.0.0" -description = "Get CPU info with pure Python" -optional = false -python-versions = "*" -groups = ["main"] -files = [ - {file = "py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690"}, - {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"}, -] - -[[package]] -name = "pycparser" -version = "2.22" -description = "C parser in Python" -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "platform_python_implementation != \"PyPy\"" -files = [ - {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, - {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, -] - [[package]] name = "pydantic" version = "2.10.6" @@ -1851,21 +1484,6 @@ files = [ [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" -[[package]] -name = "pygments" -version = "2.19.1" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, - {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, -] - -[package.extras] -windows-terminal = ["colorama (>=0.4.6)"] - [[package]] name = "pyjwt" version = "2.10.1" @@ -1884,24 +1502,6 @@ dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pyte docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] -[[package]] -name = "pynvml" -version = "12.0.0" -description = "Python utilities for the NVIDIA Management Library" -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "pynvml-12.0.0-py3-none-any.whl", hash = "sha256:fdff84b62a27dbe98e08e1a647eb77342bef1aebe0878bcd15e99a83fcbecb9e"}, - {file = "pynvml-12.0.0.tar.gz", hash = "sha256:299ce2451a6a17e6822d6faee750103e25b415f06f59abb8db65d30f794166f5"}, -] - -[package.dependencies] -nvidia-ml-py = ">=12.0.0,<13.0.0a0" - -[package.extras] -test = ["pytest (>=3.6)", "pytest-cov", "pytest-runner"] - [[package]] name = "pyparsing" version = "3.2.1" @@ -2077,21 +1677,6 @@ files = [ {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, ] -[[package]] -name = "questionary" -version = "2.1.0" -description = "Python library to build pretty command line user prompts ⭐️" -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "questionary-2.1.0-py3-none-any.whl", hash = "sha256:44174d237b68bc828e4878c763a9ad6790ee61990e0ae72927694ead57bab8ec"}, - {file = "questionary-2.1.0.tar.gz", hash = "sha256:6302cdd645b19667d8f6e6634774e9538bfcd1aad9be287e743d96cacaf95587"}, -] - -[package.dependencies] -prompt_toolkit = ">=2.0,<4.0" - [[package]] name = "randomname" version = "0.2.1" @@ -2107,105 +1692,19 @@ files = [ fire = "*" [[package]] -name = "rapidfuzz" -version = "3.12.1" -description = "rapid fuzzy string matching" +name = "ratelim" +version = "0.1.6" +description = "Makes it easy to respect rate limits." optional = false -python-versions = ">=3.9" +python-versions = "*" groups = ["main"] files = [ - {file = "rapidfuzz-3.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dbb7ea2fd786e6d66f225ef6eef1728832314f47e82fee877cb2a793ebda9579"}, - {file = "rapidfuzz-3.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1ae41361de05762c1eaa3955e5355de7c4c6f30d1ef1ea23d29bf738a35809ab"}, - {file = "rapidfuzz-3.12.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc3c39e0317e7f68ba01bac056e210dd13c7a0abf823e7b6a5fe7e451ddfc496"}, - {file = "rapidfuzz-3.12.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69f2520296f1ae1165b724a3aad28c56fd0ac7dd2e4cff101a5d986e840f02d4"}, - {file = "rapidfuzz-3.12.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34dcbf5a7daecebc242f72e2500665f0bde9dd11b779246c6d64d106a7d57c99"}, - {file = "rapidfuzz-3.12.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:773ab37fccf6e0513891f8eb4393961ddd1053c6eb7e62eaa876e94668fc6d31"}, - {file = "rapidfuzz-3.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ecf0e6de84c0bc2c0f48bc03ba23cef2c5f1245db7b26bc860c11c6fd7a097c"}, - {file = "rapidfuzz-3.12.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4dc2ebad4adb29d84a661f6a42494df48ad2b72993ff43fad2b9794804f91e45"}, - {file = "rapidfuzz-3.12.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8389d98b9f54cb4f8a95f1fa34bf0ceee639e919807bb931ca479c7a5f2930bf"}, - {file = "rapidfuzz-3.12.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:165bcdecbfed9978962da1d3ec9c191b2ff9f1ccc2668fbaf0613a975b9aa326"}, - {file = "rapidfuzz-3.12.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:129d536740ab0048c1a06ccff73c683f282a2347c68069affae8dbc423a37c50"}, - {file = "rapidfuzz-3.12.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1b67e390261ffe98ec86c771b89425a78b60ccb610c3b5874660216fcdbded4b"}, - {file = "rapidfuzz-3.12.1-cp310-cp310-win32.whl", hash = "sha256:a66520180d3426b9dc2f8d312f38e19bc1fc5601f374bae5c916f53fa3534a7d"}, - {file = "rapidfuzz-3.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:82260b20bc7a76556cecb0c063c87dad19246a570425d38f8107b8404ca3ac97"}, - {file = "rapidfuzz-3.12.1-cp310-cp310-win_arm64.whl", hash = "sha256:3a860d103bbb25c69c2e995fdf4fac8cb9f77fb69ec0a00469d7fd87ff148f46"}, - {file = "rapidfuzz-3.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6d9afad7b16d01c9e8929b6a205a18163c7e61b6cd9bcf9c81be77d5afc1067a"}, - {file = "rapidfuzz-3.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bb424ae7240f2d2f7d8dda66a61ebf603f74d92f109452c63b0dbf400204a437"}, - {file = "rapidfuzz-3.12.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42149e6d13bd6d06437d2a954dae2184dadbbdec0fdb82dafe92860d99f80519"}, - {file = "rapidfuzz-3.12.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:760ac95d788f2964b73da01e0bdffbe1bf2ad8273d0437565ce9092ae6ad1fbc"}, - {file = "rapidfuzz-3.12.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2cf27e8e4bf7bf9d92ef04f3d2b769e91c3f30ba99208c29f5b41e77271a2614"}, - {file = "rapidfuzz-3.12.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:00ceb8ff3c44ab0d6014106c71709c85dee9feedd6890eff77c814aa3798952b"}, - {file = "rapidfuzz-3.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b61c558574fbc093d85940c3264c08c2b857b8916f8e8f222e7b86b0bb7d12"}, - {file = "rapidfuzz-3.12.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:346a2d8f17224e99f9ef988606c83d809d5917d17ad00207237e0965e54f9730"}, - {file = "rapidfuzz-3.12.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d60d1db1b7e470e71ae096b6456e20ec56b52bde6198e2dbbc5e6769fa6797dc"}, - {file = "rapidfuzz-3.12.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2477da227e266f9c712f11393182c69a99d3c8007ea27f68c5afc3faf401cc43"}, - {file = "rapidfuzz-3.12.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8499c7d963ddea8adb6cffac2861ee39a1053e22ca8a5ee9de1197f8dc0275a5"}, - {file = "rapidfuzz-3.12.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:12802e5c4d8ae104fb6efeeb436098325ce0dca33b461c46e8df015c84fbef26"}, - {file = "rapidfuzz-3.12.1-cp311-cp311-win32.whl", hash = "sha256:e1061311d07e7cdcffa92c9b50c2ab4192907e70ca01b2e8e1c0b6b4495faa37"}, - {file = "rapidfuzz-3.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:c6e4ed63e204daa863a802eec09feea5448617981ba5d150f843ad8e3ae071a4"}, - {file = "rapidfuzz-3.12.1-cp311-cp311-win_arm64.whl", hash = "sha256:920733a28c3af47870835d59ca9879579f66238f10de91d2b4b3f809d1ebfc5b"}, - {file = "rapidfuzz-3.12.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f6235b57ae3faa3f85cb3f90c9fee49b21bd671b76e90fc99e8ca2bdf0b5e4a3"}, - {file = "rapidfuzz-3.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:af4585e5812632c357fee5ab781c29f00cd06bea58f8882ff244cc4906ba6c9e"}, - {file = "rapidfuzz-3.12.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5942dc4460e5030c5f9e1d4c9383de2f3564a2503fe25e13e89021bcbfea2f44"}, - {file = "rapidfuzz-3.12.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b31ab59e1a0df5afc21f3109b6cfd77b34040dbf54f1bad3989f885cfae1e60"}, - {file = "rapidfuzz-3.12.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97c885a7a480b21164f57a706418c9bbc9a496ec6da087e554424358cadde445"}, - {file = "rapidfuzz-3.12.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d844c0587d969ce36fbf4b7cbf0860380ffeafc9ac5e17a7cbe8abf528d07bb"}, - {file = "rapidfuzz-3.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93c95dce8917bf428064c64024de43ffd34ec5949dd4425780c72bd41f9d969"}, - {file = "rapidfuzz-3.12.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:834f6113d538af358f39296604a1953e55f8eeffc20cb4caf82250edbb8bf679"}, - {file = "rapidfuzz-3.12.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a940aa71a7f37d7f0daac186066bf6668d4d3b7e7ef464cb50bc7ba89eae1f51"}, - {file = "rapidfuzz-3.12.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ec9eaf73501c9a7de2c6938cb3050392e2ee0c5ca3921482acf01476b85a7226"}, - {file = "rapidfuzz-3.12.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3c5ec360694ac14bfaeb6aea95737cf1a6cf805b5fe8ea7fd28814706c7fa838"}, - {file = "rapidfuzz-3.12.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6b5e176524653ac46f1802bdd273a4b44a5f8d0054ed5013a8e8a4b72f254599"}, - {file = "rapidfuzz-3.12.1-cp312-cp312-win32.whl", hash = "sha256:6f463c6f1c42ec90e45d12a6379e18eddd5cdf74138804d8215619b6f4d31cea"}, - {file = "rapidfuzz-3.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:b894fa2b30cd6498a29e5c470cb01c6ea898540b7e048a0342775a5000531334"}, - {file = "rapidfuzz-3.12.1-cp312-cp312-win_arm64.whl", hash = "sha256:43bb17056c5d1332f517b888c4e57846c4b5f936ed304917eeb5c9ac85d940d4"}, - {file = "rapidfuzz-3.12.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:97f824c15bc6933a31d6e3cbfa90188ba0e5043cf2b6dd342c2b90ee8b3fd47c"}, - {file = "rapidfuzz-3.12.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a973b3f5cabf931029a3ae4a0f72e3222e53d412ea85fc37ddc49e1774f00fbf"}, - {file = "rapidfuzz-3.12.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df7880e012228722dec1be02b9ef3898ed023388b8a24d6fa8213d7581932510"}, - {file = "rapidfuzz-3.12.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c78582f50e75e6c2bc38c791ed291cb89cf26a3148c47860c1a04d6e5379c8e"}, - {file = "rapidfuzz-3.12.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d7d9e6a04d8344b0198c96394c28874086888d0a2b2f605f30d1b27b9377b7d"}, - {file = "rapidfuzz-3.12.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5620001fd4d6644a2f56880388179cc8f3767670f0670160fcb97c3b46c828af"}, - {file = "rapidfuzz-3.12.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0666ab4c52e500af7ba5cc17389f5d15c0cdad06412c80312088519fdc25686d"}, - {file = "rapidfuzz-3.12.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:27b4d440fa50b50c515a91a01ee17e8ede719dca06eef4c0cccf1a111a4cfad3"}, - {file = "rapidfuzz-3.12.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:83dccfd5a754f2a0e8555b23dde31f0f7920601bfa807aa76829391ea81e7c67"}, - {file = "rapidfuzz-3.12.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b572b634740e047c53743ed27a1bb3b4f93cf4abbac258cd7af377b2c4a9ba5b"}, - {file = "rapidfuzz-3.12.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7fa7b81fb52902d5f78dac42b3d6c835a6633b01ddf9b202a3ca8443be4b2d6a"}, - {file = "rapidfuzz-3.12.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b1d4fbff980cb6baef4ee675963c081f7b5d6580a105d6a4962b20f1f880e1fb"}, - {file = "rapidfuzz-3.12.1-cp313-cp313-win32.whl", hash = "sha256:3fe8da12ea77271097b303fa7624cfaf5afd90261002314e3b0047d36f4afd8d"}, - {file = "rapidfuzz-3.12.1-cp313-cp313-win_amd64.whl", hash = "sha256:6f7e92fc7d2a7f02e1e01fe4f539324dfab80f27cb70a30dd63a95445566946b"}, - {file = "rapidfuzz-3.12.1-cp313-cp313-win_arm64.whl", hash = "sha256:e31be53d7f4905a6a038296d8b773a79da9ee9f0cd19af9490c5c5a22e37d2e5"}, - {file = "rapidfuzz-3.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bef5c91d5db776523530073cda5b2a276283258d2f86764be4a008c83caf7acd"}, - {file = "rapidfuzz-3.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:841e0c2a5fbe8fc8b9b1a56e924c871899932c0ece7fbd970aa1c32bfd12d4bf"}, - {file = "rapidfuzz-3.12.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:046fc67f3885d94693a2151dd913aaf08b10931639cbb953dfeef3151cb1027c"}, - {file = "rapidfuzz-3.12.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4d2d39b2e76c17f92edd6d384dc21fa020871c73251cdfa017149358937a41d"}, - {file = "rapidfuzz-3.12.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5857dda85165b986c26a474b22907db6b93932c99397c818bcdec96340a76d5"}, - {file = "rapidfuzz-3.12.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4c26cd1b9969ea70dbf0dbda3d2b54ab4b2e683d0fd0f17282169a19563efeb1"}, - {file = "rapidfuzz-3.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf56ea4edd69005786e6c80a9049d95003aeb5798803e7a2906194e7a3cb6472"}, - {file = "rapidfuzz-3.12.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fbe7580b5fb2db8ebd53819171ff671124237a55ada3f64d20fc9a149d133960"}, - {file = "rapidfuzz-3.12.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:018506a53c3b20dcbda8c93d4484b9eb1764c93d5ea16be103cf6b0d8b11d860"}, - {file = "rapidfuzz-3.12.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:325c9c71b737fcd32e2a4e634c430c07dd3d374cfe134eded3fe46e4c6f9bf5d"}, - {file = "rapidfuzz-3.12.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:930756639643e3aa02d3136b6fec74e5b9370a24f8796e1065cd8a857a6a6c50"}, - {file = "rapidfuzz-3.12.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0acbd27543b158cb915fde03877383816a9e83257832818f1e803bac9b394900"}, - {file = "rapidfuzz-3.12.1-cp39-cp39-win32.whl", hash = "sha256:80ff9283c54d7d29b2d954181e137deee89bec62f4a54675d8b6dbb6b15d3e03"}, - {file = "rapidfuzz-3.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:fd37e53f0ed239d0cec27b250cec958982a8ba252ce64aa5e6052de3a82fa8db"}, - {file = "rapidfuzz-3.12.1-cp39-cp39-win_arm64.whl", hash = "sha256:4a4422e4f73a579755ab60abccb3ff148b5c224b3c7454a13ca217dfbad54da6"}, - {file = "rapidfuzz-3.12.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b7cba636c32a6fc3a402d1cb2c70c6c9f8e6319380aaf15559db09d868a23e56"}, - {file = "rapidfuzz-3.12.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b79286738a43e8df8420c4b30a92712dec6247430b130f8e015c3a78b6d61ac2"}, - {file = "rapidfuzz-3.12.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dc1937198e7ff67e217e60bfa339f05da268d91bb15fec710452d11fe2fdf60"}, - {file = "rapidfuzz-3.12.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b85817a57cf8db32dd5d2d66ccfba656d299b09eaf86234295f89f91be1a0db2"}, - {file = "rapidfuzz-3.12.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04283c6f3e79f13a784f844cd5b1df4f518ad0f70c789aea733d106c26e1b4fb"}, - {file = "rapidfuzz-3.12.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a718f740553aad5f4daef790191511da9c6eae893ee1fc2677627e4b624ae2db"}, - {file = "rapidfuzz-3.12.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cbdf145c7e4ebf2e81c794ed7a582c4acad19e886d5ad6676086369bd6760753"}, - {file = "rapidfuzz-3.12.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:0d03ad14a26a477be221fddc002954ae68a9e2402b9d85433f2d0a6af01aa2bb"}, - {file = "rapidfuzz-3.12.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1187aeae9c89e838d2a0a2b954b4052e4897e5f62e5794ef42527bf039d469e"}, - {file = "rapidfuzz-3.12.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd47dfb1bca9673a48b923b3d988b7668ee8efd0562027f58b0f2b7abf27144c"}, - {file = "rapidfuzz-3.12.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:187cdb402e223264eebed2fe671e367e636a499a7a9c82090b8d4b75aa416c2a"}, - {file = "rapidfuzz-3.12.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6899b41bf6c30282179f77096c1939f1454836440a8ab05b48ebf7026a3b590"}, - {file = "rapidfuzz-3.12.1.tar.gz", hash = "sha256:6a98bbca18b4a37adddf2d8201856441c26e9c981d8895491b5bc857b5f780eb"}, + {file = "ratelim-0.1.6-py2.py3-none-any.whl", hash = "sha256:e1a7dd39e6b552b7cc7f52169cd66cdb826a1a30198e355d7016012987c9ad08"}, + {file = "ratelim-0.1.6.tar.gz", hash = "sha256:826d32177e11f9a12831901c9fda6679fd5bbea3605910820167088f5acbb11d"}, ] -[package.extras] -all = ["numpy"] +[package.dependencies] +decorator = "*" [[package]] name = "requests" @@ -2229,26 +1728,6 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] -[[package]] -name = "rich" -version = "13.9.4" -description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -optional = false -python-versions = ">=3.8.0" -groups = ["main"] -files = [ - {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, - {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, -] - -[package.dependencies] -markdown-it-py = ">=2.2.0" -pygments = ">=2.13.0,<3.0.0" -typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""} - -[package.extras] -jupyter = ["ipywidgets (>=7.5.1,<9)"] - [[package]] name = "ruff" version = "0.9.7" @@ -2310,18 +1789,6 @@ enabler = ["pytest-enabler (>=2.2)"] test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] -[[package]] -name = "shellingham" -version = "1.5.4" -description = "Tool to Detect Surrounding Shell" -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, - {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, -] - [[package]] name = "six" version = "1.17.0" @@ -2346,18 +1813,6 @@ files = [ {file = "smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5"}, ] -[[package]] -name = "sniffio" -version = "1.3.1" -description = "Sniff out which async library your code is running under" -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, - {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, -] - [[package]] name = "tabulate" version = "0.9.0" @@ -2459,36 +1914,6 @@ files = [ {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] -[[package]] -name = "typer" -version = "0.15.1" -description = "Typer, build great CLIs. Easy to code. Based on Python type hints." -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "typer-0.15.1-py3-none-any.whl", hash = "sha256:7994fb7b8155b64d3402518560648446072864beefd44aa2dc36972a5972e847"}, - {file = "typer-0.15.1.tar.gz", hash = "sha256:a0588c0a7fa68a1978a069818657778f86abe6ff5ea6abf472f940a08bfe4f0a"}, -] - -[package.dependencies] -click = ">=8.0.0" -rich = ">=10.11.0" -shellingham = ">=1.3.0" -typing-extensions = ">=3.7.4.3" - -[[package]] -name = "types-python-dateutil" -version = "2.9.0.20241206" -description = "Typing stubs for python-dateutil" -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"}, - {file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"}, -] - [[package]] name = "types-requests" version = "2.32.0.20241016" @@ -2546,37 +1971,10 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] -[[package]] -name = "wcwidth" -version = "0.2.13" -description = "Measures the displayed width of unicode strings in a terminal" -optional = false -python-versions = "*" -groups = ["main"] -files = [ - {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, - {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, -] - -[[package]] -name = "yaspin" -version = "3.0.1" -description = "Yet Another Terminal Spinner" -optional = false -python-versions = ">=3.9,<4.0" -groups = ["main"] -files = [ - {file = "yaspin-3.0.1-py3-none-any.whl", hash = "sha256:c4b5d2ca23ae664b87a5cd53401c5107cef12668a71d9ee5ea5536045f364121"}, - {file = "yaspin-3.0.1.tar.gz", hash = "sha256:9c04aa69cce9be83e1ea3134a6712e749e6c0c9cd02599023713e6befd7bf369"}, -] - -[package.dependencies] -termcolor = ">=2.3,<3.0" - [extras] plot = ["matplotlib", "plotly"] [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.14" -content-hash = "fe341bb564b08a5b01370b04ed1d5564476a880713239f4fb37cb095a01491cc" +content-hash = "73ec4a32990526c7a7a8ea147cb4b49f58805c209b1896555906573393d31b45" diff --git a/pyproject.toml b/pyproject.toml index 6d7ccf54..5a97be50 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,7 +45,6 @@ dependencies = [ "gitpython (>=3.1.44,<4.0.0)", "humanfriendly (>=10.0,<11.0)", "randomname (>=0.2.1,<0.3.0)", - "codecarbon (>=2.8.3,<3.0.0)", "numpy (>=2.0.0,<3.0.0)", "flatdict (>=4.0.1,<5.0.0)", "semver (>=3.0.4,<4.0.0)", @@ -54,6 +53,7 @@ dependencies = [ "tenacity (>=9.0.0,<10.0.0)", "typing-extensions (>=4.12.2,<5.0.0) ; python_version < \"3.11\"", "deepmerge (>=2.0,<3.0)", + "geocoder (>=1.38.1,<2.0.0)", ] [project.urls] diff --git a/simvue/api/objects/folder.py b/simvue/api/objects/folder.py index b0313e42..0461364a 100644 --- a/simvue/api/objects/folder.py +++ b/simvue/api/objects/folder.py @@ -10,8 +10,8 @@ import pathlib import typing import datetime +import json -from codecarbon.output_methods.emissions_data import json import pydantic from simvue.exception import ObjectNotFoundError diff --git a/simvue/api/request.py b/simvue/api/request.py index 8dd6a8bd..20f47917 100644 --- a/simvue/api/request.py +++ b/simvue/api/request.py @@ -10,9 +10,9 @@ import copy import json as json_module import typing +import logging import http -from codecarbon.external.logger import logging import requests from tenacity import ( retry, diff --git a/simvue/config/parameters.py b/simvue/config/parameters.py index 9e0b38bc..e0469e03 100644 --- a/simvue/config/parameters.py +++ b/simvue/config/parameters.py @@ -48,8 +48,6 @@ def check_token(cls, v: typing.Any) -> str | None: class OfflineSpecifications(pydantic.BaseModel): cache: pathlib.Path | None = None - country_iso_code: str | None = None - class MetricsSpecifications(pydantic.BaseModel): resources_metrics_interval: pydantic.PositiveInt | None = -1 diff --git a/simvue/config/user.py b/simvue/config/user.py index f86bbcac..78878fee 100644 --- a/simvue/config/user.py +++ b/simvue/config/user.py @@ -33,6 +33,7 @@ from simvue.version import __version__ from simvue.api.request import get as sv_get from simvue.api.url import URL +from simvue.eco.config import EcoConfig logger = logging.getLogger(__name__) @@ -42,7 +43,7 @@ class SimvueConfiguration(pydantic.BaseModel): # Hide values as they contain token and URL - model_config = pydantic.ConfigDict(hide_input_in_errors=True) + model_config = pydantic.ConfigDict(hide_input_in_errors=True, revalidate_instances="always") client: ClientGeneralOptions = ClientGeneralOptions() server: ServerSpecifications = pydantic.Field( ..., description="Specifications for Simvue server" @@ -50,6 +51,7 @@ class SimvueConfiguration(pydantic.BaseModel): run: DefaultRunSpecifications = DefaultRunSpecifications() offline: OfflineSpecifications = OfflineSpecifications() metrics: MetricsSpecifications = MetricsSpecifications() + eco: EcoConfig = EcoConfig() @classmethod def _load_pyproject_configs(cls) -> dict | None: @@ -135,14 +137,14 @@ def write(self, out_directory: pydantic.DirectoryPath) -> None: @pydantic.model_validator(mode="after") @classmethod - def check_valid_server(cls, values: "SimvueConfiguration") -> bool: + def check_valid_server(cls, values: "SimvueConfiguration") -> "SimvueConfiguration": if os.environ.get("SIMVUE_NO_SERVER_CHECK"): return values cls._check_server(values.server.token, values.server.url, values.run.mode) return values - + @classmethod @sv_util.prettify_pydantic def fetch( diff --git a/simvue/eco.py b/simvue/eco.py deleted file mode 100644 index 76e2d694..00000000 --- a/simvue/eco.py +++ /dev/null @@ -1,132 +0,0 @@ -import typing -import logging -import datetime - -from codecarbon import EmissionsTracker, OfflineEmissionsTracker -from codecarbon.output import BaseOutput as cc_BaseOutput -from simvue.utilities import simvue_timestamp - -if typing.TYPE_CHECKING: - from simvue import Run - from codecarbon.output_methods.emissions_data import EmissionsData - - -logger = logging.getLogger(__file__) - - -class CodeCarbonOutput(cc_BaseOutput): - def __init__(self, run: "Run") -> None: - self._simvue_run = run - self._metrics_step: int = 0 - - def out( - self, total: "EmissionsData", delta: "EmissionsData", meta_update: bool = True - ) -> None: - # Check if the run has been shutdown, if so do nothing - if ( - self._simvue_run._shutdown_event - and self._simvue_run._shutdown_event.is_set() - ): - logger.debug("Terminating CodeCarbon tracker") - return - - if meta_update: - logger.debug("Logging CodeCarbon metadata") - try: - self._simvue_run.update_metadata( - { - "sustainability": { - "country": total.country_name, - "country_iso_code": total.country_iso_code, - "region": total.region, - "codecarbon_version": total.codecarbon_version, - } - } - ) - except AttributeError as e: - logger.error(f"Failed to update metadata: {e}") - try: - _cc_timestamp = datetime.datetime.strptime( - total.timestamp, "%Y-%m-%dT%H:%M:%S" - ) - except ValueError as e: - logger.error(f"Error parsing timestamp: {e}") - return - - logger.debug("Logging CodeCarbon metrics") - try: - self._simvue_run.log_metrics( - metrics={ - "sustainability.emissions.total": total.emissions, - "sustainability.energy_consumed.total": total.energy_consumed, - "sustainability.emissions.delta": delta.emissions, - "sustainability.energy_consumed.delta": delta.energy_consumed, - }, - step=self._metrics_step, - timestamp=simvue_timestamp(_cc_timestamp), - ) - except ArithmeticError as e: - logger.error(f"Failed to log metrics: {e}") - return - - self._metrics_step += 1 - - def live_out(self, total: "EmissionsData", delta: "EmissionsData") -> None: - self.out(total, delta, meta_update=False) - - -class SimvueEmissionsTracker(EmissionsTracker): - def __init__( - self, project_name: str, simvue_run: "Run", metrics_interval: int - ) -> None: - self._simvue_run = simvue_run - logger.setLevel(logging.ERROR) - super().__init__( - project_name=project_name, - measure_power_secs=metrics_interval, - api_call_interval=1, - experiment_id=None, - experiment_name=None, - logging_logger=CodeCarbonOutput(simvue_run), - save_to_logger=True, - allow_multiple_runs=True, - log_level="error", - ) - - def set_measure_interval(self, interval: int) -> None: - """Set the measure interval""" - self._set_from_conf(interval, "measure_power_secs") - - def post_init(self) -> None: - self._set_from_conf(self._simvue_run._id, "experiment_id") - self._set_from_conf(self._simvue_run._name, "experiment_name") - self.start() - - -class OfflineSimvueEmissionsTracker(OfflineEmissionsTracker): - def __init__( - self, project_name: str, simvue_run: "Run", metrics_interval: int - ) -> None: - self._simvue_run = simvue_run - logger.setLevel(logging.ERROR) - super().__init__( - country_iso_code=simvue_run._user_config.offline.country_iso_code, - project_name=project_name, - measure_power_secs=metrics_interval, - api_call_interval=1, - experiment_id=None, - experiment_name=None, - logging_logger=CodeCarbonOutput(simvue_run), - save_to_logger=True, - allow_multiple_runs=True, - log_level="error", - ) - - def set_measure_interval(self, interval: int) -> None: - """Set the measure interval""" - self._set_from_conf(interval, "measure_power_secs") - - def post_init(self) -> None: - self._set_from_conf(self._simvue_run._id, "experiment_id") - self._set_from_conf(self._simvue_run._name, "experiment_name") - self.start() diff --git a/simvue/eco/__init__.py b/simvue/eco/__init__.py new file mode 100644 index 00000000..cfe48ef9 --- /dev/null +++ b/simvue/eco/__init__.py @@ -0,0 +1,14 @@ +""" +Simvue Eco +========== + +Contains functionality for green IT, monitoring emissions etc. +NOTE: The metrics calculated by these methods should be used for relative +comparisons only. Any values returned should not be taken as absolute. + +""" +__date__ = "2025-03-06" + +from .emissions_monitor import CO2Monitor as CO2Monitor + +__all__ = ["CO2Monitor"] diff --git a/simvue/eco/api_client.py b/simvue/eco/api_client.py new file mode 100644 index 00000000..0d2784ed --- /dev/null +++ b/simvue/eco/api_client.py @@ -0,0 +1,142 @@ +""" +CO2 Signal API Client +===================== + +Provides inteface to the CO2 Signal API, +which provides real-time data on the carbon intensity of +electricity generation in different countries. +""" + +__date__ = "2025-02-27" + +import requests +import pydantic +import functools +import http +import logging +import datetime +import geocoder +import geocoder.location +import typing + +from simvue.utilities import check_extra + +class CO2SignalData(pydantic.BaseModel): + datetime: datetime.datetime + carbon_intensity: float + fossil_fuel_percentage: float + + +class CO2SignalResponse(pydantic.BaseModel): + disclaimer: str + country_code: str + status: str + data: CO2SignalData + carbon_intensity_units: str + + @classmethod + def from_json_response(cls, json_response: dict) -> "CO2SignalResponse": + _data: dict[str, typing.Any] = json_response["data"] + _co2_signal_data = CO2SignalData( + datetime=datetime.datetime.fromisoformat(_data["datetime"]), + carbon_intensity=_data["carbonIntensity"], + fossil_fuel_percentage=_data["fossilFuelPercentage"], + ) + return cls( + disclaimer=json_response["_disclaimer"], + country_code=json_response["countryCode"], + status=json_response["status"], + data=_co2_signal_data, + carbon_intensity_units=json_response["units"]["carbonIntensity"], + ) + + +@functools.lru_cache() +def _call_geocoder_query() -> typing.Any: + """Call GeoCoder API for IP location + + Cached so this API is only called once per session as required. + """ + return geocoder.ip("me") + + +class APIClient(pydantic.BaseModel): + """ + CO2 Signal API Client + + Provides an interface to the Electricity Maps API. + """ + + co2_api_endpoint: pydantic.HttpUrl = pydantic.HttpUrl( + "https://api.co2signal.com/v1/latest" + ) + co2_api_token: pydantic.SecretStr | None = None + timeout: pydantic.PositiveInt = 10 + + def __init__(self, *args, **kwargs) -> None: + """Initialise the CO2 Signal API client. + + Parameters + ---------- + co2_api_endpoint : str + endpoint for CO2 signal API + co2_api_token: str + RECOMMENDED. The API token for the CO2 Signal API, default is None. + timeout : int + timeout for API + """ + super().__init__(*args, **kwargs) + self._logger = logging.getLogger("ecoclient.api") + + if not self.co2_api_token: + self._logger.warning( + "⚠️ No API token provided for CO2 Signal, it is recommended " + ) + + self._get_user_location_info() + + def _get_user_location_info(self) -> None: + """Retrieve location information for the current user.""" + self._logger.info("📍 Determining current user location.") + _current_user_loc_data: geocoder.location.BBox = _call_geocoder_query() + self._latitude: float + self._longitude: float + self._latitude, self._longitude = _current_user_loc_data.latlng + self._two_letter_country_code: str = _current_user_loc_data.country # type: ignore + + def get(self) -> CO2SignalResponse: + """Get the current data""" + _params: dict[str, float | str] = { + "lat": self._latitude, + "lon": self._longitude, + "countryCode": self._two_letter_country_code, + } + + if self.co2_api_token: + _params["auth-token"] = self.co2_api_token.get_secret_value() + + self._logger.debug(f"🍃 Retrieving carbon intensity data for: {_params}") + _response = requests.get(f"{self.co2_api_endpoint}", params=_params) + + if not _response.status_code == http.HTTPStatus.OK: + raise RuntimeError( + "Failed to retrieve current CO2 signal data for" + f" country '{self._two_letter_country_code}': {_response.text}" + ) + + return CO2SignalResponse.from_json_response(_response.json()) + + @property + def country_code(self) -> str: + """Returns the country code""" + return self._two_letter_country_code + + @property + def latitude(self) -> float: + """Returns current latitude""" + return self._latitude + + @property + def longitude(self) -> float: + """Returns current longitude""" + return self._longitude diff --git a/simvue/eco/config.py b/simvue/eco/config.py new file mode 100644 index 00000000..2193e442 --- /dev/null +++ b/simvue/eco/config.py @@ -0,0 +1,34 @@ +""" +Eco Config +========== + +Configuration file extension for configuring the Simvue Eco sub-module. +""" +__date__ = "2025-03-06" + +import pydantic +import pathlib +import os + +from simvue.config.files import DEFAULT_OFFLINE_DIRECTORY + +class EcoConfig(pydantic.BaseModel): + co2_signal_api_token: pydantic.SecretStr | None = None + cpu_thermal_design_power: pydantic.PositiveInt = 80 + cpu_idle_power: pydantic.PositiveFloat = 10 + local_data_directory: pydantic.DirectoryPath | None = pydantic.Field(None, validate_default=True) + intensity_refresh_rate: pydantic.PositiveInt | str | None = pydantic.Field( + default="1 week", gt=2 * 60 + ) + co2_intensity: float | None = None + + @pydantic.field_validator("local_data_directory", mode="before", check_fields=True) + @classmethod + def check_local_data_env(cls, local_data_directory: pathlib.Path | None) -> pathlib.Path: + if (_data_directory := os.environ.get("SIMVUE_ECO_DATA_DIRECTORY")): + return pathlib.Path(_data_directory) + if not local_data_directory: + local_data_directory = pathlib.Path(DEFAULT_OFFLINE_DIRECTORY) + local_data_directory.mkdir(exist_ok=True, parents=True) + return local_data_directory + \ No newline at end of file diff --git a/simvue/eco/emissions_monitor.py b/simvue/eco/emissions_monitor.py new file mode 100644 index 00000000..9370ed8a --- /dev/null +++ b/simvue/eco/emissions_monitor.py @@ -0,0 +1,306 @@ +""" +CO2 Monitor +=========== + +Provides an interface for estimating CO2 usage for processes on the CPU. +""" + +__author__ = "Kristian Zarebski" +__version__ = "0.1.0" +__license__ = "MIT" +__date__ = "2025-02-27" + +import datetime +import json +import pydantic +import dataclasses +import threading +import time +import logging +import typing +import psutil +import humanfriendly +import pathlib +import os + +from simvue.eco.api_client import APIClient, CO2SignalResponse + +TIME_FORMAT: str = "%Y_%m_%d_%H_%M_%S" + + +@dataclasses.dataclass +class ProcessData: + process: psutil.Process + cpu_percentage: float = 0.0 + power_usage: float = 0.0 + co2_emission: float = 0.0 + + +class CO2Monitor(pydantic.BaseModel): + """ + CO2 Monitor + + Provides an interface for estimating CO2 usage for processes on the CPU. + """ + + thermal_design_power_per_core: pydantic.PositiveFloat | None + cpu_idle_power: pydantic.PositiveFloat + cpu_interval: float = 1.0 + local_data_directory: pydantic.DirectoryPath + intensity_refresh_rate: int | None | str + co2_intensity: float | None + co2_signal_api_token: str | None + + def now(self) -> str: + """Return data file timestamp for the current time""" + _now: datetime.datetime = datetime.datetime.now(datetime.UTC) + return _now.strftime(TIME_FORMAT) + + @property + def outdated(self) -> bool: + """Checks if the current data is out of date.""" + if not self.intensity_refresh_rate: + return False + + _now: datetime.datetime = datetime.datetime.now() + _last_updated: str = self._local_data["last_updated"] + _latest_time: datetime.datetime = datetime.datetime.strptime( + _last_updated, TIME_FORMAT + ) + return (_now - _latest_time).seconds < self.intensity_refresh_rate + + def _load_local_data(self) -> dict[str, str | dict[str, str | float]] | None: + """Loads locally stored CO2 intensity data""" + self._data_file_path = self.local_data_directory.joinpath( + "ecoclient_co2_intensity.json" + ) + + if not self._data_file_path.exists(): + return None + + with self._data_file_path.open() as in_f: + _data: dict[str, str | dict[str, str | float]] | None = json.load(in_f) + + return _data or None + + def __init__(self, *args, **kwargs) -> None: + """Initialise a CO2 Monitor. + + Parameters + ---------- + thermal_design_power_per_core: float | None + the TDP value for the CPU. Default of None uses naive 85W value. + cpu_idle_power: float + the idle power of the CPU, default is naive value of 10W. + cpu_interval: float + the interval within which to measure average CPU percentage, default is 1s. + local_data_directory: pydantic.DirectoryPath + the directory in which to store CO2 intensity data. + intensity_refresh_rate: int | str | None + the rate in seconds at which to call the CO2 signal API. The default is once per day, + note the API is restricted to 30 requests per hour for a given user. Also accepts a + time period as a string, e.g. '1 week' + co2_intensity: float | None + disable using RestAPIs to retrieve CO2 intensity and instead use this value. + Default is None, use remote data. Value is in kgCO2/kWh + co2_signal_api_token: str + RECOMMENDED. The API token for CO2 signal, default is None. + """ + _logger = logging.getLogger("ecoclient.monitor") + if not isinstance(kwargs.get("thermal_design_power_per_core"), float): + kwargs["thermal_design_power_per_core"] = 80.0 + _logger.warning( + "⚠️ No TDP value provided for current CPU, will use arbitrary value of 80W." + ) + super().__init__(*args, **kwargs) + + if self.intensity_refresh_rate and isinstance(self.intensity_refresh_rate, str): + self.intensity_refresh_rate = int(humanfriendly.parse_timespan( + self.intensity_refresh_rate + )) + + if self.intensity_refresh_rate and self.intensity_refresh_rate <= 2 * 60: + raise ValueError( + "Invalid intensity refresh rate, CO2 signal API restricted to 30 calls per hour." + ) + + if self.co2_intensity: + _logger.warning(f"⚠️ Disabling online data retrieval, using {self.co2_intensity} for CO2 intensity.") + + self._data_file_path: pathlib.Path | None = None + + # Load any local data first, if the data is missing or due a refresh this will be None + self._local_data: dict[str, str | dict[str, float | str]] | None = ( + self._load_local_data() or {} + ) + self._measure_time = datetime.datetime.now() + self._logger = _logger + self._client: APIClient = APIClient( + co2_api_token=self.co2_signal_api_token, + timeout=10 + ) + self._processes: dict[str, ProcessData] = {} + + @pydantic.validate_call(config={"arbitrary_types_allowed": True}) + def attach_process( + self, process: psutil.Process | None = None, label: str | None = None + ) -> str: + """ + Attach a process to the CO2 Monitor. + + Parameters + ---------- + process : psutil.Process | None + The process to monitor, if None measures the current running process. Default is None. + label : str | None + The label to assign to the process. Default is process_. + + Returns + ------- + int + The PID of the process. + """ + if process is None: + process = psutil.Process(pid=os.getpid()) + + self._logger.info(f"📎 Attaching process with PID {process.pid}") + + label = label or f"process_{process.pid}" + self._processes[label] = ProcessData(process=process) + + return label + + def estimate_co2_emissions(self) -> None: + """Estimate the CO2 emissions""" + self._logger.info("📐 Measuring CPU usage and power.") + + if not self._local_data: + raise RuntimeError("Expected local data to be initialised.") + + if not self._data_file_path: + raise RuntimeError("Expected local data file to be defined.") + + if ( + not self.co2_intensity and + not self._local_data.setdefault(self._client.country_code, {}) + or self.outdated + ): + self._logger.info("🌍 CO2 emission outdated, calling API.") + _data: CO2SignalResponse = self._client.get() + self._local_data[self._client.country_code] = _data.model_dump(mode="json") + self._local_data["last_updated"] = self.now() + + with self._data_file_path.open("w") as out_f: + json.dump(self._local_data, out_f, indent=2) + + if self.co2_intensity: + _current_co2_intensity = self.co2_intensity + _co2_units = "kgCO2/kWh" + else: + self._current_co2_data = CO2SignalResponse( + **self._local_data[self._client.country_code] + ) + _current_co2_intensity = self._current_co2_data.data.carbon_intensity + _co2_units = self._current_co2_data.carbon_intensity_units + + for label, process in self._processes.items(): + process.cpu_percentage = process.process.cpu_percent( + interval=self.cpu_interval + ) + process.power_usage = min( + self.cpu_idle_power, + (process.cpu_percentage / 100.0) * self.thermal_design_power_per_core, + ) + + # Measured value is in g/kWh, convert to kg/kWs + _carbon_intensity_kgpws: float = ( + _current_co2_intensity / (60 * 60 * 1e3) + ) + + process.co2_emission = ( + process.power_usage * _carbon_intensity_kgpws * self.cpu_interval + ) + + self._logger.debug( + f"📝 For process '{label}', recorded: CPU={process.cpu_percentage}%, " + f"Power={process.power_usage}W, CO2={process.co2_emission}{_co2_units}" + ) + + @pydantic.validate_call(config={"arbitrary_types_allowed": True}) + def run( + self, + termination_trigger: threading.Event, + callback: typing.Callable, + measure_interval: pydantic.PositiveFloat = pydantic.Field(default=10.0, gt=2.0), + return_all: bool = False, + ) -> None: + """Run the API client in a thread. + + Parameters + ---------- + termination_trigger : threading.Event + thread event used to terminate monitor + callback : typing.Callable + callback to execute on measured results + measure_interval : float, optional + interval of measurement, note the API is limited at a rate of 30 requests per + hour, therefore any interval less than 2 minutes will use the previously recorded CO2 intensity. + Default is 10 seconds. + return_all : bool, optional + whether to return all processes or just the current. Default is False. + + Returns + ------- + ProcessData | dict[str, ProcessData] + Either the process data for the current process or for all processes. + """ + self._logger.info("🧵 Launching monitor in multi-threaded mode.") + self._logger.info(f"⌚ Will record at interval of {measure_interval}s.") + + def _run( + monitor: "CO2Monitor" = self, + callback: typing.Callable = callback, + return_all: bool = return_all, + ) -> None: + if not return_all and not monitor.last_process: + raise ValueError("No processes attached to monitor.") + + while not termination_trigger.is_set(): + monitor.estimate_co2_emissions() + # Depending on user choice either + # return all process data or just the last + callback( + monitor.process_data + if return_all + else monitor.process_data[monitor.last_process] # type: ignore + ) + time.sleep(measure_interval) + + _thread = threading.Thread(target=_run) + _thread.start() + + @property + def last_process(self) -> str | None: + if not self._processes: + return None + return list(self._processes.keys())[-1] + + @property + def process_data(self) -> dict[str, ProcessData]: + return self._processes + + @property + def current_carbon_intensity(self) -> float: + return self._client.get().data.carbon_intensity + + @property + def total_cpu_percentage(self) -> float: + return sum([process.cpu_percentage for process in self._processes.values()]) + + @property + def total_power_usage(self) -> float: + return sum([process.power_usage for process in self._processes.values()]) + + @property + def total_co2_emission(self) -> float: + return sum([process.co2_emission for process in self._processes.values()]) \ No newline at end of file diff --git a/simvue/run.py b/simvue/run.py index 52861375..6fa9169a 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -43,7 +43,7 @@ from .models import FOLDER_REGEX, NAME_REGEX, MetricKeyString from .system import get_system from .metadata import git_info, environment -from .eco import SimvueEmissionsTracker, OfflineSimvueEmissionsTracker +from .eco import CO2Monitor from .utilities import ( skip_if_failed, validate_timestamp, @@ -167,7 +167,7 @@ def __init__( self._data: dict[str, typing.Any] = {} self._step: int = 0 self._active: bool = False - self._user_config = SimvueConfiguration.fetch( + self._user_config: SimvueConfiguration = SimvueConfiguration.fetch( server_url=server_url, server_token=server_token, mode=mode ) @@ -210,26 +210,26 @@ def __init__( else self._user_config.metrics.emission_metrics_interval ) if mode == "offline": - if ( - self._user_config.metrics.enable_emission_metrics - and not self._user_config.offline.country_iso_code - ): - raise ValueError( - "Country ISO code must be provided if tracking emissions metrics in offline mode." - ) - - self._emissions_tracker: OfflineSimvueEmissionsTracker | None = ( - OfflineSimvueEmissionsTracker( - "simvue", self, self._emission_metrics_interval + if not (_co2_intensity := self._user_config.eco.co2_intensity): + self._error( + "Cannot record emission metrics, " + "a CO2 intensity value is required in offline mode." ) - if self._user_config.metrics.enable_emission_metrics - else None + # Create an emissions monitor with no API calls + self._emissions_monitor = CO2Monitor( + intensity_refresh_rate=None, + co2_intensity=_co2_intensity, + local_data_directory=self._user_config.eco.local_data_directory, + co2_signal_api_token=None, + cpu_idle_power=self._user_config.eco.cpu_idle_power, ) else: - self._emissions_tracker: SimvueEmissionsTracker | None = ( - SimvueEmissionsTracker("simvue", self, self._emission_metrics_interval) - if self._user_config.metrics.enable_emission_metrics - else None + self._emissions_monitor = CO2Monitor( + intensity_refresh_rate=self._user_config.eco.intensity_refresh_rate, + local_data_directory=self._user_config.eco.local_data_directory, + co2_signal_api_token=self._user_config.eco.co2_signal_api_token, + cpu_idle_power=self._user_config.eco.cpu_idle_power, + co2_intensity=self._user_config.eco.co2_intensity, ) def __enter__(self) -> Self: @@ -243,10 +243,6 @@ def _handle_exception_throw( ) -> None: _exception_thrown: str | None = exc_type.__name__ if exc_type else None _is_running: bool = self._status == "running" - _is_running_online: bool = self._id is not None and _is_running - _is_running_offline: bool = ( - self._user_config.run.mode == "offline" and _is_running - ) _is_terminated: bool = ( _exception_thrown is not None and _exception_thrown == "KeyboardInterrupt" ) @@ -369,6 +365,8 @@ def _heartbeat( last_heartbeat = time.time() last_res_metric_call = time.time() + co2_step = 0 + last_co2_metric_call = time.time() if self._resources_metrics_interval: self._add_metrics_to_dispatch( @@ -393,6 +391,18 @@ def _heartbeat( ) last_res_metric_call = res_time res_step += 1 + if ( + self._emission_metrics_interval and self._emissions_monitor + and (co2_time := time.time()) - last_co2_metric_call + > self._emission_metrics_interval + ): + self._emissions_monitor.estimate_co2_emissions() + self._add_metrics_to_dispatch( + {"sustainability.emissions.total": self._emissions_monitor.total_co2_emission}, + join_on_fail=False, step=co2_step + ) + last_co2_metric_call = co2_time + co2_step += 1 if time.time() - last_heartbeat < self._heartbeat_interval: continue @@ -449,7 +459,6 @@ def _create_dispatch_callback( def _dispatch_callback( buffer: list[typing.Any], category: typing.Literal["events", "metrics"], - run_obj: RunObject = self._sv_obj, ) -> None: if category == "events": _events = Events.new( @@ -468,14 +477,9 @@ def _dispatch_callback( return _dispatch_callback - def _start(self, reconnect: bool = False) -> bool: + def _start(self ) -> bool: """Start a run - Parameters - ---------- - reconnect : bool, optional - whether this is a reconnect to an existing run, by default False - Returns ------- bool @@ -510,6 +514,13 @@ def _start(self, reconnect: bool = False) -> bool: self._get_child_processes() if self._parent_process else None ) + if self._emissions_monitor: + self._emissions_monitor.attach_process(self._parent_process) + ( + self._emissions_monitor.attach_process(process) + for process in self._child_processes or [] + ) + self._shutdown_event = threading.Event() self._heartbeat_termination_trigger = threading.Event() self._alert_raised_trigger = threading.Event() @@ -762,10 +773,6 @@ def init( fg="green" if self._term_color else None, ) - if self._emissions_tracker and self._status == "running": - self._emissions_tracker.post_init() - self._emissions_tracker.start() - return True @skip_if_failed("_aborted", "_suppress_errors", None) @@ -1066,28 +1073,30 @@ def config( if enable_emission_metrics: if self._user_config.run.mode == "offline": - if not self._user_config.offline.country_iso_code: + if not (_co2_intensity := self._user_config.eco.co2_intensity): self._error( - "Country ISO code must be provided if tracking emissions metrics in offline mode." - ) - self._emissions_tracker: OfflineSimvueEmissionsTracker = ( - OfflineSimvueEmissionsTracker( - "simvue", self, self._emission_metrics_interval + "Cannot record emission metrics, " + "a CO2 intensity value is required in offline mode." ) + # Create an emissions monitor with no API calls + self._emissions_monitor = CO2Monitor( + intensity_refresh_rate=None, + co2_intensity=_co2_intensity, + local_data_directory=self._user_config.eco.local_data_directory, + co2_signal_api_token=None, + cpu_idle_power=self._user_config.eco.cpu_idle_power, ) else: - self._emissions_tracker: SimvueEmissionsTracker = ( - SimvueEmissionsTracker( - "simvue", self, self._emission_metrics_interval - ) + self._emissions_monitor = CO2Monitor( + intensity_refresh_rate=self._user_config.eco.intensity_refresh_rate, + local_data_directory=self._user_config.eco.local_data_directory, + co2_signal_api_token=self._user_config.eco.co2_signal_api_token, + cpu_idle_power=self._user_config.eco.cpu_idle_power, + co2_intensity=self._user_config.eco.co2_intensity, ) - # If the main Run API object is initialised the run is active - # hence the tracker should start too - if self._sv_obj: - self._emissions_tracker.start() - elif enable_emission_metrics is False and self._emissions_tracker: - self._error("Cannot disable emissions tracker once it has been started") + elif enable_emission_metrics is False and self._emissions_monitor: + self._error("Cannot disable emissions monitor once it has been started") if resources_metrics_interval: self._resources_metrics_interval = resources_metrics_interval @@ -1562,10 +1571,6 @@ def set_status( def _tidy_run(self) -> None: self._executor.wait_for_completion() - if self._emissions_tracker: - with contextlib.suppress(Exception): - self._emissions_tracker.stop() - if self._heartbeat_thread and self._heartbeat_termination_trigger: self._heartbeat_termination_trigger.set() self._heartbeat_thread.join() diff --git a/simvue/utilities.py b/simvue/utilities.py index a5f40430..2d8dd9b0 100644 --- a/simvue/utilities.py +++ b/simvue/utilities.py @@ -137,6 +137,11 @@ def wrapper(self, *args, **kwargs) -> typing.Any: raise RuntimeError( f"Plotting features require the '{extra_name}' extension to Simvue" ) + elif extra_name == "eco": + if not importlib.util.find_spec("geocoder"): + raise RuntimeError( + f"Eco features require the '{extra_name}' extenstion to Simvue" + ) elif extra_name == "torch": if not importlib.util.find_spec("torch"): raise RuntimeError( From ddabec69bd2e64e1c8f2ca86f40c7a5eceb6f907 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 7 Mar 2025 08:27:50 +0000 Subject: [PATCH 02/37] Fix emission metric sending --- simvue/eco/api_client.py | 5 +-- simvue/eco/config.py | 15 ++++--- simvue/eco/emissions_monitor.py | 65 +++++++++++++++++++----------- simvue/run.py | 38 ++++++++++++----- tests/functional/test_run_class.py | 4 +- 5 files changed, 84 insertions(+), 43 deletions(-) diff --git a/simvue/eco/api_client.py b/simvue/eco/api_client.py index 0d2784ed..71dadd94 100644 --- a/simvue/eco/api_client.py +++ b/simvue/eco/api_client.py @@ -19,7 +19,6 @@ import geocoder.location import typing -from simvue.utilities import check_extra class CO2SignalData(pydantic.BaseModel): datetime: datetime.datetime @@ -75,7 +74,7 @@ class APIClient(pydantic.BaseModel): def __init__(self, *args, **kwargs) -> None: """Initialise the CO2 Signal API client. - + Parameters ---------- co2_api_endpoint : str @@ -86,7 +85,7 @@ def __init__(self, *args, **kwargs) -> None: timeout for API """ super().__init__(*args, **kwargs) - self._logger = logging.getLogger("ecoclient.api") + self._logger = logging.getLogger(self.__class__.__name__) if not self.co2_api_token: self._logger.warning( diff --git a/simvue/eco/config.py b/simvue/eco/config.py index 2193e442..ecc60fe0 100644 --- a/simvue/eco/config.py +++ b/simvue/eco/config.py @@ -4,6 +4,7 @@ Configuration file extension for configuring the Simvue Eco sub-module. """ + __date__ = "2025-03-06" import pydantic @@ -12,23 +13,27 @@ from simvue.config.files import DEFAULT_OFFLINE_DIRECTORY + class EcoConfig(pydantic.BaseModel): co2_signal_api_token: pydantic.SecretStr | None = None cpu_thermal_design_power: pydantic.PositiveInt = 80 cpu_idle_power: pydantic.PositiveFloat = 10 - local_data_directory: pydantic.DirectoryPath | None = pydantic.Field(None, validate_default=True) + local_data_directory: pydantic.DirectoryPath | None = pydantic.Field( + None, validate_default=True + ) intensity_refresh_rate: pydantic.PositiveInt | str | None = pydantic.Field( - default="1 week", gt=2 * 60 + default="1 day", gt=2 * 60 ) co2_intensity: float | None = None @pydantic.field_validator("local_data_directory", mode="before", check_fields=True) @classmethod - def check_local_data_env(cls, local_data_directory: pathlib.Path | None) -> pathlib.Path: - if (_data_directory := os.environ.get("SIMVUE_ECO_DATA_DIRECTORY")): + def check_local_data_env( + cls, local_data_directory: pathlib.Path | None + ) -> pathlib.Path: + if _data_directory := os.environ.get("SIMVUE_ECO_DATA_DIRECTORY"): return pathlib.Path(_data_directory) if not local_data_directory: local_data_directory = pathlib.Path(DEFAULT_OFFLINE_DIRECTORY) local_data_directory.mkdir(exist_ok=True, parents=True) return local_data_directory - \ No newline at end of file diff --git a/simvue/eco/emissions_monitor.py b/simvue/eco/emissions_monitor.py index 9370ed8a..d26cdeae 100644 --- a/simvue/eco/emissions_monitor.py +++ b/simvue/eco/emissions_monitor.py @@ -33,7 +33,10 @@ class ProcessData: process: psutil.Process cpu_percentage: float = 0.0 power_usage: float = 0.0 + total_energy: float = 0.0 + energy_delta: float = 0.0 co2_emission: float = 0.0 + co2_delta: float = 0.0 class CO2Monitor(pydantic.BaseModel): @@ -85,7 +88,7 @@ def _load_local_data(self) -> dict[str, str | dict[str, str | float]] | None: def __init__(self, *args, **kwargs) -> None: """Initialise a CO2 Monitor. - + Parameters ---------- thermal_design_power_per_core: float | None @@ -106,7 +109,7 @@ def __init__(self, *args, **kwargs) -> None: co2_signal_api_token: str RECOMMENDED. The API token for CO2 signal, default is None. """ - _logger = logging.getLogger("ecoclient.monitor") + _logger = logging.getLogger(self.__class__.__name__) if not isinstance(kwargs.get("thermal_design_power_per_core"), float): kwargs["thermal_design_power_per_core"] = 80.0 _logger.warning( @@ -115,17 +118,19 @@ def __init__(self, *args, **kwargs) -> None: super().__init__(*args, **kwargs) if self.intensity_refresh_rate and isinstance(self.intensity_refresh_rate, str): - self.intensity_refresh_rate = int(humanfriendly.parse_timespan( - self.intensity_refresh_rate - )) + self.intensity_refresh_rate = int( + humanfriendly.parse_timespan(self.intensity_refresh_rate) + ) if self.intensity_refresh_rate and self.intensity_refresh_rate <= 2 * 60: raise ValueError( "Invalid intensity refresh rate, CO2 signal API restricted to 30 calls per hour." ) - + if self.co2_intensity: - _logger.warning(f"⚠️ Disabling online data retrieval, using {self.co2_intensity} for CO2 intensity.") + _logger.warning( + f"⚠️ Disabling online data retrieval, using {self.co2_intensity} for CO2 intensity." + ) self._data_file_path: pathlib.Path | None = None @@ -136,8 +141,7 @@ def __init__(self, *args, **kwargs) -> None: self._measure_time = datetime.datetime.now() self._logger = _logger self._client: APIClient = APIClient( - co2_api_token=self.co2_signal_api_token, - timeout=10 + co2_api_token=self.co2_signal_api_token, timeout=10 ) self._processes: dict[str, ProcessData] = {} @@ -174,15 +178,15 @@ def estimate_co2_emissions(self) -> None: """Estimate the CO2 emissions""" self._logger.info("📐 Measuring CPU usage and power.") - if not self._local_data: + if self._local_data is None: raise RuntimeError("Expected local data to be initialised.") - + if not self._data_file_path: raise RuntimeError("Expected local data file to be defined.") if ( - not self.co2_intensity and - not self._local_data.setdefault(self._client.country_code, {}) + not self.co2_intensity + and not self._local_data.setdefault(self._client.country_code, {}) or self.outdated ): self._logger.info("🌍 CO2 emission outdated, calling API.") @@ -207,20 +211,25 @@ def estimate_co2_emissions(self) -> None: process.cpu_percentage = process.process.cpu_percent( interval=self.cpu_interval ) + _previous_energy: float = process.e process.power_usage = min( self.cpu_idle_power, (process.cpu_percentage / 100.0) * self.thermal_design_power_per_core, ) + process.total_energy += process.power_usage * self.cpu_interval + process.energy_delta = process.total_energy - _previous_energy # Measured value is in g/kWh, convert to kg/kWs - _carbon_intensity_kgpws: float = ( - _current_co2_intensity / (60 * 60 * 1e3) - ) + _carbon_intensity_kgpws: float = _current_co2_intensity / (60 * 60 * 1e3) + + _previous_emission: float = process.co2_emission - process.co2_emission = ( + process.co2_delta = ( process.power_usage * _carbon_intensity_kgpws * self.cpu_interval ) + process.co2_emission += process.co2_delta + self._logger.debug( f"📝 For process '{label}', recorded: CPU={process.cpu_percentage}%, " f"Power={process.power_usage}W, CO2={process.co2_emission}{_co2_units}" @@ -281,9 +290,7 @@ def _run( @property def last_process(self) -> str | None: - if not self._processes: - return None - return list(self._processes.keys())[-1] + return list(self._processes.keys())[-1] if self._processes else None @property def process_data(self) -> dict[str, ProcessData]: @@ -295,12 +302,24 @@ def current_carbon_intensity(self) -> float: @property def total_cpu_percentage(self) -> float: - return sum([process.cpu_percentage for process in self._processes.values()]) + return sum(process.cpu_percentage for process in self._processes.values()) @property def total_power_usage(self) -> float: - return sum([process.power_usage for process in self._processes.values()]) + return sum(process.power_usage for process in self._processes.values()) @property def total_co2_emission(self) -> float: - return sum([process.co2_emission for process in self._processes.values()]) \ No newline at end of file + return sum(process.co2_emission for process in self._processes.values()) + + @property + def total_co2_delta(self) -> float: + return sum(process.co2_delta for process in self._processes.values()) + + @property + def total_energy_delta(self) -> float: + return sum(process.energy_delta for process in self._processes.values()) + + @property + def total_energy(self) -> float: + return sum(process.energy for process in self._processes.values()) diff --git a/simvue/run.py b/simvue/run.py index 6fa9169a..7a149075 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -365,14 +365,29 @@ def _heartbeat( last_heartbeat = time.time() last_res_metric_call = time.time() - co2_step = 0 + co2_step: int = 0 + res_step: int = 0 last_co2_metric_call = time.time() if self._resources_metrics_interval: self._add_metrics_to_dispatch( self._get_sysinfo(interval=1), join_on_fail=False, step=0 ) - res_step = 1 + res_step += 1 + + if self._emission_metrics_interval: + self._emissions_monitor.estimate_co2_emissions() + self._add_metrics_to_dispatch( + { + "sustainability.emissions.total": self._emissions_monitor.total_co2_emission, + "sustainability.emissions.delta": self._emissions_monitor.total_co2_delta, + "sustainability.energy_consumed.total": self._emissions_monitor.total_energy, + "sustainability.energy_consumed.delta": self._emissions_monitor.total_energy_delta, + }, + join_on_fail=False, + step=0, + ) + co2_step += 1 while not heartbeat_trigger.is_set(): time.sleep(0.1) @@ -392,14 +407,21 @@ def _heartbeat( last_res_metric_call = res_time res_step += 1 if ( - self._emission_metrics_interval and self._emissions_monitor + self._emission_metrics_interval + and self._emissions_monitor and (co2_time := time.time()) - last_co2_metric_call > self._emission_metrics_interval ): self._emissions_monitor.estimate_co2_emissions() self._add_metrics_to_dispatch( - {"sustainability.emissions.total": self._emissions_monitor.total_co2_emission}, - join_on_fail=False, step=co2_step + { + "sustainability.emissions.total": self._emissions_monitor.total_co2_emission, + "sustainability.emissions.delta": self._emissions_monitor.total_co2_delta, + "sustainability.energy_consumed.total": self._emissions_monitor.total_energy, + "sustainability.energy_consumed.delta": self._emissions_monitor.total_energy_delta, + }, + join_on_fail=False, + step=co2_step, ) last_co2_metric_call = co2_time co2_step += 1 @@ -477,7 +499,7 @@ def _dispatch_callback( return _dispatch_callback - def _start(self ) -> bool: + def _start(self) -> bool: """Start a run Returns @@ -564,10 +586,6 @@ def _error(self, message: str, join_threads: bool = True) -> None: RuntimeError exception throw """ - if self._emissions_tracker: - with contextlib.suppress(Exception): - self._emissions_tracker.stop() - # Stop heartbeat if self._heartbeat_termination_trigger and self._heartbeat_thread: self._heartbeat_termination_trigger.set() diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 2e9a8b1a..7840b51e 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -51,7 +51,7 @@ def test_check_run_initialised_decorator() -> None: @pytest.mark.codecarbon def test_run_with_emissions() -> None: with sv_run.Run() as run_created: - run_created.init(retention_period="1 min") + run_created.init(folder="/simvue_client_unit_tests", retention_period="1 min", tags=["test_run_with_emissions"]) run_created.config(enable_emission_metrics=True, emission_metrics_interval=1) time.sleep(5) _run = RunObject(identifier=run_created.id) @@ -1022,4 +1022,4 @@ def test_reconnect(mode, monkeypatch: pytest.MonkeyPatch) -> None: assert dict(_reconnected_run.metrics)["test_metric"]["last"] == 1 assert client.get_events(run_id)[0]["message"] == "Testing!" - \ No newline at end of file + From 4978446416afedbe8080f7c787db064fe1a27195 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 7 Mar 2025 08:50:25 +0000 Subject: [PATCH 03/37] Fix wrong variable name --- simvue/eco/emissions_monitor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/simvue/eco/emissions_monitor.py b/simvue/eco/emissions_monitor.py index d26cdeae..cdc15c2f 100644 --- a/simvue/eco/emissions_monitor.py +++ b/simvue/eco/emissions_monitor.py @@ -211,7 +211,7 @@ def estimate_co2_emissions(self) -> None: process.cpu_percentage = process.process.cpu_percent( interval=self.cpu_interval ) - _previous_energy: float = process.e + _previous_energy: float = process.total_energy process.power_usage = min( self.cpu_idle_power, (process.cpu_percentage / 100.0) * self.thermal_design_power_per_core, From 333d3743317452e0204b1962712389ac926f4de7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 17 Mar 2025 11:29:15 +0000 Subject: [PATCH 04/37] Fix co2 monitor default --- simvue/run.py | 23 +---------------------- 1 file changed, 1 insertion(+), 22 deletions(-) diff --git a/simvue/run.py b/simvue/run.py index 36e9ca20..0f97539e 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -201,6 +201,7 @@ def __init__( self._heartbeat_thread: threading.Thread | None = None self._heartbeat_interval: int = HEARTBEAT_INTERVAL + self._emissions_monitor: CO2Monitor | None = None self._emission_metrics_interval: int | None = ( HEARTBEAT_INTERVAL if ( @@ -209,28 +210,6 @@ def __init__( ) else self._user_config.metrics.emission_metrics_interval ) - if mode == "offline": - if not (_co2_intensity := self._user_config.eco.co2_intensity): - self._error( - "Cannot record emission metrics, " - "a CO2 intensity value is required in offline mode." - ) - # Create an emissions monitor with no API calls - self._emissions_monitor = CO2Monitor( - intensity_refresh_rate=None, - co2_intensity=_co2_intensity, - local_data_directory=self._user_config.eco.local_data_directory, - co2_signal_api_token=None, - cpu_idle_power=self._user_config.eco.cpu_idle_power, - ) - else: - self._emissions_monitor = CO2Monitor( - intensity_refresh_rate=self._user_config.eco.intensity_refresh_rate, - local_data_directory=self._user_config.eco.local_data_directory, - co2_signal_api_token=self._user_config.eco.co2_signal_api_token, - cpu_idle_power=self._user_config.eco.cpu_idle_power, - co2_intensity=self._user_config.eco.co2_intensity, - ) def __enter__(self) -> Self: return self From 223d4c39a465c906eff6780d7db5e5a952cd6522 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 17 Mar 2025 14:29:27 +0000 Subject: [PATCH 05/37] Remove extra parameter from _start --- simvue/run.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/simvue/run.py b/simvue/run.py index 0f97539e..13f7769d 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -976,7 +976,7 @@ def reconnect(self, run_id: str) -> bool: self._id = run_id self._sv_obj = RunObject(identifier=self._id, _read_only=False) - self._start(reconnect=True) + self._start() return True From bd664803ebb3f7f8715d7abfb06aa07e209bbabf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 19 Mar 2025 16:38:47 +0000 Subject: [PATCH 06/37] Added path field validation --- simvue/config/parameters.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/simvue/config/parameters.py b/simvue/config/parameters.py index e0469e03..d74d2eb4 100644 --- a/simvue/config/parameters.py +++ b/simvue/config/parameters.py @@ -7,6 +7,7 @@ """ import logging +import re import time import pydantic import typing @@ -49,6 +50,16 @@ def check_token(cls, v: typing.Any) -> str | None: class OfflineSpecifications(pydantic.BaseModel): cache: pathlib.Path | None = None + @pydantic.field_validator("cache") + @classmethod + def check_valid_cache_path(cls, cache: pathlib.Path) -> pathlib.Path: + if not re.fullmatch( + r"^(\/|([a-zA-Z]:\\))?([\w\s.-]+[\\/])*[\w\s.-]*$", f"{cache}" + ): + raise AssertionError(f"Value '{cache}' is not a valid cache path.") + return cache + + class MetricsSpecifications(pydantic.BaseModel): resources_metrics_interval: pydantic.PositiveInt | None = -1 emission_metrics_interval: pydantic.PositiveInt | None = None From 8f798103602934fd6f9287999a7a56923a2ff973 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 19 Mar 2025 16:42:33 +0000 Subject: [PATCH 07/37] Refactor of heartbeat code --- simvue/config/user.py | 6 +++-- simvue/eco/api_client.py | 2 +- simvue/run.py | 41 ++++++++++++++---------------- tests/conftest.py | 26 ++++++++++++------- tests/functional/test_client.py | 5 ++-- tests/functional/test_run_class.py | 7 ++++- 6 files changed, 49 insertions(+), 38 deletions(-) diff --git a/simvue/config/user.py b/simvue/config/user.py index 78878fee..36b68142 100644 --- a/simvue/config/user.py +++ b/simvue/config/user.py @@ -43,7 +43,9 @@ class SimvueConfiguration(pydantic.BaseModel): # Hide values as they contain token and URL - model_config = pydantic.ConfigDict(hide_input_in_errors=True, revalidate_instances="always") + model_config = pydantic.ConfigDict( + hide_input_in_errors=True, revalidate_instances="always" + ) client: ClientGeneralOptions = ClientGeneralOptions() server: ServerSpecifications = pydantic.Field( ..., description="Specifications for Simvue server" @@ -144,7 +146,7 @@ def check_valid_server(cls, values: "SimvueConfiguration") -> "SimvueConfigurati cls._check_server(values.server.token, values.server.url, values.run.mode) return values - + @classmethod @sv_util.prettify_pydantic def fetch( diff --git a/simvue/eco/api_client.py b/simvue/eco/api_client.py index 71dadd94..970b08ac 100644 --- a/simvue/eco/api_client.py +++ b/simvue/eco/api_client.py @@ -117,7 +117,7 @@ def get(self) -> CO2SignalResponse: self._logger.debug(f"🍃 Retrieving carbon intensity data for: {_params}") _response = requests.get(f"{self.co2_api_endpoint}", params=_params) - if not _response.status_code == http.HTTPStatus.OK: + if _response.status_code != http.HTTPStatus.OK: raise RuntimeError( "Failed to retrieve current CO2 signal data for" f" country '{self._two_letter_country_code}': {_response.text}" diff --git a/simvue/run.py b/simvue/run.py index 13f7769d..cf0503fb 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -413,31 +413,13 @@ def _heartbeat( # Check if the user has aborted the run with self._configuration_lock: if self._sv_obj and self._sv_obj.abort_trigger: - self._alert_raised_trigger.set() - logger.debug("Received abort request from server") - - if abort_callback is not None: - abort_callback(self) # type: ignore - - if self._abort_on_alert != "ignore": - self.kill_all_processes() - if self._dispatcher and self._shutdown_event: - self._shutdown_event.set() - self._dispatcher.purge() - self._dispatcher.join() - if self._active: - self.set_status("terminated") - click.secho( - "[simvue] Run was aborted.", - fg="red" if self._term_color else None, - bold=self._term_color, - ) - if self._abort_on_alert == "terminate": - os._exit(1) + self._terminate_run(abort_callback=abort_callback) if self._sv_obj: self._sv_obj.send_heartbeat() + time.sleep(self.loop_frequency) + return _heartbeat def _create_dispatch_callback( @@ -957,6 +939,22 @@ def id(self) -> str | None: """Return the unique id of the run""" return self._id + @property + def loop_frequency(self) -> int: + """Returns the current frequency of monitoring. + + This value is the maximum frequency of heartbeat, + emissions metric and resource metric measuring. + """ + # There is no point the loop interval being greater + # than any of the metric push or heartbeat intervals + # where None use heartbeat value as default + return min( + self._heartbeat_interval, + self._resources_metrics_interval or self._heartbeat_interval, + self._emission_metrics_interval or self._heartbeat_interval, + ) + @skip_if_failed("_aborted", "_suppress_errors", False) @pydantic.validate_call def reconnect(self, run_id: str) -> bool: @@ -998,7 +996,6 @@ def set_pid(self, pid: int) -> None: _process.cpu_percent() for _process in self._child_processes + [self._parent_process] ] - time.sleep(0.1) @skip_if_failed("_aborted", "_suppress_errors", False) @pydantic.validate_call diff --git a/tests/conftest.py b/tests/conftest.py index 01c5cf4e..02edc3a0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,4 +1,5 @@ import contextlib +from _pytest import monkeypatch from numpy import fix import pytest import pytest_mock @@ -42,6 +43,11 @@ def clear_out_files() -> None: file_obj.unlink() +@pytest.fixture +def speedy_heartbeat(monkeypatch: monkeypatch.MonkeyPatch) -> None: + monkeypatch.setattr(sv_run, "HEARTBEAT_INTERVAL", 0.1) + + @pytest.fixture(autouse=True) def setup_logging() -> CountingLogHandler: logging.basicConfig(level=logging.DEBUG) @@ -57,7 +63,13 @@ def log_messages(caplog): @pytest.fixture -def create_test_run(request) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]: +def prevent_script_exit(monkeypatch: monkeypatch.MonkeyPatch) -> None: + _orig_func = sv_run.Run._terminate_run + monkeypatch.setattr(sv_run.Run, "_terminate_run", lambda *args, **kwargs: _orig_func(*args, force_exit=False, **kwargs)) + + +@pytest.fixture +def create_test_run(request, prevent_script_exit) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]: with sv_run.Run() as run: _test_run_data = setup_test_run(run, True, request) yield run, _test_run_data @@ -70,10 +82,9 @@ def create_test_run(request) -> typing.Generator[typing.Tuple[sv_run.Run, dict], @pytest.fixture -def create_test_run_offline(mocker: pytest_mock.MockerFixture, request, monkeypatch: pytest.MonkeyPatch) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]: +def create_test_run_offline(request, monkeypatch: pytest.MonkeyPatch, prevent_script_exit) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]: def testing_exit(status: int) -> None: raise SystemExit(status) - mocker.patch("os._exit", testing_exit) with tempfile.TemporaryDirectory() as temp_d: monkeypatch.setenv("SIMVUE_OFFLINE_DIRECTORY", temp_d) with sv_run.Run("offline") as run: @@ -82,24 +93,23 @@ def testing_exit(status: int) -> None: @pytest.fixture -def create_plain_run(request, mocker: pytest_mock.MockFixture) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]: +def create_plain_run(request, prevent_script_exit) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]: def testing_exit(status: int) -> None: raise SystemExit(status) - mocker.patch("os._exit", testing_exit) with sv_run.Run() as run: yield run, setup_test_run(run, False, request) clear_out_files() @pytest.fixture -def create_pending_run(request) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]: +def create_pending_run(request, prevent_script_exit) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]: with sv_run.Run() as run: yield run, setup_test_run(run, False, request, True) clear_out_files() @pytest.fixture -def create_plain_run_offline(mocker: pytest_mock.MockerFixture, request, monkeypatch: pytest.MonkeyPatch) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]: +def create_plain_run_offline(request,prevent_script_exit) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]: with tempfile.TemporaryDirectory() as temp_d: monkeypatch.setenv("SIMVUE_OFFLINE_DIRECTORY", temp_d) with sv_run.Run("offline") as run: @@ -111,7 +121,6 @@ def create_plain_run_offline(mocker: pytest_mock.MockerFixture, request, monkeyp def create_run_object(mocker: pytest_mock.MockFixture) -> sv_api_obj.Run: def testing_exit(status: int) -> None: raise SystemExit(status) - mocker.patch("os._exit", testing_exit) _fix_use_id: str = str(uuid.uuid4()).split('-', 1)[0] _folder = sv_api_obj.Folder.new(path=f"/simvue_unit_testing/{_fix_use_id}") _folder.commit() @@ -138,7 +147,6 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur TEST_DATA["tags"].append("ci") run.config(suppress_errors=False) - run._heartbeat_interval = 1 run.init( name=TEST_DATA['metadata']['test_identifier'], tags=TEST_DATA["tags"], diff --git a/tests/functional/test_client.py b/tests/functional/test_client.py index 85407b6c..428146cb 100644 --- a/tests/functional/test_client.py +++ b/tests/functional/test_client.py @@ -384,14 +384,13 @@ def test_alert_deletion() -> None: @pytest.mark.client -def test_abort_run(create_plain_run: tuple[sv_run.Run, dict]) -> None: +def test_abort_run(speedy_heartbeat, create_plain_run: tuple[sv_run.Run, dict]) -> None: run, run_data = create_plain_run _uuid = f"{uuid.uuid4()}".split("-")[0] run.update_tags([f"delete_me_{_uuid}"]) - time.sleep(1) _client = svc.Client() _client.abort_run(run.id, reason="Test abort") - time.sleep(1) + time.sleep(0.5) assert run._status == "terminated" diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 82a70802..265d6fd9 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -1019,9 +1019,11 @@ def test_run_created_with_no_timeout() -> None: @pytest.mark.parametrize("mode", ("online", "offline"), ids=("online", "offline")) @pytest.mark.run def test_reconnect(mode, monkeypatch: pytest.MonkeyPatch) -> None: + temp_d: tempfile.TemporaryDirectory | None = None + if mode == "offline": temp_d = tempfile.TemporaryDirectory() - monkeypatch.setenv("SIMVUE_OFFLINE_DIRECTORY", temp_d) + monkeypatch.setenv("SIMVUE_OFFLINE_DIRECTORY", temp_d.name) with simvue.Run(mode=mode) as run: run.init( @@ -1053,3 +1055,6 @@ def test_reconnect(mode, monkeypatch: pytest.MonkeyPatch) -> None: assert dict(_reconnected_run.metrics)["test_metric"]["last"] == 1 assert client.get_events(run_id)[0]["message"] == "Testing!" + if temp_d: + temp_d.cleanup() + From bd10c4474f64786ee236ef4c6d44fdc6f9d76953 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 19 Mar 2025 16:42:57 +0000 Subject: [PATCH 08/37] Started simplifying ecoclient --- simvue/eco/emissions_monitor.py | 146 +++++----------------- simvue/metrics.py | 42 ++++++- simvue/run.py | 215 +++++++++++++++++++------------- 3 files changed, 195 insertions(+), 208 deletions(-) diff --git a/simvue/eco/emissions_monitor.py b/simvue/eco/emissions_monitor.py index cdc15c2f..0f9d7966 100644 --- a/simvue/eco/emissions_monitor.py +++ b/simvue/eco/emissions_monitor.py @@ -6,22 +6,15 @@ """ __author__ = "Kristian Zarebski" -__version__ = "0.1.0" -__license__ = "MIT" __date__ = "2025-02-27" import datetime import json import pydantic import dataclasses -import threading -import time import logging -import typing -import psutil import humanfriendly import pathlib -import os from simvue.eco.api_client import APIClient, CO2SignalResponse @@ -30,7 +23,6 @@ @dataclasses.dataclass class ProcessData: - process: psutil.Process cpu_percentage: float = 0.0 power_usage: float = 0.0 total_energy: float = 0.0 @@ -145,38 +137,13 @@ def __init__(self, *args, **kwargs) -> None: ) self._processes: dict[str, ProcessData] = {} - @pydantic.validate_call(config={"arbitrary_types_allowed": True}) - def attach_process( - self, process: psutil.Process | None = None, label: str | None = None - ) -> str: - """ - Attach a process to the CO2 Monitor. - - Parameters - ---------- - process : psutil.Process | None - The process to monitor, if None measures the current running process. Default is None. - label : str | None - The label to assign to the process. Default is process_. - - Returns - ------- - int - The PID of the process. - """ - if process is None: - process = psutil.Process(pid=os.getpid()) - - self._logger.info(f"📎 Attaching process with PID {process.pid}") - - label = label or f"process_{process.pid}" - self._processes[label] = ProcessData(process=process) - - return label - - def estimate_co2_emissions(self) -> None: + def estimate_co2_emissions( + self, process_id: str, cpu_percent: float, cpu_interval: float + ) -> None: """Estimate the CO2 emissions""" - self._logger.info("📐 Measuring CPU usage and power.") + self._logger.debug( + f"📐 Estimating CO2 emissions from CPU usage of {cpu_percent}% in interval {cpu_interval}s." + ) if self._local_data is None: raise RuntimeError("Expected local data to be initialised.") @@ -184,6 +151,9 @@ def estimate_co2_emissions(self) -> None: if not self._data_file_path: raise RuntimeError("Expected local data file to be defined.") + if not (_process := self._processes.get(process_id)): + self._processes[process_id] = (_process := ProcessData()) + if ( not self.co2_intensity and not self._local_data.setdefault(self._client.country_code, {}) @@ -207,86 +177,30 @@ def estimate_co2_emissions(self) -> None: _current_co2_intensity = self._current_co2_data.data.carbon_intensity _co2_units = self._current_co2_data.carbon_intensity_units - for label, process in self._processes.items(): - process.cpu_percentage = process.process.cpu_percent( - interval=self.cpu_interval - ) - _previous_energy: float = process.total_energy - process.power_usage = min( - self.cpu_idle_power, - (process.cpu_percentage / 100.0) * self.thermal_design_power_per_core, - ) - process.total_energy += process.power_usage * self.cpu_interval - process.energy_delta = process.total_energy - _previous_energy - - # Measured value is in g/kWh, convert to kg/kWs - _carbon_intensity_kgpws: float = _current_co2_intensity / (60 * 60 * 1e3) - - _previous_emission: float = process.co2_emission + _process.cpu_percentage = cpu_percent + _previous_energy: float = _process.total_energy + _process.power_usage = min( + self.cpu_idle_power, + (_process.cpu_percentage / 100.0) * self.thermal_design_power_per_core, + ) + _process.total_energy += _process.power_usage * self.cpu_interval + _process.energy_delta = _process.total_energy - _previous_energy - process.co2_delta = ( - process.power_usage * _carbon_intensity_kgpws * self.cpu_interval - ) + # Measured value is in g/kWh, convert to kg/kWs + _carbon_intensity_kgpws: float = _current_co2_intensity / (60 * 60 * 1e3) - process.co2_emission += process.co2_delta + _previous_emission: float = _process.co2_emission - self._logger.debug( - f"📝 For process '{label}', recorded: CPU={process.cpu_percentage}%, " - f"Power={process.power_usage}W, CO2={process.co2_emission}{_co2_units}" - ) + _process.co2_delta = ( + _process.power_usage * _carbon_intensity_kgpws * self.cpu_interval + ) - @pydantic.validate_call(config={"arbitrary_types_allowed": True}) - def run( - self, - termination_trigger: threading.Event, - callback: typing.Callable, - measure_interval: pydantic.PositiveFloat = pydantic.Field(default=10.0, gt=2.0), - return_all: bool = False, - ) -> None: - """Run the API client in a thread. + _process.co2_emission += _process.co2_delta - Parameters - ---------- - termination_trigger : threading.Event - thread event used to terminate monitor - callback : typing.Callable - callback to execute on measured results - measure_interval : float, optional - interval of measurement, note the API is limited at a rate of 30 requests per - hour, therefore any interval less than 2 minutes will use the previously recorded CO2 intensity. - Default is 10 seconds. - return_all : bool, optional - whether to return all processes or just the current. Default is False. - - Returns - ------- - ProcessData | dict[str, ProcessData] - Either the process data for the current process or for all processes. - """ - self._logger.info("🧵 Launching monitor in multi-threaded mode.") - self._logger.info(f"⌚ Will record at interval of {measure_interval}s.") - - def _run( - monitor: "CO2Monitor" = self, - callback: typing.Callable = callback, - return_all: bool = return_all, - ) -> None: - if not return_all and not monitor.last_process: - raise ValueError("No processes attached to monitor.") - - while not termination_trigger.is_set(): - monitor.estimate_co2_emissions() - # Depending on user choice either - # return all process data or just the last - callback( - monitor.process_data - if return_all - else monitor.process_data[monitor.last_process] # type: ignore - ) - time.sleep(measure_interval) - - _thread = threading.Thread(target=_run) - _thread.start() + self._logger.debug( + f"📝 For _process '{process_id}', recorded: CPU={_process.cpu_percentage}%, " + f"Power={_process.power_usage}W, CO2={_process.co2_emission}{_co2_units}" + ) @property def last_process(self) -> str | None: @@ -300,10 +214,6 @@ def process_data(self) -> dict[str, ProcessData]: def current_carbon_intensity(self) -> float: return self._client.get().data.carbon_intensity - @property - def total_cpu_percentage(self) -> float: - return sum(process.cpu_percentage for process in self._processes.values()) - @property def total_power_usage(self) -> float: return sum(process.power_usage for process in self._processes.values()) diff --git a/simvue/metrics.py b/simvue/metrics.py index 6b224106..0bb81d13 100644 --- a/simvue/metrics.py +++ b/simvue/metrics.py @@ -10,6 +10,7 @@ import logging import psutil + from .pynvml import ( nvmlDeviceGetComputeRunningProcesses, nvmlDeviceGetCount, @@ -21,6 +22,8 @@ nvmlShutdown, ) +RESOURCES_METRIC_PREFIX: str = "resources" + logger = logging.getLogger(__name__) @@ -65,11 +68,11 @@ def is_gpu_used(handle, processes: list[psutil.Process]) -> bool: return len(list(set(gpu_pids) & set(pids))) > 0 -def get_gpu_metrics(processes: list[psutil.Process]) -> dict[str, float]: +def get_gpu_metrics(processes: list[psutil.Process]) -> list[tuple[float, float]]: """ Get GPU metrics """ - gpu_metrics: dict[str, float] = {} + gpu_metrics: list[tuple[float, float]] = [] with contextlib.suppress(Exception): nvmlInit() @@ -80,11 +83,38 @@ def get_gpu_metrics(processes: list[psutil.Process]) -> dict[str, float]: utilisation_percent = nvmlDeviceGetUtilizationRates(handle).gpu memory = nvmlDeviceGetMemoryInfo(handle) memory_percent = 100 * memory.free / memory.total - gpu_metrics[f"resources/gpu.utilisation.percent.{i}"] = ( - utilisation_percent - ) - gpu_metrics[f"resources/gpu.memory.percent.{i}"] = memory_percent + gpu_metrics.append((utilisation_percent, memory_percent)) nvmlShutdown() return gpu_metrics + + +class SystemResourceMeasurement: + def __init__( + self, + processes: list[psutil.Process], + interval: float | None, + cpu_only: bool = False, + ) -> None: + self.cpu_percent: float | None = get_process_cpu(processes, interval=interval) + self.cpu_memory: float | None = get_process_memory(processes) + self.gpus: list[dict[str, float]] = ( + None if cpu_only else get_gpu_metrics(processes) + ) + + def to_dict(self) -> dict[str, float]: + _metrics: dict[str, float] = { + f"{RESOURCES_METRIC_PREFIX}/cpu.usage.percentage": self.cpu_percent, + f"{RESOURCES_METRIC_PREFIX}/cpu.usage.memory": self.cpu_memory, + } + + for i, gpu in enumerate(self.gpus): + _metrics[f"{RESOURCES_METRIC_PREFIX}/gpu.utilisation.percent.{i}"] = gpu[ + "utilisation" + ] + _metrics[f"{RESOURCES_METRIC_PREFIX}/gpu.utilisation.memory.{i}"] = gpu[ + "memory" + ] + + return _metrics diff --git a/simvue/run.py b/simvue/run.py index cf0503fb..1e43b00b 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -39,7 +39,7 @@ from .factory.dispatch import Dispatcher from .executor import Executor -from .metrics import get_gpu_metrics, get_process_cpu, get_process_memory +from .metrics import SystemResourceMeasurement from .models import FOLDER_REGEX, NAME_REGEX, MetricKeyString from .system import get_system from .metadata import git_info, environment @@ -293,38 +293,105 @@ def processes(self) -> list[psutil.Process]: return list(set(process_list)) - def _get_sysinfo(self, interval: float | None = None) -> dict[str, typing.Any]: - """Retrieve system administration + def _terminate_run( + self, + abort_callback: typing.Callable[[Self], None] | None, + force_exit: bool = True, + ) -> None: + """Close the current simvue Run and its subprocesses. + + Closes the run and all subprocesses with the default to being also. + To abort the actual Python execution as well. Parameters ---------- - interval : float | None - The interval to use for collection of CPU metrics, by default None (non blocking) + abort_callback: Callable, optional + the callback to execute on the termination else None + force_exit: bool, optional + whether to close Python itself, the default is True + """ + self._alert_raised_trigger.set() + logger.debug("Received abort request from server") - Returns - ------- - dict[str, typing.Any] - retrieved system specifications + if abort_callback is not None: + abort_callback(self) # type: ignore + + if self._abort_on_alert != "ignore": + self.kill_all_processes() + if self._dispatcher and self._shutdown_event: + self._shutdown_event.set() + self._dispatcher.purge() + self._dispatcher.join() + if self._active: + self.set_status("terminated") + click.secho( + "[simvue] Run was aborted.", + fg="red" if self._term_color else None, + bold=self._term_color, + ) + if self._abort_on_alert == "terminate": + os._exit(1) if force_exit else sys.exit(1) + + def _get_internal_metrics( + self, + resource_metrics_step: int | None, + emission_metrics_step: int | None, + ) -> None: + """Refresh resource and emissions metrics. + + Checks if the refresh interval has been satisfied for emissions + and resource metrics, if so adds latest values to dispatch. + + Parameters + ---------- + res_metric_prev_time: float + the previous time at which resource metrics were recorded. + ems_metric_prev_time: float + the previous time at which emissions metrics were recorded. + res_metric_step: int + the value count for resource metrics for this run. + ems_metric_step: int + the value count for emissions metrics for this run. + + Return + ------ + tuple[float, float] + new resource metric measure time + new emissions metric measure time """ - processes = self.processes - cpu = get_process_cpu(processes, interval=interval) - memory = get_process_memory(processes) - gpu = get_gpu_metrics(processes) - data: dict[str, typing.Any] = {} - - if memory is not None and cpu is not None: - data = { - f"{RESOURCES_METRIC_PREFIX}/cpu.usage.percent": cpu, - f"{RESOURCES_METRIC_PREFIX}/memory.usage": memory, - } - if gpu: - for item in gpu: - data[item] = gpu[item] - return data + _current_system_measure = SystemResourceMeasurement( + self.processes, interval=None, cpu_only=not resource_metrics_step + ) + + if resource_metrics_step is not None: + # Set join on fail to false as if an error is thrown + # join would be called on this thread and a thread cannot + # join itself! + self._add_metrics_to_dispatch( + _current_system_measure.to_dict(), + join_on_fail=False, + step=resource_metrics_step, + ) + + if emission_metrics_step is not None: + self._emissions_monitor.estimate_co2_emissions( + cpu_percent=_current_system_measure.cpu_percent + ) + self._add_metrics_to_dispatch( + { + "sustainability.emissions.total": self._emissions_monitor.total_co2_emission, + "sustainability.emissions.delta": self._emissions_monitor.total_co2_delta, + "sustainability.energy_consumed.total": self._emissions_monitor.total_energy, + "sustainability.energy_consumed.delta": self._emissions_monitor.total_energy_delta, + }, + join_on_fail=False, + step=emission_metrics_step, + ) def _create_heartbeat_callback( self, ) -> typing.Callable[[threading.Event], None]: + """Defines the callback executed at the heartbeat interval for the Run.""" if ( self._user_config.run.mode == "online" and (not self._user_config.server.url or not self._id) @@ -332,80 +399,60 @@ def _create_heartbeat_callback( raise RuntimeError("Could not commence heartbeat, run not initialised") def _heartbeat( - heartbeat_trigger: typing.Optional[ - threading.Event - ] = self._heartbeat_termination_trigger, - abort_callback: typing.Optional[ - typing.Callable[[Self], None] - ] = self._abort_callback, + heartbeat_trigger: threading.Event + | None = self._heartbeat_termination_trigger, + abort_callback: typing.Callable[[Self], None] | None = self._abort_callback, ) -> None: if not heartbeat_trigger: raise RuntimeError("Expected initialisation of heartbeat") - last_heartbeat = time.time() - last_res_metric_call = time.time() + last_heartbeat: float = 0 + last_res_metric_call: float = 0 + last_co2_metric_call: float = 0 + co2_step: int = 0 res_step: int = 0 - last_co2_metric_call = time.time() - - if self._resources_metrics_interval: - self._add_metrics_to_dispatch( - self._get_sysinfo(interval=1), join_on_fail=False, step=0 - ) - res_step += 1 - - if self._emission_metrics_interval: - self._emissions_monitor.estimate_co2_emissions() - self._add_metrics_to_dispatch( - { - "sustainability.emissions.total": self._emissions_monitor.total_co2_emission, - "sustainability.emissions.delta": self._emissions_monitor.total_co2_delta, - "sustainability.energy_consumed.total": self._emissions_monitor.total_energy, - "sustainability.energy_consumed.delta": self._emissions_monitor.total_energy_delta, - }, - join_on_fail=False, - step=0, - ) - co2_step += 1 while not heartbeat_trigger.is_set(): - time.sleep(0.1) - with self._configuration_lock: - if ( - self._resources_metrics_interval - and (res_time := time.time()) - last_res_metric_call + _current_time: float = time.time() + _update_resource_metrics: bool = ( + self._resources_metrics_interval is not None + and _current_time - last_res_metric_call > self._resources_metrics_interval - ): - # Set join on fail to false as if an error is thrown - # join would be called on this thread and a thread cannot - # join itself! - self._add_metrics_to_dispatch( - self._get_sysinfo(), join_on_fail=False, step=res_step - ) - last_res_metric_call = res_time - res_step += 1 - if ( - self._emission_metrics_interval + ) + _update_emissions_metrics: bool = ( + self._emission_metrics_interval is not None and self._emissions_monitor - and (co2_time := time.time()) - last_co2_metric_call + and _current_time - last_co2_metric_call > self._emission_metrics_interval - ): - self._emissions_monitor.estimate_co2_emissions() - self._add_metrics_to_dispatch( - { - "sustainability.emissions.total": self._emissions_monitor.total_co2_emission, - "sustainability.emissions.delta": self._emissions_monitor.total_co2_delta, - "sustainability.energy_consumed.total": self._emissions_monitor.total_energy, - "sustainability.energy_consumed.delta": self._emissions_monitor.total_energy_delta, - }, - join_on_fail=False, - step=co2_step, - ) - last_co2_metric_call = co2_time - co2_step += 1 + ) + + self._get_internal_metrics( + emission_metrics_step=co2_step + if _update_emissions_metrics + else None, + resource_metrics_step=res_step + if _update_resource_metrics + else None, + ) + + res_step += int(_update_resource_metrics) + co2_step += int(_update_emissions_metrics) + + last_res_metric_call = ( + _current_time + if _update_resource_metrics + else last_res_metric_call + ) + last_co2_metric_call = ( + _current_time + if _update_emissions_metrics + else last_co2_metric_call + ) if time.time() - last_heartbeat < self._heartbeat_interval: + time.sleep(self.loop_frequency) continue last_heartbeat = time.time() From 3aab367fda635ab8cc182f7263a4a4c1f8581560 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Thu, 20 Mar 2025 10:53:45 +0000 Subject: [PATCH 09/37] Fix emissions estimates using resource metrics CPU info --- simvue/config/parameters.py | 1 - simvue/eco/emissions_monitor.py | 2 +- simvue/metrics.py | 2 +- simvue/run.py | 51 +++++++++++++++--------------- tests/functional/test_run_class.py | 2 +- 5 files changed, 29 insertions(+), 29 deletions(-) diff --git a/simvue/config/parameters.py b/simvue/config/parameters.py index d74d2eb4..6bed98f1 100644 --- a/simvue/config/parameters.py +++ b/simvue/config/parameters.py @@ -62,7 +62,6 @@ def check_valid_cache_path(cls, cache: pathlib.Path) -> pathlib.Path: class MetricsSpecifications(pydantic.BaseModel): resources_metrics_interval: pydantic.PositiveInt | None = -1 - emission_metrics_interval: pydantic.PositiveInt | None = None enable_emission_metrics: bool = False diff --git a/simvue/eco/emissions_monitor.py b/simvue/eco/emissions_monitor.py index 0f9d7966..394ca2ad 100644 --- a/simvue/eco/emissions_monitor.py +++ b/simvue/eco/emissions_monitor.py @@ -232,4 +232,4 @@ def total_energy_delta(self) -> float: @property def total_energy(self) -> float: - return sum(process.energy for process in self._processes.values()) + return sum(process.total_energy for process in self._processes.values()) diff --git a/simvue/metrics.py b/simvue/metrics.py index 0bb81d13..3785a325 100644 --- a/simvue/metrics.py +++ b/simvue/metrics.py @@ -109,7 +109,7 @@ def to_dict(self) -> dict[str, float]: f"{RESOURCES_METRIC_PREFIX}/cpu.usage.memory": self.cpu_memory, } - for i, gpu in enumerate(self.gpus): + for i, gpu in enumerate(self.gpus or []): _metrics[f"{RESOURCES_METRIC_PREFIX}/gpu.utilisation.percent.{i}"] = gpu[ "utilisation" ] diff --git a/simvue/run.py b/simvue/run.py index 1e43b00b..241a4b4e 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -9,6 +9,7 @@ import contextlib import logging import pathlib +import math import mimetypes import multiprocessing.synchronize import threading @@ -202,14 +203,6 @@ def __init__( self._heartbeat_interval: int = HEARTBEAT_INTERVAL self._emissions_monitor: CO2Monitor | None = None - self._emission_metrics_interval: int | None = ( - HEARTBEAT_INTERVAL - if ( - (_interval := self._user_config.metrics.emission_metrics_interval) - and _interval < 1 - ) - else self._user_config.metrics.emission_metrics_interval - ) def __enter__(self) -> Self: return self @@ -336,6 +329,8 @@ def _get_internal_metrics( self, resource_metrics_step: int | None, emission_metrics_step: int | None, + res_measure_interval: int | None = None, + ems_measure_interval: int | None = None, ) -> None: """Refresh resource and emissions metrics. @@ -344,6 +339,7 @@ def _get_internal_metrics( Parameters ---------- + #TODO: update docs res_metric_prev_time: float the previous time at which resource metrics were recorded. ems_metric_prev_time: float @@ -360,7 +356,9 @@ def _get_internal_metrics( new emissions metric measure time """ _current_system_measure = SystemResourceMeasurement( - self.processes, interval=None, cpu_only=not resource_metrics_step + self.processes, + interval=res_measure_interval, + cpu_only=not resource_metrics_step, ) if resource_metrics_step is not None: @@ -375,7 +373,9 @@ def _get_internal_metrics( if emission_metrics_step is not None: self._emissions_monitor.estimate_co2_emissions( - cpu_percent=_current_system_measure.cpu_percent + process_id=f"{self._name}", + cpu_percent=_current_system_measure.cpu_percent, + cpu_interval=self._resources_metrics_interval, ) self._add_metrics_to_dispatch( { @@ -413,6 +413,8 @@ def _heartbeat( co2_step: int = 0 res_step: int = 0 + initial_ems_metrics_interval: float = time.time() - self._start_time + while not heartbeat_trigger.is_set(): with self._configuration_lock: _current_time: float = time.time() @@ -422,12 +424,17 @@ def _heartbeat( > self._resources_metrics_interval ) _update_emissions_metrics: bool = ( - self._emission_metrics_interval is not None + self._resources_metrics_interval is not None and self._emissions_monitor and _current_time - last_co2_metric_call - > self._emission_metrics_interval + > self._resources_metrics_interval ) + # In order to get a resource metric reading at t=0 + # because there is no previous CPU reading yet we cannot + # use the default of None for the interval here, so we measure + # at an interval of 1s. For emissions metrics the first step + # is time since run start self._get_internal_metrics( emission_metrics_step=co2_step if _update_emissions_metrics @@ -435,10 +442,14 @@ def _heartbeat( resource_metrics_step=res_step if _update_resource_metrics else None, + res_measure_interval=1 if res_step == 0 else None, + ems_measure_interval=initial_ems_metrics_interval + if co2_step == 0 + else self._resources_metrics_interval, ) - res_step += int(_update_resource_metrics) - co2_step += int(_update_emissions_metrics) + res_step += 1 + co2_step += 1 last_res_metric_call = ( _current_time @@ -996,10 +1007,9 @@ def loop_frequency(self) -> int: # There is no point the loop interval being greater # than any of the metric push or heartbeat intervals # where None use heartbeat value as default - return min( + return math.gcd( self._heartbeat_interval, self._resources_metrics_interval or self._heartbeat_interval, - self._emission_metrics_interval or self._heartbeat_interval, ) @skip_if_failed("_aborted", "_suppress_errors", False) @@ -1052,7 +1062,6 @@ def config( suppress_errors: bool | None = None, queue_blocking: bool | None = None, resources_metrics_interval: pydantic.PositiveInt | None = None, - emission_metrics_interval: pydantic.PositiveInt | None = None, enable_emission_metrics: bool | None = None, disable_resources_metrics: bool | None = None, storage_id: str | None = None, @@ -1104,14 +1113,6 @@ def config( self._pid = None self._resources_metrics_interval = None - if emission_metrics_interval: - if not enable_emission_metrics: - self._error( - "Cannot set rate of emission metrics, these metrics have been disabled" - ) - return False - self._emission_metrics_interval = emission_metrics_interval - if enable_emission_metrics: if self._user_config.run.mode == "offline": if not (_co2_intensity := self._user_config.eco.co2_intensity): diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 265d6fd9..69e20597 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -52,7 +52,7 @@ def test_check_run_initialised_decorator() -> None: def test_run_with_emissions() -> None: with sv_run.Run() as run_created: run_created.init(folder="/simvue_client_unit_tests", retention_period="1 min", tags=["test_run_with_emissions"]) - run_created.config(enable_emission_metrics=True, emission_metrics_interval=1) + run_created.config(enable_emission_metrics=True, resources_metrics_interval=1) time.sleep(5) _run = RunObject(identifier=run_created.id) _metric_names = [item[0] for item in _run.metrics] From 2f104e883204d4d7078ba48eaabb1eddfd6e503b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Thu, 20 Mar 2025 15:32:06 +0000 Subject: [PATCH 10/37] Add GPU metrics --- simvue/eco/api_client.py | 3 +- simvue/eco/config.py | 18 ++++++- simvue/eco/emissions_monitor.py | 67 ++++++++++++++++-------- simvue/metrics.py | 82 ++++++++++++++++++++++++++---- simvue/run.py | 16 +++--- tests/functional/test_run_class.py | 7 ++- 6 files changed, 150 insertions(+), 43 deletions(-) diff --git a/simvue/eco/api_client.py b/simvue/eco/api_client.py index 970b08ac..04c781f7 100644 --- a/simvue/eco/api_client.py +++ b/simvue/eco/api_client.py @@ -89,7 +89,8 @@ def __init__(self, *args, **kwargs) -> None: if not self.co2_api_token: self._logger.warning( - "⚠️ No API token provided for CO2 Signal, it is recommended " + "⚠️ No API token provided for CO2 Signal, " + "use of a token is strongly recommended." ) self._get_user_location_info() diff --git a/simvue/eco/config.py b/simvue/eco/config.py index ecc60fe0..de681c19 100644 --- a/simvue/eco/config.py +++ b/simvue/eco/config.py @@ -15,9 +15,23 @@ class EcoConfig(pydantic.BaseModel): + """Configurations for CO2 emission metrics gathering. + + Parameters + ---------- + co2_signal_api_token: str | None, optional + the CO2 signal API token (Recommended), default is None + cpu_thermal_design_power: int | None, optional + the TDP for the CPU + gpu_thermal_design_power: int | None, optional + the TDP for each GPU + local_data_directory: str, optional + the directory to store local data, default is Simvue offline directory + """ + co2_signal_api_token: pydantic.SecretStr | None = None - cpu_thermal_design_power: pydantic.PositiveInt = 80 - cpu_idle_power: pydantic.PositiveFloat = 10 + cpu_thermal_design_power: pydantic.PositiveInt | None = None + gpu_thermal_design_power: pydantic.PositiveInt | None = None local_data_directory: pydantic.DirectoryPath | None = pydantic.Field( None, validate_default=True ) diff --git a/simvue/eco/emissions_monitor.py b/simvue/eco/emissions_monitor.py index 394ca2ad..152e242f 100644 --- a/simvue/eco/emissions_monitor.py +++ b/simvue/eco/emissions_monitor.py @@ -24,6 +24,7 @@ @dataclasses.dataclass class ProcessData: cpu_percentage: float = 0.0 + gpu_percentage: float | None = None power_usage: float = 0.0 total_energy: float = 0.0 energy_delta: float = 0.0 @@ -38,9 +39,8 @@ class CO2Monitor(pydantic.BaseModel): Provides an interface for estimating CO2 usage for processes on the CPU. """ - thermal_design_power_per_core: pydantic.PositiveFloat | None - cpu_idle_power: pydantic.PositiveFloat - cpu_interval: float = 1.0 + thermal_design_power_per_cpu: pydantic.PositiveFloat | None + thermal_design_power_per_gpu: pydantic.PositiveFloat | None local_data_directory: pydantic.DirectoryPath intensity_refresh_rate: int | None | str co2_intensity: float | None @@ -58,9 +58,8 @@ def outdated(self) -> bool: return False _now: datetime.datetime = datetime.datetime.now() - _last_updated: str = self._local_data["last_updated"] _latest_time: datetime.datetime = datetime.datetime.strptime( - _last_updated, TIME_FORMAT + self._local_data["last_updated"], TIME_FORMAT ) return (_now - _latest_time).seconds < self.intensity_refresh_rate @@ -83,12 +82,10 @@ def __init__(self, *args, **kwargs) -> None: Parameters ---------- - thermal_design_power_per_core: float | None - the TDP value for the CPU. Default of None uses naive 85W value. - cpu_idle_power: float - the idle power of the CPU, default is naive value of 10W. - cpu_interval: float - the interval within which to measure average CPU percentage, default is 1s. + thermal_design_power_per_cpu: float | None + the TDP value for each CPU, default is 80W. + thermal_design_power_per_gpu: float | None + the TDP value for each GPU, default is 130W. local_data_directory: pydantic.DirectoryPath the directory in which to store CO2 intensity data. intensity_refresh_rate: int | str | None @@ -102,11 +99,18 @@ def __init__(self, *args, **kwargs) -> None: RECOMMENDED. The API token for CO2 signal, default is None. """ _logger = logging.getLogger(self.__class__.__name__) - if not isinstance(kwargs.get("thermal_design_power_per_core"), float): - kwargs["thermal_design_power_per_core"] = 80.0 + + if not isinstance(kwargs.get("thermal_design_power_per_cpu"), float): + kwargs["thermal_design_power_per_cpu"] = 80.0 _logger.warning( "⚠️ No TDP value provided for current CPU, will use arbitrary value of 80W." ) + + if not isinstance(kwargs.get("thermal_design_power_per_gpu"), float): + kwargs["thermal_design_power_per_gpu"] = 80.0 + _logger.warning( + "⚠️ No TDP value provided for current GPUs, will use arbitrary value of 130W." + ) super().__init__(*args, **kwargs) if self.intensity_refresh_rate and isinstance(self.intensity_refresh_rate, str): @@ -138,11 +142,15 @@ def __init__(self, *args, **kwargs) -> None: self._processes: dict[str, ProcessData] = {} def estimate_co2_emissions( - self, process_id: str, cpu_percent: float, cpu_interval: float + self, + process_id: str, + cpu_percent: float, + gpu_percent: float | None, + measure_interval: float, ) -> None: """Estimate the CO2 emissions""" self._logger.debug( - f"📐 Estimating CO2 emissions from CPU usage of {cpu_percent}% in interval {cpu_interval}s." + f"📐 Estimating CO2 emissions from CPU usage of {cpu_percent}% in interval {measure_interval}s." ) if self._local_data is None: @@ -177,13 +185,19 @@ def estimate_co2_emissions( _current_co2_intensity = self._current_co2_data.data.carbon_intensity _co2_units = self._current_co2_data.carbon_intensity_units + _process.gpu_percentage = gpu_percent _process.cpu_percentage = cpu_percent _previous_energy: float = _process.total_energy - _process.power_usage = min( - self.cpu_idle_power, - (_process.cpu_percentage / 100.0) * self.thermal_design_power_per_core, - ) - _process.total_energy += _process.power_usage * self.cpu_interval + _process.power_usage = ( + _process.cpu_percentage / 100.0 + ) * self.thermal_design_power_per_cpu + + if _process.gpu_percentage and self.thermal_design_power_per_gpu: + _process.power_usage += ( + _process.gpu_percentage / 100.0 + ) * self.thermal_design_power_per_gpu + + _process.total_energy += _process.power_usage * measure_interval _process.energy_delta = _process.total_energy - _previous_energy # Measured value is in g/kWh, convert to kg/kWs @@ -192,7 +206,7 @@ def estimate_co2_emissions( _previous_emission: float = _process.co2_emission _process.co2_delta = ( - _process.power_usage * _carbon_intensity_kgpws * self.cpu_interval + _process.power_usage * _carbon_intensity_kgpws * measure_interval ) _process.co2_emission += _process.co2_delta @@ -202,6 +216,17 @@ def estimate_co2_emissions( f"Power={_process.power_usage}W, CO2={_process.co2_emission}{_co2_units}" ) + def simvue_metrics(self) -> dict[str, float]: + """Retrieve metrics to send to Simvue server.""" + return ( + { + "sustainability.emissions.total": self.total_co2_emission, + "sustainability.emissions.delta": self.total_co2_delta, + "sustainability.energy_consumed.total": self.total_energy, + "sustainability.energy_consumed.delta": self.total_energy_delta, + }, + ) + @property def last_process(self) -> str | None: return list(self._processes.keys())[-1] if self._processes else None diff --git a/simvue/metrics.py b/simvue/metrics.py index 3785a325..0e64f357 100644 --- a/simvue/metrics.py +++ b/simvue/metrics.py @@ -28,8 +28,17 @@ def get_process_memory(processes: list[psutil.Process]) -> int: - """ - Get the resident set size + """Get the resident set size. + + Parameters + ---------- + processes: list[psutil.Process] + processes to monitor + + Returns + ------- + int + total process memory """ rss: int = 0 for process in processes: @@ -41,11 +50,22 @@ def get_process_memory(processes: list[psutil.Process]) -> int: def get_process_cpu( processes: list[psutil.Process], interval: float | None = None -) -> int: - """ - Get the CPU usage +) -> float: + """Get the CPU usage If first time being called, use a small interval to collect initial CPU metrics. + + Parameters + ---------- + processes: list[psutil.Process] + list of processes to track for CPU usage. + interval: float, optional + interval to measure across, default is None, use previous measure time difference. + + Returns + ------- + float + CPU percentage usage """ cpu_percent: int = 0 for process in processes: @@ -56,8 +76,19 @@ def get_process_cpu( def is_gpu_used(handle, processes: list[psutil.Process]) -> bool: - """ - Check if the GPU is being used by the list of processes + """Check if the GPU is being used by the list of processes. + + Parameters + ---------- + handle: Unknown + connector to GPU API + processes: list[psutil.Process] + list of processes to monitor + + Returns + ------- + bool + if GPU is being used """ pids = [process.pid for process in processes] @@ -69,8 +100,19 @@ def is_gpu_used(handle, processes: list[psutil.Process]) -> bool: def get_gpu_metrics(processes: list[psutil.Process]) -> list[tuple[float, float]]: - """ - Get GPU metrics + """Get GPU metrics. + + Parameters + ---------- + processes: list[psutil.Process] + list of processes to monitor + + Returns + ------- + list[tuple[float, float]] + For each GPU identified: + - gpu_percent + - gpu_memory """ gpu_metrics: list[tuple[float, float]] = [] @@ -91,12 +133,25 @@ def get_gpu_metrics(processes: list[psutil.Process]) -> list[tuple[float, float] class SystemResourceMeasurement: + """Class for taking and storing a system resources measurement.""" + def __init__( self, processes: list[psutil.Process], interval: float | None, cpu_only: bool = False, ) -> None: + """Perform a measurement of system resource consumption. + + Parameters + ---------- + processes: list[psutil.Process] + processes to measure across. + interval: float | None + interval to measure, if None previous measure time used for interval. + cpu_only: bool, optional + only record CPU information, default False + """ self.cpu_percent: float | None = get_process_cpu(processes, interval=interval) self.cpu_memory: float | None = get_process_memory(processes) self.gpus: list[dict[str, float]] = ( @@ -104,6 +159,7 @@ def __init__( ) def to_dict(self) -> dict[str, float]: + """Create metrics dictionary for sending to a Simvue server.""" _metrics: dict[str, float] = { f"{RESOURCES_METRIC_PREFIX}/cpu.usage.percentage": self.cpu_percent, f"{RESOURCES_METRIC_PREFIX}/cpu.usage.memory": self.cpu_memory, @@ -118,3 +174,11 @@ def to_dict(self) -> dict[str, float]: ] return _metrics + + @property + def gpu_percent(self) -> float: + return sum(m[0] for m in self.gpus) / (len(self.gpus) or 1) + + @property + def gpu_memory(self) -> float: + return sum(m[1] for m in self.gpus) / (len(self.gpus) or 1) diff --git a/simvue/run.py b/simvue/run.py index 241a4b4e..792d38ea 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -375,15 +375,11 @@ def _get_internal_metrics( self._emissions_monitor.estimate_co2_emissions( process_id=f"{self._name}", cpu_percent=_current_system_measure.cpu_percent, - cpu_interval=self._resources_metrics_interval, + measure_interval=self._resources_metrics_interval, + gpu_percent=_current_system_measure.gpu_percent, ) self._add_metrics_to_dispatch( - { - "sustainability.emissions.total": self._emissions_monitor.total_co2_emission, - "sustainability.emissions.delta": self._emissions_monitor.total_co2_delta, - "sustainability.energy_consumed.total": self._emissions_monitor.total_energy, - "sustainability.energy_consumed.delta": self._emissions_monitor.total_energy_delta, - }, + self._emissions_monitor.simvue_metrics(), join_on_fail=False, step=emission_metrics_step, ) @@ -1126,15 +1122,17 @@ def config( co2_intensity=_co2_intensity, local_data_directory=self._user_config.eco.local_data_directory, co2_signal_api_token=None, - cpu_idle_power=self._user_config.eco.cpu_idle_power, + thermal_design_power_per_cpu=self._user_config.eco.cpu_thermal_design_power, + thermal_design_power_per_gpu=self._user_config.eco.gpu_thermal_design_power, ) else: self._emissions_monitor = CO2Monitor( intensity_refresh_rate=self._user_config.eco.intensity_refresh_rate, local_data_directory=self._user_config.eco.local_data_directory, co2_signal_api_token=self._user_config.eco.co2_signal_api_token, - cpu_idle_power=self._user_config.eco.cpu_idle_power, co2_intensity=self._user_config.eco.co2_intensity, + thermal_design_power_per_cpu=self._user_config.eco.cpu_thermal_design_power, + thermal_design_power_per_gpu=self._user_config.eco.gpu_thermal_design_power, ) elif enable_emission_metrics is False and self._emissions_monitor: diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 69e20597..df8f35c3 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -51,7 +51,12 @@ def test_check_run_initialised_decorator() -> None: @pytest.mark.codecarbon def test_run_with_emissions() -> None: with sv_run.Run() as run_created: - run_created.init(folder="/simvue_client_unit_tests", retention_period="1 min", tags=["test_run_with_emissions"]) + run_created.init( + name="test_run_with_emissions", + folder="/simvue_client_unit_tests", + retention_period="1 min", + tags=["test_run_with_emissions"] + ) run_created.config(enable_emission_metrics=True, resources_metrics_interval=1) time.sleep(5) _run = RunObject(identifier=run_created.id) From 08dc7a0f53e8a457234951461430b7abcd581820 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Thu, 20 Mar 2025 16:32:01 +0000 Subject: [PATCH 11/37] Format code --- simvue/eco/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/simvue/eco/__init__.py b/simvue/eco/__init__.py index cfe48ef9..240c0c06 100644 --- a/simvue/eco/__init__.py +++ b/simvue/eco/__init__.py @@ -7,6 +7,7 @@ comparisons only. Any values returned should not be taken as absolute. """ + __date__ = "2025-03-06" from .emissions_monitor import CO2Monitor as CO2Monitor From 933ba92d34946ddf5ee82ec1588a61e723950515 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Thu, 20 Mar 2025 16:43:53 +0000 Subject: [PATCH 12/37] Fix loop frequency to be 1s --- simvue/run.py | 20 ++------------------ 1 file changed, 2 insertions(+), 18 deletions(-) diff --git a/simvue/run.py b/simvue/run.py index 792d38ea..c405f890 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -9,7 +9,6 @@ import contextlib import logging import pathlib -import math import mimetypes import multiprocessing.synchronize import threading @@ -459,7 +458,7 @@ def _heartbeat( ) if time.time() - last_heartbeat < self._heartbeat_interval: - time.sleep(self.loop_frequency) + time.sleep(1) continue last_heartbeat = time.time() @@ -472,7 +471,7 @@ def _heartbeat( if self._sv_obj: self._sv_obj.send_heartbeat() - time.sleep(self.loop_frequency) + time.sleep(1) return _heartbeat @@ -993,21 +992,6 @@ def id(self) -> str | None: """Return the unique id of the run""" return self._id - @property - def loop_frequency(self) -> int: - """Returns the current frequency of monitoring. - - This value is the maximum frequency of heartbeat, - emissions metric and resource metric measuring. - """ - # There is no point the loop interval being greater - # than any of the metric push or heartbeat intervals - # where None use heartbeat value as default - return math.gcd( - self._heartbeat_interval, - self._resources_metrics_interval or self._heartbeat_interval, - ) - @skip_if_failed("_aborted", "_suppress_errors", False) @pydantic.validate_call def reconnect(self, run_id: str) -> bool: From f885caff214d9a1a4c8f1f8082fb7020c94a0a4b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 24 Mar 2025 14:43:22 +0000 Subject: [PATCH 13/37] Fix bad refresh rate for CO2 signal and added CO2 signal mocking --- CITATION.cff | 61 ---------------------- poetry.lock | 18 ++++++- pyproject.toml | 3 +- simvue/eco/api_client.py | 6 +-- simvue/eco/config.py | 2 +- simvue/eco/emissions_monitor.py | 49 +++++++++-------- simvue/run.py | 25 +++++---- tests/conftest.py | 45 ++++++++++++++-- tests/functional/test_client.py | 11 +++- tests/functional/test_run_class.py | 4 +- tests/unit/test_ecoclient.py | 84 ++++++++++++++++++++++++++++++ 11 files changed, 201 insertions(+), 107 deletions(-) create mode 100644 tests/unit/test_ecoclient.py diff --git a/CITATION.cff b/CITATION.cff index 69ae53fc..3d634a59 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -45,64 +45,3 @@ license: Apache-2.0 commit: 88bdd861e01153a2853191c0a8d1bb64dfabe296 version: 2.0.0 date-released: '2025-03-07' -references: -- title: mlco2/codecarbon - version: v2.8.2 - type: software - date-released: 2024-12-08 - doi: 10.5281/zenodo.14518377 - url: https://doi.org/10.5281/zenodo.14518377 - repository-code: https://github.com/mlco2/codecarbon - authors: - - given-names: Benoit - family-names: Courty - - given-names: Victor - family-names: Schmidt - - given-names: Sasha - family-names: Luccioni - - given-names: Goyal - family-names: Kamal - - given-names: Marion - family-names: Coutarel - - given-names: Boris - family-names: Feld - - given-names: Jérémy - family-names: Lecourt - - given-names: Liam - family-names: Connell - - given-names: Amine - family-names: Saboni - - given-names: Mathilde - family-names: Léval - - given-names: Luis - family-names: Blanche - - given-names: Alexis - family-names: Cruveiller - - given-names: Franklin - family-names: Zhao - - given-names: Aditya - family-names: Joshi - - given-names: Alexis - family-names: Bogroff - - given-names: Hugues - family-names: de Lavoreille - - given-names: Niko - family-names: Laskaris - - given-names: Edoardo - family-names: Abati - - given-names: Douglas - family-names: Blank - - given-names: Ziyao - family-names: Wang - - given-names: Armin - family-names: Catovic - - given-names: Marc - family-names: Alencon - - given-names: Michał - family-names: Stęchły - - given-names: Christian - family-names: Bauer - - given-names: Lucas Otávio N. - family-names: de Araújo - - given-names: Minerva - family-names: Books diff --git a/poetry.lock b/poetry.lock index 9e40a86b..4faab40a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1667,6 +1667,22 @@ termcolor = ">=2.1.0" [package.extras] dev = ["black", "flake8", "pre-commit"] +[[package]] +name = "pytest-timeout" +version = "2.3.1" +description = "pytest plugin to abort hanging tests" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "pytest-timeout-2.3.1.tar.gz", hash = "sha256:12397729125c6ecbdaca01035b9e5239d4db97352320af155b3f5de1ba5165d9"}, + {file = "pytest_timeout-2.3.1-py3-none-any.whl", hash = "sha256:68188cb703edfc6a18fad98dc25a3c61e9f24d644b0b70f33af545219fc7813e"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + [[package]] name = "pytest-xdist" version = "3.6.1" @@ -2034,4 +2050,4 @@ plot = ["matplotlib", "plotly"] [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.14" -content-hash = "73ec4a32990526c7a7a8ea147cb4b49f58805c209b1896555906573393d31b45" +content-hash = "ce123cae51b9203f8b8838b0c4a1b60645ef4af7498ec5eb8ca4f84584f10203" diff --git a/pyproject.toml b/pyproject.toml index 9f271650..2444fc85 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -78,6 +78,7 @@ pytest-xdist = "^3.6.1" jinja2 = "^3.1.6" types-requests = "^2.32.0.20241016" interrogate = "^1.7.0" +pytest-timeout = "^2.3.1" [build-system] requires = ["poetry-core"] @@ -94,7 +95,7 @@ testpaths = [ "tests" ] markers = [ - "codecarbon: tests for emission metrics", + "eco: tests for emission metrics", "client: tests of Simvue client", "converters: tests for Simvue object converters", "dispatch: test data dispatcher", diff --git a/simvue/eco/api_client.py b/simvue/eco/api_client.py index 04c781f7..b8812380 100644 --- a/simvue/eco/api_client.py +++ b/simvue/eco/api_client.py @@ -19,6 +19,8 @@ import geocoder.location import typing +CO2_SIGNAL_API_ENDPOINT: str = "https://api.co2signal.com/v1/latest" + class CO2SignalData(pydantic.BaseModel): datetime: datetime.datetime @@ -66,9 +68,7 @@ class APIClient(pydantic.BaseModel): Provides an interface to the Electricity Maps API. """ - co2_api_endpoint: pydantic.HttpUrl = pydantic.HttpUrl( - "https://api.co2signal.com/v1/latest" - ) + co2_api_endpoint: pydantic.HttpUrl = pydantic.HttpUrl(CO2_SIGNAL_API_ENDPOINT) co2_api_token: pydantic.SecretStr | None = None timeout: pydantic.PositiveInt = 10 diff --git a/simvue/eco/config.py b/simvue/eco/config.py index de681c19..e69ea20b 100644 --- a/simvue/eco/config.py +++ b/simvue/eco/config.py @@ -35,7 +35,7 @@ class EcoConfig(pydantic.BaseModel): local_data_directory: pydantic.DirectoryPath | None = pydantic.Field( None, validate_default=True ) - intensity_refresh_rate: pydantic.PositiveInt | str | None = pydantic.Field( + intensity_refresh_interval: pydantic.PositiveInt | str | None = pydantic.Field( default="1 day", gt=2 * 60 ) co2_intensity: float | None = None diff --git a/simvue/eco/emissions_monitor.py b/simvue/eco/emissions_monitor.py index 152e242f..67714826 100644 --- a/simvue/eco/emissions_monitor.py +++ b/simvue/eco/emissions_monitor.py @@ -19,6 +19,7 @@ from simvue.eco.api_client import APIClient, CO2SignalResponse TIME_FORMAT: str = "%Y_%m_%d_%H_%M_%S" +CO2_SIGNAL_API_INTERVAL_LIMIT: int = 2 * 60 @dataclasses.dataclass @@ -42,7 +43,7 @@ class CO2Monitor(pydantic.BaseModel): thermal_design_power_per_cpu: pydantic.PositiveFloat | None thermal_design_power_per_gpu: pydantic.PositiveFloat | None local_data_directory: pydantic.DirectoryPath - intensity_refresh_rate: int | None | str + intensity_refresh_interval: int | None | str co2_intensity: float | None co2_signal_api_token: str | None @@ -54,14 +55,14 @@ def now(self) -> str: @property def outdated(self) -> bool: """Checks if the current data is out of date.""" - if not self.intensity_refresh_rate: + if not self.intensity_refresh_interval: return False _now: datetime.datetime = datetime.datetime.now() _latest_time: datetime.datetime = datetime.datetime.strptime( self._local_data["last_updated"], TIME_FORMAT ) - return (_now - _latest_time).seconds < self.intensity_refresh_rate + return (_now - _latest_time).seconds > self.intensity_refresh_interval def _load_local_data(self) -> dict[str, str | dict[str, str | float]] | None: """Loads locally stored CO2 intensity data""" @@ -88,8 +89,8 @@ def __init__(self, *args, **kwargs) -> None: the TDP value for each GPU, default is 130W. local_data_directory: pydantic.DirectoryPath the directory in which to store CO2 intensity data. - intensity_refresh_rate: int | str | None - the rate in seconds at which to call the CO2 signal API. The default is once per day, + intensity_refresh_interval: int | str | None + the interval in seconds at which to call the CO2 signal API. The default is once per day, note the API is restricted to 30 requests per hour for a given user. Also accepts a time period as a string, e.g. '1 week' co2_intensity: float | None @@ -113,12 +114,17 @@ def __init__(self, *args, **kwargs) -> None: ) super().__init__(*args, **kwargs) - if self.intensity_refresh_rate and isinstance(self.intensity_refresh_rate, str): - self.intensity_refresh_rate = int( - humanfriendly.parse_timespan(self.intensity_refresh_rate) + if self.intensity_refresh_interval and isinstance( + self.intensity_refresh_interval, str + ): + self.intensity_refresh_interval = int( + humanfriendly.parse_timespan(self.intensity_refresh_interval) ) - if self.intensity_refresh_rate and self.intensity_refresh_rate <= 2 * 60: + if ( + self.intensity_refresh_interval + and self.intensity_refresh_interval <= CO2_SIGNAL_API_INTERVAL_LIMIT + ): raise ValueError( "Invalid intensity refresh rate, CO2 signal API restricted to 30 calls per hour." ) @@ -150,7 +156,9 @@ def estimate_co2_emissions( ) -> None: """Estimate the CO2 emissions""" self._logger.debug( - f"📐 Estimating CO2 emissions from CPU usage of {cpu_percent}% in interval {measure_interval}s." + f"📐 Estimating CO2 emissions from CPU usage of {cpu_percent}% " + f"{('and GPU usage of ' + (str(gpu_percent)) + '%') if gpu_percent else ''} " + f"in interval {measure_interval}s." ) if self._local_data is None: @@ -165,8 +173,7 @@ def estimate_co2_emissions( if ( not self.co2_intensity and not self._local_data.setdefault(self._client.country_code, {}) - or self.outdated - ): + ) or self.outdated: self._logger.info("🌍 CO2 emission outdated, calling API.") _data: CO2SignalResponse = self._client.get() self._local_data[self._client.country_code] = _data.model_dump(mode="json") @@ -212,20 +219,18 @@ def estimate_co2_emissions( _process.co2_emission += _process.co2_delta self._logger.debug( - f"📝 For _process '{process_id}', recorded: CPU={_process.cpu_percentage}%, " - f"Power={_process.power_usage}W, CO2={_process.co2_emission}{_co2_units}" + f"📝 For process '{process_id}', recorded: CPU={_process.cpu_percentage:.2f}%, " + f"Power={_process.power_usage:.2f}W, CO2={_process.co2_emission:.2e}{_co2_units}" ) def simvue_metrics(self) -> dict[str, float]: """Retrieve metrics to send to Simvue server.""" - return ( - { - "sustainability.emissions.total": self.total_co2_emission, - "sustainability.emissions.delta": self.total_co2_delta, - "sustainability.energy_consumed.total": self.total_energy, - "sustainability.energy_consumed.delta": self.total_energy_delta, - }, - ) + return { + "sustainability.emissions.total": self.total_co2_emission, + "sustainability.emissions.delta": self.total_co2_delta, + "sustainability.energy_consumed.total": self.total_energy, + "sustainability.energy_consumed.delta": self.total_energy_delta, + } @property def last_process(self) -> str | None: diff --git a/simvue/run.py b/simvue/run.py index c405f890..7dfa6e87 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -338,15 +338,16 @@ def _get_internal_metrics( Parameters ---------- - #TODO: update docs - res_metric_prev_time: float - the previous time at which resource metrics were recorded. - ems_metric_prev_time: float - the previous time at which emissions metrics were recorded. - res_metric_step: int - the value count for resource metrics for this run. - ems_metric_step: int - the value count for emissions metrics for this run. + resource_metrics_step: int | None + the current step for this resource metric record, + None if skipping resource metrics. + emission_metrics_step: int | None + the current step for this emission metrics record, + None if skipping emission metrics. + res_measure_interval: int | None, optional + the interval for resource metric gathering, default is None + ems_measure_interval: int | None, optional + the interval for emission metric gathering, default is None Return ------ @@ -417,12 +418,14 @@ def _heartbeat( self._resources_metrics_interval is not None and _current_time - last_res_metric_call > self._resources_metrics_interval + and self._status == "running" ) _update_emissions_metrics: bool = ( self._resources_metrics_interval is not None and self._emissions_monitor and _current_time - last_co2_metric_call > self._resources_metrics_interval + and self._status == "running" ) # In order to get a resource metric reading at t=0 @@ -1102,7 +1105,7 @@ def config( ) # Create an emissions monitor with no API calls self._emissions_monitor = CO2Monitor( - intensity_refresh_rate=None, + intensity_refresh_interval=None, co2_intensity=_co2_intensity, local_data_directory=self._user_config.eco.local_data_directory, co2_signal_api_token=None, @@ -1111,7 +1114,7 @@ def config( ) else: self._emissions_monitor = CO2Monitor( - intensity_refresh_rate=self._user_config.eco.intensity_refresh_rate, + intensity_refresh_interval=self._user_config.eco.intensity_refresh_interval, local_data_directory=self._user_config.eco.local_data_directory, co2_signal_api_token=self._user_config.eco.co2_signal_api_token, co2_intensity=self._user_config.eco.co2_intensity, diff --git a/tests/conftest.py b/tests/conftest.py index 02edc3a0..b2903a3b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,6 +2,7 @@ from _pytest import monkeypatch from numpy import fix import pytest +import datetime import pytest_mock import typing import uuid @@ -11,13 +12,17 @@ import json import pathlib import logging -from simvue.api.objects.artifact import Artifact -from simvue.exception import ObjectNotFoundError +import requests + +import simvue.eco.api_client as sv_eco import simvue.run as sv_run import simvue.api.objects as sv_api_obj import simvue.config.user as sv_cfg import simvue.utilities +from simvue.api.objects.artifact import Artifact +from simvue.exception import ObjectNotFoundError + MAX_BUFFER_SIZE: int = 10 class CountingLogHandler(logging.Handler): @@ -43,6 +48,40 @@ def clear_out_files() -> None: file_obj.unlink() +@pytest.fixture +def mock_co2_signal(monkeypatch: monkeypatch.MonkeyPatch) -> dict[str, dict | str]: + _mock_data = { + "data": { + "datetime": datetime.datetime.now().isoformat(), + "carbonIntensity": 0.04, + "fossilFuelPercentage": 39, + }, + "_disclaimer": "test disclaimer", + "countryCode": "GB", + "status": "unknown", + "units": {"carbonIntensity": "eqCO2kg/kwh"} + } + class MockCo2SignalAPIResponse: + def json(*_, **__) -> dict: + return _mock_data + + @property + def status_code(self) -> int: + return 200 + + _req_get = requests.get + + def _mock_get(*args, **kwargs) -> requests.Response: + if sv_eco.CO2_SIGNAL_API_ENDPOINT in args or kwargs.get("url") == sv_eco.CO2_SIGNAL_API_ENDPOINT: + return MockCo2SignalAPIResponse() + else: + return _req_get(*args, **kwargs) + + monkeypatch.setattr(requests, "get", _mock_get) + + return _mock_data + + @pytest.fixture def speedy_heartbeat(monkeypatch: monkeypatch.MonkeyPatch) -> None: monkeypatch.setattr(sv_run, "HEARTBEAT_INTERVAL", 0.1) @@ -78,7 +117,7 @@ def create_test_run(request, prevent_script_exit) -> typing.Generator[typing.Tup for alert_id in _test_run_data.get("alert_ids", []): with contextlib.suppress(ObjectNotFoundError): sv_api_obj.Alert(identifier=alert_id).delete() - clear_out_files() +clear_out_files() @pytest.fixture diff --git a/tests/functional/test_client.py b/tests/functional/test_client.py index 428146cb..6492d395 100644 --- a/tests/functional/test_client.py +++ b/tests/functional/test_client.py @@ -390,7 +390,14 @@ def test_abort_run(speedy_heartbeat, create_plain_run: tuple[sv_run.Run, dict]) run.update_tags([f"delete_me_{_uuid}"]) _client = svc.Client() _client.abort_run(run.id, reason="Test abort") - time.sleep(0.5) - assert run._status == "terminated" + time.sleep(2) + + # On some machines it might take a little longer so + # try twice before accepting the abort failed + try: + assert run._status == "terminated" + except AssertionError: + time.sleep(2) + assert run._status == "terminated" diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index df8f35c3..5cdcf340 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -48,8 +48,8 @@ def test_check_run_initialised_decorator() -> None: @pytest.mark.run -@pytest.mark.codecarbon -def test_run_with_emissions() -> None: +@pytest.mark.eco +def test_run_with_emissions(mock_co2_signal) -> None: with sv_run.Run() as run_created: run_created.init( name="test_run_with_emissions", diff --git a/tests/unit/test_ecoclient.py b/tests/unit/test_ecoclient.py new file mode 100644 index 00000000..a978bc0f --- /dev/null +++ b/tests/unit/test_ecoclient.py @@ -0,0 +1,84 @@ +import tempfile +import pytest +import time +import pytest_mock + +import simvue.eco.api_client as sv_eco_api +import simvue.eco.emissions_monitor as sv_eco_ems + +@pytest.mark.eco +def test_api_client_get_loc_info(mock_co2_signal) -> None: + _client = sv_eco_api.APIClient() + assert _client.latitude + assert _client.longitude + assert _client.country_code + + +@pytest.mark.eco +def test_api_client_query(mock_co2_signal: dict[str, dict | str]) -> None: + _client = sv_eco_api.APIClient() + _response: sv_eco_api.CO2SignalResponse = _client.get() + assert _response.carbon_intensity_units == mock_co2_signal["units"]["carbonIntensity"] + assert _response.country_code == mock_co2_signal["countryCode"] + assert _response.data.carbon_intensity == mock_co2_signal["data"]["carbonIntensity"] + assert _response.data.fossil_fuel_percentage == mock_co2_signal["data"]["fossilFuelPercentage"] + + +@pytest.mark.eco +@pytest.mark.parametrize( + "refresh", (True, False), ids=("refresh", "no-refresh") +) +def test_outdated_data_check( + mock_co2_signal, + refresh: bool, + mocker: pytest_mock.MockerFixture, + monkeypatch: pytest.MonkeyPatch +) -> None: + _spy = mocker.spy(sv_eco_api.APIClient, "get") + monkeypatch.setattr(sv_eco_ems, "CO2_SIGNAL_API_INTERVAL_LIMIT", 0.1) + with tempfile.TemporaryDirectory() as tempd: + _ems_monitor = sv_eco_ems.CO2Monitor( + thermal_design_power_per_cpu=80, + thermal_design_power_per_gpu=130, + local_data_directory=tempd, + intensity_refresh_interval=1 if refresh else 2, + co2_intensity=None, + co2_signal_api_token=None + ) + _measure_params = { + "process_id": "test_outdated_data_check", + "cpu_percent": 20, + "gpu_percent": 40, + "measure_interval": 1 + } + _ems_monitor.estimate_co2_emissions(**_measure_params) + time.sleep(3) + _ems_monitor.estimate_co2_emissions(**_measure_params) + + assert _spy.call_count == 2 if refresh else 1, f"{_spy.call_count} != {2 if refresh else 1}" + + +def test_co2_monitor_properties(mock_co2_signal) -> None: + with tempfile.TemporaryDirectory() as tempd: + _ems_monitor = sv_eco_ems.CO2Monitor( + thermal_design_power_per_cpu=80, + thermal_design_power_per_gpu=130, + local_data_directory=tempd, + intensity_refresh_interval=1 if refresh else 2, + co2_intensity=None, + co2_signal_api_token=None + ) + _measure_params = { + "process_id": "test_outdated_data_check", + "cpu_percent": 20, + "gpu_percent": 40, + "measure_interval": 1 + } + _ems_monitor.estimate_co2_emissions(**_measure_params) + assert _ems_monitor.current_carbon_intensity + assert _ems_monitor.process_data["test_outdated_data_check"] + assert _ems_monitor.total_power_usage + assert _ems_monitor.total_co2_emission + assert _ems_monitor.total_co2_delta + assert _ems_monitor.total_energy + assert _ems_monitor.total_energy_delta From 5f5fcfa7e308f59ae746486bdec632ac173f4f49 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Tue, 25 Mar 2025 08:20:14 +0000 Subject: [PATCH 14/37] Add CO2 intensity refresh to sender --- simvue/eco/emissions_monitor.py | 39 ++++++++++++++++--------- simvue/sender.py | 50 +++++++++++++++++++++++++++++++-- 2 files changed, 74 insertions(+), 15 deletions(-) diff --git a/simvue/eco/emissions_monitor.py b/simvue/eco/emissions_monitor.py index 67714826..f3ce5785 100644 --- a/simvue/eco/emissions_monitor.py +++ b/simvue/eco/emissions_monitor.py @@ -147,6 +147,28 @@ def __init__(self, *args, **kwargs) -> None: ) self._processes: dict[str, ProcessData] = {} + def check_refresh(self) -> bool: + """Check to see if an intensity value refresh is required. + + Returns + ------- + bool + whether a refresh of the CO2 intensity was requested + from the CO2 Signal API. + """ + if ( + not self.co2_intensity + and not self._local_data.setdefault(self._client.country_code, {}) + ) or self.outdated: + self._logger.info("🌍 CO2 emission outdated, calling API.") + _data: CO2SignalResponse = self._client.get() + self._local_data[self._client.country_code] = _data.model_dump(mode="json") + self._local_data["last_updated"] = self.now() + with self._data_file_path.open("w") as out_f: + json.dump(self._local_data, out_f, indent=2) + return True + return False + def estimate_co2_emissions( self, process_id: str, @@ -157,8 +179,9 @@ def estimate_co2_emissions( """Estimate the CO2 emissions""" self._logger.debug( f"📐 Estimating CO2 emissions from CPU usage of {cpu_percent}% " - f"{('and GPU usage of ' + (str(gpu_percent)) + '%') if gpu_percent else ''} " - f"in interval {measure_interval}s." + f"and GPU usage of {gpu_percent}%" + if gpu_percent + else f"in interval {measure_interval}s." ) if self._local_data is None: @@ -170,17 +193,7 @@ def estimate_co2_emissions( if not (_process := self._processes.get(process_id)): self._processes[process_id] = (_process := ProcessData()) - if ( - not self.co2_intensity - and not self._local_data.setdefault(self._client.country_code, {}) - ) or self.outdated: - self._logger.info("🌍 CO2 emission outdated, calling API.") - _data: CO2SignalResponse = self._client.get() - self._local_data[self._client.country_code] = _data.model_dump(mode="json") - self._local_data["last_updated"] = self.now() - - with self._data_file_path.open("w") as out_f: - json.dump(self._local_data, out_f, indent=2) + self.check_refresh() if self.co2_intensity: _current_co2_intensity = self.co2_intensity diff --git a/simvue/sender.py b/simvue/sender.py index d747dc9b..d5c8a712 100644 --- a/simvue/sender.py +++ b/simvue/sender.py @@ -10,11 +10,13 @@ import logging from concurrent.futures import ThreadPoolExecutor import threading +import time import requests import psutil from simvue.config.user import SimvueConfiguration import simvue.api.objects +from simvue.eco.emissions_monitor import CO2Monitor from simvue.version import __version__ UPLOAD_ORDER: list[str] = [ @@ -150,7 +152,8 @@ def sender( max_workers: int = 5, threading_threshold: int = 10, objects_to_upload: list[str] = UPLOAD_ORDER, -): + co2_intensity_refresh: int | None | str = None, +) -> dict[str, str]: """Send data from a local cache directory to the Simvue server. Parameters @@ -163,13 +166,52 @@ def sender( The number of cached files above which threading will be used objects_to_upload : list[str] Types of objects to upload, by default uploads all types of objects present in cache + co2_intensity_refresh: int | None | str + the refresh interval for the CO2 intensity value, if None use config value if available, + else do not refresh. + + Returns + ------- + id_mapping + mapping of local ID to server ID """ - _user_config = SimvueConfiguration.fetch() + _user_config: SimvueConfiguration = SimvueConfiguration.fetch() cache_dir = cache_dir or _user_config.offline.cache cache_dir.joinpath("server_ids").mkdir(parents=True, exist_ok=True) _lock_path = cache_dir.joinpath("sender.lock") + # If CO2 emissions are requested create a dummy monitor which just + # refreshes the CO2 intensity value if required. No emission metrics + # will be taken by the sender itself, values are assumed to be recorded + # by any offline runs being sent. + + _co2_monitor = CO2Monitor( + thermal_design_power_per_gpu=None, + thermal_design_power_per_cpu=None, + local_data_directory=cache_dir, + intensity_refresh_interval=_user_config.eco.intensity_refresh_interval, + co2_intensity=co2_intensity_refresh or _user_config.eco.co2_intensity, + co2_signal_api_token=_user_config.eco.co2_signal_api_token, + ) + _co2_monitor_trigger = threading.Event() + + def _monitor_refresh_task( + co2_monitor: CO2Monitor, + trigger: threading.Event, + ) -> None: + while not trigger.is_set(): + co2_monitor.check_refresh() + time.sleep(1) + + _co2_monitor_thread: threading.Thread | None = None + + if _co2_monitor.intensity_refresh_interval: + _co2_monitor_thread = threading.Thread( + target=_monitor_refresh_task, args=(_co2_monitor, _co2_monitor_trigger) + ) + _co2_monitor_thread.start() + # Check that no other sender is already currently running... if _lock_path.exists() and psutil.pid_exists(int(_lock_path.read_text())): raise RuntimeError("A sender is already running for this cache!") @@ -234,6 +276,10 @@ def sender( ), _heartbeat_files, ) + + # If a CO2 monitor is running stop it + _co2_monitor_trigger.set() + # Remove lock file to allow another sender to start in the future _lock_path.unlink() return _id_mapping From 3e08099fe251683a5ef016c2a5fe7edcb78f5893 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Tue, 25 Mar 2025 08:55:34 +0000 Subject: [PATCH 15/37] Remove unneeded extra thread in sender --- simvue/sender.py | 51 ++++++++++++++++-------------------------------- 1 file changed, 17 insertions(+), 34 deletions(-) diff --git a/simvue/sender.py b/simvue/sender.py index d5c8a712..47e8f7fc 100644 --- a/simvue/sender.py +++ b/simvue/sender.py @@ -10,7 +10,6 @@ import logging from concurrent.futures import ThreadPoolExecutor import threading -import time import requests import psutil from simvue.config.user import SimvueConfiguration @@ -181,37 +180,6 @@ def sender( cache_dir.joinpath("server_ids").mkdir(parents=True, exist_ok=True) _lock_path = cache_dir.joinpath("sender.lock") - # If CO2 emissions are requested create a dummy monitor which just - # refreshes the CO2 intensity value if required. No emission metrics - # will be taken by the sender itself, values are assumed to be recorded - # by any offline runs being sent. - - _co2_monitor = CO2Monitor( - thermal_design_power_per_gpu=None, - thermal_design_power_per_cpu=None, - local_data_directory=cache_dir, - intensity_refresh_interval=_user_config.eco.intensity_refresh_interval, - co2_intensity=co2_intensity_refresh or _user_config.eco.co2_intensity, - co2_signal_api_token=_user_config.eco.co2_signal_api_token, - ) - _co2_monitor_trigger = threading.Event() - - def _monitor_refresh_task( - co2_monitor: CO2Monitor, - trigger: threading.Event, - ) -> None: - while not trigger.is_set(): - co2_monitor.check_refresh() - time.sleep(1) - - _co2_monitor_thread: threading.Thread | None = None - - if _co2_monitor.intensity_refresh_interval: - _co2_monitor_thread = threading.Thread( - target=_monitor_refresh_task, args=(_co2_monitor, _co2_monitor_trigger) - ) - _co2_monitor_thread.start() - # Check that no other sender is already currently running... if _lock_path.exists() and psutil.pid_exists(int(_lock_path.read_text())): raise RuntimeError("A sender is already running for this cache!") @@ -277,8 +245,23 @@ def _monitor_refresh_task( _heartbeat_files, ) - # If a CO2 monitor is running stop it - _co2_monitor_trigger.set() + # If CO2 emissions are requested create a dummy monitor which just + # refreshes the CO2 intensity value if required. No emission metrics + # will be taken by the sender itself, values are assumed to be recorded + # by any offline runs being sent. + + if ( + _refresh_interval := co2_intensity_refresh + or _user_config.eco.intensity_refresh_interval + ): + CO2Monitor( + thermal_design_power_per_gpu=None, + thermal_design_power_per_cpu=None, + local_data_directory=cache_dir, + intensity_refresh_interval=_refresh_interval, + co2_intensity=co2_intensity_refresh or _user_config.eco.co2_intensity, + co2_signal_api_token=_user_config.eco.co2_signal_api_token, + ).check_refresh() # Remove lock file to allow another sender to start in the future _lock_path.unlink() From eacbe4564aeeeefcd079c80bf90c8f38b55cb209 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Tue, 25 Mar 2025 10:58:12 +0000 Subject: [PATCH 16/37] Resolve stability of log_metrics test --- simvue/api/request.py | 1 - tests/conftest.py | 4 +++- tests/functional/test_run_class.py | 20 +++++++++++--------- 3 files changed, 14 insertions(+), 11 deletions(-) diff --git a/simvue/api/request.py b/simvue/api/request.py index 20f47917..0235fc20 100644 --- a/simvue/api/request.py +++ b/simvue/api/request.py @@ -96,7 +96,6 @@ def post( else: data_sent = data - logging.debug(f"POST: {url}\n\tdata={data_sent}") response = requests.post( url, headers=headers, diff --git a/tests/conftest.py b/tests/conftest.py index b2903a3b..dc0545e4 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -37,6 +37,8 @@ def emit(self, record): for i, capture in enumerate(self.captures): if capture in record.msg: + if "resource" in record.msg: + print(f"[{i}={self.counts[i]}]: {record.msg}") self.counts[i] += 1 @@ -148,7 +150,7 @@ def create_pending_run(request, prevent_script_exit) -> typing.Generator[typing. @pytest.fixture -def create_plain_run_offline(request,prevent_script_exit) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]: +def create_plain_run_offline(request,prevent_script_exit,monkeypatch) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]: with tempfile.TemporaryDirectory() as temp_d: monkeypatch.setenv("SIMVUE_OFFLINE_DIRECTORY", temp_d) with sv_run.Run("offline") as run: diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 5cdcf340..09c00a54 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -16,7 +16,7 @@ import random import datetime import simvue -from simvue.api.objects.alert.fetch import Alert +from simvue.api.objects import Alert, Metrics from simvue.exception import SimvueRunError import simvue.run as sv_run import simvue.client as sv_cl @@ -95,20 +95,20 @@ def test_run_with_emissions(mock_co2_signal) -> None: def test_log_metrics( overload_buffer: bool, timestamp: str | None, - setup_logging: "CountingLogHandler", - mocker, + mocker: pytest_mock.MockerFixture, request: pytest.FixtureRequest, visibility: typing.Literal["public", "tenant"] | list[str] | None, ) -> None: METRICS = {"a": 10, "b": 1.2} - setup_logging.captures = ["'a'", "resources/"] - # Have to create the run outside of fixtures because the resources dispatch # occurs immediately and is not captured by the handler when using the fixture run = sv_run.Run() run.config(suppress_errors=False) + metrics_spy = mocker.spy(Metrics, "new") + resource_metrics_spy = mocker.spy(sv_run.Run, "_get_internal_metrics") + if visibility == "bad_option": with pytest.raises(SimvueRunError, match="visibility") as e: run.init( @@ -169,12 +169,14 @@ def test_log_metrics( assert len(_steps) == ( run._dispatcher._max_buffer_size * 3 if overload_buffer else 1 ) - # There are two debug log messages per metric dispatch - 'Executing callback on buffer' and 'Posting staged data' - # Should have done one dispatch if not overloaded, and 3 dispatches if overloaded - assert setup_logging.counts[0] == (6 if overload_buffer else 2) + + if overload_buffer: + assert metrics_spy.call_count > 2 + else: + assert metrics_spy.call_count <= 2 # Check heartbeat has been called at least once (so sysinfo sent) - assert setup_logging.counts[1] > 0 + assert resource_metrics_spy.call_count >= 1 @pytest.mark.run From af4d0e33cbf4d2b100435fd9b123ad7c4d74c003 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Tue, 25 Mar 2025 11:17:28 +0000 Subject: [PATCH 17/37] [skip ci] Updated changelog --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index accced78..c40092c5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,6 @@ # Change log +## Unreleased +* Removed CodeCarbon dependence in favour of a slimmer solution using the CO2 Signal API. ## [v2.0.1](https://github.com/simvue-io/client/releases/tag/v2.0.1) - 2025-03-24 * Improvements to docstrings on methods, classes and functions. ## [v2.0.0](https://github.com/simvue-io/client/releases/tag/v2.0.0) - 2025-03-07 From bae9ade4dbae2a6062401ca7c79a84ef85fc86d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Tue, 25 Mar 2025 13:17:20 +0000 Subject: [PATCH 18/37] Re-write abort python on alert test --- tests/functional/test_run_class.py | 28 +++++----------------------- 1 file changed, 5 insertions(+), 23 deletions(-) diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 09c00a54..207fd1c8 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -1,6 +1,4 @@ import os -from os.path import basename -from numpy import identity import pytest import pytest_mock import time @@ -940,30 +938,14 @@ def abort_callback(abort_run=trigger) -> None: @pytest.mark.run def test_abort_on_alert_python( - create_plain_run: typing.Tuple[sv_run.Run, dict], mocker: pytest_mock.MockerFixture + speedy_heartbeat, create_plain_run: typing.Tuple[sv_run.Run, dict], mocker: pytest_mock.MockerFixture ) -> None: - abort_set = threading.Event() - - def testing_exit(status: int) -> None: - abort_set.set() - raise SystemExit(status) - - mocker.patch("os._exit", testing_exit) + timeout: int = 20 + interval: int = 0 run, _ = create_plain_run - run.config(resources_metrics_interval=1) - run._heartbeat_interval = 1 client = sv_cl.Client() - i = 0 - - while True: - time.sleep(1) - if i == 4: - client.abort_run(run._id, reason="testing abort") - i += 1 - if abort_set.is_set() or i > 11: - break - - assert i < 10 + client.abort_run(run.id, reason="Test abort") + time.sleep(2) assert run._status == "terminated" From 6538fc68db0e27ebabfb9c561751beba7da9e4c3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Tue, 25 Mar 2025 13:59:06 +0000 Subject: [PATCH 19/37] Fix monitor tests --- tests/unit/test_ecoclient.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/unit/test_ecoclient.py b/tests/unit/test_ecoclient.py index a978bc0f..691a66fa 100644 --- a/tests/unit/test_ecoclient.py +++ b/tests/unit/test_ecoclient.py @@ -64,19 +64,19 @@ def test_co2_monitor_properties(mock_co2_signal) -> None: thermal_design_power_per_cpu=80, thermal_design_power_per_gpu=130, local_data_directory=tempd, - intensity_refresh_interval=1 if refresh else 2, + intensity_refresh_interval=None, co2_intensity=None, co2_signal_api_token=None ) _measure_params = { - "process_id": "test_outdated_data_check", + "process_id": "test_co2_monitor_properties", "cpu_percent": 20, "gpu_percent": 40, "measure_interval": 1 } _ems_monitor.estimate_co2_emissions(**_measure_params) assert _ems_monitor.current_carbon_intensity - assert _ems_monitor.process_data["test_outdated_data_check"] + assert _ems_monitor.process_data["test_co2_monitor_properties"] assert _ems_monitor.total_power_usage assert _ems_monitor.total_co2_emission assert _ems_monitor.total_co2_delta From 4de2f5f3bed7079bfdd41fd34cc9de8978881c60 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 26 Mar 2025 09:47:23 +0000 Subject: [PATCH 20/37] Simplified and fixed the emissions run tests --- simvue/run.py | 7 +-- tests/functional/test_run_class.py | 74 ++++++++++++++++++------------ 2 files changed, 46 insertions(+), 35 deletions(-) diff --git a/simvue/run.py b/simvue/run.py index b4a3ec5e..71021980 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -1109,15 +1109,10 @@ def config( if enable_emission_metrics: if self._user_config.run.mode == "offline": - if not (_co2_intensity := self._user_config.eco.co2_intensity): - self._error( - "Cannot record emission metrics, " - "a CO2 intensity value is required in offline mode." - ) # Create an emissions monitor with no API calls self._emissions_monitor = CO2Monitor( intensity_refresh_interval=None, - co2_intensity=_co2_intensity, + co2_intensity=self._user_config.eco.co2_intensity, local_data_directory=self._user_config.eco.local_data_directory, co2_signal_api_token=None, thermal_design_power_per_cpu=self._user_config.eco.cpu_thermal_design_power, diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 207fd1c8..dd7fce98 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -1,3 +1,4 @@ +import json import os import pytest import pytest_mock @@ -15,7 +16,9 @@ import datetime import simvue from simvue.api.objects import Alert, Metrics +from simvue.eco.api_client import CO2SignalData, CO2SignalResponse from simvue.exception import SimvueRunError +from simvue.eco.emissions_monitor import TIME_FORMAT, CO2Monitor import simvue.run as sv_run import simvue.client as sv_cl import simvue.sender as sv_send @@ -47,38 +50,51 @@ def test_check_run_initialised_decorator() -> None: @pytest.mark.run @pytest.mark.eco -def test_run_with_emissions(mock_co2_signal) -> None: - with sv_run.Run() as run_created: - run_created.init( - name="test_run_with_emissions", - folder="/simvue_client_unit_tests", - retention_period="1 min", - tags=["test_run_with_emissions"] +@pytest.mark.online +def test_run_with_emissions_online(speedy_heartbeat, mock_co2_signal, create_plain_run) -> None: + run_created, _ = create_plain_run + run_created.config(enable_emission_metrics=True) + time.sleep(3) + _run = RunObject(identifier=run_created.id) + _metric_names = [item[0] for item in _run.metrics] + client = sv_cl.Client() + for _metric in ["emissions", "energy_consumed"]: + _total_metric_name = f"sustainability.{_metric}.total" + _delta_metric_name = f"sustainability.{_metric}.delta" + assert _total_metric_name in _metric_names + assert _delta_metric_name in _metric_names + _metric_values = client.get_metric_values( + metric_names=[_total_metric_name, _delta_metric_name], + xaxis="time", + output_format="dataframe", + run_ids=[run_created.id], ) - run_created.config(enable_emission_metrics=True, resources_metrics_interval=1) - time.sleep(5) - _run = RunObject(identifier=run_created.id) - _metric_names = [item[0] for item in _run.metrics] - client = sv_cl.Client() - for _metric in ["emissions", "energy_consumed"]: - _total_metric_name = f"sustainability.{_metric}.total" - _delta_metric_name = f"sustainability.{_metric}.delta" - assert _total_metric_name in _metric_names - assert _delta_metric_name in _metric_names - _metric_values = client.get_metric_values( - metric_names=[_total_metric_name, _delta_metric_name], - xaxis="time", - output_format="dataframe", - run_ids=[run_created.id], - ) + assert _total_metric_name in _metric_values - # Check that total = previous total + latest delta - _total_values = _metric_values[_total_metric_name].tolist() - _delta_values = _metric_values[_delta_metric_name].tolist() - assert len(_total_values) > 1 - for i in range(1, len(_total_values)): - assert _total_values[i] == _total_values[i - 1] + _delta_values[i] +@pytest.mark.run +@pytest.mark.eco +@pytest.mark.offline +def test_run_with_emissions_offline(speedy_heartbeat, mock_co2_signal, create_plain_run_offline) -> None: + run_created, _ = create_plain_run_offline + run_created.config(enable_emission_metrics=True) + time.sleep(2) + id_mapping = sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"]) + _run = RunObject(identifier=id_mapping[run_created.id]) + _metric_names = [item[0] for item in _run.metrics] + client = sv_cl.Client() + for _metric in ["emissions", "energy_consumed"]: + _total_metric_name = f"sustainability.{_metric}.total" + _delta_metric_name = f"sustainability.{_metric}.delta" + assert _total_metric_name in _metric_names + assert _delta_metric_name in _metric_names + _metric_values = client.get_metric_values( + metric_names=[_total_metric_name, _delta_metric_name], + xaxis="time", + output_format="dataframe", + run_ids=[id_mapping[run_created.id]], + ) + assert _total_metric_name in _metric_values @pytest.mark.run @pytest.mark.parametrize( From 01f8c0b4b6a513f19a3562dcb9c2b7ed80dfe900 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Thu, 27 Mar 2025 14:59:23 +0000 Subject: [PATCH 21/37] Fix bad code inclusion of 'attach_process' for emissions monitor --- simvue/run.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/simvue/run.py b/simvue/run.py index 71021980..eef42c6f 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -561,13 +561,6 @@ def _start(self) -> bool: self._get_child_processes() if self._parent_process else None ) - if self._emissions_monitor: - self._emissions_monitor.attach_process(self._parent_process) - ( - self._emissions_monitor.attach_process(process) - for process in self._child_processes or [] - ) - self._shutdown_event = threading.Event() self._heartbeat_termination_trigger = threading.Event() self._alert_raised_trigger = threading.Event() From b2811f0ef0d73fe58c84acf15b65fb6b7c654955 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Thu, 27 Mar 2025 15:06:03 +0000 Subject: [PATCH 22/37] Fix 0 GPUs bug --- simvue/eco/emissions_monitor.py | 2 +- simvue/metrics.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/simvue/eco/emissions_monitor.py b/simvue/eco/emissions_monitor.py index f3ce5785..d5435b5a 100644 --- a/simvue/eco/emissions_monitor.py +++ b/simvue/eco/emissions_monitor.py @@ -108,7 +108,7 @@ def __init__(self, *args, **kwargs) -> None: ) if not isinstance(kwargs.get("thermal_design_power_per_gpu"), float): - kwargs["thermal_design_power_per_gpu"] = 80.0 + kwargs["thermal_design_power_per_gpu"] = 130.0 _logger.warning( "⚠️ No TDP value provided for current GPUs, will use arbitrary value of 130W." ) diff --git a/simvue/metrics.py b/simvue/metrics.py index 0e64f357..bf5b209d 100644 --- a/simvue/metrics.py +++ b/simvue/metrics.py @@ -177,8 +177,8 @@ def to_dict(self) -> dict[str, float]: @property def gpu_percent(self) -> float: - return sum(m[0] for m in self.gpus) / (len(self.gpus) or 1) + return sum(m[0] for m in self.gpus or []) / (len(self.gpus or []) or 1) @property def gpu_memory(self) -> float: - return sum(m[1] for m in self.gpus) / (len(self.gpus) or 1) + return sum(m[1] for m in self.gpus or []) / (len(self.gpus or []) or 1) From 462e7bea086ff09445d21e8aab78550272ee0c9e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 28 Mar 2025 09:00:01 +0000 Subject: [PATCH 23/37] Renamed resource metrics interval to system metrics interval --- simvue/config/parameters.py | 2 +- simvue/run.py | 54 +++++++++++++++--------------- tests/conftest.py | 2 +- tests/functional/test_run_class.py | 16 ++++----- tests/unit/test_suppress_errors.py | 6 ++-- 5 files changed, 40 insertions(+), 40 deletions(-) diff --git a/simvue/config/parameters.py b/simvue/config/parameters.py index 6bed98f1..e4a5b526 100644 --- a/simvue/config/parameters.py +++ b/simvue/config/parameters.py @@ -61,7 +61,7 @@ def check_valid_cache_path(cls, cache: pathlib.Path) -> pathlib.Path: class MetricsSpecifications(pydantic.BaseModel): - resources_metrics_interval: pydantic.PositiveInt | None = -1 + system_metrics_interval: pydantic.PositiveInt | None = -1 enable_emission_metrics: bool = False diff --git a/simvue/run.py b/simvue/run.py index eef42c6f..205a5031 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -187,10 +187,10 @@ def __init__( ) self._aborted: bool = False - self._resources_metrics_interval: int | None = ( + self._system_metrics_interval: int | None = ( HEARTBEAT_INTERVAL - if self._user_config.metrics.resources_metrics_interval < 1 - else self._user_config.metrics.resources_metrics_interval + if self._user_config.metrics.system_metrics_interval < 1 + else self._user_config.metrics.system_metrics_interval ) self._headers: dict[str, str] = ( { @@ -334,7 +334,7 @@ def _terminate_run( def _get_internal_metrics( self, - resource_metrics_step: int | None, + system_metrics_step: int | None, emission_metrics_step: int | None, res_measure_interval: int | None = None, ems_measure_interval: int | None = None, @@ -346,7 +346,7 @@ def _get_internal_metrics( Parameters ---------- - resource_metrics_step: int | None + system_metrics_step: int | None the current step for this resource metric record, None if skipping resource metrics. emission_metrics_step: int | None @@ -366,24 +366,24 @@ def _get_internal_metrics( _current_system_measure = SystemResourceMeasurement( self.processes, interval=res_measure_interval, - cpu_only=not resource_metrics_step, + cpu_only=not system_metrics_step, ) - if resource_metrics_step is not None: + if system_metrics_step is not None: # Set join on fail to false as if an error is thrown # join would be called on this thread and a thread cannot # join itself! self._add_metrics_to_dispatch( _current_system_measure.to_dict(), join_on_fail=False, - step=resource_metrics_step, + step=system_metrics_step, ) if emission_metrics_step is not None: self._emissions_monitor.estimate_co2_emissions( process_id=f"{self._name}", cpu_percent=_current_system_measure.cpu_percent, - measure_interval=self._resources_metrics_interval, + measure_interval=self._system_metrics_interval, gpu_percent=_current_system_measure.gpu_percent, ) self._add_metrics_to_dispatch( @@ -422,17 +422,17 @@ def _heartbeat( while not heartbeat_trigger.is_set(): with self._configuration_lock: _current_time: float = time.time() - _update_resource_metrics: bool = ( - self._resources_metrics_interval is not None + _update_system_metrics: bool = ( + self._system_metrics_interval is not None and _current_time - last_res_metric_call - > self._resources_metrics_interval + > self._system_metrics_interval and self._status == "running" ) _update_emissions_metrics: bool = ( - self._resources_metrics_interval is not None + self._system_metrics_interval is not None and self._emissions_monitor and _current_time - last_co2_metric_call - > self._resources_metrics_interval + > self._system_metrics_interval and self._status == "running" ) @@ -445,13 +445,13 @@ def _heartbeat( emission_metrics_step=co2_step if _update_emissions_metrics else None, - resource_metrics_step=res_step - if _update_resource_metrics + system_metrics_step=res_step + if _update_system_metrics else None, res_measure_interval=1 if res_step == 0 else None, ems_measure_interval=initial_ems_metrics_interval if co2_step == 0 - else self._resources_metrics_interval, + else self._system_metrics_interval, ) res_step += 1 @@ -459,7 +459,7 @@ def _heartbeat( last_res_metric_call = ( _current_time - if _update_resource_metrics + if _update_system_metrics else last_res_metric_call ) last_co2_metric_call = ( @@ -1048,9 +1048,9 @@ def config( *, suppress_errors: bool | None = None, queue_blocking: bool | None = None, - resources_metrics_interval: pydantic.PositiveInt | None = None, + system_metrics_interval: pydantic.PositiveInt | None = None, enable_emission_metrics: bool | None = None, - disable_resources_metrics: bool | None = None, + disable_system_metrics: bool | None = None, storage_id: str | None = None, abort_on_alert: typing.Literal["run", "all", "ignore"] | bool | None = None, ) -> bool: @@ -1063,11 +1063,11 @@ def config( dormant state if an error occurs queue_blocking : bool, optional block thread queues during metric/event recording - resources_metrics_interval : int, optional + system_metrics_interval : int, optional frequency at which to collect resource metrics enable_emission_metrics : bool, optional enable monitoring of emission metrics - disable_resources_metrics : bool, optional + disable_system_metrics : bool, optional disable monitoring of resource metrics storage_id : str, optional identifier of storage to use, by default None @@ -1090,15 +1090,15 @@ def config( if queue_blocking is not None: self._queue_blocking = queue_blocking - if resources_metrics_interval and disable_resources_metrics: + if system_metrics_interval and disable_system_metrics: self._error( "Setting of resource metric interval and disabling resource metrics is ambiguous" ) return False - if disable_resources_metrics: + if disable_system_metrics: self._pid = None - self._resources_metrics_interval = None + self._system_metrics_interval = None if enable_emission_metrics: if self._user_config.run.mode == "offline": @@ -1124,8 +1124,8 @@ def config( elif enable_emission_metrics is False and self._emissions_monitor: self._error("Cannot disable emissions monitor once it has been started") - if resources_metrics_interval: - self._resources_metrics_interval = resources_metrics_interval + if system_metrics_interval: + self._system_metrics_interval = system_metrics_interval if abort_on_alert is not None: if isinstance(abort_on_alert, bool): diff --git a/tests/conftest.py b/tests/conftest.py index dc0545e4..96aa75af 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -256,7 +256,7 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur TEST_DATA["url"] = run._user_config.server.url TEST_DATA["headers"] = run._headers TEST_DATA["pid"] = run._pid - TEST_DATA["resources_metrics_interval"] = run._resources_metrics_interval + TEST_DATA["system_metrics_interval"] = run._system_metrics_interval if create_objects: with tempfile.TemporaryDirectory() as tempd: diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index dd7fce98..527d63da 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -121,7 +121,7 @@ def test_log_metrics( run.config(suppress_errors=False) metrics_spy = mocker.spy(Metrics, "new") - resource_metrics_spy = mocker.spy(sv_run.Run, "_get_internal_metrics") + system_metrics_spy = mocker.spy(sv_run.Run, "_get_internal_metrics") if visibility == "bad_option": with pytest.raises(SimvueRunError, match="visibility") as e: @@ -135,7 +135,7 @@ def test_log_metrics( retention_period="1 hour", visibility=visibility, ) - run.config(resources_metrics_interval=1) + run.config(system_metrics_interval=1) return run.init( @@ -148,7 +148,7 @@ def test_log_metrics( visibility=visibility, retention_period="1 hour", ) - run.config(resources_metrics_interval=1) + run.config(system_metrics_interval=1) # Speed up the read rate for this test run._dispatcher._max_buffer_size = 10 @@ -190,7 +190,7 @@ def test_log_metrics( assert metrics_spy.call_count <= 2 # Check heartbeat has been called at least once (so sysinfo sent) - assert resource_metrics_spy.call_count >= 1 + assert system_metrics_spy.call_count >= 1 @pytest.mark.run @@ -928,7 +928,7 @@ def abort_callback(abort_run=trigger) -> None: mocker.patch("os._exit", testing_exit) N_PROCESSES: int = 3 - run.config(resources_metrics_interval=1) + run.config(system_metrics_interval=1) run._heartbeat_interval = 1 run._testing = True run.add_process( @@ -943,7 +943,7 @@ def abort_callback(abort_run=trigger) -> None: client = sv_cl.Client() client.abort_run(run._id, reason="testing abort") time.sleep(4) - assert run._resources_metrics_interval == 1 + assert run._system_metrics_interval == 1 for child in child_processes: assert not child.is_running() if run._status != "terminated": @@ -971,7 +971,7 @@ def test_abort_on_alert_raise( ) -> None: run, _ = create_plain_run - run.config(resources_metrics_interval=1) + run.config(system_metrics_interval=1) run._heartbeat_interval = 1 run._testing = True alert_id = run.create_user_alert("abort_test", trigger_abort=True) @@ -994,7 +994,7 @@ def test_abort_on_alert_raise( @pytest.mark.run def test_kill_all_processes(create_plain_run: typing.Tuple[sv_run.Run, dict]) -> None: run, _ = create_plain_run - run.config(resources_metrics_interval=1) + run.config(system_metrics_interval=1) run.add_process(identifier="forever_long_1", executable="bash", c="sleep 10000") run.add_process(identifier="forever_long_2", executable="bash", c="sleep 10000") processes = [ diff --git a/tests/unit/test_suppress_errors.py b/tests/unit/test_suppress_errors.py index 73c114dc..0ba7d022 100644 --- a/tests/unit/test_suppress_errors.py +++ b/tests/unit/test_suppress_errors.py @@ -12,7 +12,7 @@ def test_suppress_errors_false() -> None: with pytest.raises(RuntimeError) as e: run.config( suppress_errors=False, - disable_resources_metrics=123, + disable_system_metrics=123, ) assert "Input should be a valid boolean, unable to interpret input" in f"{e.value}" @@ -25,7 +25,7 @@ def test_suppress_errors_true(caplog) -> None: run.config(suppress_errors=True) run.config( - disable_resources_metrics=123, + disable_system_metrics=123, ) caplog.set_level(logging.ERROR) @@ -41,7 +41,7 @@ def test_suppress_errors_default(caplog) -> None: run.config(suppress_errors=True) run.config( - disable_resources_metrics=123, + disable_system_metrics=123, ) caplog.set_level(logging.ERROR) From ebef99890c58658e615d110e93d859ca97209146 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 28 Mar 2025 09:00:27 +0000 Subject: [PATCH 24/37] Add Python3.10 support for CO2 timestamp parsing --- simvue/eco/api_client.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/simvue/eco/api_client.py b/simvue/eco/api_client.py index b8812380..9bd1883a 100644 --- a/simvue/eco/api_client.py +++ b/simvue/eco/api_client.py @@ -39,7 +39,9 @@ class CO2SignalResponse(pydantic.BaseModel): def from_json_response(cls, json_response: dict) -> "CO2SignalResponse": _data: dict[str, typing.Any] = json_response["data"] _co2_signal_data = CO2SignalData( - datetime=datetime.datetime.fromisoformat(_data["datetime"]), + datetime=datetime.datetime.fromisoformat( + _data["datetime"].replace("Z", "+00:00") + ), carbon_intensity=_data["carbonIntensity"], fossil_fuel_percentage=_data["fossilFuelPercentage"], ) From 444805da5f03ebdfa6c898ee9601d5c0f0999178 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 28 Mar 2025 09:00:46 +0000 Subject: [PATCH 25/37] Tidy error response from CO2 signal API --- simvue/eco/api_client.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/simvue/eco/api_client.py b/simvue/eco/api_client.py index 9bd1883a..e4597ee6 100644 --- a/simvue/eco/api_client.py +++ b/simvue/eco/api_client.py @@ -121,9 +121,13 @@ def get(self) -> CO2SignalResponse: _response = requests.get(f"{self.co2_api_endpoint}", params=_params) if _response.status_code != http.HTTPStatus.OK: + try: + _error = _response.json()["error"] + except (AttributeError, KeyError): + _error = _response.text raise RuntimeError( - "Failed to retrieve current CO2 signal data for" - f" country '{self._two_letter_country_code}': {_response.text}" + f"[{_response.status_code}] Failed to retrieve current CO2 signal data for" + f" country '{self._two_letter_country_code}': {_error}" ) return CO2SignalResponse.from_json_response(_response.json()) From 1626f1758503b888a8c4d5380a3af6d1f71e4292 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 28 Mar 2025 09:07:47 +0000 Subject: [PATCH 26/37] Reactivate sorting tests --- tests/functional/test_client.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/tests/functional/test_client.py b/tests/functional/test_client.py index 37dcb5c2..5ba17620 100644 --- a/tests/functional/test_client.py +++ b/tests/functional/test_client.py @@ -161,9 +161,6 @@ def test_plot_metrics(create_test_run: tuple[sv_run.Run, dict]) -> None: ids=("sorted-metadata", "sorted-name-created", None) ) def test_get_artifacts_entries(create_test_run: tuple[sv_run.Run, dict], sorting: list[tuple[str, bool]] | None) -> None: - # TODO: Reinstate this test once server bug fixed - if any("metadata" in a[0] for a in sorting or []): - pytest.skip(reason="Server bug fix required for metadata sorting.") client = svc.Client() assert dict(client.list_artifacts(create_test_run[1]["run_id"], sort_by_columns=sorting)) assert client.get_artifact(create_test_run[1]["run_id"], name="test_attributes") @@ -252,9 +249,6 @@ def test_get_run(create_test_run: tuple[sv_run.Run, dict]) -> None: ids=("no-sort", "sort-path-metadata", "sort-modified") ) def test_get_folders(create_test_run: tuple[sv_run.Run, dict], sorting: list[tuple[str, bool]] | None) -> None: - #TODO: Once server is fixed reinstate this test - if "modified" in (a[0] for a in sorting or []): - pytest.skip(reason="Server bug when sorting by 'modified'") client = svc.Client() assert (folders := client.get_folders(sort_by_columns=sorting)) _id, _folder = next(folders) From 9c5317e23d2fc0ccc94b93dd604e54d4af7e0731 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 28 Mar 2025 12:23:25 +0000 Subject: [PATCH 27/37] Fix Co2 client logic, do not get from CO2 signal if value provided --- simvue/eco/emissions_monitor.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/simvue/eco/emissions_monitor.py b/simvue/eco/emissions_monitor.py index d5435b5a..b694ca3e 100644 --- a/simvue/eco/emissions_monitor.py +++ b/simvue/eco/emissions_monitor.py @@ -156,10 +156,10 @@ def check_refresh(self) -> bool: whether a refresh of the CO2 intensity was requested from the CO2 Signal API. """ - if ( - not self.co2_intensity - and not self._local_data.setdefault(self._client.country_code, {}) - ) or self.outdated: + if not self.co2_intensity and ( + not self._local_data.setdefault(self._client.country_code, {}) + or self.outdated + ): self._logger.info("🌍 CO2 emission outdated, calling API.") _data: CO2SignalResponse = self._client.get() self._local_data[self._client.country_code] = _data.model_dump(mode="json") From ff417858cbec52c183a8ab25c9d607ad9fedb670 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 28 Mar 2025 12:30:13 +0000 Subject: [PATCH 28/37] Do not refresh if offline --- simvue/eco/emissions_monitor.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/simvue/eco/emissions_monitor.py b/simvue/eco/emissions_monitor.py index b694ca3e..387aca3c 100644 --- a/simvue/eco/emissions_monitor.py +++ b/simvue/eco/emissions_monitor.py @@ -142,8 +142,10 @@ def __init__(self, *args, **kwargs) -> None: ) self._measure_time = datetime.datetime.now() self._logger = _logger - self._client: APIClient = APIClient( - co2_api_token=self.co2_signal_api_token, timeout=10 + self._client: APIClient | None = ( + None + if self.co2_intensity + else APIClient(co2_api_token=self.co2_signal_api_token, timeout=10) ) self._processes: dict[str, ProcessData] = {} @@ -156,7 +158,7 @@ def check_refresh(self) -> bool: whether a refresh of the CO2 intensity was requested from the CO2 Signal API. """ - if not self.co2_intensity and ( + if ( not self._local_data.setdefault(self._client.country_code, {}) or self.outdated ): @@ -193,12 +195,11 @@ def estimate_co2_emissions( if not (_process := self._processes.get(process_id)): self._processes[process_id] = (_process := ProcessData()) - self.check_refresh() - if self.co2_intensity: _current_co2_intensity = self.co2_intensity _co2_units = "kgCO2/kWh" else: + self.check_refresh() self._current_co2_data = CO2SignalResponse( **self._local_data[self._client.country_code] ) @@ -255,7 +256,7 @@ def process_data(self) -> dict[str, ProcessData]: @property def current_carbon_intensity(self) -> float: - return self._client.get().data.carbon_intensity + return self.co2_intensity or self._client.get().data.carbon_intensity @property def total_power_usage(self) -> float: From b0545e5595ef6c74c10ffa00afd81368e751acdc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 28 Mar 2025 13:25:34 +0000 Subject: [PATCH 29/37] Added local refresh and handling of complete offline running for CO2 --- simvue/eco/emissions_monitor.py | 45 +++++++++++++++++++++++++++++---- simvue/run.py | 1 + 2 files changed, 41 insertions(+), 5 deletions(-) diff --git a/simvue/eco/emissions_monitor.py b/simvue/eco/emissions_monitor.py index 387aca3c..c7191d9b 100644 --- a/simvue/eco/emissions_monitor.py +++ b/simvue/eco/emissions_monitor.py @@ -15,6 +15,7 @@ import logging import humanfriendly import pathlib +import os.path from simvue.eco.api_client import APIClient, CO2SignalResponse @@ -46,6 +47,7 @@ class CO2Monitor(pydantic.BaseModel): intensity_refresh_interval: int | None | str co2_intensity: float | None co2_signal_api_token: str | None + offline: bool = False def now(self) -> str: """Return data file timestamp for the current time""" @@ -97,7 +99,9 @@ def __init__(self, *args, **kwargs) -> None: disable using RestAPIs to retrieve CO2 intensity and instead use this value. Default is None, use remote data. Value is in kgCO2/kWh co2_signal_api_token: str - RECOMMENDED. The API token for CO2 signal, default is None. + The API token for CO2 signal, default is None. + offline: bool, optional + Run without any server interaction """ _logger = logging.getLogger(self.__class__.__name__) @@ -113,6 +117,7 @@ def __init__(self, *args, **kwargs) -> None: "⚠️ No TDP value provided for current GPUs, will use arbitrary value of 130W." ) super().__init__(*args, **kwargs) + self._last_local_write = datetime.datetime.now() if self.intensity_refresh_interval and isinstance( self.intensity_refresh_interval, str @@ -144,7 +149,7 @@ def __init__(self, *args, **kwargs) -> None: self._logger = _logger self._client: APIClient | None = ( None - if self.co2_intensity + if self.co2_intensity or self.offline else APIClient(co2_api_token=self.co2_signal_api_token, timeout=10) ) self._processes: dict[str, ProcessData] = {} @@ -158,10 +163,25 @@ def check_refresh(self) -> bool: whether a refresh of the CO2 intensity was requested from the CO2 Signal API. """ + # Need to check if the local cache has been modified + # even if running offline if ( - not self._local_data.setdefault(self._client.country_code, {}) - or self.outdated + self._data_file_path.exists() + and ( + _check_write := datetime.datetime.fromtimestamp( + os.path.getmtime(f"{self._data_file_path}") + ) + ) + > self._last_local_write ): + self._last_local_write = _check_write + with self._data_file_path.open("r") as in_f: + self._local_data = json.load(in_f) + + if ( + self._client + and not self._local_data.setdefault(self._client.country_code, {}) + ) or self.outdated: self._logger.info("🌍 CO2 emission outdated, calling API.") _data: CO2SignalResponse = self._client.get() self._local_data[self._client.country_code] = _data.model_dump(mode="json") @@ -200,8 +220,23 @@ def estimate_co2_emissions( _co2_units = "kgCO2/kWh" else: self.check_refresh() + if self._client: + _country_code = self._client.country_code + else: + # If no local data yet then return + if not (_country_codes := list(self._local_data.keys())): + self._logger.warning( + "No CO2 emission data recorded as no CO2 intensity value " + "has been provided and there is no local intensity data available." + ) + return + + _country_code = _country_codes[0] + self._logger.debug( + f"🗂️ Using data for region '{_country_code}' from local cache for offline estimation." + ) self._current_co2_data = CO2SignalResponse( - **self._local_data[self._client.country_code] + **self._local_data[_country_code] ) _current_co2_intensity = self._current_co2_data.data.carbon_intensity _co2_units = self._current_co2_data.carbon_intensity_units diff --git a/simvue/run.py b/simvue/run.py index 205a5031..e708edbe 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -1110,6 +1110,7 @@ def config( co2_signal_api_token=None, thermal_design_power_per_cpu=self._user_config.eco.cpu_thermal_design_power, thermal_design_power_per_gpu=self._user_config.eco.gpu_thermal_design_power, + offline=True, ) else: self._emissions_monitor = CO2Monitor( From 74f00f2113cc3cdb153f72cb4a4fbfb9f647939d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 28 Mar 2025 14:05:43 +0000 Subject: [PATCH 30/37] Added Number of CPU cores to config --- simvue/eco/config.py | 1 + simvue/eco/emissions_monitor.py | 15 ++++++++++++--- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/simvue/eco/config.py b/simvue/eco/config.py index e69ea20b..06e57761 100644 --- a/simvue/eco/config.py +++ b/simvue/eco/config.py @@ -31,6 +31,7 @@ class EcoConfig(pydantic.BaseModel): co2_signal_api_token: pydantic.SecretStr | None = None cpu_thermal_design_power: pydantic.PositiveInt | None = None + cpu_n_cores: pydantic.PositiveInt | None = None gpu_thermal_design_power: pydantic.PositiveInt | None = None local_data_directory: pydantic.DirectoryPath | None = pydantic.Field( None, validate_default=True diff --git a/simvue/eco/emissions_monitor.py b/simvue/eco/emissions_monitor.py index c7191d9b..7bc691e5 100644 --- a/simvue/eco/emissions_monitor.py +++ b/simvue/eco/emissions_monitor.py @@ -42,6 +42,7 @@ class CO2Monitor(pydantic.BaseModel): """ thermal_design_power_per_cpu: pydantic.PositiveFloat | None + n_cores_per_cpu: pydantic.PositiveInt | None thermal_design_power_per_gpu: pydantic.PositiveFloat | None local_data_directory: pydantic.DirectoryPath intensity_refresh_interval: int | None | str @@ -87,6 +88,8 @@ def __init__(self, *args, **kwargs) -> None: ---------- thermal_design_power_per_cpu: float | None the TDP value for each CPU, default is 80W. + n_cores_per_cpu: int | None + the number of cores in each CPU, default is 4. thermal_design_power_per_gpu: float | None the TDP value for each GPU, default is 130W. local_data_directory: pydantic.DirectoryPath @@ -111,6 +114,12 @@ def __init__(self, *args, **kwargs) -> None: "⚠️ No TDP value provided for current CPU, will use arbitrary value of 80W." ) + if not isinstance(kwargs.get("n_cores_per_cpu"), float): + kwargs["n_cores_per_cpu"] = 4 + _logger.warning( + "⚠️ No core count provided for current CPU, will use arbitrary value of 4." + ) + if not isinstance(kwargs.get("thermal_design_power_per_gpu"), float): kwargs["thermal_design_power_per_gpu"] = 130.0 _logger.warning( @@ -244,9 +253,9 @@ def estimate_co2_emissions( _process.gpu_percentage = gpu_percent _process.cpu_percentage = cpu_percent _previous_energy: float = _process.total_energy - _process.power_usage = ( - _process.cpu_percentage / 100.0 - ) * self.thermal_design_power_per_cpu + _process.power_usage = (_process.cpu_percentage / 100.0) * ( + self.thermal_design_power_per_cpu / self.n_cores_per_cpu + ) if _process.gpu_percentage and self.thermal_design_power_per_gpu: _process.power_usage += ( From 37d900b880932840067c27a3af7b108300b4543e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 28 Mar 2025 14:09:34 +0000 Subject: [PATCH 31/37] Added intensity units to log message --- simvue/eco/emissions_monitor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/simvue/eco/emissions_monitor.py b/simvue/eco/emissions_monitor.py index 7bc691e5..74316b03 100644 --- a/simvue/eco/emissions_monitor.py +++ b/simvue/eco/emissions_monitor.py @@ -145,7 +145,7 @@ def __init__(self, *args, **kwargs) -> None: if self.co2_intensity: _logger.warning( - f"⚠️ Disabling online data retrieval, using {self.co2_intensity} for CO2 intensity." + f"⚠️ Disabling online data retrieval, using {self.co2_intensity} eqCO2g/kwh for CO2 intensity." ) self._data_file_path: pathlib.Path | None = None From 2437e374925a6a5dc40a6ef2850e8686b3bf573d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 28 Mar 2025 14:25:25 +0000 Subject: [PATCH 32/37] Fix refresh on first run offline --- simvue/eco/emissions_monitor.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/simvue/eco/emissions_monitor.py b/simvue/eco/emissions_monitor.py index 74316b03..13c054d6 100644 --- a/simvue/eco/emissions_monitor.py +++ b/simvue/eco/emissions_monitor.py @@ -187,10 +187,13 @@ def check_refresh(self) -> bool: with self._data_file_path.open("r") as in_f: self._local_data = json.load(in_f) + if not self._client or not self._local_data: + return False + if ( - self._client - and not self._local_data.setdefault(self._client.country_code, {}) - ) or self.outdated: + not self._local_data.setdefault(self._client.country_code, {}) + or self.outdated + ): self._logger.info("🌍 CO2 emission outdated, calling API.") _data: CO2SignalResponse = self._client.get() self._local_data[self._client.country_code] = _data.model_dump(mode="json") From 01b5ff895ada52b9316ae5c50bfb2b1d3bdd8a25 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 28 Mar 2025 14:51:46 +0000 Subject: [PATCH 33/37] Correct step 0 emissions measure --- simvue/run.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/simvue/run.py b/simvue/run.py index e708edbe..dc5ef56d 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -379,11 +379,16 @@ def _get_internal_metrics( step=system_metrics_step, ) - if emission_metrics_step is not None: + if ( + self._emissions_monitor + and emission_metrics_step is not None + and ems_measure_interval is not None + and _current_system_measure.cpu_percent is not None + ): self._emissions_monitor.estimate_co2_emissions( process_id=f"{self._name}", cpu_percent=_current_system_measure.cpu_percent, - measure_interval=self._system_metrics_interval, + measure_interval=ems_measure_interval, gpu_percent=_current_system_measure.gpu_percent, ) self._add_metrics_to_dispatch( From bbe4b4f60e82a93c7ad4ffd32ec11f4ba632c22f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 28 Mar 2025 14:56:54 +0000 Subject: [PATCH 34/37] Test parallelization --- .github/workflows/test_client_ubuntu.yml | 74 +++++++++++++++++++++--- 1 file changed, 67 insertions(+), 7 deletions(-) diff --git a/.github/workflows/test_client_ubuntu.yml b/.github/workflows/test_client_ubuntu.yml index 8af0ce8d..02321742 100644 --- a/.github/workflows/test_client_ubuntu.yml +++ b/.github/workflows/test_client_ubuntu.yml @@ -18,8 +18,51 @@ concurrency: cancel-in-progress: true jobs: - build: - + online_unit_tests: + runs-on: ubuntu-latest + timeout-minutes: 40 + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.13 + uses: actions/setup-python@v5 + with: + python-version: "3.13" + - name: Install dependencies + run: | + python -m pip install poetry + poetry self add poetry-plugin-export + poetry export -f requirements.txt --with dev -o requirements.txt --all-extras + python -m pip install torch --index-url https://download.pytorch.org/whl/cpu + python -m pip install -r requirements.txt + python -m pip install . + - name: Test with pytest + env: + SIMVUE_URL: ${{ secrets.SIMVUE_URL }} + SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} + run: python -m pytest tests/unit/ -x -m online -c /dev/null -p no:warnings -n 0 -v + offline_unit_tests: + runs-on: ubuntu-latest + timeout-minutes: 40 + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.13 + uses: actions/setup-python@v5 + with: + python-version: "3.13" + - name: Install dependencies + run: | + python -m pip install poetry + poetry self add poetry-plugin-export + poetry export -f requirements.txt --with dev -o requirements.txt --all-extras + python -m pip install torch --index-url https://download.pytorch.org/whl/cpu + python -m pip install -r requirements.txt + python -m pip install . + - name: Test with pytest + env: + SIMVUE_URL: ${{ secrets.SIMVUE_URL }} + SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} + run: python -m pytest tests/unit/ -x -m offline -c /dev/null -p no:warnings -n 0 -v + online_functional_tests: runs-on: ubuntu-latest timeout-minutes: 40 steps: @@ -40,9 +83,26 @@ jobs: env: SIMVUE_URL: ${{ secrets.SIMVUE_URL }} SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} - run: python -m pytest tests/ -x --cov --cov-report=xml -m 'not scenario' -c /dev/null -p no:warnings -n 0 -v - - name: Upload coverage reports to Codecov + run: python -m pytest tests/functional/ -x -m online -c /dev/null -p no:warnings -n 0 -v + offline_functional_tests: + runs-on: ubuntu-latest + timeout-minutes: 40 + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.13 + uses: actions/setup-python@v5 + with: + python-version: "3.13" + - name: Install dependencies run: | - curl -Os https://uploader.codecov.io/latest/linux/codecov - chmod +x codecov - ./codecov -t ${CODECOV_TOKEN} + python -m pip install poetry + poetry self add poetry-plugin-export + poetry export -f requirements.txt --with dev -o requirements.txt --all-extras + python -m pip install torch --index-url https://download.pytorch.org/whl/cpu + python -m pip install -r requirements.txt + python -m pip install . + - name: Test with pytest + env: + SIMVUE_URL: ${{ secrets.SIMVUE_URL }} + SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} + run: python -m pytest tests/functional/ -x -m offline -c /dev/null -p no:warnings -n 0 -v From d352d7ec2ad612536c8e1aa5c6fae6c25fcce5df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 28 Mar 2025 15:11:50 +0000 Subject: [PATCH 35/37] Mock location info for international test running --- tests/conftest.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 96aa75af..0ac0d563 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -55,7 +55,7 @@ def mock_co2_signal(monkeypatch: monkeypatch.MonkeyPatch) -> dict[str, dict | st _mock_data = { "data": { "datetime": datetime.datetime.now().isoformat(), - "carbonIntensity": 0.04, + "carbonIntensity": 40, "fossilFuelPercentage": 39, }, "_disclaimer": "test disclaimer", @@ -78,8 +78,15 @@ def _mock_get(*args, **kwargs) -> requests.Response: return MockCo2SignalAPIResponse() else: return _req_get(*args, **kwargs) + def _mock_location_info(self) -> None: + self._logger.info("📍 Determining current user location.") + self._latitude: float + self._longitude: float + self._latitude, self._longitude = (-1, -1) + self._two_letter_country_code: str = "GB" monkeypatch.setattr(requests, "get", _mock_get) + monkeypatch.setattr(sv_eco.APIClient, "_get_user_location_info", _mock_location_info) return _mock_data From 4099b79f41f516a538cb591df76da852b4c5b006 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 28 Mar 2025 15:15:11 +0000 Subject: [PATCH 36/37] Fix handling of no local emission data --- simvue/eco/emissions_monitor.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/simvue/eco/emissions_monitor.py b/simvue/eco/emissions_monitor.py index 13c054d6..b1798e96 100644 --- a/simvue/eco/emissions_monitor.py +++ b/simvue/eco/emissions_monitor.py @@ -232,17 +232,17 @@ def estimate_co2_emissions( _co2_units = "kgCO2/kWh" else: self.check_refresh() + # If no local data yet then return + if not (_country_codes := list(self._local_data.keys())): + self._logger.warning( + "No CO2 emission data recorded as no CO2 intensity value " + "has been provided and there is no local intensity data available." + ) + return + if self._client: _country_code = self._client.country_code else: - # If no local data yet then return - if not (_country_codes := list(self._local_data.keys())): - self._logger.warning( - "No CO2 emission data recorded as no CO2 intensity value " - "has been provided and there is no local intensity data available." - ) - return - _country_code = _country_codes[0] self._logger.debug( f"🗂️ Using data for region '{_country_code}' from local cache for offline estimation." From ff59423c23752495d14dc6b5d18f51d755db26cb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 28 Mar 2025 15:17:52 +0000 Subject: [PATCH 37/37] Run non offline/online tests in CI --- .github/workflows/test_client_ubuntu.yml | 44 ++++++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/.github/workflows/test_client_ubuntu.yml b/.github/workflows/test_client_ubuntu.yml index 02321742..a20de2dd 100644 --- a/.github/workflows/test_client_ubuntu.yml +++ b/.github/workflows/test_client_ubuntu.yml @@ -106,3 +106,47 @@ jobs: SIMVUE_URL: ${{ secrets.SIMVUE_URL }} SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} run: python -m pytest tests/functional/ -x -m offline -c /dev/null -p no:warnings -n 0 -v + other_unit_tests: + runs-on: ubuntu-latest + timeout-minutes: 40 + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.13 + uses: actions/setup-python@v5 + with: + python-version: "3.13" + - name: Install dependencies + run: | + python -m pip install poetry + poetry self add poetry-plugin-export + poetry export -f requirements.txt --with dev -o requirements.txt --all-extras + python -m pip install torch --index-url https://download.pytorch.org/whl/cpu + python -m pip install -r requirements.txt + python -m pip install . + - name: Test with pytest + env: + SIMVUE_URL: ${{ secrets.SIMVUE_URL }} + SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} + run: python -m pytest tests/unit/ -x -m 'not offline' -m 'not online' -m 'not scenario' -c /dev/null -p no:warnings -n 0 -v + other_functional_tests: + runs-on: ubuntu-latest + timeout-minutes: 40 + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.13 + uses: actions/setup-python@v5 + with: + python-version: "3.13" + - name: Install dependencies + run: | + python -m pip install poetry + poetry self add poetry-plugin-export + poetry export -f requirements.txt --with dev -o requirements.txt --all-extras + python -m pip install torch --index-url https://download.pytorch.org/whl/cpu + python -m pip install -r requirements.txt + python -m pip install . + - name: Test with pytest + env: + SIMVUE_URL: ${{ secrets.SIMVUE_URL }} + SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} + run: python -m pytest tests/functional/ -x -m 'not offline' -m 'not online' -m 'not scenario' -c /dev/null -p no:warnings -n 0 -v