diff --git a/README.md b/README.md index 928a117..2489a90 100644 --- a/README.md +++ b/README.md @@ -7,8 +7,8 @@ This Open Source project is build by B-Open - https://www.bopen.eu xarray-ecmwf is a Python library and [Xarray](https://docs.xarray.dev) backend with the following functionalities: - opens an ECMWF style request as a Xarray Dataset connected to the remote services - - the [Climate Data Store](https://cds.climate.copernicus.eu) via [cdsapi](https://github.com/ecmwf/cdsapi): ERA5, Seasonal forecasts - - the [Athospheric Data Store](https://ads.atmosphere.copernicus.eu) via cdsapi + - the [Climate Data Store](https://cds.climate.copernicus.eu) via [ecmwf-datastores-client](https://github.com/ecmwf/ecmwf-datastores-client): ERA5, Seasonal forecasts + - the [Atmospheric Data Store](https://ads.atmosphere.copernicus.eu) via ecmwf-datastores-client - the [ECMWF Open data](https://www.ecmwf.int/en/forecasts/datasets/open-data) via [ecmwf-opendata](https://github.com/ecmwf/ecmwf-opendata): High resolution forecasts, ensemble forecast - allows lazy loading the data and well integrated with [Dask](https://www.dask.org) and [Dask.distributed](https://distributed.dask.org) - allows chunking the input request according to a configurable splitting strategy. Allowed strategies: diff --git a/ci/environment-integration.yml b/ci/environment-integration.yml index 810ab03..fcfc5ca 100644 --- a/ci/environment-integration.yml +++ b/ci/environment-integration.yml @@ -7,8 +7,8 @@ dependencies: - pytest - pytest-cov # DO NOT EDIT ABOVE THIS LINE, ADD DEPENDENCIES BELOW -- cdsapi - ecmwf-api-client +- ecmwf-datastores-client - ecmwf-opendata - pandas-stubs - pip diff --git a/environment.yml b/environment.yml index 2662f34..0f71e12 100644 --- a/environment.yml +++ b/environment.yml @@ -9,8 +9,8 @@ channels: # DO NOT EDIT ABOVE THIS LINE, ADD DEPENDENCIES BELOW AS SHOWN IN THE EXAMPLE dependencies: - attrs -- cdsapi - cfgrib +- ecmwf-datastores-client - ecmwf-opendata - pip - xarray diff --git a/pyproject.toml b/pyproject.toml index 65bce00..8ee2a62 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,8 +12,8 @@ classifiers = [ "Programming Language :: Python :: 3.11", "Topic :: Scientific/Engineering" ] -dependencies = ["cdsapi", "cfgrib", "ecmwf-opendata", "polytope-client", "xarray"] -description = "Xarray backend to access data via the cdsapi package" +dependencies = ["cfgrib", "ecmwf-datastores-client", "ecmwf-opendata", "polytope-client", "xarray"] +description = "Xarray backend to access ECMWF datastores" dynamic = ["version"] license = {file = "LICENSE"} name = "xarray-ecmwf" @@ -32,7 +32,6 @@ strict = true [[tool.mypy.overrides]] ignore_missing_imports = true module = [ - "cdsapi", "cf2cdm", "ecmwf", "ecmwf.opendata", diff --git a/xarray_ecmwf/client_cdsapi.py b/xarray_ecmwf/client_cdsapi.py index 0ce1cdd..db56c4c 100644 --- a/xarray_ecmwf/client_cdsapi.py +++ b/xarray_ecmwf/client_cdsapi.py @@ -3,7 +3,7 @@ from typing import Any import attrs -import cdsapi +import ecmwf.datastores import numpy as np import xarray as xr @@ -16,13 +16,13 @@ @attrs.define class CdsapiRequestClient: - client_kwargs: dict[str, Any] = {"quiet": True, "retry_max": 1} + client_kwargs: dict[str, Any] = {"maximum_tries": 1} def submit_and_wait_on_result(self, request: dict[str, Any]) -> Any: request = request.copy() dataset = request.pop("dataset") - client = cdsapi.Client(**self.client_kwargs) - return client.retrieve(dataset, request | {"format": "grib"}) + client = ecmwf.datastores.Client(**self.client_kwargs) + return client.submit_and_wait_on_results(dataset, request | {"format": "grib"}) def get_filename(self, result: Any) -> str: return result.location.split("/")[-1] # type: ignore diff --git a/xarray_ecmwf/engine_ecmwf.py b/xarray_ecmwf/engine_ecmwf.py index ef3e89a..a270abc 100644 --- a/xarray_ecmwf/engine_ecmwf.py +++ b/xarray_ecmwf/engine_ecmwf.py @@ -108,7 +108,7 @@ def retrieve_once( if not os.path.isdir(self.cache_folder): os.makedirs(self.cache_folder, exist_ok=True) - with xr.backends.locks.get_write_lock(f"{HOSTNAME}-grib"): # type: ignore + with xr.backends.locks.get_write_lock(f"{HOSTNAME}-grib"): if not os.path.exists(path): robust_save_to_file(self.request_client.download, (result,), path) ds = self.open_dataset(path) @@ -142,7 +142,7 @@ def cached_empty_dataset(self, request: dict[str, Any]) -> Iterator[xr.Dataset]: if not os.path.exists(path): with self.retrieve(request) as read_ds: # check again as the retrieve may be long - with xr.backends.locks.get_write_lock(f"{HOSTNAME}-zarr"): # type: ignore + with xr.backends.locks.get_write_lock(f"{HOSTNAME}-zarr"): if not os.path.exists(path): # NOTE: be sure that read_ds is chunked so compute=False only # writes the metadata. Some open_dataset