Skip to content

icclim time bounds issues #2

@DamienIrving

Description

@DamienIrving

icclim outputs an xarray Dataset with a time bounds axis. For some reason I get the following error when reading CWD index data calculated from AGCD data (i.e. /g/data/xv83/dbi599/indices/cwd_year_AGCD_v1_r005_1900-2021.nc) but not for any other metric or dataset (even other AGCD data/metrics).

---------------------------------------------------------------------------
OverflowError                             Traceback (most recent call last)
File /g/data/xv83/dbi599/miniconda3/envs/model-eval/lib/python3.10/site-packages/xarray/coding/times.py:261, in decode_cf_datetime(num_dates, units, calendar, use_cftime)
    260 try:
--> 261     dates = _decode_datetime_with_pandas(flat_num_dates, units, calendar)
    262 except (KeyError, OutOfBoundsDatetime, OverflowError):

File /g/data/xv83/dbi599/miniconda3/envs/model-eval/lib/python3.10/site-packages/xarray/coding/times.py:217, in _decode_datetime_with_pandas(flat_num_dates, units, calendar)
    216 warnings.filterwarnings("ignore", "invalid value encountered", RuntimeWarning)
--> 217 pd.to_timedelta(flat_num_dates.min(), delta) + ref_date
    218 pd.to_timedelta(flat_num_dates.max(), delta) + ref_date

File /g/data/xv83/dbi599/miniconda3/envs/model-eval/lib/python3.10/site-packages/pandas/core/tools/timedeltas.py:148, in to_timedelta(arg, unit, errors)
    147 # ...so it must be a scalar value. Return scalar.
--> 148 return _coerce_scalar_to_timedelta_type(arg, unit=unit, errors=errors)

File /g/data/xv83/dbi599/miniconda3/envs/model-eval/lib/python3.10/site-packages/pandas/core/tools/timedeltas.py:156, in _coerce_scalar_to_timedelta_type(r, unit, errors)
    155 try:
--> 156     result = Timedelta(r, unit)
    157 except ValueError:

File /g/data/xv83/dbi599/miniconda3/envs/model-eval/lib/python3.10/site-packages/pandas/_libs/tslibs/timedeltas.pyx:1357, in pandas._libs.tslibs.timedeltas.Timedelta.__new__()

File /g/data/xv83/dbi599/miniconda3/envs/model-eval/lib/python3.10/site-packages/pandas/_libs/tslibs/timedeltas.pyx:288, in pandas._libs.tslibs.timedeltas.convert_to_timedelta64()

File /g/data/xv83/dbi599/miniconda3/envs/model-eval/lib/python3.10/site-packages/pandas/_libs/tslibs/conversion.pyx:125, in pandas._libs.tslibs.conversion.cast_from_unit()

OverflowError: Python int too large to convert to C long

During handling of the above exception, another exception occurred:

OverflowError                             Traceback (most recent call last)
File /g/data/xv83/dbi599/miniconda3/envs/model-eval/lib/python3.10/site-packages/xarray/coding/times.py:174, in _decode_cf_datetime_dtype(data, units, calendar, use_cftime)
    173 try:
--> 174     result = decode_cf_datetime(example_value, units, calendar, use_cftime)
    175 except Exception:

File /g/data/xv83/dbi599/miniconda3/envs/model-eval/lib/python3.10/site-packages/xarray/coding/times.py:263, in decode_cf_datetime(num_dates, units, calendar, use_cftime)
    262 except (KeyError, OutOfBoundsDatetime, OverflowError):
--> 263     dates = _decode_datetime_with_cftime(
    264         flat_num_dates.astype(float), units, calendar
    265     )
    267     if (
    268         dates[np.nanargmin(num_dates)].year < 1678
    269         or dates[np.nanargmax(num_dates)].year >= 2262
    270     ):

File /g/data/xv83/dbi599/miniconda3/envs/model-eval/lib/python3.10/site-packages/xarray/coding/times.py:195, in _decode_datetime_with_cftime(num_dates, units, calendar)
    193     raise ModuleNotFoundError("No module named 'cftime'")
    194 return np.asarray(
--> 195     cftime.num2date(num_dates, units, calendar, only_use_cftime_datetimes=True)
    196 )

File src/cftime/_cftime.pyx:584, in cftime._cftime.num2date()

File src/cftime/_cftime.pyx:383, in cftime._cftime.cast_to_int()

OverflowError: time values outside range of 64 bit signed integers

During handling of the above exception, another exception occurred:

ValueError                                Traceback (most recent call last)
Input In [24], in <cell line: 1>()
----> 1 cwd_annual_mean['AGCD'] = read_data(
      2     cwd_files['AGCD'], regrid=False, time_bounds=[start_date, end_date]
      3 )

Input In [8], in read_data(infile, regrid, time_bounds)
      1 def read_data(infile, regrid=False, time_bounds=None):
      2     """Read data and calculate annual mean.
      3     
      4     Parameters
   (...)
      9     
     10     """
---> 12     ds = xr.open_dataset(infile, decode_timedelta=False)
     13     if time_bounds:
     14         start_date, end_date = time_bounds

File /g/data/xv83/dbi599/miniconda3/envs/model-eval/lib/python3.10/site-packages/xarray/backends/api.py:495, in open_dataset(filename_or_obj, engine, chunks, cache, decode_cf, mask_and_scale, decode_times, decode_timedelta, use_cftime, concat_characters, decode_coords, drop_variables, backend_kwargs, *args, **kwargs)
    483 decoders = _resolve_decoders_kwargs(
    484     decode_cf,
    485     open_backend_dataset_parameters=backend.open_dataset_parameters,
   (...)
    491     decode_coords=decode_coords,
    492 )
    494 overwrite_encoded_chunks = kwargs.pop("overwrite_encoded_chunks", None)
--> 495 backend_ds = backend.open_dataset(
    496     filename_or_obj,
    497     drop_variables=drop_variables,
    498     **decoders,
    499     **kwargs,
    500 )
    501 ds = _dataset_from_backend_dataset(
    502     backend_ds,
    503     filename_or_obj,
   (...)
    510     **kwargs,
    511 )
    512 return ds

File /g/data/xv83/dbi599/miniconda3/envs/model-eval/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:567, in NetCDF4BackendEntrypoint.open_dataset(self, filename_or_obj, mask_and_scale, decode_times, concat_characters, decode_coords, drop_variables, use_cftime, decode_timedelta, group, mode, format, clobber, diskless, persist, lock, autoclose)
    565 store_entrypoint = StoreBackendEntrypoint()
    566 with close_on_error(store):
--> 567     ds = store_entrypoint.open_dataset(
    568         store,
    569         mask_and_scale=mask_and_scale,
    570         decode_times=decode_times,
    571         concat_characters=concat_characters,
    572         decode_coords=decode_coords,
    573         drop_variables=drop_variables,
    574         use_cftime=use_cftime,
    575         decode_timedelta=decode_timedelta,
    576     )
    577 return ds

File /g/data/xv83/dbi599/miniconda3/envs/model-eval/lib/python3.10/site-packages/xarray/backends/store.py:27, in StoreBackendEntrypoint.open_dataset(self, store, mask_and_scale, decode_times, concat_characters, decode_coords, drop_variables, use_cftime, decode_timedelta)
     24 vars, attrs = store.load()
     25 encoding = store.get_encoding()
---> 27 vars, attrs, coord_names = conventions.decode_cf_variables(
     28     vars,
     29     attrs,
     30     mask_and_scale=mask_and_scale,
     31     decode_times=decode_times,
     32     concat_characters=concat_characters,
     33     decode_coords=decode_coords,
     34     drop_variables=drop_variables,
     35     use_cftime=use_cftime,
     36     decode_timedelta=decode_timedelta,
     37 )
     39 ds = Dataset(vars, attrs=attrs)
     40 ds = ds.set_coords(coord_names.intersection(vars))

File /g/data/xv83/dbi599/miniconda3/envs/model-eval/lib/python3.10/site-packages/xarray/conventions.py:516, in decode_cf_variables(variables, attributes, concat_characters, mask_and_scale, decode_times, decode_coords, drop_variables, use_cftime, decode_timedelta)
    509     continue
    510 stack_char_dim = (
    511     concat_characters
    512     and v.dtype == "S1"
    513     and v.ndim > 0
    514     and stackable(v.dims[-1])
    515 )
--> 516 new_vars[k] = decode_cf_variable(
    517     k,
    518     v,
    519     concat_characters=concat_characters,
    520     mask_and_scale=mask_and_scale,
    521     decode_times=decode_times,
    522     stack_char_dim=stack_char_dim,
    523     use_cftime=use_cftime,
    524     decode_timedelta=decode_timedelta,
    525 )
    526 if decode_coords in [True, "coordinates", "all"]:
    527     var_attrs = new_vars[k].attrs

File /g/data/xv83/dbi599/miniconda3/envs/model-eval/lib/python3.10/site-packages/xarray/conventions.py:364, in decode_cf_variable(name, var, concat_characters, mask_and_scale, decode_times, decode_endianness, stack_char_dim, use_cftime, decode_timedelta)
    362     var = times.CFTimedeltaCoder().decode(var, name=name)
    363 if decode_times:
--> 364     var = times.CFDatetimeCoder(use_cftime=use_cftime).decode(var, name=name)
    366 dimensions, data, attributes, encoding = variables.unpack_for_decoding(var)
    367 # TODO(shoyer): convert everything below to use coders

File /g/data/xv83/dbi599/miniconda3/envs/model-eval/lib/python3.10/site-packages/xarray/coding/times.py:673, in CFDatetimeCoder.decode(self, variable, name)
    671 units = pop_to(attrs, encoding, "units")
    672 calendar = pop_to(attrs, encoding, "calendar")
--> 673 dtype = _decode_cf_datetime_dtype(data, units, calendar, self.use_cftime)
    674 transform = partial(
    675     decode_cf_datetime,
    676     units=units,
    677     calendar=calendar,
    678     use_cftime=self.use_cftime,
    679 )
    680 data = lazy_elemwise_func(data, transform, dtype)

File /g/data/xv83/dbi599/miniconda3/envs/model-eval/lib/python3.10/site-packages/xarray/coding/times.py:184, in _decode_cf_datetime_dtype(data, units, calendar, use_cftime)
    176     calendar_msg = (
    177         "the default calendar" if calendar is None else f"calendar {calendar!r}"
    178     )
    179     msg = (
    180         f"unable to decode time units {units!r} with {calendar_msg!r}. Try "
    181         "opening your dataset with decode_times=False or installing cftime "
    182         "if it is not installed."
    183     )
--> 184     raise ValueError(msg)
    185 else:
    186     dtype = getattr(result, "dtype", np.dtype("object"))

ValueError: unable to decode time units 'days since 1900-01-01 00:00:00' with "calendar 'proleptic_gregorian'". Try opening your dataset with decode_times=False or installing cftime if it is not installed.

Metadata

Metadata

Assignees

No one assigned

    Labels

    No labels
    No labels

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions