Skip to content

Commit 97fcfd4

Browse files
committed
Testing improvements
1 parent 8497cc9 commit 97fcfd4

File tree

3 files changed

+16
-12
lines changed

3 files changed

+16
-12
lines changed

flox/aggregations.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ def _normalize_dtype(dtype, array_dtype, fill_value=None):
4949
dtype = np.floating
5050
else:
5151
dtype = array_dtype
52-
elif dtype is np.floating:
52+
if dtype is np.floating:
5353
# mean, std, var always result in floating
5454
# but we preserve the array's dtype if it is floating
5555
if array_dtype.kind in "fcmM":

tests/__init__.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -92,6 +92,9 @@ def assert_equal(a, b):
9292
elif has_xarray and isinstance(a, xr_types) or isinstance(b, xr_types):
9393
xr.testing.assert_identical(a, b)
9494
elif has_dask and isinstance(a, dask_array_type) or isinstance(b, dask_array_type):
95+
# sometimes it's nice to see values and shapes
96+
# rather than being dropped into some file in dask
97+
np.testing.assert_allclose(a, b)
9598
# does some validation of the dask graph
9699
da.utils.assert_eq(a, b, equal_nan=True)
97100
else:

tests/test_core.py

Lines changed: 12 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -307,29 +307,30 @@ def test_numpy_reduce_axis_subset(engine):
307307
# TODO: add NaNs
308308
by = labels2d
309309
array = np.ones_like(by)
310-
result, _ = groupby_reduce(array, by, "count", axis=1, engine=engine)
310+
kwargs = dict(func="count", engine=engine)
311+
result, _ = groupby_reduce(array, by, **kwargs, axis=1)
311312
assert_equal(result, [[2, 3], [2, 3]])
312313

313314
by = np.broadcast_to(labels2d, (3, *labels2d.shape))
314315
array = np.ones_like(by)
315-
result, _ = groupby_reduce(array, by, "count", axis=1, engine=engine)
316+
result, _ = groupby_reduce(array, by, **kwargs, axis=1)
316317
subarr = np.array([[1, 1], [1, 1], [0, 2], [1, 1], [1, 1]])
317318
expected = np.tile(subarr, (3, 1, 1))
318319
assert_equal(result, expected)
319320

320-
result, _ = groupby_reduce(array, by, "count", axis=2, engine=engine)
321+
result, _ = groupby_reduce(array, by, **kwargs, axis=2)
321322
subarr = np.array([[2, 3], [2, 3]])
322323
expected = np.tile(subarr, (3, 1, 1))
323324
assert_equal(result, expected)
324325

325-
result, _ = groupby_reduce(array, by, "count", axis=(1, 2), engine=engine)
326+
result, _ = groupby_reduce(array, by, **kwargs, axis=(1, 2))
326327
expected = np.array([[4, 6], [4, 6], [4, 6]])
327328
assert_equal(result, expected)
328329

329-
result, _ = groupby_reduce(array, by, "count", axis=(2, 1), engine=engine)
330+
result, _ = groupby_reduce(array, by, **kwargs, axis=(2, 1))
330331
assert_equal(result, expected)
331332

332-
result, _ = groupby_reduce(array, by[0, ...], "count", axis=(1, 2), engine=engine)
333+
result, _ = groupby_reduce(array, by[0, ...], **kwargs, axis=(1, 2))
333334
expected = np.array([[4, 6], [4, 6], [4, 6]])
334335
assert_equal(result, expected)
335336

@@ -343,7 +344,7 @@ def test_dask_reduce_axis_subset():
343344
result, _ = groupby_reduce(
344345
da.from_array(array, chunks=(2, 3)),
345346
da.from_array(by, chunks=(2, 2)),
346-
"count",
347+
func="count",
347348
axis=1,
348349
expected_groups=[0, 2],
349350
)
@@ -357,7 +358,7 @@ def test_dask_reduce_axis_subset():
357358
result, _ = groupby_reduce(
358359
da.from_array(array, chunks=(1, 2, 3)),
359360
da.from_array(by, chunks=(2, 2, 2)),
360-
"count",
361+
func="count",
361362
axis=1,
362363
expected_groups=[0, 2],
363364
fill_value=123,
@@ -370,7 +371,7 @@ def test_dask_reduce_axis_subset():
370371
result, _ = groupby_reduce(
371372
da.from_array(array, chunks=(1, 2, 3)),
372373
da.from_array(by, chunks=(2, 2, 2)),
373-
"count",
374+
func="count",
374375
axis=2,
375376
expected_groups=[0, 2],
376377
)
@@ -380,7 +381,7 @@ def test_dask_reduce_axis_subset():
380381
groupby_reduce(
381382
da.from_array(array, chunks=(1, 3, 2)),
382383
da.from_array(by, chunks=(2, 2, 2)),
383-
"count",
384+
func="count",
384385
axis=2,
385386
)
386387

@@ -448,7 +449,7 @@ def _maybe_chunk(arr):
448449
result, _ = groupby_reduce(
449450
_maybe_chunk(array),
450451
_maybe_chunk(by),
451-
"count",
452+
func="count",
452453
expected_groups=groups,
453454
axis=axis,
454455
fill_value=0,

0 commit comments

Comments
 (0)