@@ -31,6 +31,7 @@ def test_alignment_error():
3131 groupby_reduce (da , labels , func = "mean" )
3232
3333
34+ @pytest .mark .parametrize ("backend" , ["numpy" , "numba" ])
3435@pytest .mark .parametrize ("dtype" , (float , int ))
3536@pytest .mark .parametrize ("chunk, split_out" , [(False , 1 ), (True , 1 ), (True , 2 ), (True , 3 )])
3637@pytest .mark .parametrize ("expected_groups" , [None , [0 , 1 , 2 ], np .array ([0 , 1 , 2 ])])
@@ -59,7 +60,9 @@ def test_alignment_error():
5960 # (np.ones((12,)), np.array([labels, labels])), # form 4
6061 ],
6162)
62- def test_groupby_reduce (array , by , expected , func , expected_groups , chunk , split_out , dtype ):
63+ def test_groupby_reduce (
64+ array , by , expected , func , expected_groups , chunk , split_out , dtype , backend
65+ ):
6366 array = array .astype (dtype )
6467 if chunk :
6568 if expected_groups is None :
@@ -81,10 +84,12 @@ def test_groupby_reduce(array, by, expected, func, expected_groups, chunk, split
8184 expected_groups = expected_groups ,
8285 fill_value = 123 ,
8386 split_out = split_out ,
87+ backend = backend ,
8488 )
8589 assert_equal (expected , result )
8690
8791
92+ @pytest .mark .parametrize ("backend" , ["numpy" , "numba" ])
8893@pytest .mark .parametrize ("size" , ((12 ,), (12 , 5 )))
8994@pytest .mark .parametrize (
9095 "func" ,
@@ -109,7 +114,7 @@ def test_groupby_reduce(array, by, expected, func, expected_groups, chunk, split
109114 pytest .param ("nanargmin" , marks = (pytest .mark .xfail ,)),
110115 ),
111116)
112- def test_groupby_reduce_all (size , func ):
117+ def test_groupby_reduce_all (size , func , backend ):
113118
114119 array = np .random .randn (* size )
115120 by = np .ones (size [- 1 ])
@@ -123,13 +128,15 @@ def test_groupby_reduce_all(size, func):
123128 expected = getattr (np , func )(array , axis = - 1 )
124129 expected = np .expand_dims (expected , - 1 )
125130
126- actual , _ = groupby_reduce (array , by , func = func )
131+ actual , _ = groupby_reduce (array , by , func = func , backend = backend )
127132 if "arg" in func :
128133 assert actual .dtype .kind == "i"
129134 assert_equal (actual , expected )
130135
131136 for method in ["mapreduce" , "cohorts" ]:
132- actual , _ = groupby_reduce (da .from_array (array , chunks = 3 ), by , func = func , method = method )
137+ actual , _ = groupby_reduce (
138+ da .from_array (array , chunks = 3 ), by , func = func , method = method , backend = backend
139+ )
133140 if "arg" in func :
134141 assert actual .dtype .kind == "i"
135142 assert_equal (actual , expected )
@@ -336,14 +343,15 @@ def test_dask_reduce_axis_subset():
336343 )
337344
338345
346+ @pytest .mark .parametrize ("backend" , ["numpy" , "numba" ])
339347@pytest .mark .parametrize (
340348 "axis" , [None , (0 , 1 , 2 ), (0 , 1 ), (0 , 2 ), (1 , 2 ), 0 , 1 , 2 , (0 ,), (1 ,), (2 ,)]
341349)
342- def test_groupby_reduce_axis_subset_against_numpy (axis ):
350+ def test_groupby_reduce_axis_subset_against_numpy (axis , backend ):
343351 # tests against the numpy output to make sure dask compute matches
344352 by = np .broadcast_to (labels2d , (3 , * labels2d .shape ))
345353 array = np .ones_like (by )
346- kwargs = dict (func = "count" , axis = axis , expected_groups = [0 , 2 ], fill_value = 123 )
354+ kwargs = dict (func = "count" , axis = axis , expected_groups = [0 , 2 ], fill_value = 123 , backend = backend )
347355 with raise_if_dask_computes ():
348356 actual , _ = groupby_reduce (
349357 da .from_array (array , chunks = (- 1 , 2 , 3 )),
@@ -354,6 +362,7 @@ def test_groupby_reduce_axis_subset_against_numpy(axis):
354362 assert_equal (actual , expected )
355363
356364
365+ @pytest .mark .parametrize ("backend" , ["numpy" , "numba" ])
357366@pytest .mark .parametrize ("chunks" , [None , (2 , 2 , 3 )])
358367@pytest .mark .parametrize (
359368 "axis, groups, expected_shape" ,
@@ -363,7 +372,7 @@ def test_groupby_reduce_axis_subset_against_numpy(axis):
363372 (None , [0 ], (1 ,)), # global reduction; 0 shaped group axis; 1 group
364373 ],
365374)
366- def test_groupby_reduce_nans (chunks , axis , groups , expected_shape ):
375+ def test_groupby_reduce_nans (chunks , axis , groups , expected_shape , backend ):
367376 def _maybe_chunk (arr ):
368377 if chunks :
369378 return da .from_array (arr , chunks = chunks )
@@ -383,6 +392,7 @@ def _maybe_chunk(arr):
383392 expected_groups = groups ,
384393 axis = axis ,
385394 fill_value = 0 ,
395+ backend = backend ,
386396 )
387397 assert_equal (result , np .zeros (expected_shape , dtype = np .int64 ))
388398
@@ -394,7 +404,8 @@ def _maybe_chunk(arr):
394404 # by = np.broadcast_to(labels2d, (3, *labels2d.shape))
395405
396406
397- def test_groupby_all_nan_blocks ():
407+ @pytest .mark .parametrize ("backend" , ["numpy" , "numba" ])
408+ def test_groupby_all_nan_blocks (backend ):
398409 labels = np .array ([0 , 0 , 2 , 2 , 2 , 1 , 1 , 2 , 2 , 1 , 1 , 0 ])
399410 nan_labels = labels .astype (float ) # copy
400411 nan_labels [:5 ] = np .nan
@@ -410,6 +421,7 @@ def test_groupby_all_nan_blocks():
410421 da .from_array (by , chunks = (1 , 3 )),
411422 func = "sum" ,
412423 expected_groups = None ,
424+ backend = backend ,
413425 )
414426 assert_equal (actual , expected )
415427
0 commit comments