Skip to content

Commit

Permalink
Fix upstream-dev CI, silence warnings (#341)
Browse files Browse the repository at this point in the history
* Fix upstream-dev CI

Closes #337

* silence warnings

* fix mypy

* Trigger upstream workflow
  • Loading branch information
dcherian authored Mar 15, 2024
1 parent f4d5baf commit 382dc14
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 5 deletions.
1 change: 1 addition & 0 deletions .github/workflows/upstream-dev-ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ on:
paths:
- ".github/workflows/upstream-dev-ci.yaml"
- "ci/upstream-dev-env.yml"
- "flox/*"
schedule:
- cron: "0 0 * * *" # Daily “At 00:00” UTC
workflow_dispatch: # allows you to trigger the workflow run manually
Expand Down
2 changes: 1 addition & 1 deletion flox/xarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,7 @@ def xarray_reduce(
for var in maybe_drop:
maybe_midx = ds._indexes.get(var, None)
if isinstance(maybe_midx, PandasMultiIndex):
idx_coord_names = set(maybe_midx.index.names + [maybe_midx.dim])
idx_coord_names = set(tuple(maybe_midx.index.names) + (maybe_midx.dim,))
idx_other_names = idx_coord_names - set(maybe_drop)
more_drop.update(idx_other_names)
maybe_drop.update(more_drop)
Expand Down
10 changes: 6 additions & 4 deletions tests/test_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -880,7 +880,7 @@ def test_find_cohorts_missing_groups():

@pytest.mark.parametrize("chunksize", [12, 13, 14, 24, 36, 48, 72, 71])
def test_verify_complex_cohorts(chunksize: int) -> None:
time = pd.Series(pd.date_range("2016-01-01", "2018-12-31 23:59", freq="H"))
time = pd.Series(pd.date_range("2016-01-01", "2018-12-31 23:59", freq="h"))
chunks = (chunksize,) * (len(time) // chunksize)
by = np.array(time.dt.dayofyear.values)

Expand Down Expand Up @@ -1063,7 +1063,7 @@ def test_empty_bins(func, engine):


def test_datetime_binning():
time_bins = pd.date_range(start="2010-08-01", end="2010-08-15", freq="24H")
time_bins = pd.date_range(start="2010-08-01", end="2010-08-15", freq="24h")
by = pd.date_range("2010-08-01", "2010-08-15", freq="15min")

(actual,) = _convert_expected_groups_to_index((time_bins,), isbin=(True,), sort=False)
Expand Down Expand Up @@ -1125,7 +1125,7 @@ def test_group_by_datetime(engine, method):
if method == "blockwise":
return None

edges = pd.date_range("1999-12-31", "2000-12-31", freq="M").to_series().to_numpy()
edges = pd.date_range("1999-12-31", "2000-12-31", freq="ME").to_series().to_numpy()
actual, _ = groupby_reduce(daskarray, t.to_numpy(), isbin=True, expected_groups=edges, **kwargs)
expected = data.resample("M").mean().to_numpy()
assert_equal(expected, actual)
Expand Down Expand Up @@ -1520,7 +1520,7 @@ def test_validate_reindex() -> None:
@requires_dask
def test_1d_blockwise_sort_optimization():
# Make sure for resampling problems sorting isn't done.
time = pd.Series(pd.date_range("2020-09-01", "2020-12-31 23:59", freq="3H"))
time = pd.Series(pd.date_range("2020-09-01", "2020-12-31 23:59", freq="3h"))
array = dask.array.ones((len(time),), chunks=(224,))

actual, _ = groupby_reduce(array, time.dt.dayofyear.values, method="blockwise", func="count")
Expand Down Expand Up @@ -1682,6 +1682,8 @@ def test_multiple_quantiles(q, chunk, func, by_ndim):
actual, _ = groupby_reduce(array, labels, func=func, finalize_kwargs=dict(q=q), axis=axis)
sorted_array = array[..., [0, 1, 2, 4, 3, 5, 6]]
f = partial(getattr(np, func), q=q, axis=axis, keepdims=True)
if chunk:
sorted_array = sorted_array.compute()
expected = np.concatenate((f(sorted_array[..., :4]), f(sorted_array[..., 4:])), axis=-1)
if by_ndim == 2:
expected = expected.squeeze(axis=-2)
Expand Down

0 comments on commit 382dc14

Please sign in to comment.