Skip to content

Commit

Permalink
Multi
Browse files Browse the repository at this point in the history
  • Loading branch information
b8raoult committed May 12, 2021
1 parent 36bc8b0 commit a7d3cba
Show file tree
Hide file tree
Showing 5 changed files with 19 additions and 8 deletions.
4 changes: 4 additions & 0 deletions climetlab/readers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@ def multi_merge(cls, readers):


class MultiReaders:
backend_kwargs = {}

def __init__(self, readers):
self.readers = readers

Expand All @@ -58,6 +60,8 @@ def preprocess(ds):
assert options == opts, f"{options} != {opts}"

options.update(kwargs)
options.setdefault("backend_kwargs", {})
options["backend_kwargs"].update(self.backend_kwargs)

return xr.open_mfdataset(
[r.path for r in self.readers],
Expand Down
13 changes: 8 additions & 5 deletions climetlab/readers/grib.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,6 +220,7 @@ def __iter__(self):

class MultiGribReaders(MultiReaders):
engine = "cfgrib"
backend_kwargs = {"squeeze": False}


class GRIBReader(Reader):
Expand Down Expand Up @@ -249,12 +250,14 @@ def __getitem__(self, n):
def __len__(self):
return len(self._items())

def to_xarray(self):
import xarray as xr
def to_xarray(self, **kwargs):
# So we use the same code
return MultiGribReaders([self]).to_xarray(**kwargs)
# import xarray as xr

params = self.source.cfgrib_options()
ds = xr.open_dataset(self.path, engine="cfgrib", **params)
return self.source.post_xarray_open_dataset_hook(ds)
# params = self.source.cfgrib_options()
# ds = xr.open_dataset(self.path, engine="cfgrib", **params)
# return self.source.post_xarray_open_dataset_hook(ds)

# @dict_args
# def sel(self, **kwargs):
Expand Down
6 changes: 4 additions & 2 deletions climetlab/readers/netcdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -312,8 +312,10 @@ def _get_fields(self, ds): # noqa C901

return fields

def to_xarray(self):
return xr.open_dataset(self.path, engine="netcdf4")
def to_xarray(self, **kwargs):
# So we use the same code
return MultiNetcdfReaders([self]).to_xarray(**kwargs)
# return xr.open_dataset(self.path, engine="netcdf4")

@classmethod
def multi_merge(cls, readers):
Expand Down
2 changes: 1 addition & 1 deletion tests/test_grib.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def test_sel():
s.sel(shortName="2t")


@pytest.mark.skipif(("GITHUB_WORKFLOW" in os.environ) or True, reason="Not yet ready")
# @pytest.mark.skipif(("GITHUB_WORKFLOW" in os.environ) or True, reason="Not yet ready")
def test_multi():
s1 = load_source(
"cds",
Expand Down
2 changes: 2 additions & 0 deletions tests/test_netcdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ def test_multi():
date="2021-03-01",
format="netcdf",
)
print(s1.to_xarray())
s2 = load_source(
"cds",
"reanalysis-era5-single-levels",
Expand All @@ -34,6 +35,7 @@ def test_multi():
date="2021-03-02",
format="netcdf",
)
print(s2.to_xarray())

source = load_source("multi", s1, s2)
for s in source:
Expand Down

0 comments on commit a7d3cba

Please sign in to comment.