Skip to content

Commit 17e7bbc

Browse files
authored
Merge pull request #213 from andersy005/patch-preprocess
Patch _open_asset() function
2 parents af1d90e + 3059a69 commit 17e7bbc

File tree

1 file changed

+16
-13
lines changed

1 file changed

+16
-13
lines changed

intake_esm/merge_util.py

Lines changed: 16 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -99,8 +99,9 @@ def apply_aggregation(v, agg_column=None, key=None, level=0):
9999
zarr_kwargs=zarr_kwargs,
100100
cdf_kwargs=cdf_kwargs,
101101
preprocess=preprocess,
102+
varname=varname,
102103
)
103-
ds.attrs['intake_esm_varname'] = varname
104+
104105
return ds
105106

106107
else:
@@ -138,8 +139,8 @@ def apply_aggregation(v, agg_column=None, key=None, level=0):
138139
del encoding[v][enc_attrs]
139140

140141
if agg_type == 'join_new':
141-
logger.info(
142-
f'Joining {len(dsets)} dataset(s) along new {agg_column} dimension with options={agg_options}'
142+
logger.debug(
143+
f'Joining {len(dsets)} dataset(s) along new {agg_column} dimension with options={agg_options}.\ndsets={dsets}'
143144
)
144145
varname = dsets[0].attrs['intake_esm_varname']
145146
ds = join_new(
@@ -151,14 +152,14 @@ def apply_aggregation(v, agg_column=None, key=None, level=0):
151152
)
152153

153154
elif agg_type == 'join_existing':
154-
logger.info(
155-
f'Joining {len(dsets)} dataset(s) along existing dimension with options={agg_options}'
155+
logger.debug(
156+
f'Joining {len(dsets)} dataset(s) along existing dimension with options={agg_options}.\ndsets={dsets}'
156157
)
157158
ds = join_existing(dsets, options=agg_options)
158159

159160
elif agg_type == 'union':
160-
logger.info(
161-
f'Merging {len(dsets)} dataset(s) into a single Dataset with options={agg_options}'
161+
logger.debug(
162+
f'Merging {len(dsets)} dataset(s) into a single Dataset with options={agg_options}.\ndsets={dsets}'
162163
)
163164
ds = union(dsets, options=agg_options)
164165

@@ -172,7 +173,7 @@ def apply_aggregation(v, agg_column=None, key=None, level=0):
172173
return apply_aggregation(v)
173174

174175

175-
def _open_asset(path, data_format, zarr_kwargs, cdf_kwargs, preprocess):
176+
def _open_asset(path, data_format, zarr_kwargs, cdf_kwargs, preprocess, varname):
176177
protocol = None
177178
root = path
178179
if isinstance(path, fsspec.mapping.FSMap):
@@ -188,25 +189,27 @@ def _open_asset(path, data_format, zarr_kwargs, cdf_kwargs, preprocess):
188189
root = path.root
189190

190191
if data_format == 'zarr':
191-
logger.info(f'Opening zarr store: {root} - protocol: {protocol}')
192+
logger.debug(f'Opening zarr store: {root} - protocol: {protocol}')
192193
try:
193194
ds = xr.open_zarr(path, **zarr_kwargs)
194195
except Exception as e:
195-
logger.error(f'Failed to open zarr store.')
196+
logger.error(f'Failed to open zarr store with zarr_kwargs={zarr_kwargs}')
196197
raise e
197198

198199
else:
199-
logger.info(f'Opening netCDF/HDF dataset: {root} - protocol: {protocol}')
200+
logger.debug(f'Opening netCDF/HDF dataset: {root} - protocol: {protocol}')
200201
try:
201202
ds = xr.open_dataset(path, **cdf_kwargs)
202203
except Exception as e:
203-
logger.error(f'Failed to open netCDF/HDF dataset.')
204+
logger.error(f'Failed to open netCDF/HDF dataset with cdf_kwargs={cdf_kwargs}')
204205
raise e
205206

207+
ds.attrs['intake_esm_varname'] = varname
208+
206209
if preprocess is None:
207210
return ds
208211
else:
209-
logger.info(f'Applying pre-processing with {preprocess.__name__} function')
212+
logger.debug(f'Applying pre-processing with {preprocess.__name__} function')
210213
return preprocess(ds)
211214

212215

0 commit comments

Comments
 (0)