@@ -99,8 +99,9 @@ def apply_aggregation(v, agg_column=None, key=None, level=0):
99
99
zarr_kwargs = zarr_kwargs ,
100
100
cdf_kwargs = cdf_kwargs ,
101
101
preprocess = preprocess ,
102
+ varname = varname ,
102
103
)
103
- ds . attrs [ 'intake_esm_varname' ] = varname
104
+
104
105
return ds
105
106
106
107
else :
@@ -138,8 +139,8 @@ def apply_aggregation(v, agg_column=None, key=None, level=0):
138
139
del encoding [v ][enc_attrs ]
139
140
140
141
if agg_type == 'join_new' :
141
- logger .info (
142
- f'Joining { len (dsets )} dataset(s) along new { agg_column } dimension with options={ agg_options } '
142
+ logger .debug (
143
+ f'Joining { len (dsets )} dataset(s) along new { agg_column } dimension with options={ agg_options } . \n dsets= { dsets } '
143
144
)
144
145
varname = dsets [0 ].attrs ['intake_esm_varname' ]
145
146
ds = join_new (
@@ -151,14 +152,14 @@ def apply_aggregation(v, agg_column=None, key=None, level=0):
151
152
)
152
153
153
154
elif agg_type == 'join_existing' :
154
- logger .info (
155
- f'Joining { len (dsets )} dataset(s) along existing dimension with options={ agg_options } '
155
+ logger .debug (
156
+ f'Joining { len (dsets )} dataset(s) along existing dimension with options={ agg_options } . \n dsets= { dsets } '
156
157
)
157
158
ds = join_existing (dsets , options = agg_options )
158
159
159
160
elif agg_type == 'union' :
160
- logger .info (
161
- f'Merging { len (dsets )} dataset(s) into a single Dataset with options={ agg_options } '
161
+ logger .debug (
162
+ f'Merging { len (dsets )} dataset(s) into a single Dataset with options={ agg_options } . \n dsets= { dsets } '
162
163
)
163
164
ds = union (dsets , options = agg_options )
164
165
@@ -172,7 +173,7 @@ def apply_aggregation(v, agg_column=None, key=None, level=0):
172
173
return apply_aggregation (v )
173
174
174
175
175
- def _open_asset (path , data_format , zarr_kwargs , cdf_kwargs , preprocess ):
176
+ def _open_asset (path , data_format , zarr_kwargs , cdf_kwargs , preprocess , varname ):
176
177
protocol = None
177
178
root = path
178
179
if isinstance (path , fsspec .mapping .FSMap ):
@@ -188,25 +189,27 @@ def _open_asset(path, data_format, zarr_kwargs, cdf_kwargs, preprocess):
188
189
root = path .root
189
190
190
191
if data_format == 'zarr' :
191
- logger .info (f'Opening zarr store: { root } - protocol: { protocol } ' )
192
+ logger .debug (f'Opening zarr store: { root } - protocol: { protocol } ' )
192
193
try :
193
194
ds = xr .open_zarr (path , ** zarr_kwargs )
194
195
except Exception as e :
195
- logger .error (f'Failed to open zarr store. ' )
196
+ logger .error (f'Failed to open zarr store with zarr_kwargs= { zarr_kwargs } ' )
196
197
raise e
197
198
198
199
else :
199
- logger .info (f'Opening netCDF/HDF dataset: { root } - protocol: { protocol } ' )
200
+ logger .debug (f'Opening netCDF/HDF dataset: { root } - protocol: { protocol } ' )
200
201
try :
201
202
ds = xr .open_dataset (path , ** cdf_kwargs )
202
203
except Exception as e :
203
- logger .error (f'Failed to open netCDF/HDF dataset. ' )
204
+ logger .error (f'Failed to open netCDF/HDF dataset with cdf_kwargs= { cdf_kwargs } ' )
204
205
raise e
205
206
207
+ ds .attrs ['intake_esm_varname' ] = varname
208
+
206
209
if preprocess is None :
207
210
return ds
208
211
else :
209
- logger .info (f'Applying pre-processing with { preprocess .__name__ } function' )
212
+ logger .debug (f'Applying pre-processing with { preprocess .__name__ } function' )
210
213
return preprocess (ds )
211
214
212
215
0 commit comments