Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 13 additions & 5 deletions sotodlib/preprocess/pcore.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,7 @@ def _intersect(new, out):

return fs_dets, ns_dets, fs_samps, ns_samps

def _wrap_valid_ranges(new, out, valid_name="valid"):
def _wrap_valid_ranges(new, out, valid_name="valid", wrap_name=None):
"""Wraps in a new Ranges field into ``out`` that tracks the current number
of detectors and samples that intersect with ``new``.
"""
Expand All @@ -281,9 +281,16 @@ def _wrap_valid_ranges(new, out, valid_name="valid"):
valid = RangesMatrix(
[v if i in fs_dets else x for i in range(out.dets.count)]
)
if valid_name in out:
out.move(valid_name, None)
out.wrap(valid_name, valid, [(0,'dets'),(1,'samps')])
if wrap_name:
if wrap_name in out:
out.move(wrap_name, None)
valid_aman = core.AxisManager(out.dets, out.samps)
valid_aman.wrap(valid_name, valid, [(0,'dets'),(1,'samps')])
out.wrap(wrap_name, valid_aman)
else:
if valid_name in out:
out.move(valid_name, None)
out.wrap(valid_name, valid, [(0,'dets'),(1,'samps')])

def _expand(new, full, wrap_valid=True):
"""new will become a top level axismanager in full once it is matched to
Expand Down Expand Up @@ -534,7 +541,8 @@ def run(self, aman, proc_aman=None, select=True, sim=False, update_plot=False):
break

if run_calc:
_wrap_valid_ranges(proc_aman, full, valid_name='valid_data')
_wrap_valid_ranges(proc_aman, full, valid_name='valid_data',
wrap_name='valid_data')

# copy updated frequency cutoffs to full
if "frequency_cutoffs" in full:
Expand Down
4 changes: 2 additions & 2 deletions sotodlib/preprocess/preprocess_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -368,7 +368,7 @@ def load_and_preprocess(obs_id, configs, context=None, dets=None, meta=None,
configs, context = get_preprocess_context(configs, context)
meta = context.get_meta(obs_id, dets=dets, meta=meta)
if 'valid_data' in meta.preprocess:
keep = has_any_cuts(meta.preprocess.valid_data)
keep = has_any_cuts(meta.preprocess.valid_data.valid_data)
meta.restrict("dets", keep)
else:
det_vals = load_preprocess_det_select(obs_id, configs=configs, context=context,
Expand Down Expand Up @@ -454,7 +454,7 @@ def multilayer_load_and_preprocess(obs_id, configs_init, configs_proc,

logger.info("Restricting detectors on all proc pipeline processes")
if 'valid_data' in meta_proc.preprocess:
keep_all = has_any_cuts(meta_proc.preprocess.valid_data)
keep_all = has_any_cuts(meta_proc.preprocess.valid_data.valid_data)
else:
keep_all = np.ones(meta_proc.dets.count, dtype=bool)
for process in pipe_proc[:]:
Expand Down