Skip to content

Commit 8014735

Browse files
committed
Update kernel_dataset.py
1 parent 5041cd3 commit 8014735

File tree

1 file changed

+30
-2
lines changed

1 file changed

+30
-2
lines changed

mtpy/processing/kernel_dataset.py

Lines changed: 30 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -65,13 +65,13 @@
6565
from typing import Optional, Union
6666

6767
import pandas as pd
68-
6968
from loguru import logger
7069

7170
import mt_metadata.timeseries
7271
from mt_metadata.utils.list_dict import ListDict
73-
import mth5.timeseries.run_ts
7472

73+
import mth5.timeseries.run_ts
74+
from mth5.utils.helpers import initialize_mth5
7575

7676
from mtpy.processing.run_summary import RunSummary
7777
from mtpy.processing import (
@@ -151,6 +151,7 @@ def __init__(
151151
self.remote_station_id = remote_station_id
152152
self._mini_summary_columns = MINI_SUMMARY_COLUMNS
153153
self.survey_metadata = {}
154+
self.initialized = False
154155

155156
def __str__(self):
156157
return str(self.mini_summary.head())
@@ -796,6 +797,33 @@ def update_survey_metadata(
796797
if len(self.survey_metadata.keys()) > 1:
797798
raise NotImplementedError
798799

800+
def initialize_mth5s(self, mode="r"):
801+
"""
802+
returns a dict of open mth5 objects, keyed by station_id
803+
804+
A future version of this for multiple station processing may need
805+
nested dict with [survey_id][station]
806+
807+
Returns
808+
-------
809+
mth5_objs : dict
810+
Keyed by stations.
811+
local station id : mth5.mth5.MTH5
812+
remote station id: mth5.mth5.MTH5
813+
"""
814+
mth5_obj_dict = {}
815+
mth5_obj_dict[self.local_station_id] = initialize_mth5(
816+
self.local_mth5_path, mode=mode
817+
)
818+
if self.remote_station_id:
819+
mth5_obj_dict[self.remote_station_id] = initialize_mth5(
820+
self.remote_mth5_path, mode="r"
821+
)
822+
823+
self.initialized = True
824+
825+
return mth5_obj_dict
826+
799827
def initialize_dataframe_for_processing(self, mth5_objs: dict) -> None:
800828
"""
801829
Adds extra columns needed for processing to the dataframe.

0 commit comments

Comments
 (0)