@@ -95,15 +95,16 @@ def obtain_new_pjob_id(self, user_id: str, initial_value: bytes = b"", attempts:
95
95
"""Obtain new, unique partitioned job id"""
96
96
# A couple of pjob_id attempts: start with current time based name and a suffix to counter collisions (if any)
97
97
base_pjob_id = "pj-" + Clock .utcnow ().strftime ("%Y%m%d-%H%M%S" )
98
- for pjob_id in [base_pjob_id ] + [f"{ base_pjob_id } -{ i } " for i in range (1 , attempts )]:
99
- try :
100
- self ._client .create (path = self ._path (user_id , pjob_id ), value = initial_value , makepath = True )
101
- # We obtained our unique id
102
- return pjob_id
103
- except NodeExistsError :
104
- # TODO: check that NodeExistsError is thrown on existing job_ids
105
- # TODO: add a sleep() to back off a bit?
106
- continue
98
+ with self ._connect ():
99
+ for pjob_id in [base_pjob_id ] + [f"{ base_pjob_id } -{ i } " for i in range (1 , attempts )]:
100
+ try :
101
+ self ._client .create (path = self ._path (user_id , pjob_id ), value = initial_value , makepath = True )
102
+ # We obtained our unique id
103
+ return pjob_id
104
+ except NodeExistsError :
105
+ # TODO: check that NodeExistsError is thrown on existing job_ids
106
+ # TODO: add a sleep() to back off a bit?
107
+ continue
107
108
raise PartitionedJobFailure ("Too much attempts to create new pjob_id" )
108
109
109
110
def insert (self , user_id : str , pjob : PartitionedJob ) -> str :
@@ -147,12 +148,13 @@ def insert_sjob(
147
148
title : Optional [str ] = None ,
148
149
status : str = STATUS_INSERTED ,
149
150
):
150
- self ._client .create (
151
- path = self ._path (user_id , pjob_id , "sjobs" , sjob_id ),
152
- value = self .serialize (process_graph = subjob .process_graph , backend_id = subjob .backend_id , title = title ),
153
- makepath = True ,
154
- )
155
- self .set_sjob_status (user_id = user_id , pjob_id = pjob_id , sjob_id = sjob_id , status = status , create = True )
151
+ with self ._connect ():
152
+ self ._client .create (
153
+ path = self ._path (user_id , pjob_id , "sjobs" , sjob_id ),
154
+ value = self .serialize (process_graph = subjob .process_graph , backend_id = subjob .backend_id , title = title ),
155
+ makepath = True ,
156
+ )
157
+ self .set_sjob_status (user_id = user_id , pjob_id = pjob_id , sjob_id = sjob_id , status = status , create = True )
156
158
157
159
def get_pjob_metadata (self , user_id : str , pjob_id : str ) -> dict :
158
160
"""Get metadata of partitioned job, given by storage id."""
0 commit comments