Skip to content

Commit 41c4899

Browse files
committed
fix some "might be referenced before assignment" warnings
1 parent 5223795 commit 41c4899

File tree

1 file changed

+26
-28
lines changed

1 file changed

+26
-28
lines changed

openeogeotrellis/backend.py

Lines changed: 26 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -2203,40 +2203,38 @@ def as_arg_element(dependency: dict) -> dict:
22032203
batch_job_config_dir=get_backend_config().batch_job_config_dir
22042204
)
22052205

2206-
if get_backend_config().provide_s3_profiles_and_tokens:
2207-
# For now we cannot access the subject of the initial access token but generally it is the user_id
2208-
token_path = get_backend_config().batch_job_config_dir / "token"
2209-
s3_profiles_cfg_batch_secret = k8s_render_manifest_template(
2210-
"batch_job_cfg_secret.yaml.j2",
2211-
secret_name=batch_job_cfg_secret_name,
2212-
job_id=job_id,
2213-
token=IDP_TOKEN_ISSUER.get_job_token(sub_id=user_id, user_id=user_id, job_id=job_id),
2214-
profile_file_content=S3Config.from_backend_config(job_id, str(token_path))
2215-
)
2216-
2217-
if get_backend_config().fuse_mount_batchjob_s3_bucket:
2218-
persistentvolume_batch_job_results_dict = k8s_render_manifest_template(
2219-
"persistentvolume_batch_job_results.yaml.j2",
2220-
job_name=spark_app_id,
2221-
job_namespace=pod_namespace,
2222-
mounter=get_backend_config().fuse_mount_batchjob_s3_mounter,
2223-
mount_options=get_backend_config().fuse_mount_batchjob_s3_mount_options,
2224-
storage_class=get_backend_config().fuse_mount_batchjob_s3_storage_class,
2225-
output_dir=output_dir,
2226-
swift_bucket=bucket,
2227-
)
2228-
2229-
persistentvolumeclaim_batch_job_results_dict = k8s_render_manifest_template(
2230-
"persistentvolumeclaim_batch_job_results.yaml.j2",
2231-
job_name=spark_app_id,
2232-
)
2233-
22342206
with self._double_job_registry as dbl_registry:
22352207
try:
22362208
if get_backend_config().fuse_mount_batchjob_s3_bucket:
2209+
persistentvolume_batch_job_results_dict = k8s_render_manifest_template(
2210+
"persistentvolume_batch_job_results.yaml.j2",
2211+
job_name=spark_app_id,
2212+
job_namespace=pod_namespace,
2213+
mounter=get_backend_config().fuse_mount_batchjob_s3_mounter,
2214+
mount_options=get_backend_config().fuse_mount_batchjob_s3_mount_options,
2215+
storage_class=get_backend_config().fuse_mount_batchjob_s3_storage_class,
2216+
output_dir=output_dir,
2217+
swift_bucket=bucket,
2218+
)
2219+
2220+
persistentvolumeclaim_batch_job_results_dict = k8s_render_manifest_template(
2221+
"persistentvolumeclaim_batch_job_results.yaml.j2",
2222+
job_name=spark_app_id,
2223+
)
2224+
22372225
api_instance_core.create_persistent_volume(persistentvolume_batch_job_results_dict, pretty=True)
22382226
api_instance_core.create_namespaced_persistent_volume_claim(pod_namespace, persistentvolumeclaim_batch_job_results_dict, pretty=True)
22392227
if get_backend_config().provide_s3_profiles_and_tokens:
2228+
# For now we cannot access the subject of the initial access token but generally it is the user_id
2229+
token_path = get_backend_config().batch_job_config_dir / "token"
2230+
s3_profiles_cfg_batch_secret = k8s_render_manifest_template(
2231+
"batch_job_cfg_secret.yaml.j2",
2232+
secret_name=batch_job_cfg_secret_name,
2233+
job_id=job_id,
2234+
token=IDP_TOKEN_ISSUER.get_job_token(sub_id=user_id, user_id=user_id, job_id=job_id),
2235+
profile_file_content=S3Config.from_backend_config(job_id, str(token_path)),
2236+
)
2237+
22402238
api_instance_core.create_namespaced_secret(
22412239
pod_namespace, s3_profiles_cfg_batch_secret, pretty=True
22422240
)

0 commit comments

Comments
 (0)