Skip to content

Commit bc54f00

Browse files
author
dsamaey
committed
Issue #747 please the linter (ran pre-commit on all)
1 parent b0580c7 commit bc54f00

File tree

9 files changed

+61
-39
lines changed

9 files changed

+61
-39
lines changed

docs/conf.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -13,13 +13,14 @@
1313
# All configuration values have a default; values that are commented out
1414
# serve to show the default.
1515

16+
import datetime
17+
1618
# If extensions (or modules to document with autodoc) are in another directory,
1719
# add these directories to sys.path here. If the directory is relative to the
1820
# documentation root, use os.path.abspath to make it absolute, like shown here.
1921
#
2022
import os
2123
import sys
22-
import datetime
2324

2425
sys.path.insert(0, os.path.abspath('.'))
2526
sys.path.insert(0, os.path.abspath('../'))
@@ -44,6 +45,7 @@
4445
]
4546

4647
import sphinx_autodoc_typehints
48+
4749
# Add any paths that contain templates here, relative to this directory.
4850
templates_path = ['_templates']
4951

docs/cookbook/sampling.md

+5-5
Original file line numberDiff line numberDiff line change
@@ -6,10 +6,10 @@ but rather want to extract a result at specific locations.
66
Examples include extracting training data for model calibration, or computing the result for
77
areas where validation data is available.
88

9-
An important constraint is that most implementations assume that sampling is an operation
10-
on relatively small areas, of for instance up to 512x512 pixels (but often much smaller).
9+
An important constraint is that most implementations assume that sampling is an operation
10+
on relatively small areas, of for instance up to 512x512 pixels (but often much smaller).
1111
When extracting polygons with larger areas, it is recommended to look into running a separate job per 'sample'.
12-
Some more important performance notices are mentioned later in the chapter, please read them carefully
12+
Some more important performance notices are mentioned later in the chapter, please read them carefully
1313
to get best results.
1414

1515
Sampling can be done for points or polygons:
@@ -23,9 +23,9 @@ public url, and to load it in openEO using {py:meth}`openeo.rest.connection.Conn
2323

2424
## Sampling at point locations
2525

26-
To sample point locations, the `openeo.rest.datacube.DataCube.aggregate_spatial` method can be used. The reducer can be a
26+
To sample point locations, the `openeo.rest.datacube.DataCube.aggregate_spatial` method can be used. The reducer can be a
2727
commonly supported reducer like `min`, `max` or `mean` and will receive only one value as input in most cases. Note that
28-
in edge cases, a point can intersect with up to 4 pixels. If this is not desirable, it might be worth trying to align
28+
in edge cases, a point can intersect with up to 4 pixels. If this is not desirable, it might be worth trying to align
2929
points with pixel centers, which does require more advanced knowledge of the pixel grid of your data cube.
3030

3131
More information on `aggregate_spatial` is available [here](_aggregate-spatial-evi).

examples/archive/udf/udf_modify_spatial.py

+5-3
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
1+
import numpy as np
12
import xarray
3+
4+
from openeo.metadata import CollectionMetadata
25
from openeo.udf import XarrayDataCube
36
from openeo.udf.debug import inspect
4-
from openeo.metadata import CollectionMetadata
5-
import numpy as np
7+
68

79
def apply_metadata(input_metadata:CollectionMetadata, context:dict) -> CollectionMetadata:
810

@@ -41,4 +43,4 @@ def apply_datacube(cube: XarrayDataCube, context: dict) -> XarrayDataCube:
4143
predicted_cube = xarray.DataArray(predicted_array, dims=['bands', 'x', 'y'], coords=dict(x=coord_x, y=coord_y))
4244

4345

44-
return XarrayDataCube(predicted_cube)
46+
return XarrayDataCube(predicted_cube)

openeo/extra/job_management/__init__.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -673,20 +673,20 @@ def _cancel_prolonged_job(self, job: BatchJob, row):
673673
try:
674674
# Ensure running start time is valid
675675
job_running_start_time = rfc3339.parse_datetime(row.get("running_start_time"), with_timezone=True)
676-
676+
677677
# Parse the current time into a datetime object with timezone info
678678
current_time = rfc3339.parse_datetime(rfc3339.utcnow(), with_timezone=True)
679679

680680
# Calculate the elapsed time between job start and now
681681
elapsed = current_time - job_running_start_time
682682

683683
if elapsed > self._cancel_running_job_after:
684-
684+
685685
_log.info(
686686
f"Cancelling long-running job {job.job_id} (after {elapsed}, running since {job_running_start_time})"
687687
)
688688
job.stop()
689-
689+
690690
except Exception as e:
691691
_log.error(f"Unexpected error while handling job {job.job_id}: {e}")
692692

openeo/extra/job_management/stac_job_db.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ def __init__(
5353

5454
def exists(self) -> bool:
5555
return any(c.id == self.collection_id for c in self.client.get_collections())
56-
56+
5757
def _normalize_df(self, df: pd.DataFrame) -> pd.DataFrame:
5858
"""
5959
Normalize the given dataframe to be compatible with :py:class:`MultiBackendJobManager`

openeo/processes.py

+34-17
Original file line numberDiff line numberDiff line change
@@ -2867,7 +2867,8 @@ def aggregate_spatial(data, geometries, reducer, target_dimension=UNSET, context
28672867
aggregated values (i.e. no-data). The spatial dimensions are replaced by a dimension of type 'geometries'
28682868
and if `target_dimension` is not `null`, a new dimension is added.
28692869
"""
2870-
return _process('aggregate_spatial',
2870+
return _process(
2871+
"aggregate_spatial",
28712872
data=data,
28722873
geometries=geometries,
28732874
reducer=build_child_callback(reducer, parent_parameters=['data', 'context']),
@@ -2903,7 +2904,8 @@ def aggregate_spatial_window(data, reducer, size, boundary=UNSET, align=UNSET, c
29032904
labels will be set to the coordinate at the center of the window. The other dimension properties (name,
29042905
type and reference system) remain unchanged.
29052906
"""
2906-
return _process('aggregate_spatial_window',
2907+
return _process(
2908+
"aggregate_spatial_window",
29072909
data=data,
29082910
reducer=build_child_callback(reducer, parent_parameters=['data', 'context']),
29092911
size=size,
@@ -2942,7 +2944,8 @@ def aggregate_temporal(data, intervals, reducer, labels=UNSET, dimension=UNSET,
29422944
system and resolution) remain unchanged, except for the resolution and dimension labels of the given
29432945
temporal dimension.
29442946
"""
2945-
return _process('aggregate_temporal',
2947+
return _process(
2948+
"aggregate_temporal",
29462949
data=data,
29472950
intervals=intervals,
29482951
reducer=build_child_callback(reducer, parent_parameters=['data', 'context']),
@@ -2996,7 +2999,8 @@ def aggregate_temporal_period(data, period, reducer, dimension=UNSET, context=UN
29962999
the source data cube has just one dimension label `2020-01-05`, the process returns a data cube with just a
29973000
single dimension label (`2020-005`).
29983001
"""
2999-
return _process('aggregate_temporal_period',
3002+
return _process(
3003+
"aggregate_temporal_period",
30003004
data=data,
30013005
period=period,
30023006
reducer=build_child_callback(reducer, parent_parameters=['data', 'context']),
@@ -3137,7 +3141,8 @@ def apply_dimension(data, process, dimension, target_dimension=UNSET, context=UN
31373141
incrementing integers starting from zero, - the resolution changes, and - the reference system is
31383142
undefined.
31393143
"""
3140-
return _process('apply_dimension',
3144+
return _process(
3145+
"apply_dimension",
31413146
data=data,
31423147
process=build_child_callback(process, parent_parameters=['data', 'context']),
31433148
dimension=dimension,
@@ -3195,7 +3200,8 @@ def apply_neighborhood(data, process, size, overlap=UNSET, context=UNSET) -> Pro
31953200
:return: A raster data cube with the newly computed values and the same dimensions. The dimension
31963201
properties (name, type, labels, reference system and resolution) remain unchanged.
31973202
"""
3198-
return _process('apply_neighborhood',
3203+
return _process(
3204+
"apply_neighborhood",
31993205
data=data,
32003206
process=build_child_callback(process, parent_parameters=['data', 'context']),
32013207
size=size,
@@ -3224,7 +3230,8 @@ def apply_polygon(data, polygons, process, mask_value=UNSET, context=UNSET) -> P
32243230
:return: A data cube with the newly computed values and the same dimensions. The dimension properties
32253231
(name, type, labels, reference system and resolution) remain unchanged.
32263232
"""
3227-
return _process('apply_polygon',
3233+
return _process(
3234+
"apply_polygon",
32283235
data=data,
32293236
polygons=polygons,
32303237
process=build_child_callback(process, parent_parameters=['data', 'context']),
@@ -3318,7 +3325,8 @@ def ard_normalized_radar_backscatter(data, elevation_model=UNSET, contributing_a
33183325
DEM-based local incidence angles in degrees. The data returned is CARD4L compliant with corresponding
33193326
metadata.
33203327
"""
3321-
return _process('ard_normalized_radar_backscatter',
3328+
return _process(
3329+
"ard_normalized_radar_backscatter",
33223330
data=data,
33233331
elevation_model=elevation_model,
33243332
contributing_area=contributing_area,
@@ -3373,7 +3381,8 @@ def ard_surface_reflectance(data, atmospheric_correction_method, cloud_detection
33733381
(optional): Contains coefficients used for terrain illumination correction are provided for each pixel.
33743382
The data returned is CARD4L compliant with corresponding metadata.
33753383
"""
3376-
return _process('ard_surface_reflectance',
3384+
return _process(
3385+
"ard_surface_reflectance",
33773386
data=data,
33783387
atmospheric_correction_method=atmospheric_correction_method,
33793388
cloud_detection_method=cloud_detection_method,
@@ -3413,7 +3422,8 @@ def array_apply(data, process, context=UNSET) -> ProcessBuilder:
34133422
:return: An array with the newly computed values. The number of elements are the same as for the original
34143423
array.
34153424
"""
3416-
return _process('array_apply',
3425+
return _process(
3426+
"array_apply",
34173427
data=data,
34183428
process=build_child_callback(process, parent_parameters=['x', 'index', 'label', 'context']),
34193429
context=context
@@ -3503,7 +3513,8 @@ def array_filter(data, condition, context=UNSET) -> ProcessBuilder:
35033513
:return: An array filtered by the specified condition. The number of elements are less than or equal
35043514
compared to the original array.
35053515
"""
3506-
return _process('array_filter',
3516+
return _process(
3517+
"array_filter",
35073518
data=data,
35083519
condition=build_child_callback(condition, parent_parameters=['x', 'index', 'label', 'context']),
35093520
context=context
@@ -4056,7 +4067,8 @@ def filter_labels(data, condition, dimension, context=UNSET) -> ProcessBuilder:
40564067
system and resolution) remain unchanged, except that the given dimension has less (or the same) dimension
40574068
labels.
40584069
"""
4059-
return _process('filter_labels',
4070+
return _process(
4071+
"filter_labels",
40604072
data=data,
40614073
condition=build_child_callback(condition, parent_parameters=['value', 'context']),
40624074
dimension=dimension,
@@ -4156,7 +4168,8 @@ def fit_curve(data, parameters, function, ignore_nodata=UNSET) -> ProcessBuilder
41564168
41574169
:return: An array with the optimal values for the parameters.
41584170
"""
4159-
return _process('fit_curve',
4171+
return _process(
4172+
"fit_curve",
41604173
data=data,
41614174
parameters=parameters,
41624175
function=build_child_callback(function, parent_parameters=['x', 'parameters']),
@@ -4704,7 +4717,8 @@ def merge_cubes(cube1, cube2, overlap_resolver=UNSET, context=UNSET) -> ProcessB
47044717
:return: The merged data cube. See the process description for details regarding the dimensions and
47054718
dimension properties (name, type, labels, reference system and resolution).
47064719
"""
4707-
return _process('merge_cubes',
4720+
return _process(
4721+
"merge_cubes",
47084722
cube1=cube1,
47094723
cube2=cube2,
47104724
overlap_resolver=(build_child_callback(overlap_resolver, parent_parameters=['x', 'y', 'context']) if overlap_resolver not in [None, UNSET] else overlap_resolver),
@@ -4903,7 +4917,8 @@ def predict_curve(parameters, function, dimension, labels=UNSET) -> ProcessBuild
49034917
:return: A data cube with the predicted values with the provided dimension `dimension` having as many
49044918
labels as provided through `labels`.
49054919
"""
4906-
return _process('predict_curve',
4920+
return _process(
4921+
"predict_curve",
49074922
parameters=parameters,
49084923
function=build_child_callback(function, parent_parameters=['x', 'parameters']),
49094924
dimension=dimension,
@@ -4994,7 +5009,8 @@ def reduce_dimension(data, reducer, dimension, context=UNSET) -> ProcessBuilder:
49945009
dimensions decreases by one. The dimension properties (name, type, labels, reference system and resolution)
49955010
for all other dimensions remain unchanged.
49965011
"""
4997-
return _process('reduce_dimension',
5012+
return _process(
5013+
"reduce_dimension",
49985014
data=data,
49995015
reducer=build_child_callback(reducer, parent_parameters=['data', 'context']),
50005016
dimension=dimension,
@@ -5225,7 +5241,8 @@ def sar_backscatter(data, coefficient=UNSET, elevation_model=UNSET, mask=UNSET,
52255241
:return: Backscatter values corresponding to the chosen parametrization. The values are given in linear
52265242
scale.
52275243
"""
5228-
return _process('sar_backscatter',
5244+
return _process(
5245+
"sar_backscatter",
52295246
data=data,
52305247
coefficient=coefficient,
52315248
elevation_model=elevation_model,

openeo/rest/job.py

+2-6
Original file line numberDiff line numberDiff line change
@@ -3,20 +3,16 @@
33
import datetime
44
import json
55
import logging
6+
import shutil
67
import time
78
import typing
89
from pathlib import Path
910
from typing import Dict, List, Optional, Union
1011

1112
import requests
12-
import shutil
1313

1414
from openeo.internal.documentation import openeo_endpoint
15-
from openeo.internal.jupyter import (
16-
VisualDict,
17-
render_component,
18-
render_error,
19-
)
15+
from openeo.internal.jupyter import VisualDict, render_component, render_error
2016
from openeo.internal.warnings import deprecated, legacy_alias
2117
from openeo.rest import (
2218
DEFAULT_DOWNLOAD_CHUNK_SIZE,

tests/internal/processes/test_generator.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -284,7 +284,7 @@ def apply_dimension(data, dimension, process):
284284
285285
:return: Data cube
286286
"""
287-
return _process('apply_dimension',
287+
return _process('apply_dimension',
288288
data=data,
289289
dimension=dimension,
290290
process=build_child_callback(process, parent_parameters=['data'])
@@ -332,7 +332,7 @@ def apply(data, process=UNSET):
332332
333333
:return: Data cube
334334
"""
335-
return _process('apply',
335+
return _process('apply',
336336
data=data,
337337
process=(build_child_callback(process, parent_parameters=['data']) if process not in [None, UNSET] else process)
338338
)'''

tests/rest/test_job.py

+6-1
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,12 @@
1212

1313
import openeo
1414
import openeo.rest.job
15-
from openeo.rest import JobFailedException, OpenEoApiPlainError, OpenEoClientException, DEFAULT_DOWNLOAD_CHUNK_SIZE
15+
from openeo.rest import (
16+
DEFAULT_DOWNLOAD_CHUNK_SIZE,
17+
JobFailedException,
18+
OpenEoApiPlainError,
19+
OpenEoClientException,
20+
)
1621
from openeo.rest.job import BatchJob, ResultAsset
1722
from openeo.rest.models.general import Link
1823
from openeo.rest.models.logs import LogEntry

0 commit comments

Comments
 (0)