Skip to content

Commit aa929d7

Browse files
committed
fixup! Issue #346 Some more ProcessArgs porting
1 parent f5e17b8 commit aa929d7

File tree

3 files changed

+82
-67
lines changed

3 files changed

+82
-67
lines changed

openeo_driver/ProcessGraphDeserializer.py

+65-65
Original file line numberDiff line numberDiff line change
@@ -754,7 +754,7 @@ def load_disk_data(args: ProcessArgs, env: EvalEnv) -> DriverDataCube:
754754
"""
755755
Deprecated, use load_uploaded_files or load_stac
756756
"""
757-
_log.warning("Deprecated: usage of load_disk_data")
757+
_log.warning("DEPRECATED: load_disk_data usage")
758758
kwargs = dict(
759759
glob_pattern=args.get_required("glob_pattern", expected_type=str),
760760
format=args.get_required("format", expected_type=str),
@@ -1002,7 +1002,9 @@ def apply_polygon(args: ProcessArgs, env: EvalEnv) -> DriverDataCube:
10021002
process = args.get_deep("process", "process_graph", expected_type=dict)
10031003
if "polygons" in args and "geometries" not in args:
10041004
# TODO remove this deprecated "polygons" parameter handling when not used anymore
1005-
_log.warning("In process 'apply_polygon': parameter 'polygons' is deprecated, use 'geometries' instead.")
1005+
_log.warning(
1006+
"DEPRECATED: In process 'apply_polygon': parameter 'polygons' is deprecated, use 'geometries' instead."
1007+
)
10061008
geometries = args.get_required("polygons")
10071009
else:
10081010
geometries = args.get_required("geometries")
@@ -1528,16 +1530,30 @@ def resample_spatial(args: ProcessArgs, env: EvalEnv) -> DriverDataCube:
15281530

15291531

15301532
@process
1531-
def resample_cube_spatial(args: dict, env: EvalEnv) -> DriverDataCube:
1532-
image_collection = extract_arg(args, 'data')
1533-
target_image_collection = extract_arg(args, 'target')
1534-
method = args.get('method', 'near')
1535-
if not isinstance(image_collection, DriverDataCube):
1536-
raise ProcessParameterInvalidException(
1537-
parameter="data", process="resample_cube_spatial",
1538-
reason=f"Invalid data type {type(image_collection)!r} expected raster-cube."
1539-
)
1540-
return image_collection.resample_cube_spatial(target=target_image_collection, method=method)
1533+
def resample_cube_spatial(args: ProcessArgs, env: EvalEnv) -> DriverDataCube:
1534+
cube: DriverDataCube = args.get_required("data", expected_type=DriverDataCube)
1535+
target: DriverDataCube = args.get_required("target", expected_type=DriverDataCube)
1536+
method = args.get_enum(
1537+
"method",
1538+
options=[
1539+
"average",
1540+
"bilinear",
1541+
"cubic",
1542+
"cubicspline",
1543+
"lanczos",
1544+
"max",
1545+
"med",
1546+
"min",
1547+
"mode",
1548+
"near",
1549+
"q1",
1550+
"q3",
1551+
"rms",
1552+
"sum",
1553+
],
1554+
default="near",
1555+
)
1556+
return cube.resample_cube_spatial(target=target, method=method)
15411557

15421558

15431559
@process
@@ -1636,25 +1652,22 @@ def run_udf(args: dict, env: EvalEnv):
16361652

16371653

16381654
@process
1639-
def linear_scale_range(args: dict, env: EvalEnv) -> DriverDataCube:
1640-
image_collection = extract_arg(args, 'x')
1641-
1642-
inputMin = extract_arg(args, "inputMin")
1643-
inputMax = extract_arg(args, "inputMax")
1644-
outputMax = args.get("outputMax", 1.0)
1645-
outputMin = args.get("outputMin", 0.0)
1646-
if not isinstance(image_collection, DriverDataCube):
1647-
raise ProcessParameterInvalidException(
1648-
parameter="data", process="linear_scale_range",
1649-
reason=f"Invalid data type {type(image_collection)!r} expected raster-cube."
1650-
)
1651-
1652-
return image_collection.linear_scale_range(inputMin, inputMax, outputMin, outputMax)
1655+
def linear_scale_range(args: ProcessArgs, env: EvalEnv) -> DriverDataCube:
1656+
# TODO: eliminate this top-level linear_scale_range process implementation (should be used as `apply` callback)
1657+
_log.warning("DEPRECATED: linear_scale_range usage directly on cube is deprecated/non-standard.")
1658+
cube: DriverDataCube = args.get_required("x", expected_type=DriverDataCube)
1659+
# Note: non-standard camelCase parameter names (https://github.com/Open-EO/openeo-processes/issues/302)
1660+
input_min = args.get_required("inputMin")
1661+
input_max = args.get_required("inputMax")
1662+
output_min = args.get_optional("outputMin", default=0.0)
1663+
output_max = args.get_optional("outputMax", default=1.0)
1664+
# TODO linear_scale_range is defined on GeopysparkDataCube, but not on DriverDataCube
1665+
return cube.linear_scale_range(input_min, input_max, output_min, output_max)
16531666

16541667

16551668
@process
1656-
def constant(args: dict, env: EvalEnv):
1657-
return args["x"]
1669+
def constant(args: ProcessArgs, env: EvalEnv):
1670+
return args.get_required("x")
16581671

16591672

16601673
def flatten_children_node_types(process_graph: Union[dict, list]):
@@ -1806,10 +1819,11 @@ def apply_process(process_id: str, args: dict, namespace: Union[str, None], env:
18061819
])
18071820
.returns("GeoJSON-style feature collection", schema={"type": "object", "subtype": "geojson"})
18081821
)
1809-
def read_vector(args: Dict, env: EvalEnv) -> DelayedVector:
1822+
def read_vector(args: ProcessArgs, env: EvalEnv) -> DelayedVector:
18101823
# TODO #114 EP-3981: deprecated in favor of load_uploaded_files/load_external? https://github.com/Open-EO/openeo-processes/issues/322
18111824
# TODO: better argument name than `filename`?
1812-
path = extract_arg(args, "filename")
1825+
_log.warning("DEPRECATED: read_vector usage")
1826+
path = args.get_required("filename")
18131827
_check_geometry_path_assumption(
18141828
path=path, process="read_vector", parameter="filename"
18151829
)
@@ -1856,10 +1870,10 @@ def load_uploaded_files(args: ProcessArgs, env: EvalEnv) -> Union[DriverVectorCu
18561870
.param('data', description="GeoJson object.", schema={"type": "object", "subtype": "geojson"})
18571871
.returns("vector-cube", schema={"type": "object", "subtype": "vector-cube"})
18581872
)
1859-
def to_vector_cube(args: Dict, env: EvalEnv):
1860-
_log.warning("Experimental process `to_vector_cube` is deprecated, use `load_geojson` instead")
1873+
def to_vector_cube(args: ProcessArgs, env: EvalEnv):
1874+
_log.warning("DEPRECATED: process to_vector_cube is deprecated, use load_geojson instead")
18611875
# TODO: remove this experimental/deprecated process
1862-
data = extract_arg(args, "data", process_id="to_vector_cube")
1876+
data = args.get_required("data")
18631877
if isinstance(data, dict) and data.get("type") in {"Polygon", "MultiPolygon", "Feature", "FeatureCollection"}:
18641878
return env.backend_implementation.vector_cube_cls.from_geojson(data)
18651879
raise FeatureUnsupportedException(f"Converting {type(data)} to vector cube is not supported")
@@ -1925,14 +1939,10 @@ def get_geometries(args: Dict, env: EvalEnv) -> Union[DelayedVector, dict]:
19251939
.param('data', description="A raster data cube.", schema={"type": "object", "subtype": "raster-cube"})
19261940
.returns("vector-cube", schema={"type": "object", "subtype": "vector-cube"})
19271941
)
1928-
def raster_to_vector(args: Dict, env: EvalEnv):
1929-
image_collection = extract_arg(args, 'data')
1930-
if not isinstance(image_collection, DriverDataCube):
1931-
raise ProcessParameterInvalidException(
1932-
parameter="data", process="raster_to_vector",
1933-
reason=f"Invalid data type {type(image_collection)!r} expected raster-cube."
1934-
)
1935-
return image_collection.raster_to_vector()
1942+
def raster_to_vector(args: ProcessArgs, env: EvalEnv):
1943+
cube: DriverDataCube = args.get_required("data", expected_type=DriverDataCube)
1944+
# TODO: raster_to_vector is only defined on GeopysparkDataCube, not DriverDataCube
1945+
return cube.raster_to_vector()
19361946

19371947

19381948
@non_standard_process(
@@ -2056,9 +2066,9 @@ def evaluate_process_from_url(process_id: str, namespace: str, args: dict, env:
20562066
.param('seconds', description="Number of seconds to sleep.", schema={"type": "number"}, required=True)
20572067
.returns("Original data", schema={})
20582068
)
2059-
def sleep(args: Dict, env: EvalEnv):
2060-
data = extract_arg(args, "data")
2061-
seconds = extract_arg(args, "seconds")
2069+
def sleep(args: ProcessArgs, env: EvalEnv):
2070+
data = args.get_required("data")
2071+
seconds = args.get_required("seconds", expected_type=(int, float))
20622072
dry_run_tracer: DryRunDataTracer = env.get(ENV_DRY_RUN_TRACER)
20632073
if not dry_run_tracer:
20642074
_log.info("Sleeping {s} seconds".format(s=seconds))
@@ -2165,20 +2175,15 @@ def resolution_merge(args: ProcessArgs, env: EvalEnv):
21652175
.param('data', description="Data to discard.", schema={}, required=False)
21662176
.returns("Nothing", schema={})
21672177
)
2168-
def discard_result(args: Dict, env: EvalEnv):
2178+
def discard_result(args: ProcessArgs, env: EvalEnv):
21692179
# TODO: keep a reference to the discarded result?
21702180
return NullResult()
21712181

21722182

21732183
@process_registry_100.add_function(spec=read_spec("openeo-processes/experimental/mask_scl_dilation.json"))
21742184
@process_registry_2xx.add_function(spec=read_spec("openeo-processes/experimental/mask_scl_dilation.json"))
2175-
def mask_scl_dilation(args: Dict, env: EvalEnv):
2176-
cube: DriverDataCube = extract_arg(args, 'data')
2177-
if not isinstance(cube, DriverDataCube):
2178-
raise ProcessParameterInvalidException(
2179-
parameter="data", process="mask_scl_dilation",
2180-
reason=f"Invalid data type {type(cube)!r} expected raster-cube."
2181-
)
2185+
def mask_scl_dilation(args: ProcessArgs, env: EvalEnv):
2186+
cube: DriverDataCube = args.get_required("data", expected_type=DriverDataCube)
21822187
if hasattr(cube, "mask_scl_dilation"):
21832188
the_args = args.copy()
21842189
del the_args["data"]
@@ -2209,13 +2214,8 @@ def to_scl_dilation_mask(args: ProcessArgs, env: EvalEnv):
22092214

22102215
@process_registry_100.add_function(spec=read_spec("openeo-processes/experimental/mask_l1c.json"))
22112216
@process_registry_2xx.add_function(spec=read_spec("openeo-processes/experimental/mask_l1c.json"))
2212-
def mask_l1c(args: Dict, env: EvalEnv):
2213-
cube: DriverDataCube = extract_arg(args, 'data')
2214-
if not isinstance(cube, DriverDataCube):
2215-
raise ProcessParameterInvalidException(
2216-
parameter="data", process="mask_l1c",
2217-
reason=f"Invalid data type {type(cube)!r} expected raster-cube."
2218-
)
2217+
def mask_l1c(args: ProcessArgs, env: EvalEnv):
2218+
cube: DriverDataCube = args.get_required("data", expected_type=DriverDataCube)
22192219
if hasattr(cube, "mask_l1c"):
22202220
return cube.mask_l1c()
22212221
else:
@@ -2280,8 +2280,8 @@ def array_create(args: ProcessArgs, env: EvalEnv) -> list:
22802280

22812281

22822282
@process_registry_100.add_function(spec=read_spec("openeo-processes/1.x/proposals/load_result.json"))
2283-
def load_result(args: dict, env: EvalEnv) -> DriverDataCube:
2284-
job_id = extract_arg(args, "id")
2283+
def load_result(args: ProcessArgs, env: EvalEnv) -> DriverDataCube:
2284+
job_id = args.get_required("id", expected_type=str)
22852285
user = env.get("user")
22862286

22872287
arguments = {}
@@ -2309,10 +2309,10 @@ def load_result(args: dict, env: EvalEnv) -> DriverDataCube:
23092309

23102310
@process_registry_100.add_function(spec=read_spec("openeo-processes/1.x/proposals/inspect.json"))
23112311
@process_registry_2xx.add_function(spec=read_spec("openeo-processes/2.x/proposals/inspect.json"))
2312-
def inspect(args: dict, env: EvalEnv):
2313-
data = extract_arg(args, "data")
2314-
message = args.get("message", "")
2315-
level = args.get("level", "info")
2312+
def inspect(args: ProcessArgs, env: EvalEnv):
2313+
data = args.get_required("data")
2314+
message = args.get_optional("message", default="")
2315+
level = args.get_optional("level", default="info")
23162316
if message:
23172317
_log.log(level=logging.getLevelName(level.upper()), msg=message)
23182318
data_message = str(data)

openeo_driver/processes.py

+8-2
Original file line numberDiff line numberDiff line change
@@ -426,13 +426,19 @@ def get_subset(self, names: List[str], aliases: Optional[Dict[str, str]] = None)
426426
kwargs[key] = self[alias]
427427
return kwargs
428428

429-
def get_enum(self, name: str, options: Collection[ArgumentValue]) -> ArgumentValue:
429+
def get_enum(
430+
self, name: str, options: Collection[ArgumentValue], default: Optional[ArgumentValue] = None
431+
) -> ArgumentValue:
430432
"""
431433
Get argument by name and check if it belongs to given set of (enum) values.
432434
433435
Originally: `extract_arg_enum`
434436
"""
435-
value = self.get_required(name=name)
437+
# TODO: use an "unset" sentinel value instead of None for default?
438+
if default is None:
439+
value = self.get_required(name=name)
440+
else:
441+
value = self.get_optional(name=name, default=default)
436442
if value not in options:
437443
raise ProcessParameterInvalidException(
438444
parameter=name,

tests/test_processes.py

+9
Original file line numberDiff line numberDiff line change
@@ -615,6 +615,15 @@ def test_get_enum(self):
615615
):
616616
_ = args.get_enum("color", options=["R", "G", "B"])
617617

618+
def test_get_enum_optional(self):
619+
args = ProcessArgs({"size": 3, "color": "red"}, process_id="wibble")
620+
assert args.get_enum("color", options=["red", "green", "blue"], default="green") == "red"
621+
assert args.get_enum("colour", options=["red", "green", "blue"], default="green") == "green"
622+
623+
assert args.get_enum("size", options=[0, 1, 2, 3], default=0) == 3
624+
assert args.get_enum("dim", options=[0, 1, 2, 3], default=0) == 0
625+
assert args.get_enum("dim", options=[0, 1, 2, 3], default=2) == 2
626+
618627
def test_validator_generic(self):
619628
args = ProcessArgs({"size": 11}, process_id="wibble")
620629

0 commit comments

Comments
 (0)