@@ -754,7 +754,7 @@ def load_disk_data(args: ProcessArgs, env: EvalEnv) -> DriverDataCube:
754
754
"""
755
755
Deprecated, use load_uploaded_files or load_stac
756
756
"""
757
- _log .warning ("Deprecated: usage of load_disk_data" )
757
+ _log .warning ("DEPRECATED: load_disk_data usage " )
758
758
kwargs = dict (
759
759
glob_pattern = args .get_required ("glob_pattern" , expected_type = str ),
760
760
format = args .get_required ("format" , expected_type = str ),
@@ -1002,7 +1002,9 @@ def apply_polygon(args: ProcessArgs, env: EvalEnv) -> DriverDataCube:
1002
1002
process = args .get_deep ("process" , "process_graph" , expected_type = dict )
1003
1003
if "polygons" in args and "geometries" not in args :
1004
1004
# TODO remove this deprecated "polygons" parameter handling when not used anymore
1005
- _log .warning ("In process 'apply_polygon': parameter 'polygons' is deprecated, use 'geometries' instead." )
1005
+ _log .warning (
1006
+ "DEPRECATED: In process 'apply_polygon': parameter 'polygons' is deprecated, use 'geometries' instead."
1007
+ )
1006
1008
geometries = args .get_required ("polygons" )
1007
1009
else :
1008
1010
geometries = args .get_required ("geometries" )
@@ -1528,16 +1530,30 @@ def resample_spatial(args: ProcessArgs, env: EvalEnv) -> DriverDataCube:
1528
1530
1529
1531
1530
1532
@process
1531
- def resample_cube_spatial (args : dict , env : EvalEnv ) -> DriverDataCube :
1532
- image_collection = extract_arg (args , 'data' )
1533
- target_image_collection = extract_arg (args , 'target' )
1534
- method = args .get ('method' , 'near' )
1535
- if not isinstance (image_collection , DriverDataCube ):
1536
- raise ProcessParameterInvalidException (
1537
- parameter = "data" , process = "resample_cube_spatial" ,
1538
- reason = f"Invalid data type { type (image_collection )!r} expected raster-cube."
1539
- )
1540
- return image_collection .resample_cube_spatial (target = target_image_collection , method = method )
1533
+ def resample_cube_spatial (args : ProcessArgs , env : EvalEnv ) -> DriverDataCube :
1534
+ cube : DriverDataCube = args .get_required ("data" , expected_type = DriverDataCube )
1535
+ target : DriverDataCube = args .get_required ("target" , expected_type = DriverDataCube )
1536
+ method = args .get_enum (
1537
+ "method" ,
1538
+ options = [
1539
+ "average" ,
1540
+ "bilinear" ,
1541
+ "cubic" ,
1542
+ "cubicspline" ,
1543
+ "lanczos" ,
1544
+ "max" ,
1545
+ "med" ,
1546
+ "min" ,
1547
+ "mode" ,
1548
+ "near" ,
1549
+ "q1" ,
1550
+ "q3" ,
1551
+ "rms" ,
1552
+ "sum" ,
1553
+ ],
1554
+ default = "near" ,
1555
+ )
1556
+ return cube .resample_cube_spatial (target = target , method = method )
1541
1557
1542
1558
1543
1559
@process
@@ -1636,25 +1652,22 @@ def run_udf(args: dict, env: EvalEnv):
1636
1652
1637
1653
1638
1654
@process
1639
- def linear_scale_range (args : dict , env : EvalEnv ) -> DriverDataCube :
1640
- image_collection = extract_arg (args , 'x' )
1641
-
1642
- inputMin = extract_arg (args , "inputMin" )
1643
- inputMax = extract_arg (args , "inputMax" )
1644
- outputMax = args .get ("outputMax" , 1.0 )
1645
- outputMin = args .get ("outputMin" , 0.0 )
1646
- if not isinstance (image_collection , DriverDataCube ):
1647
- raise ProcessParameterInvalidException (
1648
- parameter = "data" , process = "linear_scale_range" ,
1649
- reason = f"Invalid data type { type (image_collection )!r} expected raster-cube."
1650
- )
1651
-
1652
- return image_collection .linear_scale_range (inputMin , inputMax , outputMin , outputMax )
1655
+ def linear_scale_range (args : ProcessArgs , env : EvalEnv ) -> DriverDataCube :
1656
+ # TODO: eliminate this top-level linear_scale_range process implementation (should be used as `apply` callback)
1657
+ _log .warning ("DEPRECATED: linear_scale_range usage directly on cube is deprecated/non-standard." )
1658
+ cube : DriverDataCube = args .get_required ("x" , expected_type = DriverDataCube )
1659
+ # Note: non-standard camelCase parameter names (https://github.com/Open-EO/openeo-processes/issues/302)
1660
+ input_min = args .get_required ("inputMin" )
1661
+ input_max = args .get_required ("inputMax" )
1662
+ output_min = args .get_optional ("outputMin" , default = 0.0 )
1663
+ output_max = args .get_optional ("outputMax" , default = 1.0 )
1664
+ # TODO linear_scale_range is defined on GeopysparkDataCube, but not on DriverDataCube
1665
+ return cube .linear_scale_range (input_min , input_max , output_min , output_max )
1653
1666
1654
1667
1655
1668
@process
1656
- def constant (args : dict , env : EvalEnv ):
1657
- return args [ "x" ]
1669
+ def constant (args : ProcessArgs , env : EvalEnv ):
1670
+ return args . get_required ( "x" )
1658
1671
1659
1672
1660
1673
def flatten_children_node_types (process_graph : Union [dict , list ]):
@@ -1806,10 +1819,11 @@ def apply_process(process_id: str, args: dict, namespace: Union[str, None], env:
1806
1819
])
1807
1820
.returns ("GeoJSON-style feature collection" , schema = {"type" : "object" , "subtype" : "geojson" })
1808
1821
)
1809
- def read_vector (args : Dict , env : EvalEnv ) -> DelayedVector :
1822
+ def read_vector (args : ProcessArgs , env : EvalEnv ) -> DelayedVector :
1810
1823
# TODO #114 EP-3981: deprecated in favor of load_uploaded_files/load_external? https://github.com/Open-EO/openeo-processes/issues/322
1811
1824
# TODO: better argument name than `filename`?
1812
- path = extract_arg (args , "filename" )
1825
+ _log .warning ("DEPRECATED: read_vector usage" )
1826
+ path = args .get_required ("filename" )
1813
1827
_check_geometry_path_assumption (
1814
1828
path = path , process = "read_vector" , parameter = "filename"
1815
1829
)
@@ -1856,10 +1870,10 @@ def load_uploaded_files(args: ProcessArgs, env: EvalEnv) -> Union[DriverVectorCu
1856
1870
.param ('data' , description = "GeoJson object." , schema = {"type" : "object" , "subtype" : "geojson" })
1857
1871
.returns ("vector-cube" , schema = {"type" : "object" , "subtype" : "vector-cube" })
1858
1872
)
1859
- def to_vector_cube (args : Dict , env : EvalEnv ):
1860
- _log .warning ("Experimental process ` to_vector_cube` is deprecated, use ` load_geojson` instead" )
1873
+ def to_vector_cube (args : ProcessArgs , env : EvalEnv ):
1874
+ _log .warning ("DEPRECATED: process to_vector_cube is deprecated, use load_geojson instead" )
1861
1875
# TODO: remove this experimental/deprecated process
1862
- data = extract_arg ( args , "data" , process_id = "to_vector_cube " )
1876
+ data = args . get_required ( "data" )
1863
1877
if isinstance (data , dict ) and data .get ("type" ) in {"Polygon" , "MultiPolygon" , "Feature" , "FeatureCollection" }:
1864
1878
return env .backend_implementation .vector_cube_cls .from_geojson (data )
1865
1879
raise FeatureUnsupportedException (f"Converting { type (data )} to vector cube is not supported" )
@@ -1925,14 +1939,10 @@ def get_geometries(args: Dict, env: EvalEnv) -> Union[DelayedVector, dict]:
1925
1939
.param ('data' , description = "A raster data cube." , schema = {"type" : "object" , "subtype" : "raster-cube" })
1926
1940
.returns ("vector-cube" , schema = {"type" : "object" , "subtype" : "vector-cube" })
1927
1941
)
1928
- def raster_to_vector (args : Dict , env : EvalEnv ):
1929
- image_collection = extract_arg (args , 'data' )
1930
- if not isinstance (image_collection , DriverDataCube ):
1931
- raise ProcessParameterInvalidException (
1932
- parameter = "data" , process = "raster_to_vector" ,
1933
- reason = f"Invalid data type { type (image_collection )!r} expected raster-cube."
1934
- )
1935
- return image_collection .raster_to_vector ()
1942
+ def raster_to_vector (args : ProcessArgs , env : EvalEnv ):
1943
+ cube : DriverDataCube = args .get_required ("data" , expected_type = DriverDataCube )
1944
+ # TODO: raster_to_vector is only defined on GeopysparkDataCube, not DriverDataCube
1945
+ return cube .raster_to_vector ()
1936
1946
1937
1947
1938
1948
@non_standard_process (
@@ -2056,9 +2066,9 @@ def evaluate_process_from_url(process_id: str, namespace: str, args: dict, env:
2056
2066
.param ('seconds' , description = "Number of seconds to sleep." , schema = {"type" : "number" }, required = True )
2057
2067
.returns ("Original data" , schema = {})
2058
2068
)
2059
- def sleep (args : Dict , env : EvalEnv ):
2060
- data = extract_arg ( args , "data" )
2061
- seconds = extract_arg ( args , "seconds" )
2069
+ def sleep (args : ProcessArgs , env : EvalEnv ):
2070
+ data = args . get_required ( "data" )
2071
+ seconds = args . get_required ( "seconds" , expected_type = ( int , float ) )
2062
2072
dry_run_tracer : DryRunDataTracer = env .get (ENV_DRY_RUN_TRACER )
2063
2073
if not dry_run_tracer :
2064
2074
_log .info ("Sleeping {s} seconds" .format (s = seconds ))
@@ -2165,20 +2175,15 @@ def resolution_merge(args: ProcessArgs, env: EvalEnv):
2165
2175
.param ('data' , description = "Data to discard." , schema = {}, required = False )
2166
2176
.returns ("Nothing" , schema = {})
2167
2177
)
2168
- def discard_result (args : Dict , env : EvalEnv ):
2178
+ def discard_result (args : ProcessArgs , env : EvalEnv ):
2169
2179
# TODO: keep a reference to the discarded result?
2170
2180
return NullResult ()
2171
2181
2172
2182
2173
2183
@process_registry_100 .add_function (spec = read_spec ("openeo-processes/experimental/mask_scl_dilation.json" ))
2174
2184
@process_registry_2xx .add_function (spec = read_spec ("openeo-processes/experimental/mask_scl_dilation.json" ))
2175
- def mask_scl_dilation (args : Dict , env : EvalEnv ):
2176
- cube : DriverDataCube = extract_arg (args , 'data' )
2177
- if not isinstance (cube , DriverDataCube ):
2178
- raise ProcessParameterInvalidException (
2179
- parameter = "data" , process = "mask_scl_dilation" ,
2180
- reason = f"Invalid data type { type (cube )!r} expected raster-cube."
2181
- )
2185
+ def mask_scl_dilation (args : ProcessArgs , env : EvalEnv ):
2186
+ cube : DriverDataCube = args .get_required ("data" , expected_type = DriverDataCube )
2182
2187
if hasattr (cube , "mask_scl_dilation" ):
2183
2188
the_args = args .copy ()
2184
2189
del the_args ["data" ]
@@ -2209,13 +2214,8 @@ def to_scl_dilation_mask(args: ProcessArgs, env: EvalEnv):
2209
2214
2210
2215
@process_registry_100 .add_function (spec = read_spec ("openeo-processes/experimental/mask_l1c.json" ))
2211
2216
@process_registry_2xx .add_function (spec = read_spec ("openeo-processes/experimental/mask_l1c.json" ))
2212
- def mask_l1c (args : Dict , env : EvalEnv ):
2213
- cube : DriverDataCube = extract_arg (args , 'data' )
2214
- if not isinstance (cube , DriverDataCube ):
2215
- raise ProcessParameterInvalidException (
2216
- parameter = "data" , process = "mask_l1c" ,
2217
- reason = f"Invalid data type { type (cube )!r} expected raster-cube."
2218
- )
2217
+ def mask_l1c (args : ProcessArgs , env : EvalEnv ):
2218
+ cube : DriverDataCube = args .get_required ("data" , expected_type = DriverDataCube )
2219
2219
if hasattr (cube , "mask_l1c" ):
2220
2220
return cube .mask_l1c ()
2221
2221
else :
@@ -2280,8 +2280,8 @@ def array_create(args: ProcessArgs, env: EvalEnv) -> list:
2280
2280
2281
2281
2282
2282
@process_registry_100 .add_function (spec = read_spec ("openeo-processes/1.x/proposals/load_result.json" ))
2283
- def load_result (args : dict , env : EvalEnv ) -> DriverDataCube :
2284
- job_id = extract_arg ( args , "id" )
2283
+ def load_result (args : ProcessArgs , env : EvalEnv ) -> DriverDataCube :
2284
+ job_id = args . get_required ( "id" , expected_type = str )
2285
2285
user = env .get ("user" )
2286
2286
2287
2287
arguments = {}
@@ -2309,10 +2309,10 @@ def load_result(args: dict, env: EvalEnv) -> DriverDataCube:
2309
2309
2310
2310
@process_registry_100 .add_function (spec = read_spec ("openeo-processes/1.x/proposals/inspect.json" ))
2311
2311
@process_registry_2xx .add_function (spec = read_spec ("openeo-processes/2.x/proposals/inspect.json" ))
2312
- def inspect (args : dict , env : EvalEnv ):
2313
- data = extract_arg ( args , "data" )
2314
- message = args .get ("message" , "" )
2315
- level = args .get ("level" , "info" )
2312
+ def inspect (args : ProcessArgs , env : EvalEnv ):
2313
+ data = args . get_required ( "data" )
2314
+ message = args .get_optional ("message" , default = "" )
2315
+ level = args .get_optional ("level" , default = "info" )
2316
2316
if message :
2317
2317
_log .log (level = logging .getLevelName (level .upper ()), msg = message )
2318
2318
data_message = str (data )
0 commit comments