@@ -2624,8 +2624,10 @@ def apply_datacube(cube: XarrayDataCube, context: dict) -> XarrayDataCube:
2624
2624
),
2625
2625
],
2626
2626
)
2627
- def test_unified_asset_keys (tmp_path , separate_asset_per_band , expected_tiff_files , expected_asset_keys ):
2628
- process_graph = { # plain old spatiotemporal data cube to GeoTIFF
2627
+ def test_unified_asset_keys_spatiotemporal_geotiff (
2628
+ tmp_path , separate_asset_per_band , expected_tiff_files , expected_asset_keys
2629
+ ):
2630
+ process_graph = {
2629
2631
"load2" : {
2630
2632
"process_id" : "load_collection" ,
2631
2633
"arguments" : {
@@ -2842,3 +2844,100 @@ def test_unified_asset_keys_sample_by_feature(tmp_path):
2842
2844
2843
2845
for item in items :
2844
2846
assert set (item ["assets" ].keys ()) == {"openEO" }
2847
+
2848
+
2849
+ @pytest .mark .parametrize (
2850
+ ["separate_asset_per_band" , "expected_tiff_files" , "expected_asset_keys" ],
2851
+ [
2852
+ (False , {"openEO.tif" }, {"openEO" }),
2853
+ (
2854
+ True ,
2855
+ {
2856
+ "openEO_Flat:0.tif" ,
2857
+ "openEO_Flat:1.tif" ,
2858
+ "openEO_Flat:2.tif" ,
2859
+ },
2860
+ {"openEO_Flat:0" , "openEO_Flat:1" , "openEO_Flat:2" },
2861
+ ),
2862
+ ],
2863
+ )
2864
+ def test_unified_asset_keys_spatial_geotiff (
2865
+ tmp_path , separate_asset_per_band , expected_tiff_files , expected_asset_keys
2866
+ ):
2867
+ process_graph = {
2868
+ "load2" : {
2869
+ "process_id" : "load_collection" ,
2870
+ "arguments" : {
2871
+ "bands" : [
2872
+ "Flat:0" ,
2873
+ "Flat:1" ,
2874
+ "Flat:2" ,
2875
+ ],
2876
+ "id" : "TestCollection-LonLat16x16" ,
2877
+ "spatial_extent" : {
2878
+ "west" : 0 ,
2879
+ "south" : 50 ,
2880
+ "east" : 5 ,
2881
+ "north" : 55 ,
2882
+ },
2883
+ "temporal_extent" : ["2025-04-01" , "2025-04-21" ],
2884
+ },
2885
+ },
2886
+ "reducedimension1" : {
2887
+ "process_id" : "reduce_dimension" ,
2888
+ "arguments" : {
2889
+ "data" : {"from_node" : "load2" },
2890
+ "dimension" : "t" ,
2891
+ "reducer" : {
2892
+ "process_graph" : {
2893
+ "first1" : {
2894
+ "process_id" : "first" ,
2895
+ "arguments" : {"data" : {"from_parameter" : "data" }},
2896
+ "result" : True ,
2897
+ }
2898
+ }
2899
+ },
2900
+ },
2901
+ },
2902
+ "save1" : {
2903
+ "process_id" : "save_result" ,
2904
+ "arguments" : {
2905
+ "data" : {"from_node" : "reducedimension1" },
2906
+ "format" : "GTIFF" ,
2907
+ "options" : {"separate_asset_per_band" : separate_asset_per_band },
2908
+ },
2909
+ "result" : True ,
2910
+ },
2911
+ }
2912
+
2913
+ process = {
2914
+ "process_graph" : process_graph ,
2915
+ }
2916
+
2917
+ job_dir = tmp_path
2918
+ metadata_file = job_dir / "job_metadata.json"
2919
+
2920
+ run_job (
2921
+ process ,
2922
+ output_file = job_dir / "out" ,
2923
+ metadata_file = metadata_file ,
2924
+ api_version = "2.0.0" ,
2925
+ job_dir = job_dir ,
2926
+ dependencies = [],
2927
+ )
2928
+
2929
+ tiff_files = {file for file in os .listdir (job_dir ) if file .endswith (".tif" )}
2930
+ assert tiff_files == expected_tiff_files
2931
+
2932
+ with open (metadata_file ) as f :
2933
+ job_metadata = json .load (f )
2934
+
2935
+ items = job_metadata ["items" ]
2936
+ print (f"items={ json .dumps (items , indent = 2 )} " )
2937
+
2938
+ assert len (items ) == 1
2939
+ # single item ID can be anything (no spatial or temporal references)
2940
+ assert job_metadata ["start_datetime" ] == "2025-04-01T00:00:00Z" # top-level rather than on Item
2941
+ assert job_metadata ["end_datetime" ] == "2025-04-21T00:00:00Z" # ditto
2942
+
2943
+ assert set (items [0 ]["assets" ].keys ()) == expected_asset_keys
0 commit comments