-
Notifications
You must be signed in to change notification settings - Fork 24
Tests for the test suite #468
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: draft
Are you sure you want to change the base?
Changes from 39 commits
08bdec4
2db6186
af46f66
29b61af
ac702b8
aa3a632
dac84c1
cad3ee1
46f37df
aa308b8
58d10b8
a9291a8
574abe8
da0c512
4c11c9a
b93b1e6
9262522
c24c1ad
67917fa
895b40c
1967425
3b77850
25384af
75fb59b
2e4c55d
1b475fc
0702c08
fe5f948
4e5ae0d
5248a13
6e21fc0
17f5a6b
81834d6
6508be2
20ef8bc
788955d
65c8534
412b6e8
b88ff6a
1ec59e8
7d9c1ec
6442258
7f4e7b8
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,7 +1,7 @@ | ||
{ | ||
"id": "apply_kernel", | ||
"summary": "Apply a spatial convolution with a kernel", | ||
"description": "Applies a 2D convolution (i.e. a focal operation with a weighted kernel) on the horizontal spatial dimensions (axes `x` and `y`) of a raster data cube.\n\nEach value in the kernel is multiplied with the corresponding pixel value and all products are summed up afterwards. The sum is then multiplied with the factor.\n\nThe process can't handle non-numerical or infinite numerical values in the data cube. Boolean values are converted to integers (`false` = 0, `true` = 1), but all other non-numerical or infinite values are replaced with zeroes by default (see parameter `replace_invalid`).\n\nFor cases requiring more generic focal operations or non-numerical values, see ``apply_neighborhood()``.", | ||
"description": "Applies a 2D convolution (i.e. a focal operation with a weighted kernel) on the horizontal spatial dimensions (axes `x` and `y`) of a raster data cube.\n\nEach value in the kernel is multiplied with the corresponding pixel value and all products are summed up afterwards. The sum is then multiplied with the factor.\n\nThe process can't handle non-numerical or infinite numerical values in the data cube. Boolean values are converted to integers (`false` = 0, `true` = 1), but all other non-numerical, NaN, no-data, or infinite values are replaced with zeroes by default (see parameter `replace_invalid`).\n\nFor cases requiring more generic focal operations or non-numerical values, see ``apply_neighborhood()``.", | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. these changes look unrelated to the scope of this PR, right? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Well, they were spotted when creating the tests. They should've been in #490, but it seems I didn't catch them all. |
||
"categories": [ | ||
"cubes", | ||
"math > image filter" | ||
|
@@ -70,7 +70,7 @@ | |
}, | ||
{ | ||
"name": "replace_invalid", | ||
"description": "This parameter specifies the value to replace non-numerical or infinite numerical values with. By default, those values are replaced with zeroes.", | ||
"description": "This parameter specifies the value to replace non-numerical, NaN, no-data, or infinite numerical values with. By default, those values are replaced with zeroes.", | ||
"schema": { | ||
"type": "number" | ||
}, | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,68 @@ | ||
// Ensure that each test is valid | ||
|
||
const fs = require('fs'); | ||
const path = require('path'); | ||
const JSON5 = require('json5'); | ||
const ajv = require('ajv'); | ||
|
||
const testsDir = path.join(__dirname, '../tests'); | ||
|
||
const tests = fs.readdirSync(testsDir).filter(file => file.endsWith('.json5')); | ||
|
||
const schemaPath = path.join(testsDir, 'schema', 'schema.json'); | ||
const schemaContent = fs.readFileSync(schemaPath, 'utf8'); | ||
const schema = JSON.parse(schemaContent); | ||
const validate = new ajv().compile(schema); | ||
|
||
const results = {}; | ||
for (const testFile of tests) { | ||
const testPath = path.join(testsDir, testFile); | ||
|
||
let testData; | ||
// Ensure we can load the test file as JSON5 | ||
try { | ||
testData = JSON5.parse(fs.readFileSync(testPath, 'utf8')); | ||
} catch (error) { | ||
results[testFile] = `Invalid JSON5: ${error.message}`; | ||
continue; | ||
} | ||
|
||
// Ensure the file is valid against the schema | ||
if (!validate(testData)) { | ||
results[testFile] = `Schema validation failed: ${validate.errors.map(err => err.message).join(', ')}`; | ||
continue; | ||
} | ||
|
||
// Make sure the id is the same as filename without extension | ||
const expectedId = path.basename(testFile, '.json5'); | ||
if (testData.id !== expectedId) { | ||
results[testFile] = `ID mismatch: expected ${expectedId}, got ${testData.id}`; | ||
continue; | ||
} | ||
|
||
// Check if experimental is set to the same value as in the process itself | ||
let processFile = path.join(__dirname, '../', expectedId + '.json'); | ||
if (!fs.existsSync(processFile)) { | ||
processFile = path.join(__dirname, '../proposals/', expectedId + '.json'); | ||
} | ||
if (fs.existsSync(processFile)) { | ||
const processData = JSON.parse(fs.readFileSync(processFile, 'utf8')); | ||
const expected = processData.experimental || false; | ||
const actual = testData.experimental || false; | ||
if (expected !== actual) { | ||
results[testFile] = `Experimental flag mismatch: expected ${expected}, got ${actual}`; | ||
continue; | ||
} | ||
} | ||
} | ||
|
||
if (Object.keys(results).length > 0) { | ||
console.error('The following test files have issues:'); | ||
for (const [file, error] of Object.entries(results)) { | ||
console.error(`- ${file}: ${error}`); | ||
} | ||
process.exit(1); | ||
} | ||
else { | ||
console.log('All test files are valid and match the expected schema.'); | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,44 @@ | ||
// Ensure that each process has a corresponding file in the tests directory. | ||
// It can be empty, but it must exist to ensure people made that decision consciously. | ||
|
||
const fs = require('fs'); | ||
const path = require('path'); | ||
|
||
const processesDir = path.join(__dirname, '../'); | ||
const proposalsDir = path.join(__dirname, '../proposals'); | ||
const testsDir = path.join(__dirname, '../tests'); | ||
|
||
const processes = [ | ||
...fs.readdirSync(processesDir), | ||
...fs.readdirSync(proposalsDir), | ||
] | ||
.filter(file => file.endsWith('.json')) | ||
.map(file => path.basename(file, '.json'));; | ||
const tests = fs.readdirSync(testsDir) | ||
.filter(file => file.endsWith('.json5')) | ||
.map(file => path.basename(file, '.json5')); | ||
|
||
// Check which tests are missing for the processes | ||
const missingTests = processes.filter(process => !tests.includes(process)); | ||
|
||
if (missingTests.length > 0) { | ||
console.error('The following processes are missing tests:'); | ||
missingTests.forEach(process => console.error(`- ${process}`)); | ||
} | ||
|
||
// Check whether there are tests for non-existing processes | ||
const extraTests = tests.filter(test => !processes.includes(test)); | ||
if (extraTests.length > 0) { | ||
console.error('\nThe following tests exist without a corresponding process:'); | ||
extraTests.forEach(test => console.error(`- ${test}`)); | ||
} | ||
|
||
// todo: add check that json5 files are valid | ||
|
||
if (missingTests.length === 0 && extraTests.length === 0) { | ||
console.log('All processes have corresponding tests and vice versa.'); | ||
process.exit(0); | ||
} | ||
else { | ||
process.exit(1); | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,119 @@ | ||
# Tests | ||
|
||
This folder contains test cases for the openEO processes. | ||
|
||
## Assumptions | ||
|
||
The test cases assume a couple of things as they are an abstraction and not bound to specific implementations: | ||
- The JSON Schema type `number` explicitly includes the values `+Infinity`, `-Infinity` and `NaN`. | ||
- The input and output values for no-data values are `null` by default unless otherwise specified by a runner. | ||
- Input that is not valid according to the schemas, will be rejected upfront and will not be checked on. For example, the absolute process only tests against the data types `number` and `null`. There are no tests for a boolean or string input. | ||
m-mohr marked this conversation as resolved.
Show resolved
Hide resolved
|
||
- Numerical data types such as uint8 don't matter, i.e. tests don't check for overflows etc. This suite can't provide such tests as the underlying data type is not known. | ||
- If not otherwise specified for numbers, a precision of 10 decimals is checked so return values should have at least 11 decimals. | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I'm not sure I understand the goal of this. If only up to 10 decimals must be checked, why care about the 11th decimal? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. For whatever reaons I sometimes ran into rounding errors or imprecisions, so the 11th number was there to ensure that's actually happening in the 11th digit, not the 10th. |
||
|
||
## Test Files | ||
|
||
To allow for more data types (e.g. infinity and nan for numbers), all the files are encoded in **JSON5** instead of JSON. | ||
|
||
The test files have the following schema: [schema/schema.json](./schema/schema.json) | ||
|
||
### No-data values | ||
|
||
No-data values have a special encoding in tests (see below). | ||
The encoding is replaced with `null` unless otherwise specified by the runners. | ||
|
||
```json5 | ||
{ | ||
"type": "nodata" | ||
} | ||
``` | ||
|
||
### Datetimes | ||
|
||
Datetimes as strings have a varying precision, especially regarding the milliseconds. | ||
Also, sometimes timezones are differently handled. | ||
|
||
Datetimes in return values should be encoded as follows so that the results can be compared better: | ||
m-mohr marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
||
```json5 | ||
{ | ||
"type": "datetime", | ||
"value": "2020-01-01T00:00:00Z" | ||
} | ||
``` | ||
|
||
### External references | ||
|
||
Arguments and return values can point to external files, e.g. | ||
|
||
```json5 | ||
{ | ||
"$ref": "https://host.example/datacube.json" | ||
} | ||
``` | ||
|
||
The test suite can currently only load JSON and JSON5 files. | ||
|
||
### Labeled arrays | ||
|
||
Labeled arrays can't be represented in JSON5 and will be provided as an object instead. | ||
|
||
```json5 | ||
{ | ||
"type": "labeled-array", | ||
"data": [ | ||
{ | ||
"key": "B01", | ||
"value": 1.23 | ||
}, | ||
{ | ||
"key": "B02", | ||
"value": 0.98 | ||
} | ||
// ... | ||
] | ||
} | ||
``` | ||
|
||
### Datacubes | ||
|
||
Datacubes can't be represented in JSON5 and will be provided as an object instead. | ||
Vector datacubes are currently not supported. | ||
|
||
```json5 | ||
{ | ||
"type": "datacube", | ||
"data": [ | ||
// multi-dimensional array | ||
// can be set to `null` if the data values are irrelevant for the test. | ||
], | ||
"nodata": [ | ||
NaN | ||
], | ||
"order": ["bands", "t", "y", "x"], | ||
"dimensions": { | ||
// similar to the STAC datacube extension | ||
// properties: type, axis (if type = spatial), values, and reference_system (optional) | ||
"bands": { | ||
"type": "bands", | ||
"values": ["blue","green","red","nir"] | ||
}, | ||
"t": { | ||
"type": "temporal", | ||
"values": ["2020-06-01T00:00:00Z","2020-06-03T00:00:00Z","2020-06-06T00:00:00Z"] | ||
}, | ||
"y": { | ||
"type": "spatial", | ||
"axis": "y", | ||
"values": [5757495.0,5757485.0,5757475.0,5757465.0], | ||
"reference_system": "EPSG:25832" | ||
}, | ||
"x": { | ||
"type": "spatial", | ||
"axis": "x", | ||
"values": [404835.0,404845.0,404855.0,404865.0,404875.0], | ||
"reference_system": "EPSG:25832" | ||
} | ||
} | ||
} | ||
``` |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,60 @@ | ||
{ | ||
"id": "absolute", | ||
"level": "L1", | ||
"tests": [ | ||
{ | ||
"arguments": { | ||
"x": 0 | ||
}, | ||
"returns": 0 | ||
}, | ||
{ | ||
"arguments": { | ||
"x": 1 | ||
}, | ||
"returns": 1 | ||
}, | ||
{ | ||
"arguments": { | ||
"x": -1 | ||
}, | ||
"returns": 1 | ||
}, | ||
{ | ||
"arguments": { | ||
"x": 2.5 | ||
}, | ||
"returns": 2.5 | ||
}, | ||
{ | ||
"arguments": { | ||
"x": -2.5 | ||
}, | ||
"returns": 2.5 | ||
}, | ||
{ | ||
"arguments": { | ||
"x": NaN | ||
}, | ||
"returns": NaN | ||
}, | ||
{ | ||
"arguments": { | ||
"x": Infinity | ||
}, | ||
"returns": Infinity | ||
}, | ||
{ | ||
"arguments": { | ||
"x": -Infinity | ||
}, | ||
"returns": Infinity | ||
}, | ||
{ | ||
"arguments": { | ||
"x": {"type": "nodata"} | ||
}, | ||
"returns": {"type": "nodata"} | ||
} | ||
] | ||
} |
Uh oh!
There was an error while loading. Please reload this page.