diff --git a/README.md b/README.md index 28c6ef37..45283419 100644 --- a/README.md +++ b/README.md @@ -33,11 +33,7 @@ This repository contains a set of files formally describing the openEO Processes * [implementation.md](meta/implementation.md) in the `meta` folder provide some additional implementation details for back-ends. For back-end implementors, it's highly recommended to read them. * [subtype-schemas.json](meta/subtype-schemas.json) in the `meta` folder defines common data types (`subtype`s) for JSON Schema used in openEO processes. * The [`examples`](examples/) folder contains some useful examples that the processes link to. All of these are non-binding additions. -* The [`tests`](tests/) folder can be used to test the process specification for validity and consistent "style". It also allows rendering the processes in a web browser. - - If you switch to the `tests` folder in CLI and after installing NodeJS and run `npm install`, you can run a couple of commands: - * `npm test`: Check the processes for validity and lint them. Processes need to pass tests to be added to this repository. - * `npm run render`: Opens a browser with all processes rendered through the docgen. +* The [`tests`](tests/) folder can be used to test the process specification for validity and consistent "style". It also allows rendering the processes in a web browser. Check the [tests documentation](tests/README.md) for details. ## Process diff --git a/meta/subtype-schemas.json b/meta/subtype-schemas.json index b2a349bf..2d6d7ae8 100644 --- a/meta/subtype-schemas.json +++ b/meta/subtype-schemas.json @@ -75,6 +75,7 @@ "type": "object", "subtype": "chunk-size", "title": "Chunk Size", + "description": "The chunk size per dimension given. This object maps the dimension names given as key to chunks given as either a physical measure or pixels. If not given or `null`, no chunking is applied.", "required": [ "dimension", "value" @@ -108,7 +109,7 @@ "type": "string", "subtype": "collection-id", "title": "Collection ID", - "description": "A collection id from the list of supported collections.", + "description": "A collection identifier from the list of supported collections.", "pattern": "^[\\w\\-\\.~/]+$" }, "date": { @@ -128,6 +129,7 @@ "duration": { "type": "string", "subtype": "duration", + "title": "Duration", "description": "[ISO 8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations), e.g. `P1D` for one day.", "pattern": "^(-?)P(?=\\d|T\\d)(?:(\\d+)Y)?(?:(\\d+)M)?(?:(\\d+)([DW]))?(?:T(?:(\\d+)H)?(?:(\\d+)M)?(?:(\\d+(?:\\.\\d+)?)S)?)?$" }, @@ -172,7 +174,7 @@ "type": "string", "subtype": "input-format", "title": "Input File Format", - "description": "An input format supported by the back-end." + "description": "A file format that the back-end supports to import data from." }, "input-format-options": { "type": "object", @@ -191,7 +193,7 @@ "type": "array", "subtype": "kernel", "title": "Image Kernel", - "description": "Image kernel, a two-dimensional array of numbers.", + "description": "A two-dimensional array of numbers to be used as kernel for the image operation.", "items": { "type": "array", "items": { @@ -237,7 +239,7 @@ "type": "string", "subtype": "output-format", "title": "Output File Format", - "description": "An output format supported by the back-end." + "description": "A file format that the back-end supports to save and export data to." }, "output-format-options": { "type": "object", @@ -390,13 +392,13 @@ "type": "string", "subtype": "udf-runtime", "title": "UDF runtime", - "description": "The name of a UDF runtime." + "description": "The identifier of a UDF runtime you want to run the given UDF source code with." }, "udf-runtime-version": { "type": "string", "subtype": "udf-runtime-version", "title": "UDF Runtime version", - "description": "The version of a UDF runtime." + "description": "The version of the UDF runtime you want to run the given UDF source code with." }, "uri": { "type": "string", diff --git a/tests/README.md b/tests/README.md index e2868634..31c79785 100644 --- a/tests/README.md +++ b/tests/README.md @@ -4,5 +4,32 @@ To run the tests follow these steps: 1. Install [node and npm](https://nodejs.org) - should run with any recent version 2. Run `npm install` in this folder to install the dependencies -3. Run the tests with `npm test`. -4. To show the files nicely formatted in a web browser, run `npm run render`. It starts a server and opens the corresponding page in a web browser. \ No newline at end of file +3. Run the tests with `npm test`. This will also lint the files and verify it follows best practices. +4. To show the files nicely formatted in a web browser, run `npm run render`. It starts a server and opens the corresponding page in a web browser. + +## Development processes + +All new processes must be added to the `proposals` folder. Each process must be declared to be `experimental`. +Processes must comply to best practices, which ensure a certain degree of consistency. +`npm test` will validate and lint the processes and also ensure the best practices are applied. + +The linting checks that the files are named correctly, that the content is correctly formatted and indented (JSON and embedded CommonMark). +The best practices ensure that for examples the fields are not too short and also not too long for example. + +A spell check is also checking the texts. It may report names and rarely used technical words as errors. +If you are sure that these are correct, you can add them to the `.words` file to exclude the word from being reported as an error. +The file must contain one word per line. + +New processes should be added via GitHub Pull Requests. + +## Subtype schemas + +Sometimes it is useful to define a new "data type" on top of the JSON types (number, string, array, object, ...). +For example, a client could make a select box with all collections available by adding a subtype `collection-id` to the JSON type `string`. +If you think a new subype should be added, you need to add it to the `meta/subtype-schemas.json` file. +It must be a valid JSON Schema. The tests mentioned above will also verify to a certain degree that the subtypes are defined correctly. + +## Examples + +To get out of proposal state, at least two examples must be provided. +The examples are located in the `examples` folder and will also be validated to some extent in the tests. \ No newline at end of file diff --git a/tests/processes.test.js b/tests/processes.test.js index 089328f9..f7c73c0d 100644 --- a/tests/processes.test.js +++ b/tests/processes.test.js @@ -1,7 +1,7 @@ const glob = require('glob'); const fs = require('fs'); const path = require('path'); -const { normalizeString, checkDescription, checkSpelling, checkJsonSchema, getAjv, prepareSchema } = require('./testHelpers'); +const { normalizeString, checkDescription, checkSpelling, checkJsonSchema, getAjv, prepareSchema, isObject } = require('./testHelpers'); const anyOfRequired = [ "quantiles", @@ -66,7 +66,7 @@ describe.each(processes)("%s", (file, p, fileContent, proposal) => { // description expect(typeof p.description).toBe('string'); // lint: Description should be longer than a summary - expect(p.description.length).toBeGreaterThan(55); + expect(p.description.length).toBeGreaterThan(60); checkDescription(p.description, p); }); @@ -98,7 +98,7 @@ describe.each(processes)("%s", (file, p, fileContent, proposal) => { } test("Return Value", () => { - expect(typeof p.returns).toBe('object'); + expect(isObject(p.returns)).toBeTruthy(); expect(p.returns).not.toBeNull(); // return value description @@ -108,14 +108,14 @@ describe.each(processes)("%s", (file, p, fileContent, proposal) => { checkDescription(p.returns.description, p); // return value schema - expect(typeof p.returns.schema).toBe('object'); expect(p.returns.schema).not.toBeNull(); + expect(typeof p.returns.schema).toBe('object'); // lint: Description should not be empty checkJsonSchema(jsv, p.returns.schema); }); test("Exceptions", () => { - expect(typeof p.exceptions === 'undefined' || (typeof p.exceptions === 'object' && p.exceptions !== 'null')).toBeTruthy(); + expect(typeof p.exceptions === 'undefined' || isObject(p.exceptions)).toBeTruthy(); }); var exceptions = o2a(p.exceptions); @@ -153,7 +153,7 @@ describe.each(processes)("%s", (file, p, fileContent, proposal) => { } var paramKeys = Object.keys(parametersObj); - expect(typeof example).toBe('object'); + expect(isObject(example)).toBeTruthy(); expect(example).not.toBeNull(); // example title @@ -194,8 +194,7 @@ describe.each(processes)("%s", (file, p, fileContent, proposal) => { if (Array.isArray(p.links)) { test.each(p.links)("Links > %#", (link) => { - expect(typeof link).toBe('object'); - expect(link).not.toBeNull(); + expect(isObject(link)).toBeTruthy(); // link href expect(typeof link.href).toBe('string'); @@ -250,8 +249,8 @@ function checkParam(param, p, checkCbParams = true) { checkFlags(param); // Parameter schema - expect(typeof param.schema).toBe('object'); expect(param.schema).not.toBeNull(); + expect(typeof param.schema).toBe('object'); checkJsonSchema(jsv, param.schema); if (!checkCbParams) { diff --git a/tests/subtype-schemas.test.js b/tests/subtype-schemas.test.js deleted file mode 100644 index 49633fda..00000000 --- a/tests/subtype-schemas.test.js +++ /dev/null @@ -1,22 +0,0 @@ -const fs = require('fs'); -const $RefParser = require("@apidevtools/json-schema-ref-parser"); -const { checkJsonSchema, normalizeString, getAjv } = require('./testHelpers'); - -test("subtype-schemas.json", async () => { - let fileContent = fs.readFileSync('../meta/subtype-schemas.json'); - - let schema = JSON.parse(fileContent); - expect(schema).not.toBe(null); - expect(typeof schema).toBe('object'); - - // lint: Check whether the file is correctly JSON formatted - expect(normalizeString(JSON.stringify(schema, null, 4))).toEqual(normalizeString(fileContent.toString())); - - // Is JSON Schema valid? - checkJsonSchema(await getAjv(), schema); - - // is everything dereferencable? - let subtypes = await $RefParser.dereference(schema, { dereference: { circular: "ignore" } }); - expect(subtypes).not.toBe(null); - expect(typeof subtypes).toBe('object'); -}); \ No newline at end of file diff --git a/tests/subtypes-file.test.js b/tests/subtypes-file.test.js new file mode 100644 index 00000000..e70f7e8f --- /dev/null +++ b/tests/subtypes-file.test.js @@ -0,0 +1,29 @@ +const fs = require('fs'); +const $RefParser = require("@apidevtools/json-schema-ref-parser"); +const { checkJsonSchema, getAjv, isObject, normalizeString } = require('./testHelpers'); + +test("File subtype-schemas.json", async () => { + let schema; + let fileContent; + try { + fileContent = fs.readFileSync('../meta/subtype-schemas.json'); + schema = JSON.parse(fileContent); + } catch(err) { + console.error("The file for subtypes is invalid and can't be read:"); + console.error(err); + expect(err).toBeUndefined(); + } + + expect(isObject(schema)).toBeTruthy(); + expect(isObject(schema.definitions)).toBeTruthy(); + + // lint: Check whether the file is correctly JSON formatted + expect(normalizeString(JSON.stringify(schema, null, 4))).toEqual(normalizeString(fileContent.toString())); + + // Is JSON Schema valid? + checkJsonSchema(await getAjv(), schema); + + // is everything dereferencable? + let subtypes = await $RefParser.dereference(schema, { dereference: { circular: "ignore" } }); + expect(isObject(subtypes)).toBeTruthy(); +}); \ No newline at end of file diff --git a/tests/subtypes-schemas.test.js b/tests/subtypes-schemas.test.js new file mode 100644 index 00000000..ff1b72bd --- /dev/null +++ b/tests/subtypes-schemas.test.js @@ -0,0 +1,54 @@ +const $RefParser = require("@apidevtools/json-schema-ref-parser"); +const { checkDescription, checkSpelling, isObject } = require('./testHelpers'); + +// I'd like to run the tests for each subtype individually instead of in a loop, +// but jest doesn't support that, so you need to figure out yourself what is broken. +// The console.log in afterAll ensures we have a hint of which process was checked last + +// Load and dereference schemas +let subtypes = {}; +let lastTest = null; +let testsCompleted = 0; +beforeAll(async () => { + subtypes = await $RefParser.dereference('../meta/subtype-schemas.json', { dereference: { circular: "ignore" } }); + return subtypes; +}); + +afterAll(async () => { + if (testsCompleted != Object.keys(subtypes.definitions).length) { + console.log('The schema the test has likely failed for: ' + lastTest); + } +}); + +test("Schemas in subtype-schemas.json", () => { + // Each schema must contain at least a type, subtype, title and description + for(let name in subtypes.definitions) { + let schema = subtypes.definitions[name]; + lastTest = name; + + // Schema is object + expect(isObject(schema)).toBeTruthy(); + + // Type is array with an element or a stirng + expect((Array.isArray(schema.type) && schema.type.length > 0) || typeof schema.type === 'string').toBeTruthy(); + + // Subtype is a string + expect(typeof schema.subtype === 'string').toBeTruthy(); + + // Check title + expect(typeof schema.title === 'string').toBeTruthy(); + // lint: Summary should be short + expect(schema.title.length).toBeLessThan(60); + // lint: Summary should not end with a dot + expect(/[^\.]$/.test(schema.title)).toBeTruthy(); + checkSpelling(schema.title, schema); + + // Check description + expect(typeof schema.description).toBe('string'); + // lint: Description should be longer than a summary + expect(schema.description.length).toBeGreaterThan(60); + checkDescription(schema.description, schema); + + testsCompleted++; + } +}); \ No newline at end of file diff --git a/tests/testHelpers.js b/tests/testHelpers.js index 385d0449..3f998088 100644 --- a/tests/testHelpers.js +++ b/tests/testHelpers.js @@ -116,6 +116,10 @@ async function getAjv() { return jsv; } +function isObject(obj) { + return (typeof obj === 'object' && obj === Object(obj) && !Array.isArray(obj)); +} + function normalizeString(str) { return str.replace(/\r\n|\r|\n/g, "\n").trim(); } @@ -214,5 +218,6 @@ module.exports = { checkSpelling, checkJsonSchema, checkSchemaRecursive, - prepareSchema + prepareSchema, + isObject }; \ No newline at end of file