- Fixes:
  - helpers.jsonSchemaMethods: -> renamed flatten to nested.
- Added:
  - helpers.descriptors: -> parseFunctionToJsonSchema
  - helpers.jsonSchemaMethods: -> added `flattenSchema` and `reduceSchema`. This Function will create a nested JSON-Schema.
This commit is contained in:
Martin Karkowski 2022-03-22 10:12:32 +01:00
parent d81eddd7a6
commit d911380511
32 changed files with 449 additions and 62 deletions

View File

@ -16,3 +16,10 @@ Inital commit, which is working with the browser
- Added:
- pub-sub-system: Listeners receive now: topicOfContent (the path of the data that is extracted), topicOfChange (the path of the data that emitted teh update), topicOfSubscription (the subscription.),
- nope repl: Added the context `nope`
# 1.0.27
- Fixes:
- helpers.jsonSchemaMethods: -> renamed flatten to nested.
- Added:
- helpers.descriptors: -> parseFunctionToJsonSchema
- helpers.jsonSchemaMethods: -> added `flattenSchema` and `reduceSchema`. This Function will create a nested JSON-Schema.

View File

@ -1 +1 @@
1.0.26
1.0.27

View File

@ -11,7 +11,7 @@ import { join } from "path";
import * as TJS from "typescript-json-schema";
import { createFile } from "../../helpers/fileMethods";
import {
flattenSchema,
nestSchema,
schemaGetDefinition,
} from "../../helpers/jsonSchemaMethods";
import { deepClone } from "../../helpers/objectMethods";
@ -87,7 +87,7 @@ export type PARAM = {{{simplifiedSubType}}};
try {
// Try to flatten the Schema:
_schema = flattenSchema(deepClone(_schema));
_schema = nestSchema(deepClone(_schema));
return schemaGetDefinition(_schema, "#/definitions/PARAM");
} catch (error) {
// Failed to flatten the schema. it seams that it is an recursive one.

View File

@ -1,9 +1,6 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2020-11-06 08:53:00
* @modify date 2020-11-06 08:53:01
* @desc [description]
*/
import { rgetattr } from "./objectMethods";

View File

@ -1,9 +1,6 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2021-08-27 21:16:23
* @modify date 2021-09-02 22:18:23
* @desc [description]
*/
/**

135
lib/helpers/descriptors.ts Normal file
View File

@ -0,0 +1,135 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
*/
import { INopeDescriptor } from "../index.browser";
import { isJsonSchema } from "./jsonSchemaMethods";
import { SPLITCHAR } from "./objectMethods";
/**
* A Helper, used, to parse a
* @param schema
* @param toJSONSchema
* @param workWithRefs
* @param definitions
* @returns
*/
export function parseFunctionToJsonSchema(
schema: INopeDescriptor,
toJSONSchema = true,
workWithRefs = true,
definitions: { [index: string]: INopeDescriptor } = {},
prePathInput: string = "input",
prePathOutput: string = "output",
splitChar: string = SPLITCHAR
) {
if (schema.type === "function") {
const inputSchema: INopeDescriptor = {
type: "object",
properties: {},
required: [],
definitions: {},
};
/**
* A Helper Function, to parse the Parameter
* @param schemaToStore
* @param name
* @param optional
* @param schema
* @param preString
*/
function parseParameter(
schemaToStore: INopeDescriptor,
name: string,
optional: boolean,
schema: INopeDescriptor,
preString: string,
isInput: boolean
) {
if (toJSONSchema && !isJsonSchema(schema)) {
throw Error("Schema contains functions as paramter");
}
if (isInput) order.push(name);
if (!optional) {
schemaToStore.required.push(name);
}
if (workWithRefs) {
const ref = preString ? preString + splitChar + name : name;
// store the id.
ids.push(ref);
// We only want to store the Reference.
schemaToStore.properties[name] = {
$ref: ref,
};
// Now store the element as Reference
schema["$id"] = ref;
definitions[ref] = schema;
} else {
schemaToStore.properties[name] = schema;
}
}
const order: string[] = [];
const ids: string[] = [];
for (const input of schema.inputs || []) {
parseParameter(
inputSchema,
input.name,
input.optional,
input.schema,
prePathInput,
true
);
}
// Now lets store the
definitions[prePathInput] = inputSchema;
if (Array.isArray(schema.outputs)) {
const outputSchema: INopeDescriptor = {
type: "object",
properties: {},
required: [],
definitions: {},
};
for (const output of schema.outputs) {
parseParameter(
outputSchema,
output.name,
output.optional,
output.schema,
prePathOutput,
false
);
}
definitions[prePathInput] = outputSchema;
} else if (schema.outputs) {
if (toJSONSchema && !isJsonSchema(schema.outputs)) {
throw Error("Output contains a Function => it can not be parsed");
}
definitions[prePathOutput] = schema.outputs;
}
return {
definitions,
order,
ids,
inputId: prePathInput,
outputId: prePathOutput,
};
} else {
throw Error("Expecting a function");
}
}

View File

@ -1,8 +1,6 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2020-11-06 08:53:03
* @modify date 2021-11-11 11:17:29
* @desc [description]
*/

View File

@ -1,8 +1,6 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2020-11-06 08:53:48
* @modify date 2021-11-11 11:11:09
* @desc [description]
*/

View File

@ -5,6 +5,7 @@
import * as arrays from "./arrayMethods";
import * as async from "./async";
import * as descriptors from "./descriptors";
import * as subject from "./getSubject";
import * as ids from "./idMethods";
import * as json from "./jsonMethods";
@ -19,6 +20,7 @@ import * as strings from "./stringMethods";
export * from "./arrayMethods";
export * from "./async";
export * from "./descriptors";
export * from "./getSubject";
export * from "./idMethods";
export * from "./jsonMethods";
@ -44,4 +46,5 @@ export {
pathes,
runtime,
subject,
descriptors,
};

View File

@ -1,8 +1,6 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2020-11-06 08:53:51
* @modify date 2020-11-06 08:53:51
* @desc [description]
*/

View File

@ -1,20 +1,25 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2020-11-06 08:53:55
* @modify date 2020-11-06 08:53:56
* @desc [description]
*/
import { IJsonSchema } from "../types/IJSONSchema";
import { INopeDescriptor } from "../types/nope";
import { flattenObject, rgetattr, rsetattr, SPLITCHAR } from "./objectMethods";
import {
deepEqual,
flattenObject,
rgetattr,
rsetattr,
SPLITCHAR,
} from "./objectMethods";
import { replaceAll } from "./stringMethods";
/**
* Function to Flatten a JSON-Schema.
* @param schema
*/
export function flattenSchema(schema: IJsonSchema) {
export function nestSchema(schema: IJsonSchema) {
let counter = 10000;
let flattenSchema = flattenObject(schema);
@ -61,6 +66,297 @@ export function schemaGetDefinition(schema: IJsonSchema, reference: string) {
return rgetattr(schema, reference.replace("#/", ""), null, "/");
}
/**
* A Helper to flatten a schema. This will add additional "$ref" items instead of nested items.
* This will perhaps help to reduce the amount of data.
*
* @author M.Karkowski
* @export
* @param {IJsonSchema} schema The Schema used as input. This will be flattend
* @param {string} [prePath="root"] The Name of the Schema. It is used for the "main" definition
* @param {string} [postPath=""] An additional path for every item which is added to the name. example "msg"
* @param {*} [splitChar=SPLITCHAR] The char to split the elements.
* @param {IJsonSchema} [definitions={ definitions: {} }] A Set of defintions to be used.
* @return {IJsonSchema} The Adapted Item.
*/
export function flattenSchema(
schema: IJsonSchema,
prePath: string = "root",
postPath: string = "",
splitChar = SPLITCHAR,
definitions: IJsonSchema = { definitions: {} }
): IJsonSchema {
const _postPath = postPath ? splitChar + postPath : postPath;
if (Array.isArray(schema.items)) {
for (const [idx, item] of schema.items.entries()) {
// We only want to adapt more complex datatypes.
if (item.type === "object" || item.type === "array") {
const ref = prePath + splitChar + "items" + splitChar + idx.toString();
definitions = flattenSchema(
item,
ref,
postPath,
splitChar,
definitions
);
schema.items[idx] = {
type: item.type,
$ref: ref + _postPath,
};
}
}
} else if (schema.items) {
if (schema.items.type === "object" || schema.items.type === "array") {
const ref = prePath + splitChar + "items";
definitions = flattenSchema(
schema.items,
ref,
postPath,
splitChar,
definitions
);
schema.items = {
type: schema.items.type,
$ref: ref + _postPath,
};
}
}
if (
typeof schema.additionalItems === "object" &&
(schema.additionalItems.type === "object" ||
schema.additionalItems.type === "array")
) {
const ref = prePath + splitChar + "additionalItems";
definitions = flattenSchema(
schema.additionalItems,
ref,
postPath,
splitChar,
definitions
);
schema.additionalProperties = {
type: schema.additionalItems.type,
$ref: ref,
};
}
for (const key in schema.properties || {}) {
const item = schema.properties[key];
if (item.type === "object" || item.type === "array") {
const ref = prePath + splitChar + key;
definitions = flattenSchema(item, ref, postPath, splitChar, definitions);
schema.properties[key] = {
type: item.type,
$ref: ref + _postPath,
};
}
}
if (
typeof schema.additionalProperties === "object" &&
(schema.additionalProperties.type === "object" ||
schema.additionalProperties.type === "array")
) {
const ref = prePath + splitChar + "additionalProperties";
definitions = flattenSchema(
schema.additionalProperties,
ref,
postPath,
splitChar,
definitions
);
schema.additionalProperties = {
type: schema.additionalProperties.type,
$ref: ref,
};
}
if (schema.oneOf) {
for (const [idx, item] of schema.oneOf.entries()) {
if (item.type === "object" || item.type === "array") {
const ref = prePath + splitChar + "oneOf" + splitChar + idx.toString();
definitions = flattenSchema(
item,
ref,
postPath,
splitChar,
definitions
);
schema.items[idx] = {
type: item.type,
$ref: ref,
};
}
}
}
if (schema.allOf) {
for (const [idx, item] of schema.allOf.entries()) {
if (item.type === "object" || item.type === "array") {
const ref = prePath + splitChar + "allOf" + splitChar + idx.toString();
definitions = flattenSchema(
item,
ref,
postPath,
splitChar,
definitions
);
schema.items[idx] = {
type: item.type,
$ref: ref,
};
}
}
}
if (schema.anyOf) {
for (const [idx, item] of schema.anyOf.entries()) {
if (item.type === "object" || item.type === "array") {
const ref = prePath + splitChar + "anyOf" + splitChar + idx.toString();
definitions = flattenSchema(
item,
ref,
postPath,
splitChar,
definitions
);
schema.items[idx] = {
type: item.type,
$ref: ref,
};
}
}
}
definitions.definitions[prePath + splitChar + postPath] = schema;
return definitions;
}
/**
* Helper to generate a name for the combined schemas.
*
* @author M.Karkowski
* @param {IJsonSchema} schema The base schema, containing all definitions.
* @param {string[]} names The names of the defintions, which should be combined.
* @return {string} The combined name.
*/
function _defaultCombiner(schema: IJsonSchema, names: string[]): string {
for (const name of names) {
const item = schema.definitions[name];
if (item.title) {
return item.title;
}
}
return names[0];
}
/**
* Helper Function to reduce the Schema and remove multiple definitions.
* @param schema
* @param getName
* @returns
*/
export function reduceSchema(
schema: IJsonSchema,
getName: (schema: IJsonSchema, names: string[]) => string = _defaultCombiner
) {
if (schema.definitions) {
/**
* Helper to find equals definitions.
* @param candidateName Name of the candidate to use as reference
* @returns
*/
function _findDuplicates(candidateName) {
const equals: string[] = [];
const candidate = schema.definitions[candidateName];
for (const name in schema.definitions) {
const item = schema.definitions[name];
if (candidateName === name) {
continue;
}
if (deepEqual(item, candidate)) {
equals.push(name);
}
}
if (equals.length > 0) {
equals.push(candidateName);
}
return equals;
}
// We want to check every definition, whether there exists a
// a pair that matches.
let toTest = Object.keys(schema.definitions);
// As long as we have items to test, we try to look for enties,
// that are equal.
while (toTest.length) {
const candidateName = toTest.pop();
// Therefore we look for equal elements.
const equalDefintionIds = _findDuplicates(candidateName);
// If we found some, we define the new name and remove double enteties.
if (equalDefintionIds.length) {
const newName = getName(schema, equalDefintionIds);
// Our first loop will remove the double enteties:
let first = true;
for (const name of equalDefintionIds) {
if (first) {
continue;
}
first = false;
delete schema.default[name];
}
// The Second loop is used to update the references.
for (const name of equalDefintionIds) {
schema = JSON.parse(
replaceAll(
JSON.stringify(schema),
JSON.stringify(name),
JSON.stringify(newName)
)
);
}
toTest = Object.keys(schema.definitions);
}
}
return schema;
} else {
return schema;
}
}
const _isNopeDescriptor: Array<
[keyof INopeDescriptor, (value: any) => boolean]
> = [

View File

@ -1,8 +1,6 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2020-09-09 16:00:36
* @modify date 2020-09-09 16:00:40
* @desc [description]
*/

View File

@ -1,8 +1,6 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2021-11-13 08:17:19
* @modify date 2021-11-13 09:44:51
* @desc [description]
*/

View File

@ -1,8 +1,6 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2021-11-23 11:21:57
* @modify date 2021-11-23 11:21:57
* @desc [description]
*/

View File

@ -1,8 +1,6 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2021-11-13 08:17:19
* @modify date 2021-11-13 09:44:51
* @desc [description]
*/

View File

@ -1,8 +1,6 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2021-11-23 11:04:49
* @modify date 2021-11-23 11:04:49
* @desc [description]
*/

View File

@ -1,8 +1,6 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2021-11-13 08:17:19
* @modify date 2021-11-13 09:44:51
* @desc [description]
*/

View File

@ -1,8 +1,6 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2020-11-06 08:54:00
* @modify date 2021-08-13 10:03:10
* @desc [description]
*/

View File

@ -1,8 +1,6 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2022-01-10 18:29:07
* @modify date 2022-01-10 18:29:07
* @desc [description]
*/

View File

@ -1,8 +1,6 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2021-11-13 08:17:19
* @modify date 2021-11-13 09:44:51
* @desc [description]
*/

View File

@ -1,8 +1,6 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2021-11-13 07:31:55
* @modify date 2021-11-13 09:44:45
* @desc [description]
*/

View File

@ -1,8 +1,6 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2020-08-25 14:52:52
* @modify date 2021-02-12 14:28:46
* @desc [description]
*/

View File

@ -1,8 +1,6 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2021-03-11 17:17:50
* @modify date 2021-08-10 16:55:51
* @desc [description]
*/

View File

@ -1,8 +1,6 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2020-08-25 23:01:24
* @modify date 2020-11-07 00:35:49
* @desc [description]
*/

View File

@ -1,8 +1,6 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2020-11-06 08:54:30
* @modify date 2020-11-06 08:54:31
* @desc [description]
*/

View File

@ -1,8 +1,6 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2020-11-07 17:24:34
* @modify date 2021-08-11 19:59:44
* @desc [description]
*/

View File

@ -1,8 +1,6 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2020-11-06 14:20:32
* @modify date 2021-08-11 19:59:47
* @desc [description]
*/

View File

@ -1,8 +1,6 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2021-08-27 20:19:51
* @modify date 2021-08-27 20:19:51
* @desc [description]
*/

View File

@ -1,14 +1,11 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2021-08-27 20:19:51
* @modify date 2021-08-27 20:19:51
* @desc [description]
*/
export { generateNopeBasicPackage } from "./generateNopeBasicPackage";
export { getPackageLoader } from "./getPackageLoader.nodejs";
export { NopePackageLoaderFileAccess as NopePackageLoader } from "./nopePackageLoader.nodejs";
export {
IConfigFile,
IPackageConfig,
@ -18,3 +15,4 @@ export {
loadPackages,
writeDefaultConfig,
} from "./loadPackages";
export { NopePackageLoaderFileAccess as NopePackageLoader } from "./nopePackageLoader.nodejs";

View File

@ -1,8 +1,6 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2020-11-11 14:19:10
* @modify date 2021-02-09 10:11:44
* @desc [description]
*/

View File

@ -1,8 +1,6 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2018-07-01 09:10:35
* @modify date 2022-01-10 16:24:45
* @desc [description]
*/

View File

@ -1,8 +1,6 @@
/**
* @author Martin Karkowski
* @email m.karkowski@zema.de
* @create date 2018-07-01 09:10:35
* @modify date 2022-01-10 16:24:45
* @desc [description]
*/