[add] output:jsonschema [add] output:typescript

This commit is contained in:
Christian Fraß 2023-05-11 12:59:02 +02:00
parent 444809dcfd
commit 94864eaeec
13 changed files with 2794 additions and 2407 deletions

View file

@ -0,0 +1,64 @@
{
"domains": [
{
"name": "address",
"description": "collection of addresses",
"key_field": {
"name": "id"
},
"data_fields": [
{
"name": "city",
"description": "the name of the city",
"nullable": false,
"type": "string_medium"
},
{
"name": "zip",
"description": "the postal code",
"nullable": false,
"type": "string_medium"
},
{
"name": "street",
"description": "the name of the street and the house number",
"nullable": false,
"type": "string_medium"
}
]
},
{
"name": "person",
"description": "collection of contacts",
"key_field": null,
"data_fields": [
{
"name": "prename",
"description": "first name of the person",
"nullable": false,
"type": "string_medium"
},
{
"name": "surname",
"description": "last name of the person",
"nullable": false,
"type": "string_medium"
},
{
"name": "address_id",
"description": "reference to the associated address dataset",
"nullable": false,
"type": "integer"
},
{
"name": "email_address",
"description": "optional eMail address",
"nullable": true,
"type": "string_medium",
"default": null
}
]
}
]
}

View file

@ -12,18 +12,27 @@
"type": "string" "type": "string"
}, },
"description": { "description": {
"type": ["null", "string"], "type": [
"null",
"string"
],
"default": null "default": null
}, },
"key_field": { "key_field": {
"type": ["null","object"], "type": [
"null",
"object"
],
"additionalProperties": false, "additionalProperties": false,
"properties": { "properties": {
"name": { "name": {
"type": "string" "type": "string"
}, },
"description": { "description": {
"type": ["null", "string"], "type": [
"null",
"string"
],
"default": null "default": null
} }
}, },
@ -42,7 +51,10 @@
"type": "string" "type": "string"
}, },
"description": { "description": {
"type": ["null", "string"], "type": [
"null",
"string"
],
"default": null "default": null
}, },
"type": { "type": {
@ -61,7 +73,13 @@
"default": true "default": true
}, },
"default": { "default": {
"type": ["null", "boolean", "integer", "float", "string"], "type": [
"null",
"boolean",
"integer",
"float",
"string"
],
"default": null "default": null
} }
}, },
@ -88,10 +106,8 @@
"parameters": { "parameters": {
"type": "object", "type": "object",
"additionalProperties": "string", "additionalProperties": "string",
"properties": { "properties": {},
}, "required": []
"required": [
]
} }
}, },
"required": [ "required": [

View file

@ -1,23 +1,35 @@
/** /**
* @author fenris * @author fenris
*/ */
declare type int = number; type int = number;
/** /**
* @author fenris * @author fenris
*/ */
declare type float = number; type float = number;
/** /**
* @author fenris * @author fenris
*/ */
declare type type_time = { type type_time = {
hours: int; hours: int;
minutes: int; minutes: int;
seconds: int; seconds: int;
}; };
declare var process: any;
declare var require: any;
declare class Buffer {
constructor(x: string, modifier?: string);
toString(modifier?: string): string;
}
declare namespace lib_plankton.base {
/**
* @author fenris
*/
function environment(): string;
}
/** /**
* @author fenris * @author fenris
*/ */
declare type type_pseudopointer<type_value> = { type type_pseudopointer<type_value> = {
value: type_value; value: type_value;
}; };
/** /**
@ -40,19 +52,6 @@ declare function pseudopointer_read<type_value>(pseudopointer: type_pseudopointe
* @author fenris * @author fenris
*/ */
declare function pseudopointer_write<type_value>(pseudopointer: type_pseudopointer<type_value>, value: type_value): void; declare function pseudopointer_write<type_value>(pseudopointer: type_pseudopointer<type_value>, value: type_value): void;
declare var process: any;
declare var require: any;
declare class Buffer {
constructor(x: string, modifier?: string);
toString(modifier?: string): string;
}
declare var java: any;
declare module lib_base {
/**
* @author fenris
*/
function environment(): string;
}
/** /**
* @author fenris * @author fenris
*/ */
@ -380,395 +379,9 @@ declare namespace lib_plankton.base {
* @author fenris * @author fenris
*/ */
function get_current_timestamp(rounded?: boolean): int; function get_current_timestamp(rounded?: boolean): int;
}
declare var plain_text_to_html: (text: string) => string;
/**
* @desc makes a valid
*/
declare var format_sentence: (str: string, rtl?: boolean, caseSense?: boolean) => string;
declare var fill_string_template: (template_string: string, object: any, fabric: Function, delimiter: string, default_string: string, sloppy: boolean) => string;
declare var make_string_template: (_template: string, _fabrics?: Object) => (object: {
[key: string]: string;
}) => string;
declare var make_eml_header: (object: {
[key: string]: string;
}) => string;
declare var make_eml_body: Object;
declare namespace lib_plankton.string {
/** /**
* @author neuc,frac
*/ */
function empty(str: string): boolean; function object_merge(core: Record<string, any>, mantle: Record<string, any>): Record<string, any>;
/**
* @desc returns a unique string
* @param {string} prefix an optional prefix for the generated string
* @return {string}
* @author fenris
*/
function generate(prefix?: string): string;
/**
* @desc splits a string, but returns an empty list, if the string is empty
* @param {string} chain
* @param {string} separator
* @return {Array<string>}
* @author fenris
*/
function split(chain: string, separator?: string): Array<string>;
/**
* @author neu3no
*/
function explode(str: string, needle: string, max: int): Array<string>;
/**
* @desc concats a given word with itself n times
* @param {string} word
* @param {int}
* @return {string}
* @author fenris
*/
function repeat(word: string, count: int): string;
/**
* @desc lengthens a string by repeatedly appending or prepending another string
* @param {string} word the string to pad
* @param {int} length the length, which the result shall have
* @param {string} symbol the string, which will be added (multiple times)
* @param {boolean} [prepend]; whether to prepend (~true) or append (~false); default: false
* @return {string} the padded string
* @author fenris
*/
function pad(word: string, length: int, symbol?: string, mode?: string): string;
/**
* @desc checks if a given string conttains a certain substring
* @param {string} string
* @param {string} part
* @return {boolean}
* @author fenris
*/
function contains(chain: string, part: string): boolean;
/**
* @desc checks if a given string starts with a certain substring
* @param {string} string
* @param {string} part
* @return {boolean}
* @author fenris
*/
function startsWith(chain: string, part: string): boolean;
/**
* @desc checks if a given string ends with a certain substring
* @param {string} string
* @param {string} part
* @return {boolean}
* @author fenris
*/
function endsWith(chain: string, part: string): boolean;
/**
* @desc count the occourrences of a string in a string
* @param string haystack_string the string wich should be examined
* @param string needle_string the string which should be counted
* @author neuc
*/
function count_occourrences(haystack_string: string, needle_string: string, check_escape: boolean): int;
/**
* @author fenris
*/
function replace(str: string, replacements: Array<{
from: string;
to: string;
}>, options?: {
debug?: boolean;
}): string;
/**
* @desc replaces occurences of "${name}" in a string by the corresponding values of an argument object
* @author fenris
*/
function coin(str: string, args: {
[id: string]: string;
}, options?: {
legacy?: boolean;
open?: string;
close?: string;
debug?: boolean;
}): string;
/**
* @author fenris
*/
function cut(str: string, length: int, delimiter?: string): string;
}
/**
* @deprecated
*/
declare namespace lib_string {
const empty: typeof lib_plankton.string.empty;
const generate: typeof lib_plankton.string.generate;
const split: typeof lib_plankton.string.split;
const explode: typeof lib_plankton.string.repeat;
const repeat: typeof lib_plankton.string.repeat;
const pad: typeof lib_plankton.string.pad;
const contains: typeof lib_plankton.string.contains;
const startsWith: typeof lib_plankton.string.startsWith;
const endsWith: typeof lib_plankton.string.endsWith;
const count_occourrences: typeof lib_plankton.string.count_occourrences;
const coin: typeof lib_plankton.string.coin;
const stance: typeof lib_plankton.string.coin;
const cut: typeof lib_plankton.string.cut;
}
declare module lib_string {
/**
* an implementation of c sprintf
* @param {string} string format string
* @param {array} args arguments which should be filled into
* @returns {string}
*/
var sprintf: (input: string, args?: Array<any>, original?: any) => string;
/**
* an implementation of c printf
* @param {string} string format string
* @param {array} args arguments which should be filled into
* @returns {string}
*/
function printf(format: any, args: any): void;
}
declare var sprintf: (input: string, args?: Array<any>, original?: any) => string;
declare var printf: typeof lib_string.printf;
declare var eml_log: any;
declare var track_exports: any;
declare var make_logger: (prefix: any, current_loglevel: any) => (obj: any, lvl: any) => void;
declare module lib_code {
/**
* @author fenris
*/
interface interface_code<type_from, type_to> {
/**
* @author fenris
*/
encode(x: type_from): type_to;
/**
* @author fenris
*/
decode(x: type_to): type_from;
}
}
declare module lib_code {
/**
* @author fenris
*/
type type_code<type_from, type_to> = {
/**
* @author fenris
*/
encode: (x: type_from) => type_to;
/**
* @author fenris
*/
decode: (x: type_to) => type_from;
};
}
declare module lib_code {
/**
* @author fenris
*/
function inverse_encode<type_from, type_to>(decode: (to: type_to) => type_from, to: type_to): type_from;
/**
* @author fenris
*/
function inverse_decode<type_from, type_to>(encode: (from: type_from) => type_to, from: type_from): type_to;
}
declare module lib_code {
/**
* @author fenris
*/
class class_code_inverse<type_from, type_to> implements interface_code<type_to, type_from> {
/**
* @author fenris
*/
protected subject: interface_code<type_from, type_to>;
/**
* @author fenris
*/
constructor(subject: interface_code<type_from, type_to>);
/**
* @implementation
* @author fenris
*/
encode(to: type_to): type_from;
/**
* @implementation
* @author fenris
*/
decode(from: type_from): type_to;
}
}
declare module lib_code {
/**
* @author fenris
*/
function pair_encode<type_from, type_between, type_to>(encode_first: (from: type_from) => type_between, encode_second: (between: type_between) => type_to, from: type_from): type_to;
/**
* @author fenris
*/
function pair_decode<type_from, type_between, type_to>(decode_first: (between: type_between) => type_from, decode_second: (to: type_to) => type_between, to: type_to): type_from;
}
declare module lib_code {
/**
* @author fenris
*/
class class_code_pair<type_from, type_between, type_to> implements interface_code<type_from, type_to> {
/**
* @author fenris
*/
protected first: interface_code<type_from, type_between>;
/**
* @author fenris
*/
protected second: interface_code<type_between, type_to>;
/**
* @author fenris
*/
constructor(first: interface_code<type_from, type_between>, second: interface_code<type_between, type_to>);
/**
* @implementation
* @author fenris
*/
encode(from: type_from): type_to;
/**
* @implementation
* @author fenris
*/
decode(to: type_to): type_from;
}
}
declare module lib_code {
/**
* @author fenris
*/
function chain_encode(encode_links: Array<(from: any) => any>, from: any): any;
/**
* @author fenris
*/
function chain_decode(decode_links: Array<(to: any) => any>, to: any): any;
}
declare module lib_code {
/**
* @author fenris
*/
class class_code_chain implements interface_code<any, any> {
/**
* @author fenris
*/
protected links: Array<interface_code<any, any>>;
/**
* @author fenris
*/
constructor(links: Array<interface_code<any, any>>);
/**
* @implementation
* @author fenris
*/
encode(from: any): any;
/**
* @implementation
* @author fenris
*/
decode(to: any): any;
}
}
declare module lib_code {
/**
* @author Christian Fraß <frass@greenscale.de>
*/
type type_flatten_from = Array<{
[name: string]: any;
}>;
/**
* @author Christian Fraß <frass@greenscale.de>
*/
type type_flatten_to = {
keys: Array<string>;
data: Array<Array<any>>;
};
/**
* @author Christian Fraß <frass@greenscale.de>
*/
function flatten_encode(from: type_flatten_from, keys?: Array<string>): type_flatten_to;
/**
* @author Christian Fraß <frass@greenscale.de>
*/
function flatten_decode(to: type_flatten_to): type_flatten_from;
}
declare module lib_code {
/**
* @author fenris
*/
class class_code_flatten implements interface_code<type_flatten_from, type_flatten_to> {
/**
* @author fenris
*/
constructor();
/**
* @implementation
* @author fenris
*/
encode(x: type_flatten_from): type_flatten_to;
/**
* @implementation
* @author fenris
*/
decode(x: type_flatten_to): type_flatten_from;
}
}
declare module lib_json {
/**
* @author fenris
*/
function encode(x: any, formatted?: boolean): string;
/**
* @author fenris
*/
function decode(x: string): any;
}
declare module lib_json {
/**
* @author fenris
*/
class class_json implements lib_code.interface_code<any, string> {
/**
* @author fenris
*/
constructor();
/**
* @implementation
* @author fenris
*/
encode(x: any): string;
/**
* @implementation
* @author fenris
*/
decode(x: string): any;
}
}
declare namespace lib_plankton.file {
/**
* @author fenris
*/
function read(path: string): Promise<string>;
/**
* @author fenris
*/
function read_buffer(path: string): Promise<Buffer>;
/**
* @author fenris
*/
function read_stdin(): Promise<string>;
/**
* @author fenris
*/
function write(path: string, content: string, options?: {
encoding?: string;
}): Promise<void>;
/**
* @author fenris
*/
function write_buffer(path: string, content: Buffer, options?: {}): Promise<void>;
} }
declare namespace lib_plankton.log { declare namespace lib_plankton.log {
/** /**
@ -798,7 +411,7 @@ declare namespace lib_plankton.log {
* @deprecated * @deprecated
* @todo remove * @todo remove
*/ */
declare module lib_log { declare namespace lib_plankton.log {
function level_push(level: int): void; function level_push(level: int): void;
function level_pop(): void; function level_pop(): void;
function indent_push(indent: int): void; function indent_push(indent: int): void;
@ -926,7 +539,393 @@ declare namespace lib_plankton.log {
*/ */
function error(incident: string, details?: Record<string, any>): void; function error(incident: string, details?: Record<string, any>): void;
} }
declare namespace lib_args { declare var plain_text_to_html: (text: string) => string;
/**
* @desc makes a valid
*/
declare var format_sentence: (str: string, rtl?: boolean, caseSense?: boolean) => string;
declare var fill_string_template: (template_string: string, object: any, fabric: Function, delimiter: string, default_string: string, sloppy: boolean) => string;
declare var make_string_template: (_template: string, _fabrics?: Object) => (object: {
[key: string]: string;
}) => string;
declare var make_eml_header: (object: {
[key: string]: string;
}) => string;
declare var make_eml_body: Object;
declare namespace lib_plankton.string {
/**
* @author neuc,frac
*/
function empty(str: string): boolean;
/**
* @desc returns a unique string
* @param {string} prefix an optional prefix for the generated string
* @return {string}
* @author fenris
*/
function generate(prefix?: string): string;
/**
* @desc splits a string, but returns an empty list, if the string is empty
* @param {string} chain
* @param {string} separator
* @return {Array<string>}
* @author fenris
*/
function split(chain: string, separator?: string): Array<string>;
/**
* @author neu3no
*/
function explode(str: string, needle: string, max: int): Array<string>;
/**
* @desc concats a given word with itself n times
* @param {string} word
* @param {int}
* @return {string}
* @author fenris
*/
function repeat(word: string, count: int): string;
/**
* @desc lengthens a string by repeatedly appending or prepending another string
* @param {string} word the string to pad
* @param {int} length the length, which the result shall have
* @param {string} symbol the string, which will be added (multiple times)
* @param {boolean} [prepend]; whether to prepend (~true) or append (~false); default: false
* @return {string} the padded string
* @author fenris
*/
function pad(word: string, length: int, symbol?: string, mode?: string): string;
/**
* @desc checks if a given string conttains a certain substring
* @param {string} string
* @param {string} part
* @return {boolean}
* @author fenris
*/
function contains(chain: string, part: string): boolean;
/**
* @desc checks if a given string starts with a certain substring
* @param {string} string
* @param {string} part
* @return {boolean}
* @author fenris
*/
function startsWith(chain: string, part: string): boolean;
/**
* @desc checks if a given string ends with a certain substring
* @param {string} string
* @param {string} part
* @return {boolean}
* @author fenris
*/
function endsWith(chain: string, part: string): boolean;
/**
* @desc count the occourrences of a string in a string
* @param string haystack_string the string wich should be examined
* @param string needle_string the string which should be counted
* @author neuc
*/
function count_occourrences(haystack_string: string, needle_string: string, check_escape: boolean): int;
/**
* @author fenris
*/
function replace(str: string, replacements: Array<{
from: string;
to: string;
}>, options?: {}): string;
/**
* @desc replaces occurences of "${name}" in a string by the corresponding values of an argument object
* @author fenris
*/
function coin(str: string, args: {
[id: string]: string;
}, options?: {
legacy?: boolean;
open?: string;
close?: string;
}): string;
/**
* @author fenris
*/
function cut(str: string, length: int, delimiter?: string): string;
}
/**
* @deprecated
*/
declare namespace lib_string {
const empty: typeof lib_plankton.string.empty;
const generate: typeof lib_plankton.string.generate;
const split: typeof lib_plankton.string.split;
const explode: typeof lib_plankton.string.repeat;
const repeat: typeof lib_plankton.string.repeat;
const pad: typeof lib_plankton.string.pad;
const contains: typeof lib_plankton.string.contains;
const startsWith: typeof lib_plankton.string.startsWith;
const endsWith: typeof lib_plankton.string.endsWith;
const count_occourrences: typeof lib_plankton.string.count_occourrences;
const coin: typeof lib_plankton.string.coin;
const stance: typeof lib_plankton.string.coin;
const cut: typeof lib_plankton.string.cut;
}
declare namespace lib_plankton.string {
/**
* an implementation of c sprintf
* @param {string} string format string
* @param {array} args arguments which should be filled into
* @returns {string}
*/
var sprintf: (input: string, args?: Array<any>, original?: any) => string;
/**
* an implementation of c printf
* @param {string} string format string
* @param {array} args arguments which should be filled into
* @returns {string}
*/
function printf(format: any, args: any): void;
}
declare var sprintf: (input: string, args?: Array<any>, original?: any) => string;
declare var printf: typeof lib_plankton.string.printf;
declare var eml_log: any;
declare var track_exports: any;
declare var make_logger: (prefix: any, current_loglevel: any) => (obj: any, lvl: any) => void;
declare namespace lib_plankton.code {
/**
* @author fenris
*/
interface interface_code<type_from, type_to> {
/**
* @author fenris
*/
encode(x: type_from): type_to;
/**
* @author fenris
*/
decode(x: type_to): type_from;
}
}
declare namespace lib_plankton.code {
/**
* @author fenris
*/
type type_code<type_from, type_to> = {
/**
* @author fenris
*/
encode: (x: type_from) => type_to;
/**
* @author fenris
*/
decode: (x: type_to) => type_from;
};
}
declare namespace lib_plankton.code {
/**
* @author fenris
*/
function inverse_encode<type_from, type_to>(decode: (to: type_to) => type_from, to: type_to): type_from;
/**
* @author fenris
*/
function inverse_decode<type_from, type_to>(encode: (from: type_from) => type_to, from: type_from): type_to;
}
declare namespace lib_plankton.code {
/**
* @author fenris
*/
class class_code_inverse<type_from, type_to> implements interface_code<type_to, type_from> {
/**
* @author fenris
*/
protected subject: interface_code<type_from, type_to>;
/**
* @author fenris
*/
constructor(subject: interface_code<type_from, type_to>);
/**
* @implementation
* @author fenris
*/
encode(to: type_to): type_from;
/**
* @implementation
* @author fenris
*/
decode(from: type_from): type_to;
}
}
declare namespace lib_plankton.code {
/**
* @author fenris
*/
function pair_encode<type_from, type_between, type_to>(encode_first: (from: type_from) => type_between, encode_second: (between: type_between) => type_to, from: type_from): type_to;
/**
* @author fenris
*/
function pair_decode<type_from, type_between, type_to>(decode_first: (between: type_between) => type_from, decode_second: (to: type_to) => type_between, to: type_to): type_from;
}
declare namespace lib_plankton.code {
/**
* @author fenris
*/
class class_code_pair<type_from, type_between, type_to> implements interface_code<type_from, type_to> {
/**
* @author fenris
*/
protected first: interface_code<type_from, type_between>;
/**
* @author fenris
*/
protected second: interface_code<type_between, type_to>;
/**
* @author fenris
*/
constructor(first: interface_code<type_from, type_between>, second: interface_code<type_between, type_to>);
/**
* @implementation
* @author fenris
*/
encode(from: type_from): type_to;
/**
* @implementation
* @author fenris
*/
decode(to: type_to): type_from;
}
}
declare namespace lib_plankton.code {
/**
* @author fenris
*/
function chain_encode(encode_links: Array<(from: any) => any>, from: any): any;
/**
* @author fenris
*/
function chain_decode(decode_links: Array<(to: any) => any>, to: any): any;
}
declare namespace lib_plankton.code {
/**
* @author fenris
*/
class class_code_chain implements interface_code<any, any> {
/**
* @author fenris
*/
protected links: Array<interface_code<any, any>>;
/**
* @author fenris
*/
constructor(links: Array<interface_code<any, any>>);
/**
* @implementation
* @author fenris
*/
encode(from: any): any;
/**
* @implementation
* @author fenris
*/
decode(to: any): any;
}
}
declare namespace lib_plankton.code {
/**
* @author Christian Fraß <frass@greenscale.de>
*/
type type_flatten_from = Array<{
[name: string]: any;
}>;
/**
* @author Christian Fraß <frass@greenscale.de>
*/
type type_flatten_to = {
keys: Array<string>;
data: Array<Array<any>>;
};
/**
* @author Christian Fraß <frass@greenscale.de>
*/
function flatten_encode(from: type_flatten_from, keys?: Array<string>): type_flatten_to;
/**
* @author Christian Fraß <frass@greenscale.de>
*/
function flatten_decode(to: type_flatten_to): type_flatten_from;
}
declare namespace lib_plankton.code {
/**
* @author fenris
*/
class class_code_flatten implements interface_code<type_flatten_from, type_flatten_to> {
/**
* @author fenris
*/
constructor();
/**
* @implementation
* @author fenris
*/
encode(x: type_flatten_from): type_flatten_to;
/**
* @implementation
* @author fenris
*/
decode(x: type_flatten_to): type_flatten_from;
}
}
declare namespace lib_plankton.json {
/**
* @author fenris
*/
function encode(x: any, formatted?: boolean): string;
/**
* @author fenris
*/
function decode(x: string): any;
}
declare namespace lib_plankton.json {
/**
* @author fenris
*/
class class_json implements lib_plankton.code.interface_code<any, string> {
/**
* @author fenris
*/
constructor();
/**
* @implementation
* @author fenris
*/
encode(x: any): string;
/**
* @implementation
* @author fenris
*/
decode(x: string): any;
}
}
declare namespace lib_plankton.file {
/**
* @author fenris
*/
function read(path: string): Promise<string>;
/**
* @author fenris
*/
function read_buffer(path: string): Promise<Buffer>;
/**
* @author fenris
*/
function read_stdin(): Promise<string>;
/**
* @author fenris
*/
function write(path: string, content: string, options?: {
encoding?: string;
}): Promise<void>;
/**
* @author fenris
*/
function write_buffer(path: string, content: Buffer, options?: {}): Promise<void>;
}
declare namespace lib_plankton.args {
/** /**
*/ */
enum enum_environment { enum enum_environment {
@ -954,7 +953,7 @@ declare namespace lib_args {
accumulate = "accumulate" accumulate = "accumulate"
} }
} }
declare module lib_args { declare namespace lib_plankton.args {
/** /**
* @author fenris * @author fenris
*/ */
@ -1091,7 +1090,7 @@ declare module lib_args {
generate_help(): string; generate_help(): string;
} }
} }
declare module lib_args { declare namespace lib_plankton.args {
/** /**
* @author fenris * @author fenris
*/ */

File diff suppressed because it is too large Load diff

View file

@ -1,7 +1,8 @@
/** /**
* @todo generate generic * @todo generate generic
*/ */
function schema( function input_schema(
) : any ) : any
{ {
return { return {
@ -118,3 +119,67 @@ function schema(
] ]
} }
} }
/**
*/
function input_normalize(
input_raw : any
) : type_input
{
// validate
if (! input_raw.hasOwnProperty("domains")) {
throw (new Error("input node is missing mandatory field 'domains'"));
}
else {
// sanitize
return {
"domains": (
input_raw["domains"]
.map(
domain_raw => ({
"name": domain_raw["name"],
"description": (domain_raw["description"] ?? null),
"key_field": (
(domain_raw.hasOwnProperty("key_field") && (domain_raw["key_field"] !== null))
? {
"name": domain_raw["key_field"]["name"],
"description": (domain_raw["key_field"]["description"] ?? null),
}
: null
),
"data_fields": (
(domain_raw.hasOwnProperty("data_fields") && (domain_raw["data_fields"] !== null))
? (
domain_raw["data_fields"]
.map(
data_field_raw => ({
"name": data_field_raw["name"],
"description": (data_field_raw["description"] ?? null),
"type": data_field_raw["type"],
"nullable": (data_field_raw["nullable"] ?? true),
"default": data_field_raw["default"],
})
)
)
: []
),
"constraints": (
(domain_raw.hasOwnProperty("constraints") && (domain_raw["constraints"] !== null))
? (
domain_raw["constraints"]
.map(
constraint_raw => ({
"kind": constraint_raw["kind"],
"parameters": (constraint_raw["parameters"] ?? {}),
})
)
)
: []
),
})
)
),
};
}
}

View file

@ -7,15 +7,17 @@ async function main(
const outputs : Record<string, type_output> = { const outputs : Record<string, type_output> = {
"sqlite": output_sqlite, "sqlite": output_sqlite,
"mysql": output_mysql, "mysql": output_mysql,
"typescript": output_typescript,
"jsonschema": output_jsonschema,
}; };
const arg_handler = new lib_args.class_handler( const arg_handler = new lib_plankton.args.class_handler(
{ {
"format": new lib_args.class_argument({ "format": new lib_plankton.args.class_argument({
"name": "format", "name": "format",
"type": lib_args.enum_type.string, "type": lib_plankton.args.enum_type.string,
"kind": lib_args.enum_kind.volatile, "kind": lib_plankton.args.enum_kind.volatile,
"mode": lib_args.enum_mode.replace, "mode": lib_plankton.args.enum_mode.replace,
"default": "sqlite", "default": "sqlite",
"parameters": { "parameters": {
"indicators_long": ["format"], "indicators_long": ["format"],
@ -23,11 +25,11 @@ async function main(
}, },
"info": "output format", "info": "output format",
}), }),
"schema": new lib_args.class_argument({ "schema": new lib_plankton.args.class_argument({
"name": "schema", "name": "schema",
"type": lib_args.enum_type.boolean, "type": lib_plankton.args.enum_type.boolean,
"kind": lib_args.enum_kind.volatile, "kind": lib_plankton.args.enum_kind.volatile,
"mode": lib_args.enum_mode.replace, "mode": lib_plankton.args.enum_mode.replace,
"default": false, "default": false,
"parameters": { "parameters": {
"indicators_long": ["schema"], "indicators_long": ["schema"],
@ -35,11 +37,11 @@ async function main(
}, },
"info": "print sindri JSON schema to stdout and exit", "info": "print sindri JSON schema to stdout and exit",
}), }),
"help": new lib_args.class_argument({ "help": new lib_plankton.args.class_argument({
"name": "help", "name": "help",
"type": lib_args.enum_type.boolean, "type": lib_plankton.args.enum_type.boolean,
"kind": lib_args.enum_kind.volatile, "kind": lib_plankton.args.enum_kind.volatile,
"mode": lib_args.enum_mode.replace, "mode": lib_plankton.args.enum_mode.replace,
"default": false, "default": false,
"parameters": { "parameters": {
"indicators_long": ["help"], "indicators_long": ["help"],
@ -49,7 +51,7 @@ async function main(
}), }),
} }
); );
const args : Record<string, any> = arg_handler.read(lib_args.enum_environment.cli, args_raw.join(" ")); const args : Record<string, any> = arg_handler.read(lib_plankton.args.enum_environment.cli, args_raw.join(" "));
if (args["help"]) { if (args["help"]) {
process.stdout.write( process.stdout.write(
@ -66,14 +68,13 @@ async function main(
else { else {
if (args["schema"]) { if (args["schema"]) {
process.stdout.write( process.stdout.write(
JSON.stringify(schema(), undefined, "\t") JSON.stringify(input_schema(), undefined, "\t")
); );
} }
else { else {
const input_content : string = await lib_plankton.file.read_stdin(); const input_content : string = await lib_plankton.file.read_stdin();
const input_data : type_input_data = lib_json.decode(input_content); const input_data_raw : any = lib_plankton.json.decode(input_content);
const input_data : type_input = input_normalize(input_data_raw);
// TODO: sanitize & normalize input_data
if (! outputs.hasOwnProperty(args["format"])) { if (! outputs.hasOwnProperty(args["format"])) {
throw (new Error("unhandled output format: " + args["format"])); throw (new Error("unhandled output format: " + args["format"]));

View file

@ -0,0 +1,82 @@
const output_jsonschema : type_output = {
"render": function (input_data) {
return lib_plankton.json.encode(
Object.fromEntries(
input_data.domains.map(
domain => ([
domain.name,
{
"type": ["array"],
"items": {
"type": ["object"],
"additionalProperties": false,
"properties": Object.fromEntries(
[]
.concat(
(domain.key_field === null)
? []
: [
[
domain.key_field.name,
{
"type": ["integer"],
"description": (domain.key_field.description ?? undefined),
}
]
]
)
.concat(
domain.data_fields
.map(
data_field => ([
data_field.name,
{
"type": (
[]
.concat(
data_field.nullable
? ["null"]
: []
)
.concat(
[
{
"boolean": "boolean",
"integer": "integer",
"float": "number",
"string_short": "string",
"string_medium": "string",
"string_long": "string",
}[data_field.type]
]
)
),
"description": (data_field.description ?? undefined),
}
])
)
)
),
"required": (
[]
.concat(
(domain.key_field === null)
? []
: [domain.key_field.name]
)
.concat(
domain.data_fields
.map(
data_field => data_field.name
)
)
)
},
}
])
)
),
true
);
},
};

View file

@ -1,23 +1,48 @@
function mysql_value_encode(
value : any
) : string
{
if (value === null) {
return "NULL";
}
else {
switch (typeof(value)) {
case "boolean": {
return (value ? "TRUE" : "FALSE");
break;
}
case "number": {
return value.toString();
break;
}
case "string": {
return ("'" + value + "'");
break;
}
default: {
throw (new Error("unhandled"));
break;
}
}
}
}
const output_mysql : type_output = { const output_mysql : type_output = {
"render": function (input_data) { "render": function (input_data) {
return ( return (
input_data["domains"] input_data.domains
.map( .map(
(domain) => lib_plankton.string.coin( (domain) => lib_plankton.string.coin(
"CREATE TABLE\n\t`{{name}}`(\n{{entries}}\n\t){{comment}}\n;", "CREATE TABLE\n\t`{{name}}`(\n{{entries}}\n\t){{comment}}\n;",
{ {
"name": domain["name"], "name": domain.name,
"comment": ( "comment": (
( (domain.description === null)
! domain.hasOwnProperty("description")
||
(domain["description"] === null)
)
? "" ? ""
: lib_plankton.string.coin( : lib_plankton.string.coin(
" COMMENT '{{comment}}'", " COMMENT '{{comment}}'",
{ {
"comment": domain["description"], "comment": domain.description,
} }
) )
), ),
@ -26,12 +51,12 @@ const output_mysql : type_output = {
[] []
// key field // key field
.concat( .concat(
(domain["key_field"] === null) (domain.key_field === null)
? [] ? []
: lib_plankton.string.coin( : lib_plankton.string.coin(
"`{{name}}` {{parameters}}", "`{{name}}` {{parameters}}",
{ {
"name": domain["key_field"]["name"], "name": domain.key_field.name,
"parameters": ( "parameters": (
[ [
"INTEGER", "INTEGER",
@ -39,17 +64,13 @@ const output_mysql : type_output = {
"AUTO INCREMENT", "AUTO INCREMENT",
] ]
.concat( .concat(
( (domain.key_field.description === null)
! domain["key_field"].hasOwnProperty("description")
||
(domain["key_field"]["description"] === null)
)
? [] ? []
: [ : [
lib_plankton.string.coin( lib_plankton.string.coin(
"COMMENT '{{comment}}'", "COMMENT '{{comment}}'",
{ {
"comment": domain["key_field"]["description"], "comment": domain.key_field.description,
} }
), ),
] ]
@ -61,12 +82,12 @@ const output_mysql : type_output = {
) )
// data fields // data fields
.concat( .concat(
domain["data_fields"] domain.data_fields
.map( .map(
(data_field) => lib_plankton.string.coin( (data_field) => lib_plankton.string.coin(
"`{{name}}` {{parameters}}", "`{{name}}` {{parameters}}",
{ {
"name": data_field["name"], "name": data_field.name,
"parameters": ( "parameters": (
( (
// type // type
@ -77,27 +98,36 @@ const output_mysql : type_output = {
"string_short": "VARCHAR(63)", "string_short": "VARCHAR(63)",
"string_medium": "VARCHAR(255)", "string_medium": "VARCHAR(255)",
"string_long": "TEXT", "string_long": "TEXT",
}[data_field["type"]], }[data_field.type],
] ]
// nullability // nullability
.concat( .concat(
data_field["nullable"] data_field.nullable
? ["NULL"] ? ["NULL"]
: [] : []
) )
// default
.concat(
(data_field.default === undefined)
? []
: [
lib_plankton.string.coin(
"DEFAULT {{value}}",
{
"value": mysql_value_encode(data_field.default),
}
),
]
)
// comment // comment
.concat( .concat(
( (data_field.description === null)
! data_field.hasOwnProperty("description")
||
(data_field["description"] === null)
)
? [] ? []
: [ : [
lib_plankton.string.coin( lib_plankton.string.coin(
"COMMENT '{{comment}}'", "COMMENT '{{comment}}'",
{ {
"comment": data_field["description"], "comment": data_field.description,
} }
), ),
] ]
@ -114,9 +144,9 @@ const output_mysql : type_output = {
domain["constraints"] domain["constraints"]
.map( .map(
(constraint) => { (constraint) => {
switch (constraint["kind"]) { switch (constraint.kind) {
default: { default: {
throw (new Error("unhandled constraint kind: " + constraint["kind"])); throw (new Error("unhandled constraint kind: " + constraint.kind));
break; break;
} }
case "foreign_key": { case "foreign_key": {
@ -124,13 +154,13 @@ const output_mysql : type_output = {
"FOREIGN KEY ({{fields}}) REFERENCES `{{reference_name}}`({{reference_fields}})", "FOREIGN KEY ({{fields}}) REFERENCES `{{reference_name}}`({{reference_fields}})",
{ {
"fields": ( "fields": (
constraint["parameters"]["fields"] constraint.parameters["fields"]
.map(x => ('`' + x + '`')) .map(x => ('`' + x + '`'))
.join(",") .join(",")
), ),
"reference_name": constraint["parameters"]["reference"]["name"], "reference_name": constraint.parameters["reference"]["name"],
"reference_fields": ( "reference_fields": (
constraint["parameters"]["reference"]["fields"] constraint.parameters["reference"]["fields"]
.map(x => ('`' + x + '`')) .map(x => ('`' + x + '`'))
.join(",") .join(",")
), ),
@ -143,7 +173,7 @@ const output_mysql : type_output = {
"UNIQUE ({{fields}})", "UNIQUE ({{fields}})",
{ {
"fields": ( "fields": (
constraint["parameters"]["fields"] constraint.parameters["fields"]
.map(x => ('`' + x + '`')) .map(x => ('`' + x + '`'))
.join(",") .join(",")
), ),

View file

@ -1,3 +1,32 @@
function sqlite_value_encode(
value : any
) : string
{
if (value === null) {
return "NULL";
}
else {
switch (typeof(value)) {
case "boolean": {
return (value ? "TRUE" : "FALSE");
break;
}
case "number": {
return value.toString();
break;
}
case "string": {
return ("'" + value + "'");
break;
}
default: {
throw (new Error("unhandled"));
break;
}
}
}
}
const output_sqlite : type_output = { const output_sqlite : type_output = {
"render": function (input_data) { "render": function (input_data) {
return ( return (
@ -6,18 +35,18 @@ const output_sqlite : type_output = {
(domain) => lib_plankton.string.coin( (domain) => lib_plankton.string.coin(
"CREATE TABLE\n\t`{{name}}`(\n{{entries}}\n\t)\n;", "CREATE TABLE\n\t`{{name}}`(\n{{entries}}\n\t)\n;",
{ {
"name": domain["name"], "name": domain.name,
"entries": ( "entries": (
( (
[] []
// key field // key field
.concat( .concat(
(domain["key_field"] === null) (domain.key_field === null)
? [] ? []
: lib_plankton.string.coin( : lib_plankton.string.coin(
"`{{name}}` {{parameters}}", "`{{name}}` {{parameters}}",
{ {
"name": domain["key_field"]["name"], "name": domain.key_field.name,
"parameters": ( "parameters": (
[ [
"INTEGER", "INTEGER",
@ -31,12 +60,12 @@ const output_sqlite : type_output = {
) )
// data fields // data fields
.concat( .concat(
domain["data_fields"] domain.data_fields
.map( .map(
(data_field) => lib_plankton.string.coin( (data_field) => lib_plankton.string.coin(
"`{{name}}` {{parameters}}", "`{{name}}` {{parameters}}",
{ {
"name": data_field["name"], "name": data_field.name,
"parameters": ( "parameters": (
( (
// type // type
@ -47,14 +76,27 @@ const output_sqlite : type_output = {
"string_short": "VARCHAR(63)", "string_short": "VARCHAR(63)",
"string_medium": "VARCHAR(255)", "string_medium": "VARCHAR(255)",
"string_long": "TEXT", "string_long": "TEXT",
}[data_field["type"]], }[data_field.type],
] ]
// nullability // nullability
.concat( .concat(
data_field["nullable"] data_field.nullable
? ["NULL"] ? ["NULL"]
: [] : []
) )
// default
.concat(
(data_field.default === undefined)
? []
: [
lib_plankton.string.coin(
"DEFAULT {{value}}",
{
"value": sqlite_value_encode(data_field.default),
}
),
]
)
) )
.join(" ") .join(" ")
) )
@ -64,12 +106,12 @@ const output_sqlite : type_output = {
) )
// constraints // constraints
.concat( .concat(
domain["constraints"] domain.constraints
.map( .map(
(constraint) => { (constraint) => {
switch (constraint["kind"]) { switch (constraint.kind) {
default: { default: {
throw (new Error("unhandled constraint kind: " + constraint["kind"])); throw (new Error("unhandled constraint kind: " + constraint.kind));
break; break;
} }
case "foreign_key": { case "foreign_key": {
@ -77,13 +119,13 @@ const output_sqlite : type_output = {
"FOREIGN KEY ({{fields}}) REFERENCES `{{reference_name}}`({{reference_fields}})", "FOREIGN KEY ({{fields}}) REFERENCES `{{reference_name}}`({{reference_fields}})",
{ {
"fields": ( "fields": (
constraint["parameters"]["fields"] constraint.parameters["fields"]
.map(x => ('`' + x + '`')) .map(x => ('`' + x + '`'))
.join(",") .join(",")
), ),
"reference_name": constraint["parameters"]["reference"]["name"], "reference_name": constraint.parameters["reference"]["name"],
"reference_fields": ( "reference_fields": (
constraint["parameters"]["reference"]["fields"] constraint.parameters["reference"]["fields"]
.map(x => ('`' + x + '`')) .map(x => ('`' + x + '`'))
.join(",") .join(",")
), ),
@ -96,7 +138,7 @@ const output_sqlite : type_output = {
"UNIQUE ({{fields}})", "UNIQUE ({{fields}})",
{ {
"fields": ( "fields": (
constraint["parameters"]["fields"] constraint.parameters["fields"]
.map(x => ('`' + x + '`')) .map(x => ('`' + x + '`'))
.join(",") .join(",")
), ),

View file

@ -0,0 +1,73 @@
const output_typescript : type_output = {
"render": function (input_data) {
return (
input_data["domains"]
.map(
(domain) => lib_plankton.string.coin(
"type type_{{name}} = {\n{{fields}}\n};\nlet collection_{{name}} : {{collection_type}} = {{collection_value}};\n",
{
"name": domain.name,
"fields": (
domain.data_fields
.map(
(data_field) => lib_plankton.string.coin(
"\t{{name}} : {{type}};{{macro_comment}}",
{
"name": data_field.name,
"type": lib_plankton.string.coin(
(data_field.nullable ? "(null | {{core}})" : "{{core}}"),
{
"core": {
"boolean": "boolean",
"integer": "number",
"float": "number",
"string_short": "string",
"string_medium": "string",
"string_long": "string",
}[data_field["type"]],
}
),
"macro_comment": (
(data_field.description !== null)
? lib_plankton.string.coin(
" // {{comment}}",
{
"comment": data_field.description,
}
)
: ""
),
}
)
)
// .map(x => ("\t" + x))
.join("\n")
),
"collection_type": (
(domain.key_field === null)
? lib_plankton.string.coin(
"Array<type_{{name}}>",
{
"name": domain.name,
}
)
: lib_plankton.string.coin(
"Record<number, type_{{name}}>",
{
"name": domain.name,
}
)
),
"collection_value": (
(domain.key_field === null)
? "[]"
: "{}"
),
}
)
)
.map(x => (x + "\n"))
.join("\n")
);
},
};

View file

@ -1,33 +1,33 @@
/** /**
*/ */
type type_input_data = { type type_input = {
domains : Array< domains : Array<
{ {
name : string; name : string;
key_field ?: ( description : (null | string);
key_field : (
null null
| |
{ {
name : string; name : string;
comment ?: (null | string); description ?: (null | string);
} }
); );
data_fields ?: Array< data_fields : Array<
{ {
name : string; name : string;
type : string; description : (null | string);
nullable ?: boolean; type : ("boolean" | "integer" | "float" | "string_short" | "string_medium" | "string_long");
default ?: any; nullable : boolean;
comment ?: (null | string); default : (null | boolean | int | float | string);
} }
>; >;
constraints ?: Array< constraints ?: Array<
{ {
kind : string; kind : ("unique" | "foreign_key");
parameters ?: Record<string, any>; parameters : Record<string, any>;
} }
>; >;
comment ?: (null | string);
} }
>; >;
}; };
@ -36,5 +36,5 @@ type type_input_data = {
/** /**
*/ */
type type_output = { type type_output = {
render : ((input_data : type_input_data) => string); render : ((input_data : type_input) => string);
}; };

View file

@ -2,6 +2,7 @@
modules="" modules=""
modules="${modules} base" modules="${modules} base"
# modules="${modules} call"
modules="${modules} string" modules="${modules} string"
modules="${modules} json" modules="${modules} json"
modules="${modules} file" modules="${modules} file"

View file

@ -18,6 +18,8 @@ lib/plankton/plankton.d.ts \
source/types.ts \ source/types.ts \
source/outputs/sqlite.ts \ source/outputs/sqlite.ts \
source/outputs/mysql.ts \ source/outputs/mysql.ts \
source/outputs/typescript.ts \
source/outputs/jsonschema.ts \
source/conf.ts \ source/conf.ts \
source/main.ts source/main.ts
@ ${cmd_log} "compiling …" @ ${cmd_log} "compiling …"