This commit is contained in:
Christian Fraß 2023-02-20 14:46:28 +01:00
commit e1d2344e69
13 changed files with 5305 additions and 0 deletions

3
.gitignore vendored Normal file
View file

@ -0,0 +1,3 @@
temp/
build/
.geany

113
doc/dtmdl.schema.json Normal file
View file

@ -0,0 +1,113 @@
{
"type": "object",
"additionalProperties": false,
"properties": {
"domains": {
"type": "array",
"items": {
"type": "object",
"additionalProperties": false,
"properties": {
"name": {
"type": "string"
},
"description": {
"type": ["null", "string"],
"default": null
},
"key_field": {
"type": ["null","object"],
"additionalProperties": false,
"properties": {
"name": {
"type": "string"
},
"description": {
"type": ["null", "string"],
"default": null
}
},
"required": [
"name"
],
"default": null
},
"data_fields": {
"type": "array",
"items": {
"type": "object",
"additionalProperties": false,
"properties": {
"name": {
"type": "string"
},
"description": {
"type": ["null", "string"],
"default": null
},
"type": {
"type": "string",
"enum": [
"boolean",
"integer",
"float",
"string_short",
"string_medium",
"string_long"
]
},
"nullable": {
"type": "boolean",
"default": true
},
"default": {
"type": ["null", "boolean", "integer", "float", "string"],
"default": null
}
},
"required": [
"name",
"type"
]
},
"default": []
},
"constraints": {
"type": "array",
"items": {
"type": "object",
"additionalProperties": false,
"properties": {
"kind": {
"type": "string",
"enum": [
"unique",
"foreign_key"
]
},
"parameters": {
"type": "object",
"additionalProperties": "string",
"properties": {
},
"required": [
]
}
},
"required": [
"kind"
]
},
"default": []
}
},
"required": [
"name"
]
}
}
},
"required": [
"domains"
]
}

1148
lib/plankton/plankton.d.ts vendored Normal file

File diff suppressed because it is too large Load diff

3596
lib/plankton/plankton.js Normal file

File diff suppressed because it is too large Load diff

21
readme.md Normal file
View file

@ -0,0 +1,21 @@
# Sindri
## Zweck
- Erstellung von Datenmodell-Skripten in verschiedenen Ausgabeformaten (MySQL, SQLite, …) auf Basis einer abstrakten Beschreibung
## Erstellung
### Voraussetzungen
- Typescript-Compiler
- GNU Make
### Anleitung
- `tools/build` ausführen

43
source/main.ts Normal file
View file

@ -0,0 +1,43 @@
async function main(
args_raw : Array<string>
) : Promise<void>
{
const outputs : Record<string, type_output> = {
"sqlite": output_sqlite,
"mysql": output_mysql,
};
const arg_handler = new lib_args.class_handler(
{
"format": new lib_args.class_argument({
"name": "format",
"type": lib_args.enum_type.string,
"kind": lib_args.enum_kind.volatile,
"mode": lib_args.enum_mode.replace,
"default": "sqlite",
"parameters": {
"indicators_long": ["format"],
"indicators_short": ["f"],
},
"info": "output format",
}),
}
);
const args : Record<string, any> = arg_handler.read(lib_args.enum_environment.cli, args_raw.join(" "));
const input_content : string = await lib_plankton.file.read_stdin();
const input_data : type_input_data = lib_json.decode(input_content);
// TODO: sanitize & normalize input_data
if (! outputs.hasOwnProperty(args["format"])) {
throw (new Error("unhandled output format: " + args["format"]));
}
else {
const output_content : string = outputs[args["format"]].render(input_data);
process.stdout.write(output_content);
// return Promise.resolve<void>(undefined);
}
}
main(process.argv.slice(2));

169
source/outputs/mysql.ts Normal file
View file

@ -0,0 +1,169 @@
const output_mysql : type_output = {
"render": function (input_data) {
return (
input_data["domains"]
.map(
(domain) => lib_plankton.string.coin(
"CREATE TABLE\n\t`{{name}}`(\n{{entries}}\n\t){{comment}}\n;",
{
"name": domain["name"],
"comment": (
(
! domain.hasOwnProperty("description")
||
(domain["description"] === null)
)
? ""
: lib_plankton.string.coin(
" COMMENT '{{comment}}'",
{
"comment": domain["description"],
}
)
),
"entries": (
(
[]
// key field
.concat(
(domain["key_field"] === null)
? []
: lib_plankton.string.coin(
"`{{name}}` {{parameters}}",
{
"name": domain["key_field"]["name"],
"parameters": (
[
"INTEGER",
"PRIMARY KEY",
"AUTO INCREMENT",
]
.concat(
(
! domain["key_field"].hasOwnProperty("description")
||
(domain["key_field"]["description"] === null)
)
? []
: [
lib_plankton.string.coin(
"COMMENT '{{comment}}'",
{
"comment": domain["key_field"]["description"],
}
),
]
)
.join(" ")
),
}
)
)
// data fields
.concat(
domain["data_fields"]
.map(
(data_field) => lib_plankton.string.coin(
"`{{name}}` {{parameters}}",
{
"name": data_field["name"],
"parameters": (
(
// type
[
{
"boolean": "BOOLEAN",
"integer": "INTEGER",
"string_short": "VARCHAR(63)",
"string_medium": "VARCHAR(255)",
"string_long": "TEXT",
}[data_field["type"]],
]
// nullability
.concat(
data_field["nullable"]
? ["NULL"]
: []
)
// comment
.concat(
(
! data_field.hasOwnProperty("description")
||
(data_field["description"] === null)
)
? []
: [
lib_plankton.string.coin(
"COMMENT '{{comment}}'",
{
"comment": data_field["description"],
}
),
]
)
)
.join(" ")
)
}
)
)
)
// constraints
.concat(
domain["constraints"]
.map(
(constraint) => {
switch (constraint["kind"]) {
default: {
throw (new Error("unhandled constraint kind: " + constraint["kind"]));
break;
}
case "foreign_key": {
return lib_plankton.string.coin(
"FOREIGN KEY ({{fields}}) REFERENCES `{{reference_name}}`({{reference_fields}})",
{
"fields": (
constraint["parameters"]["fields"]
.map(x => ('`' + x + '`'))
.join(",")
),
"reference_name": constraint["parameters"]["reference"]["name"],
"reference_fields": (
constraint["parameters"]["reference"]["fields"]
.map(x => ('`' + x + '`'))
.join(",")
),
}
);
break;
}
case "unique": {
return lib_plankton.string.coin(
"UNIQUE ({{fields}})",
{
"fields": (
constraint["parameters"]["fields"]
.map(x => ('`' + x + '`'))
.join(",")
),
}
);
break;
}
}
}
)
)
)
.map(x => ("\t\t" + x))
.join(",\n")
),
}
)
)
.map(x => (x + "\n"))
.join("\n")
);
},
};

122
source/outputs/sqlite.ts Normal file
View file

@ -0,0 +1,122 @@
const output_sqlite : type_output = {
"render": function (input_data) {
return (
input_data["domains"]
.map(
(domain) => lib_plankton.string.coin(
"CREATE TABLE\n\t`{{name}}`(\n{{entries}}\n\t)\n;",
{
"name": domain["name"],
"entries": (
(
[]
// key field
.concat(
(domain["key_field"] === null)
? []
: lib_plankton.string.coin(
"`{{name}}` {{parameters}}",
{
"name": domain["key_field"]["name"],
"parameters": (
[
"INTEGER",
"PRIMARY KEY",
"AUTOINCREMENT",
]
.join(" ")
),
}
)
)
// data fields
.concat(
domain["data_fields"]
.map(
(data_field) => lib_plankton.string.coin(
"`{{name}}` {{parameters}}",
{
"name": data_field["name"],
"parameters": (
(
// type
[
{
"boolean": "BOOLEAN",
"integer": "INTEGER",
"string_short": "VARCHAR(63)",
"string_medium": "VARCHAR(255)",
"string_long": "TEXT",
}[data_field["type"]],
]
// nullability
.concat(
data_field["nullable"]
? ["NULL"]
: []
)
)
.join(" ")
)
}
)
)
)
// constraints
.concat(
domain["constraints"]
.map(
(constraint) => {
switch (constraint["kind"]) {
default: {
throw (new Error("unhandled constraint kind: " + constraint["kind"]));
break;
}
case "foreign_key": {
return lib_plankton.string.coin(
"FOREIGN KEY ({{fields}}) REFERENCES `{{reference_name}}`({{reference_fields}})",
{
"fields": (
constraint["parameters"]["fields"]
.map(x => ('`' + x + '`'))
.join(",")
),
"reference_name": constraint["parameters"]["reference"]["name"],
"reference_fields": (
constraint["parameters"]["reference"]["fields"]
.map(x => ('`' + x + '`'))
.join(",")
),
}
);
break;
}
case "unique": {
return lib_plankton.string.coin(
"UNIQUE ({{fields}})",
{
"fields": (
constraint["parameters"]["fields"]
.map(x => ('`' + x + '`'))
.join(",")
),
}
);
break;
}
}
}
)
)
)
.map(x => ("\t\t" + x))
.join(",\n")
),
}
)
)
.map(x => (x + "\n"))
.join("\n")
);
},
};

40
source/types.ts Normal file
View file

@ -0,0 +1,40 @@
/**
*/
type type_input_data = {
domains : Array<
{
name : string;
key_field ?: (
null
|
{
name : string;
comment ?: (null | string);
}
);
data_fields ?: Array<
{
name : string;
type : string;
nullable ?: boolean;
default ?: any;
comment ?: (null | string);
}
>;
constraints ?: Array<
{
kind : string;
parameters ?: Record<string, any>;
}
>;
comment ?: (null | string);
}
>;
};
/**
*/
type type_output = {
render : ((input_data : type_input_data) => string);
};

4
tools/build Executable file
View file

@ -0,0 +1,4 @@
#!/usr/bin/env sh
make --file=tools/makefile

3
tools/clear Executable file
View file

@ -0,0 +1,3 @@
#!/usr/bin/env sh
rm --recursive --force temp build

13
tools/get-plankton Executable file
View file

@ -0,0 +1,13 @@
#!/usr/bin/env sh
modules=""
modules="${modules} base"
modules="${modules} string"
modules="${modules} json"
modules="${modules} file"
modules="${modules} args"
mkdir -p lib/plankton
cd lib/plankton
ptk bundle node ${modules}
cd -

30
tools/makefile Normal file
View file

@ -0,0 +1,30 @@
## commands
cmd_create_directory := mkdir --parents
cmd_typescript_compile := tsc
cmd_concatenate := cat
cmd_echo := echo -e
cmd_log := echo -e "--"
## rules
.PHONY: all
all: build/datamodel.js
temp/datamodel-unlinked.js: \
lib/plankton/plankton.d.ts \
source/types.ts \
source/outputs/sqlite.ts \
source/outputs/mysql.ts \
source/main.ts
@ ${cmd_log} "compiling …"
@ ${cmd_create_directory} temp
@ ${cmd_typescript_compile} $^ --lib es2020 --target es6 --outFile $@
build/datamodel.js: lib/plankton/plankton.js temp/datamodel-unlinked.js
@ ${cmd_log} "linking …"
@ ${cmd_create_directory} build
@ ${cmd_echo} "#!/usr/bin/env node\n" > temp/head.js
@ ${cmd_concatenate} temp/head.js $^ > $@