[mod] Datenbank statt Zustands-Datei nutzen

This commit is contained in:
Christian Fraß 2023-04-28 17:30:51 +02:00
parent b011be560a
commit e3d85dd1a8
19 changed files with 323 additions and 286 deletions

View file

@ -5,7 +5,7 @@
"attentive_interval": 1,
"reminding_interval": 20
},
"threshold": 1,
"threshold": 3,
"notifications": [
{
"kind": "console",
@ -15,6 +15,6 @@
]
},
"includes": [
"http_request.hmdl.json"
"file_state.hmdl.json"
]
}

View file

@ -4,14 +4,16 @@
"conditions.concerning": "bedenklich",
"conditions.critical": "kritisch",
"help.title": "Heimdall — Werkzeug zur System-Überwachung",
"help.args.conf_path": "Pfad zur Konfigurations-Datei",
"help.args.state_path": "Pfad zur Zustands-Datei, welche Daten über vorherige Prüfungen enthält; Standard-Wert: Pfad im temporären Verzeichnis des Systems mit eindeutigem Namen in Bezug auf den Pfad zur Konfigurations-Datei",
"help.args.order_path": "Pfad zur Auftrags-Datei",
"help.args.state_path": "Pfad zur Zustands-Datei, welche Daten über vorherige Prüfungen enthält; Standard-Wert: Pfad im temporären Verzeichnis des Systems mit eindeutigem Namen in Bezug auf den Pfad zur Auftrags-Datei",
"help.args.database_path": "Pfad zur Datenbank-Datei, welche die Prüfungs-Ergebnisse enthält; Standard-Wert: Pfad im temporären Verzeichnis des Systems mit eindeutigem Namen in Bezug auf den Pfad zur Auftrags-Datei",
"help.args.mutex_path": "Pfad zur Datei zur Verhinderung paralleler Ausführung",
"help.args.time_to_live": "wie lange (in Sekunden) Ergebnis-Einträge in der Datenbank gehalten werden sollen",
"help.args.send_ok_notifications": "ob '{{condition_name}}'-Zustände gemeldet werden sollen",
"help.args.language": "welche Sprache verwendet werden soll (statt der in den Umgebungs-Variablen gesetzten)",
"help.args.erase_state": "ob der Zustand bei Start gelöscht werden soll; das hat zur Folge, dass alle Prüfungen unmittelbar durchgeführt werden",
"help.args.show_schema": "nur das hmdl-JSON-Schema zur Standard-Ausgabe schreiben und beenden",
"help.args.expose_full_conf": "nur die erweiterte Konfiguration zur Standard-Ausgabe schreiben und beenden (nützlich für Fehlersuche)",
"help.args.expose_full_order": "nur den database_path Auftrag zur Standard-Ausgabe schreiben und beenden (nützlich für Fehlersuche)",
"checks.file_state.exists": "Datei existiert (soll aber nicht)",
"checks.file_state.missing": "Datei existiert nicht (soll aber)",
"checks.file_state.timestamp_implausible": "Datei ist scheinbar aus der Zukunft",
@ -27,5 +29,6 @@
"checks.http_request.body_misses_part": "Rumpf enthält nicht den erwarteten Teil '{{part}}'",
"misc.state_file_path": "Pfad zur Zustands-Datei",
"misc.check_procedure_failed": "Prüfungs-Prozedur fehlgeschlagen",
"misc.still_running": "läuft bereits/noch"
"misc.still_running": "läuft bereits/noch",
"misc.cleanup_info": "{{count}} alte Ergebnis-Datensätze gelöscht"
}

View file

@ -4,14 +4,16 @@
"conditions.concerning": "concerning",
"conditions.critical": "critical",
"help.title": "Heimdall — Monitoring Tool",
"help.args.conf_path": "path to the configuration file",
"help.args.state_path": "path to the state file, which contains information about the recent checks; default: file in temporary directory, unique for the conf-path input",
"help.args.order_path": "path to the order file",
"help.args.state_path": "path to the state file, which contains information about the recent checks; default: file in temporary directory, unique for the order-path input",
"help.args.database_path": "path to the database file, containing check results; default: file in temporary directory, unique for the order-path input",
"help.args.mutex_path": "path to file for preventing mutual execution",
"help.args.time_to_live": "how long (in seconds) result entries are supposed to be kept in database",
"help.args.send_ok_notifications": "whether an '{{condition_name}}' condition shall be reported",
"help.args.language": "language to use (instead of the language, set in the environment variables)",
"help.args.erase_state": "whether the state shall be deleted on start; this will cause that all checks are executed immediatly",
"help.args.show_schema": "print the hmdl JSON schema to stdout and exit",
"help.args.expose_full_conf": "only print the extended configuration to stdout and exit (useful for debugging)",
"help.args.expose_full_order": "only print the extended order to stdout and exit (useful for debugging)",
"checks.file_state.exists": "file exists (but shall not)",
"checks.file_state.missing": "file does not exist (but shall)",
"checks.file_state.timestamp_implausible": "file is apparently from the future",
@ -27,5 +29,6 @@
"checks.http_request.body_misses_part": "body does not contain the expected part '{{part}}'",
"misc.state_file_path": "state file path",
"misc.check_procedure_failed": "check procedure failed",
"misc.still_running": "already/still running"
"misc.still_running": "already/still running",
"misc.cleanup_info": "removed {{count}} old result entries"
}

View file

@ -4,7 +4,7 @@ class interface_notification_channel(object):
raise NotImplementedError
def normalize_conf_node(self, node):
def normalize_order_node(self, node):
raise NotImplementedError

View file

@ -17,7 +17,7 @@ class implementation_notification_channel_console(interface_notification_channel
'''
[implementation]
'''
def normalize_conf_node(self, node):
def normalize_order_node(self, node):
return dict_merge(
{
},

View file

@ -61,7 +61,7 @@ class implementation_notification_channel_email(interface_notification_channel):
'''
[implementation]
'''
def normalize_conf_node(self, node):
def normalize_order_node(self, node):
return dict_merge(
{
},

View file

@ -20,7 +20,7 @@ class implementation_notification_channel_libnotify(interface_notification_chann
'''
[implementation]
'''
def normalize_conf_node(self, node):
def normalize_order_node(self, node):
return dict_merge(
{
"icon": "/usr/local/share/icons/heimdall.png",

View file

@ -4,7 +4,7 @@ class interface_check_kind(object):
raise NotImplementedError
def normalize_conf_node(self, node):
def normalize_order_node(self, node):
raise NotImplementedError

View file

@ -43,7 +43,7 @@ class implementation_check_kind_file_state(interface_check_kind):
'''
[implementation]
'''
def normalize_conf_node(self, node):
def normalize_order_node(self, node):
if ("path" not in node):
raise ValueError("missing mandatory field 'path'")
else:
@ -62,27 +62,18 @@ class implementation_check_kind_file_state(interface_check_kind):
[implementation]
'''
def run(self, parameters):
exists = _os.path.exists(parameters["path"])
if (parameters["exist"]):
if (parameters["exist"] and not exists):
return {
"condition": (
enum_condition.critical
if parameters["strict"] else
enum_condition.concerning
),
"info": {
"path": parameters["path"],
"faults": [
translation_get("checks.file_state.missing"),
],
"data": {
},
}
}
else:
faults = []
data = {}
exists = _os.path.exists(parameters["path"])
if (not parameters["exist"]):
if (exists):
faults.append(translation_get("checks.file_state.exists"))
else:
pass
else:
if (not exists):
faults.append(translation_get("checks.file_state.missing"))
else:
stat = _os.stat(parameters["path"])
## age
if True:
@ -123,13 +114,13 @@ class implementation_check_kind_file_state(interface_check_kind):
data,
{
"size_value_in_bytes": size,
"size_threshold_in_bytes": parameters["size_threshold_in_bytes"],
"size_threshold_in_bytes": parameters["size_threshold"],
}
)
return {
"condition": (
enum_condition.ok
if (len(faults) == 0) else
if (len(faults) <= 0) else
(
enum_condition.critical
if parameters["strict"] else
@ -142,27 +133,4 @@ class implementation_check_kind_file_state(interface_check_kind):
"data": data,
}
}
else:
if (not exists):
return {
"condition": (
enum_condition.critical
if parameters["strict"] else
enum_condition.concerning
),
"info": {
"path": parameters["path"],
"faults": [
translation_get("checks.file_state.exists")
],
"data": {
},
}
}
else:
return {
"condition": enum_condition.ok,
"info": {
}
}

View file

@ -46,7 +46,7 @@ class implementation_check_kind_generic_remote(interface_check_kind):
'''
[implementation]
'''
def normalize_conf_node(self, node):
def normalize_order_node(self, node):
if (not "host" in node):
raise ValueError("mandatory parameter \"host\" missing")
else:
@ -108,12 +108,20 @@ class implementation_check_kind_generic_remote(interface_check_kind):
"avail": int(stuff[3]),
"perc": int(stuff[4][:-1]),
}
faults = []
if (data["perc"] > parameters["threshold"]):
faults.append(translation_get("checks.generic_remote.overflow"))
else:
pass
return {
"condition": (
enum_condition.ok
if (len(faults) <= 0) else
(
enum_condition.critical
if parameters["strict"] else
enum_condition.concerning
)
),
"info": {
"data": {
@ -124,14 +132,7 @@ class implementation_check_kind_generic_remote(interface_check_kind):
"available": format_bytes(data["avail"]),
"percentage": (str(data["perc"]) + "%"),
},
"faults": [
translation_get("checks.generic_remote.overflow")
],
"faults": faults
}
}
else:
return {
"condition": enum_condition.ok,
"info": {}
}

View file

@ -84,7 +84,7 @@ class implementation_check_kind_http_request(interface_check_kind):
'''
[implementation]
'''
def normalize_conf_node(self, node):
def normalize_order_node(self, node):
node_ = dict_merge(
{
"request": {
@ -219,7 +219,7 @@ class implementation_check_kind_http_request(interface_check_kind):
"request": parameters["request"],
"response": {
"status_code": response.status_code,
# "headers": dict(map(lambda pair: pair, response.headers.items())),
"headers": dict(map(lambda pair: pair, response.headers.items())),
# "body": response.text,
},
"faults": faults,

View file

@ -27,7 +27,7 @@ class implementation_check_kind_script(interface_check_kind):
'''
[implementation]
'''
def normalize_conf_node(self, node):
def normalize_order_node(self, node):
return dict_merge(
{
},

View file

@ -41,7 +41,7 @@ class implementation_check_kind_tls_certificate(interface_check_kind):
'''
[implementation]
'''
def normalize_conf_node(self, node):
def normalize_order_node(self, node):
if (not "host" in node):
raise ValueError("missing mandatory field 'host'")
else:
@ -62,60 +62,50 @@ class implementation_check_kind_tls_certificate(interface_check_kind):
[implementation]
'''
def run(self, parameters):
faults = []
data = {}
context = _ssl.create_default_context()
try:
socket = _socket.create_connection((parameters["host"], parameters["port"], ))
socket_wrapped = context.wrap_socket(socket, server_hostname = parameters["host"])
version = socket_wrapped.version()
data = socket_wrapped.getpeercert(False)
stuff = socket_wrapped.getpeercert(False)
except _ssl.SSLCertVerificationError as error:
version = None
data = None
if (data is None):
return {
"condition": (
enum_condition.critical
if parameters["strict"] else
enum_condition.concerning
),
"info": {
"host": parameters["host"],
"port": parameters["port"],
"faults": [
translation_get("checks.tls_certificate.not_obtainable"),
],
"data": {
},
}
}
stuff = None
if (stuff is None):
faults.append(translation_get("checks.tls_certificate.not_obtainable"))
else:
# version == "TLSv1.3"
expiry_timestamp = _ssl.cert_time_to_seconds(data["notAfter"])
expiry_timestamp = _ssl.cert_time_to_seconds(stuff["notAfter"])
current_timestamp = get_current_timestamp()
days = _math.ceil((expiry_timestamp - current_timestamp) / (60 * 60 * 24))
if (days <= parameters["expiry_threshold"]):
return {
"condition": (
enum_condition.critical
if parameters["strict"] else
enum_condition.concerning
),
"info": {
"host": parameters["host"],
"port": parameters["port"],
"faults": [
translation_get("checks.tls_certificate.expires_soon"),
],
"data": {
data = dict_merge(
data,
{
"expiry_timestamp": expiry_timestamp,
"days": days,
},
}
}
)
if (days <= parameters["expiry_threshold"]):
faults.append(translation_get("checks.tls_certificate.expires_soon"))
else:
pass
return {
"condition": enum_condition.ok,
"condition": (
enum_condition.ok
if (len(faults) <= 0) else
(
enum_condition.critical
if parameters["strict"] else
enum_condition.concerning
)
),
"info": {
"host": parameters["host"],
"port": parameters["port"],
"faults": faults,
"data": data,
}
}

View file

@ -89,3 +89,30 @@ def format_bytes(bytes_):
)
)
def sqlite_query_set(database_path, template, arguments):
connection = _sqlite3.connect(database_path)
cursor = connection.cursor()
result = cursor.execute(template, arguments)
connection.commit()
connection.close()
return result
def sqlite_query_put(database_path, template, arguments):
connection = _sqlite3.connect(database_path)
cursor = connection.cursor()
result = cursor.execute(template, arguments)
connection.commit()
connection.close()
return result
def sqlite_query_get(database_path, template, arguments):
connection = _sqlite3.connect(database_path)
cursor = connection.cursor()
result = cursor.execute(template, arguments)
rows = result.fetchall()
connection.close()
return rows

View file

@ -1,25 +1,3 @@
def state_encode(state):
return {
"timestamp": state["timestamp"],
"condition": condition_encode(state["condition"]),
"count": state["count"],
"last_notification_timestamp": state["last_notification_timestamp"],
}
def state_decode(state_encoded):
return {
"timestamp": state_encoded["timestamp"],
"condition": condition_decode(state_encoded["condition"]),
"count": state_encoded["count"],
"last_notification_timestamp": (
state_encoded["last_notification_timestamp"]
if ("last_notification_timestamp" in state_encoded) else
None
),
}
def main():
## setup translation for the first time
translation_initialize("en", env_get_language())
@ -30,28 +8,51 @@ def main():
formatter_class = _argparse.ArgumentDefaultsHelpFormatter
)
argumentparser.add_argument(
"-c",
"--conf-path",
type = str,
default = "monitoring.hmdl.json",
dest = "conf_path",
metavar = "<conf-path>",
help = translation_get("help.args.conf_path"),
dest = "order_path",
metavar = "<order-path>",
help = translation_get("help.args.order_path"),
)
argumentparser.add_argument(
"-f",
"--state-path",
"-x",
"--erase-state",
action = "store_true",
default = False,
dest = "erase_state",
help = translation_get("help.args.erase_state"),
)
argumentparser.add_argument(
"-s",
"--show-schema",
action = "store_true",
default = False,
dest = "show_schema",
help = translation_get("help.args.show_schema"),
)
argumentparser.add_argument(
"-e",
"--expose-full-order",
action = "store_true",
default = False,
dest = "expose_full_order",
help = translation_get("help.args.expose_full_order"),
)
### v conf stuff v
argumentparser.add_argument(
"-d",
"--database-path",
type = str,
default = None,
dest = "state_path",
metavar = "<state-path>",
help = translation_get("help.args.state_path"),
dest = "database_path",
metavar = "<database-path>",
help = translation_get("help.args.database_path"),
)
argumentparser.add_argument(
"-m",
"--mutex-path",
type = str,
default = "/tmp/heimdall.mutex",
default = "/tmp/heimdall.lock",
dest = "mutex_path",
metavar = "<mutex-path>",
help = translation_get("help.args.mutex_path"),
@ -75,39 +76,24 @@ def main():
help = translation_get("help.args.language"),
)
argumentparser.add_argument(
"-x",
"--erase-state",
action = "store_true",
default = False,
dest = "erase_state",
help = translation_get("help.args.erase_state"),
)
argumentparser.add_argument(
"-s",
"--show-schema",
action = "store_true",
default = False,
dest = "show_schema",
help = translation_get("help.args.show_schema"),
)
argumentparser.add_argument(
"-e",
"--expose-full-conf",
action = "store_true",
default = False,
dest = "expose_full_conf",
help = translation_get("help.args.expose_full_conf"),
"-t",
"--time-to-live",
type = int,
default = (60 * 60 * 24 * 7),
dest = "time_to_live",
metavar = "<time-to-live>",
help = translation_get("help.args.time_to_live"),
)
args = argumentparser.parse_args()
## vars
id_ = _hashlib.sha256(_os.path.abspath(args.conf_path).encode("ascii")).hexdigest()[:8]
state_path = (
args.state_path
if (args.state_path is not None) else
id_ = _hashlib.sha256(_os.path.abspath(args.order_path).encode("ascii")).hexdigest()[:8]
database_path = (
args.database_path
if (args.database_path is not None) else
_os.path.join(
_tempfile.gettempdir(),
string_coin("monitoring-state-{{id}}.json", {"id": id_})
string_coin("monitoring-state-{{id}}.sqlite", {"id": id_})
)
)
@ -136,7 +122,7 @@ def main():
if (args.show_schema):
_sys.stdout.write(
_json.dumps(
conf_schema_root(
order_schema_root(
check_kind_implementations,
notification_channel_implementations
),
@ -146,15 +132,15 @@ def main():
"\n"
)
else:
### get configuration data
conf = conf_load(
### get order data
order = order_load(
check_kind_implementations,
notification_channel_implementations,
_os.path.abspath(args.conf_path)
_os.path.abspath(args.order_path)
)
if (args.expose_full_conf):
_sys.stdout.write(_json.dumps(conf, indent = "\t") + "\n")
if (args.expose_full_order):
_sys.stdout.write(_json.dumps(order, indent = "\t") + "\n")
_sys.exit(1)
else:
_sys.stderr.write(
@ -162,7 +148,7 @@ def main():
"[info] {{label}}: {{path}}\n",
{
"label": translation_get("misc.state_file_path"),
"path": state_path,
"path": database_path,
}
)
)
@ -180,30 +166,79 @@ def main():
)
_sys.exit(2)
else:
### setup database
sqlite_query_set(
database_path,
"CREATE TABLE IF NOT EXISTS results(check_name TEXT NOT NULL, timestamp INTEGER NOT NULL, condition TEXT NOT NULL, notification_sent BOOLEAN NOT NULL, info TEXT NOT NULL);",
{}
)
### clean database
result = sqlite_query_put(
database_path,
"DELETE FROM results WHERE ((timestamp < :timestamp_min) OR :erase_state);",
{
"timestamp_min": (get_current_timestamp() - args.time_to_live),
"erase_state": args.erase_state,
}
)
_sys.stderr.write(
string_coin(
"[info] {{text}}\n",
{
"text": translation_get(
"misc.cleanup_info",
{
"count": ("%u" % result.rowcount),
}
),
}
)
)
file_write(args.mutex_path, "", {"append": True})
### get state data
if (
(not _os.path.exists(state_path))
or
args.erase_state
):
state_data = {}
file_write(state_path, _json.dumps(state_data, indent = "\t"))
else:
state_data = _json.loads(file_read(state_path))
### iterate through checks
for check_data in conf["checks"]:
for check_data in order["checks"]:
if (not check_data["active"]):
pass
else:
### get old state and examine whether the check shall be executed
old_item_state = (
None
if (check_data["name"] not in state_data) else
state_decode(state_data[check_data["name"]])
rows = sqlite_query_get(
database_path,
"SELECT timestamp, condition, notification_sent FROM results WHERE (check_name = :check_name) ORDER BY timestamp DESC LIMIT :limit;",
{
"check_name": check_data["name"],
"limit": (check_data["threshold"] + 1),
}
)
if (len(rows) <= 0):
old_item_state = None
else:
last_notification_timestamp = None
count = 1
for row in rows[1:]:
if (row[1] == rows[0][1]):
count += 1
else:
break
if (count > check_data["threshold"]):
count = None
else:
pass
for row in rows:
if (row[2]):
last_notification_timestamp = row[0]
break
else:
pass
old_item_state = {
"timestamp": rows[0][0],
"condition": condition_decode(rows[0][1]),
"count": count,
"last_notification_timestamp": last_notification_timestamp,
}
timestamp = get_current_timestamp()
due = (
(old_item_state is None)
@ -237,7 +272,7 @@ def main():
result = {
"condition": enum_condition.unknown,
"info": {
# "cause": translation_get("misc.check_procedure_failed"),
"cause": translation_get("misc.check_procedure_failed"),
"error": str(error),
},
}
@ -311,11 +346,22 @@ def main():
)
),
}
state_data[check_data["name"]] = state_encode(new_item_state)
file_write(state_path, _json.dumps(state_data, indent = "\t"))
sqlite_query_put(
database_path,
"INSERT INTO results(check_name, timestamp, condition, notification_sent, info) VALUES (:check_name, :timestamp, :condition, :notification_sent, :info);",
{
"check_name": check_data["name"],
"timestamp": timestamp,
"condition": condition_encode(result["condition"]),
"notification_sent": shall_send_notification,
"info": _json.dumps(result["info"]),
}
)
### send notifications
if shall_send_notification:
if (not shall_send_notification):
pass
else:
for notification in check_data["notifications"]:
notification_channel_implementations[notification["kind"]].notify(
notification["parameters"],

View file

@ -1,4 +1,4 @@
def conf_schema_active(
def order_schema_active(
):
return {
"description": "whether the check shall be executed",
@ -7,7 +7,7 @@ def conf_schema_active(
}
def conf_schema_threshold(
def order_schema_threshold(
):
return {
"description": "how often a condition has to occur in order to be reported",
@ -17,7 +17,7 @@ def conf_schema_threshold(
}
def conf_schema_annoy(
def order_schema_annoy(
):
return {
"description": "whether notifications about non-ok states shall be kept sending after the threshold has been surpassed",
@ -26,7 +26,7 @@ def conf_schema_annoy(
}
def conf_schema_interval(
def order_schema_interval(
allow_null,
default
):
@ -52,22 +52,22 @@ def conf_schema_interval(
}
def conf_schema_schedule(
def order_schema_schedule(
):
return {
"type": "object",
"additionalProperties": False,
"properties": {
"regular_interval": conf_schema_interval(False, (60 * 60)),
"attentive_interval": conf_schema_interval(False, (60 * 2)),
"reminding_interval": conf_schema_interval(True, (60 * 60 * 24)),
"regular_interval": order_schema_interval(False, (60 * 60)),
"attentive_interval": order_schema_interval(False, (60 * 2)),
"reminding_interval": order_schema_interval(True, (60 * 60 * 24)),
},
"required": [
],
}
def conf_schema_notifications(
def order_schema_notifications(
notification_channel_implementations
):
return {
@ -105,7 +105,7 @@ def conf_schema_notifications(
}
def conf_schema_root(
def order_schema_root(
check_kind_implementations,
notification_channel_implementations
):
@ -118,11 +118,11 @@ def conf_schema_root(
"type": "object",
"additionalProperties": False,
"properties": {
"active": conf_schema_active(),
"threshold": conf_schema_threshold(),
"annoy": conf_schema_annoy(),
"schedule": conf_schema_schedule(),
"notifications": conf_schema_notifications(notification_channel_implementations),
"active": order_schema_active(),
"threshold": order_schema_threshold(),
"annoy": order_schema_annoy(),
"schedule": order_schema_schedule(),
"notifications": order_schema_notifications(notification_channel_implementations),
},
"required": [
],
@ -150,11 +150,11 @@ def conf_schema_root(
"title": {
"type": "string"
},
"active": conf_schema_active(),
"threshold": conf_schema_threshold(),
"annoy": conf_schema_annoy(),
"schedule": conf_schema_schedule(),
"notifications": conf_schema_notifications(notification_channel_implementations),
"active": order_schema_active(),
"threshold": order_schema_threshold(),
"annoy": order_schema_annoy(),
"schedule": order_schema_schedule(),
"notifications": order_schema_notifications(notification_channel_implementations),
},
"required": [
"name",
@ -196,7 +196,7 @@ def conf_schema_root(
}
def conf_normalize_interval(
def order_normalize_interval(
interval_raw
):
if (interval_raw is None):
@ -219,7 +219,7 @@ def conf_normalize_interval(
raise ValueError("invalid type for interval value")
def conf_normalize_schedule(
def order_normalize_schedule(
node
):
node_ = dict_merge(
@ -231,13 +231,13 @@ def conf_normalize_schedule(
node
)
return {
"regular_interval": conf_normalize_interval(node_["regular_interval"]),
"attentive_interval": conf_normalize_interval(node_["attentive_interval"]),
"reminding_interval": conf_normalize_interval(node_["reminding_interval"]),
"regular_interval": order_normalize_interval(node_["regular_interval"]),
"attentive_interval": order_normalize_interval(node_["attentive_interval"]),
"reminding_interval": order_normalize_interval(node_["reminding_interval"]),
}
def conf_normalize_notification(
def order_normalize_notification(
notification_channel_implementations,
node
):
@ -246,11 +246,11 @@ def conf_normalize_notification(
else:
return {
"kind": node["kind"],
"parameters": notification_channel_implementations[node["kind"]].normalize_conf_node(node["parameters"]),
"parameters": notification_channel_implementations[node["kind"]].normalize_order_node(node["parameters"]),
}
def conf_normalize_defaults(
def order_normalize_defaults(
notification_channel_implementations,
node
):
@ -276,14 +276,14 @@ def conf_normalize_defaults(
"schedule": node_["schedule"],
"notifications": list(
map(
lambda x: conf_normalize_notification(notification_channel_implementations, x),
lambda x: order_normalize_notification(notification_channel_implementations, x),
node_["notifications"]
)
),
}
def conf_normalize_check(
def order_normalize_check(
check_kind_implementations,
notification_channel_implementations,
defaults,
@ -321,24 +321,24 @@ def conf_normalize_check(
if ("annoy" in node_):
node__["annoy"] = node_["annoy"]
if ("schedule" in node_):
node__["schedule"] = conf_normalize_schedule(node_["schedule"])
node__["schedule"] = order_normalize_schedule(node_["schedule"])
if ("notifications" in node_):
node__["notifications"] = list(
map(
lambda x: conf_normalize_notification(notification_channel_implementations, x),
lambda x: order_normalize_notification(notification_channel_implementations, x),
node_["notifications"]
)
)
if ("kind" in node_):
node__["kind"] = node_["kind"]
if True:
node__["parameters"] = check_kind_implementations[node_["kind"]].normalize_conf_node(node_["parameters"])
node__["parameters"] = check_kind_implementations[node_["kind"]].normalize_order_node(node_["parameters"])
if ("custom" in node_):
node__["custom"] = node_["custom"]
return node__
def conf_normalize_root(
def order_normalize_root(
check_kind_implementations,
notification_channel_implementations,
node,
@ -377,7 +377,7 @@ def conf_normalize_root(
{}
)
defaults = (
conf_normalize_defaults(
order_normalize_defaults(
notification_channel_implementations,
defaults_raw
)
@ -394,7 +394,7 @@ def conf_normalize_root(
"includes": includes,
"checks": list(
map(
lambda node_: conf_normalize_check(
lambda node_: order_normalize_check(
check_kind_implementations,
notification_channel_implementations,
defaults,
@ -406,7 +406,7 @@ def conf_normalize_root(
}
def conf_load(
def order_load(
check_kind_implementations,
notification_channel_implementations,
path,
@ -422,15 +422,15 @@ def conf_load(
if (path in options["already_included"]):
raise ValueError("circular dependency detected")
else:
conf_raw = _json.loads(file_read(path))
order_raw = _json.loads(file_read(path))
includes = (
conf_raw["includes"]
if ("includes" in conf_raw) else
order_raw["includes"]
if ("includes" in order_raw) else
[]
)
for index in range(len(includes)):
path_ = includes[index]
sub_conf = conf_load(
sub_order = order_load(
check_kind_implementations,
notification_channel_implementations,
(
@ -443,9 +443,9 @@ def conf_load(
"already_included": (options["already_included"] | {path})
}
)
if (not "checks" in conf_raw):
conf_raw["checks"] = []
conf_raw["checks"].extend(
if (not "checks" in order_raw):
order_raw["checks"] = []
order_raw["checks"].extend(
list(
map(
lambda check: dict_merge(
@ -460,15 +460,15 @@ def conf_load(
),
}
),
sub_conf["checks"]
sub_order["checks"]
)
)
)
conf_raw["includes"] = []
return conf_normalize_root(
order_raw["includes"] = []
return order_normalize_root(
check_kind_implementations,
notification_channel_implementations,
conf_raw,
order_raw,
{
"use_implicit_default_values": options["root"],
}

View file

@ -14,3 +14,4 @@ import smtplib as _smtplib
from email.mime.text import MIMEText
import ssl as _ssl
import socket as _socket
import sqlite3 as _sqlite3

View file

@ -1,7 +1,4 @@
- auf DB umstellen?
- Schreib-Operationen vermindern
- Benachrichtigungen versenden, wenn ein Zustand sich wieder normalisiert hat (aber vorher über dem Schwellwert oft nicht OK war)
- längere Statistiken über Metriken führen um auch Anstiege/Abfälle auszuwerten (z.B. "Speicherplatzverbrauch innerhalb einer Woche um 5GB gestiegen")
- Selbst-Test
- Benachrichtigungs-Kanäle:
- Matrix
@ -10,3 +7,4 @@
- Versionierung
- Test-Routinen
- neu schreiben in TypeScript (und plankton dafür nutzen?)
- ein paar der Kommandozeilen-Argumente in Konfiguration auslagern

View file

@ -40,7 +40,7 @@ def main():
_os.path.join(dir_source, "logic", "lib.py"),
_os.path.join(dir_source, "logic", "localization.py"),
_os.path.join(dir_source, "logic", "condition.py"),
_os.path.join(dir_source, "logic", "conf.py"),
_os.path.join(dir_source, "logic", "order.py"),
_os.path.join(dir_source, "logic", "checks", "_interface.py"),
_os.path.join(dir_source, "logic", "checks", "script.py"),
_os.path.join(dir_source, "logic", "checks", "file_state.py"),