[mod] Parameter (fast) aller Checks abwärtskompatibel angepasst

This commit is contained in:
Christian Fraß 2023-06-18 21:11:37 +02:00
parent a944f37ce8
commit bd950c0a57
9 changed files with 815 additions and 425 deletions

View file

@ -539,18 +539,67 @@
"path": {
"type": "string"
},
"strict": {
"description": "whether a violation of this check shall be leveled as critical instead of concerning",
"type": "boolean",
"default": true
},
"exist": {
"exist_mode": {
"description": "whether the file is supposed to exist or not",
"type": "boolean",
"default": true
},
"exist_critical": {
"description": "whether a violation of the extist state (parameter 'exist_mode') shall be considered as critical (true) or concerning (false)",
"type": "boolean",
"default": true
},
"age_threshold_concerning": {
"description": "in seconds; ignored if 'exist_mode' is set to false",
"type": [
"null",
"integer"
],
"exclusiveMinimum": 0,
"default": null
},
"age_threshold_critical": {
"description": "in seconds; ignored if 'exist_mode' is set to false",
"type": [
"null",
"integer"
],
"exclusiveMinimum": 0,
"default": null
},
"size_threshold_concerning": {
"description": "in bytes; ignored if 'exist_mode' is set to false",
"type": [
"null",
"integer"
],
"exclusiveMinimum": 0,
"default": null
},
"size_threshold_critical": {
"description": "in bytes; ignored if 'exist_mode' is set to false",
"type": [
"null",
"integer"
],
"exclusiveMinimum": 0,
"default": null
},
"strict": {
"deprecated": true,
"description": "",
"type": "boolean",
"default": true
},
"exist": {
"deprecated": true,
"description": "",
"type": "boolean",
"default": true
},
"age_threshold": {
"description": "in seconds; ignored if 'exist' is set to false",
"deprecated": true,
"description": "",
"type": [
"null",
"integer"
@ -559,8 +608,12 @@
"default": null
},
"size_threshold": {
"description": "in bytes; ignored if 'exist' is set to false",
"type": "integer",
"deprecated": true,
"description": "",
"type": [
"null",
"integer"
],
"exclusiveMinimum": 0,
"default": null
}
@ -601,19 +654,42 @@
"type": "integer",
"default": 443
},
"strict": {
"description": "whether a violation of this check shall be leveled as critical instead of concerning",
"type": "boolean",
"default": true
},
"expiry_threshold": {
"description": "in days; allowed amount of valid days before the certificate expires",
"expiry_threshold_concerning": {
"description": "in days; allowed amount of valid days before the certificate expires; threshold for condition 'concerning'; 'null' means 'report at no value'",
"type": [
"null",
"integer"
],
"default": 7,
"minimum": 0
},
"expiry_threshold_critical": {
"description": "in days; allowed amount of valid days before the certificate expires; threshold for condition 'critical'; 'null' means 'report at no value'",
"type": [
"null",
"integer"
],
"default": 1,
"minimum": 0
},
"expiry_threshold": {
"deprecated": true,
"description": "",
"type": [
"null",
"integer"
],
"minimum": 0,
"default": null
},
"strict": {
"deprecated": true,
"description": "",
"type": [
"null",
"boolean"
],
"default": null
}
},
"required": [
@ -706,10 +782,16 @@
},
"required": []
},
"strict": {
"critical": {
"description": "whether a violation of this check shall be leveled as critical instead of concerning",
"type": "boolean",
"default": true
},
"strict": {
"deprecated": true,
"description": "alias for 'critical'",
"type": "boolean",
"default": true
}
},
"required": [
@ -774,9 +856,16 @@
"default": 95,
"description": "maximaler F\u00fcllstand in Prozent"
},
"strict": {
"critical": {
"description": "whether a violation of this check shall be leveled as critical instead of concerning",
"type": "boolean",
"default": false
"default": true
},
"strict": {
"deprecated": true,
"description": "alias for 'critical'",
"type": "boolean",
"default": true
}
},
"required": [

View file

@ -13,10 +13,12 @@
"help.args.language": "welche Sprache verwendet werden soll (statt der in den Umgebungs-Variablen gesetzten)",
"help.args.erase_state": "ob der Zustand bei Start gelöscht werden soll; das hat zur Folge, dass alle Prüfungen unmittelbar durchgeführt werden",
"help.args.show_schema": "nur das hmdl-JSON-Schema zur Standard-Ausgabe schreiben und beenden",
"help.args.expose_full_order": "nur den database_path Auftrag zur Standard-Ausgabe schreiben und beenden (nützlich für Fehlersuche)",
"help.args.expose_full_order": "nur den Pfad zur Datenbank-Datei zur Standard-Ausgabe schreiben und beenden (nützlich für Fehlersuche)",
"help.args.show_version": "nur die Version zur Standard-Ausgabe schreiben und beenden",
"checks.file_state.exists": "Datei existiert (soll aber nicht)",
"checks.file_state.missing": "Datei existiert nicht (soll aber)",
"checks.file_state.timestamp_implausible": "Datei ist scheinbar aus der Zukunft",
"checks.file_state.size_implausible": "Datei hat scheinbar eine negative Größe",
"checks.file_state.too_old": "Datei ist zu alt",
"checks.file_state.too_big": "Datei ist zu groß",
"checks.tls_certificate.not_obtainable": "TLS-Zertifikat nicht abrufbar; evtl. bereits ausgelaufen",
@ -30,5 +32,6 @@
"misc.state_file_path": "Pfad zur Zustands-Datei",
"misc.check_procedure_failed": "Prüfungs-Prozedur fehlgeschlagen",
"misc.still_running": "läuft bereits/noch",
"misc.cleanup_info": "{{count}} alte Ergebnis-Datensätze gelöscht"
"misc.cleanup_info": "{{count}} alte Ergebnis-Datensätze gelöscht",
"misc.order_file_not_found": "Auftrags-Datei nicht gefunden: {{path}}"
}

View file

@ -14,9 +14,11 @@
"help.args.erase_state": "whether the state shall be deleted on start; this will cause that all checks are executed immediatly",
"help.args.show_schema": "print the hmdl JSON schema to stdout and exit",
"help.args.expose_full_order": "only print the extended order to stdout and exit (useful for debugging)",
"help.args.show_version": "only print the version to stdout and exit",
"checks.file_state.exists": "file exists (but shall not)",
"checks.file_state.missing": "file does not exist (but shall)",
"checks.file_state.timestamp_implausible": "file is apparently from the future",
"checks.file_state.size_implausible": "file has apparently a negative size",
"checks.file_state.too_old": "file is too old",
"checks.file_state.too_big": "file is too big",
"checks.tls_certificate.not_obtainable": "TLS certificate not obtainable; maybe already expired",
@ -30,5 +32,6 @@
"misc.state_file_path": "state file path",
"misc.check_procedure_failed": "check procedure failed",
"misc.still_running": "already/still running",
"misc.cleanup_info": "removed {{count}} old result entries"
"misc.cleanup_info": "removed {{count}} old result entries",
"misc.order_file_not_found": "order file not found: {{path}}"
}

View file

@ -11,31 +11,71 @@ class implementation_check_kind_file_state(interface_check_kind):
"path": {
"type": "string"
},
"strict": {
"description": "whether a violation of this check shall be leveled as critical instead of concerning",
"type": "boolean",
"default": True
},
"exist": {
"exist_mode": {
"description": "whether the file is supposed to exist or not",
"type": "boolean",
"default": True
"default": True,
},
"exist_critical": {
"description": "whether a violation of the extist state (parameter 'exist_mode') shall be considered as critical (true) or concerning (false)",
"type": "boolean",
"default": True,
},
"age_threshold_concerning": {
"description": "in seconds; ignored if 'exist_mode' is set to false",
"type": ["null", "integer"],
"exclusiveMinimum": 0,
"default": None,
},
"age_threshold_critical": {
"description": "in seconds; ignored if 'exist_mode' is set to false",
"type": ["null", "integer"],
"exclusiveMinimum": 0,
"default": None,
},
"size_threshold_concerning": {
"description": "in bytes; ignored if 'exist_mode' is set to false",
"type": ["null", "integer"],
"exclusiveMinimum": 0,
"default": None,
},
"size_threshold_critical": {
"description": "in bytes; ignored if 'exist_mode' is set to false",
"type": ["null", "integer"],
"exclusiveMinimum": 0,
"default": None,
},
# deprecated
"strict": {
"deprecated": True,
"description": "",
"type": "boolean",
"default": True,
},
"exist": {
"deprecated": True,
"description": "",
"type": "boolean",
"default": True,
},
"age_threshold": {
"description": "in seconds; ignored if 'exist' is set to false",
"deprecated": True,
"description": "",
"type": ["null", "integer"],
"exclusiveMinimum": 0,
"default": None,
},
"size_threshold": {
"description": "in bytes; ignored if 'exist' is set to false",
"type": "integer",
"deprecated": True,
"description": "",
"type": ["null", "integer"],
"exclusiveMinimum": 0,
"default": None,
},
},
"required": [
"path"
"path",
]
}
@ -44,89 +84,167 @@ class implementation_check_kind_file_state(interface_check_kind):
[implementation]
'''
def normalize_order_node(self, node):
if ("path" not in node):
raise ValueError("missing mandatory field 'path'")
version = (
"v1"
if (not ("exist_mode" in node)) else
"v2"
)
if (version == "v1"):
if ("path" not in node):
raise ValueError("missing mandatory field 'path'")
else:
node_ = dict_merge(
{
"critical": True,
"exist": True,
"age_threshold": None,
"size_threshold": None,
},
node
)
return {
"exist_mode": node_["exist"],
"exist_critical": node_["strict"],
"age_threshold_concerning": (
None
if node_["strict"] else
node_["age_threshold"]
),
"age_threshold_critical": (
node_["age_threshold"]
if node_["strict"] else
None
),
"size_threshold_concerning": (
None
if node_["strict"] else
node_["age_threshold"]
),
"size_threshold_critical": (
node_["age_threshold"]
if node_["strict"] else
None
),
}
elif (version == "v2"):
if ("path" not in node):
raise ValueError("missing mandatory field 'path'")
else:
node_ = dict_merge(
{
"exist_mode": True,
"exist_critical": True,
"age_threshold_concerning": None,
"age_threshold_critical": None,
"size_threshold_concerning": None,
"size_threshold_critical": None,
},
node
)
return node_
else:
return dict_merge(
{
"strict": True,
"exist": True,
"age_threshold": None,
"size_threshold": None,
},
node
)
raise ValueError("unhandled")
'''
[implementation]
'''
def run(self, parameters):
condition = enum_condition.ok
faults = []
data = {}
exists = _os.path.exists(parameters["path"])
if (not parameters["exist"]):
if (not parameters["exist_mode"]):
if (exists):
condition = (
enum_condition.critical
if parameters["exist_critical"] else
enum_condition.concerning
)
faults.append(translation_get("checks.file_state.exists"))
else:
pass
else:
if (not exists):
condition = (
enum_condition.critical
if parameters["exist_critical"] else
enum_condition.concerning
)
faults.append(translation_get("checks.file_state.missing"))
else:
stat = _os.stat(parameters["path"])
## age
if True:
if (parameters["age_threshold"] is None):
pass
timestamp_this = get_current_timestamp()
timestamp_that = int(stat.st_atime)
age = (timestamp_this - timestamp_that)
if (age < 0):
condition = enum_condition.critical
faults.append(translation_get("checks.file_state.timestamp_implausible"))
else:
timestamp_this = get_current_timestamp()
timestamp_that = int(stat.st_atime)
age = (timestamp_this - timestamp_that)
if (age >= 0):
pass
else:
faults.append(translation_get("checks.file_state.timestamp_implausible"))
if (age <= parameters["age_threshold"]):
pass
else:
if (
(parameters["age_threshold_critical"] is not None)
and
(age > parameters["age_threshold_critical"])
):
condition = enum_condition.critical
faults.append(translation_get("checks.file_state.too_old"))
data = dict_merge(
data,
{
"timestamp_of_checking_instance": timestamp_this,
"timestamp_of_file": timestamp_that,
"age_value_in_seconds": age,
"age_threshold_in_seconds": parameters["age_threshold"],
}
)
else:
if (
(parameters["age_threshold_concerning"] is not None)
and
(age > parameters["age_threshold_concerning"])
):
condition = enum_condition.concerning
faults.append(translation_get("checks.file_state.too_old"))
else:
pass
data = dict_merge(
data,
{
"timestamp_of_checking_instance": timestamp_this,
"timestamp_of_file": timestamp_that,
"age_value_in_seconds": age,
"age_threshold_in_seconds_concerning": parameters["age_threshold_concerning"],
"age_threshold_in_seconds_concerning": parameters["age_threshold_critical"],
}
)
## size
if True:
if (parameters["size_threshold"] is None):
pass
size = stat.st_size
if (size < 0):
condition = enum_condition.critical
faults.append(translation_get("checks.file_state.size_implausible"))
else:
size = stat.st_size
if (size <= parameters["size_threshold"]):
pass
else:
if (
(parameters["size_threshold_critical"] is not None)
and
(size > parameters["size_threshold_critical"])
):
condition = enum_condition.critical
faults.append(translation_get("checks.file_state.too_big"))
data = dict_merge(
data,
{
"size_value_in_bytes": size,
"size_threshold_in_bytes": parameters["size_threshold"],
}
)
else:
if (
(parameters["size_threshold_concerning"] is not None)
and
(size > parameters["size_threshold_concerning"])
):
condition = enum_condition.concerning
faults.append(translation_get("checks.file_state.too_big"))
else:
pass
data = dict_merge(
data,
{
"size_value_in_bytes": size,
"size_threshold_in_bytes": parameters["size_threshold"],
}
)
return {
"condition": (
enum_condition.ok
if (len(faults) <= 0) else
(
enum_condition.critical
if parameters["strict"] else
enum_condition.concerning
)
),
"condition": condition,
"info": {
"path": parameters["path"],
"faults": faults,

View file

@ -32,13 +32,20 @@ class implementation_check_kind_generic_remote(interface_check_kind):
"default" : 95,
"description" : "maximaler Füllstand in Prozent"
},
"strict" : {
"type" : "boolean",
"default" : False
}
"critical": {
"description": "whether a violation of this check shall be leveled as critical instead of concerning",
"type": "boolean",
"default": True
},
"strict": {
"deprecated": True,
"description": "alias for 'critical'",
"type": "boolean",
"default": True
},
},
"required": [
"host"
"host",
]
}
@ -47,20 +54,53 @@ class implementation_check_kind_generic_remote(interface_check_kind):
[implementation]
'''
def normalize_order_node(self, node):
if (not "host" in node):
raise ValueError("mandatory parameter \"host\" missing")
version = (
"v1"
if (not ("critical" in node)) else
"v2"
)
if (version == "v1"):
if (not "host" in node):
raise ValueError("mandatory parameter \"host\" missing")
else:
node_ = dict_merge(
{
"ssh_port": None,
"ssh_user": None,
"ssh_key": None,
"mount_point": "/",
"threshold": 95,
"strict": False,
},
node
)
return {
"ssh_port": node_["ssh_port"],
"ssh_user": node_["ssh_user"],
"ssh_key": node_["ssh_key"],
"mount_point": node_["ssh_path"],
"threshold": node_["ssh_threshold"],
"critical": node_["strict"],
}
elif (version == "v2"):
if (not "host" in node):
raise ValueError("mandatory parameter \"host\" missing")
else:
node_ = dict_merge(
{
"ssh_port": None,
"ssh_user": None,
"ssh_key": None,
"mount_point": "/",
"threshold": 95,
"critical": False,
},
node
)
return node_
else:
return dict_merge(
{
"ssh_port": None,
"ssh_user": None,
"ssh_key": None,
"mount_point": "/",
"threshold": 95,
"strict": False,
},
node
)
raise ValueError("unhandled")
'''

View file

@ -69,14 +69,20 @@ class implementation_check_kind_http_request(interface_check_kind):
"required": [
]
},
"strict": {
"critical": {
"description": "whether a violation of this check shall be leveled as critical instead of concerning",
"type": "boolean",
"default": True
},
"strict": {
"deprecated": True,
"description": "alias for 'critical'",
"type": "boolean",
"default": True
},
},
"required": [
"request"
"request",
]
}
@ -85,26 +91,62 @@ class implementation_check_kind_http_request(interface_check_kind):
[implementation]
'''
def normalize_order_node(self, node):
node_ = dict_merge(
{
"request": {
"method": "GET"
},
"timeout": 5.0,
"follow_redirects": False,
"response": {
"status_code": 200
},
"strict": True,
},
node,
True
version = (
"v1"
if (not ("critical" in node)) else
"v2"
)
allowed_methods = set(["GET", "POST"])
if (node_["request"]["method"] not in allowed_methods):
raise ValueError("invalid HTTP request method: %s" % node_["request"]["method"])
if (version == "v1"):
node_ = dict_merge(
{
"request": {
"method": "GET"
},
"timeout": 5.0,
"follow_redirects": False,
"response": {
"status_code": 200
},
"strict": True,
},
node,
True
)
allowed_methods = set(["GET", "POST"])
if (node_["request"]["method"] not in allowed_methods):
raise ValueError("invalid HTTP request method: %s" % node_["request"]["method"])
else:
return {
"request": node_["request"],
"timeout": node_["timeout"],
"follow_redirects": node_["follow_redirects"],
"response": node_["response"],
"critical": node_["strict"],
}
elif (version == "v2"):
node_ = dict_merge(
{
"request": {
"method": "GET"
},
"timeout": 5.0,
"follow_redirects": False,
"response": {
"status_code": 200
},
"critical": True,
},
node,
True
)
allowed_methods = set(["GET", "POST"])
if (node_["request"]["method"] not in allowed_methods):
raise ValueError("invalid HTTP request method: %s" % node_["request"]["method"])
else:
return node_
else:
return node_
raise ValueError("unhandled")
'''

View file

@ -16,10 +16,10 @@ class implementation_check_kind_script(interface_check_kind):
"item": {
"type": "string"
}
}
},
},
"required": [
"path"
"path",
]
}

View file

@ -20,20 +20,34 @@ class implementation_check_kind_tls_certificate(interface_check_kind):
"type": "integer",
"default": 443
},
"strict": {
"description": "whether a violation of this check shall be leveled as critical instead of concerning",
"type": "boolean",
"default": True
},
"expiry_threshold": {
"description": "in days; allowed amount of valid days before the certificate expires",
"expiry_threshold_concerning": {
"description": "in days; allowed amount of valid days before the certificate expires; threshold for condition 'concerning'; 'null' means 'report at no value'",
"type": ["null", "integer"],
"default": 7,
"minimum": 0
}
},
"expiry_threshold_critical": {
"description": "in days; allowed amount of valid days before the certificate expires; threshold for condition 'critical'; 'null' means 'report at no value'",
"type": ["null", "integer"],
"default": 1,
"minimum": 0
},
"expiry_threshold": {
"deprecated": True,
"description": "",
"type": ["null", "integer"],
"minimum": 0,
"default": None,
},
"strict": {
"deprecated": True,
"description": "",
"type": ["null", "boolean"],
"default": None,
},
},
"required": [
"host"
"host",
]
}
@ -42,20 +56,60 @@ class implementation_check_kind_tls_certificate(interface_check_kind):
[implementation]
'''
def normalize_order_node(self, node):
if (not "host" in node):
raise ValueError("missing mandatory field 'host'")
version = (
"v1"
if (
(not ("expiry_threshold_concerning" in node))
and
(not ("expiry_threshold_critical" in node))
) else
"v2"
)
if (version == "v1"):
if (not "host" in node):
raise ValueError("missing mandatory field 'host'")
else:
node_ = dict_merge(
{
"port": 443,
"expiry_threshold": 7,
"strict": True,
# "allow_self_signed": False,
# "allow_bad_domain": False,
},
node
)
return {
"port": node_["port"],
"expiry_threshold_concerning": (
None
if node_["strict"] else
node_["expiry_threshold"]
),
"expiry_threshold_critical": (
node_["expiry_threshold"]
if node_["strict"] else
None
),
}
elif (version == "v2"):
if (not "host" in node):
raise ValueError("missing mandatory field 'host'")
else:
node_ = dict_merge(
{
"port": 443,
"expiry_threshold_concerning": 7,
"expiry_threshold_critical": 1,
# "allow_self_signed": False,
# "allow_bad_domain": False,
},
node
)
return node_
else:
return dict_merge(
{
"strict": True,
"port": 443,
"expiry_threshold": 7,
# "allow_self_signed": False,
# "allow_bad_domain": False,
},
node
)
return node
raise ValueError("unhandled")
'''
@ -65,6 +119,7 @@ class implementation_check_kind_tls_certificate(interface_check_kind):
faults = []
data = {}
context = _ssl.create_default_context()
condition = enum_condition.ok
try:
socket = _socket.create_connection((parameters["host"], parameters["port"], ))
socket_wrapped = context.wrap_socket(socket, server_hostname = parameters["host"])
@ -75,6 +130,7 @@ class implementation_check_kind_tls_certificate(interface_check_kind):
stuff = None
if (stuff is None):
faults.append(translation_get("checks.tls_certificate.not_obtainable"))
condition = enum_condition.critical
else:
# version == "TLSv1.3"
expiry_timestamp = _ssl.cert_time_to_seconds(stuff["notAfter"])
@ -87,20 +143,25 @@ class implementation_check_kind_tls_certificate(interface_check_kind):
"days": days,
},
)
if (days <= parameters["expiry_threshold"]):
if (
(parameters["expiry_threshold_critical"] is not None)
and
(days <= parameters["expiry_threshold_critical"])
):
faults.append(translation_get("checks.tls_certificate.expires_soon"))
condition = enum_condition.critical
else:
pass
if (
(parameters["expiry_threshold_concerning"] is not None)
and
(days <= parameters["expiry_threshold_concerning"])
):
faults.append(translation_get("checks.tls_certificate.expires_soon"))
condition = enum_condition.concerning
else:
pass
return {
"condition": (
enum_condition.ok
if (len(faults) <= 0) else
(
enum_condition.critical
if parameters["strict"] else
enum_condition.concerning
)
),
"condition": condition,
"info": {
"host": parameters["host"],
"port": parameters["port"],

View file

@ -1,4 +1,7 @@
def main():
## const
version = "0.8"
## setup translation for the first time
translation_initialize("en", env_get_language())
@ -8,6 +11,7 @@ def main():
formatter_class = _argparse.ArgumentDefaultsHelpFormatter
)
argumentparser.add_argument(
nargs = "?",
type = str,
default = "monitoring.hmdl.json",
dest = "order_path",
@ -15,12 +19,12 @@ def main():
help = translation_get("help.args.order_path"),
)
argumentparser.add_argument(
"-x",
"--erase-state",
"-v",
"--version",
action = "store_true",
default = False,
dest = "erase_state",
help = translation_get("help.args.erase_state"),
dest = "show_version",
help = translation_get("help.args.show_version"),
)
argumentparser.add_argument(
"-s",
@ -38,6 +42,14 @@ def main():
dest = "expose_full_order",
help = translation_get("help.args.expose_full_order"),
)
argumentparser.add_argument(
"-x",
"--erase-state",
action = "store_true",
default = False,
dest = "erase_state",
help = translation_get("help.args.erase_state"),
)
### v conf stuff v
argumentparser.add_argument(
"-d",
@ -86,302 +98,324 @@ def main():
)
args = argumentparser.parse_args()
## vars
id_ = _hashlib.sha256(_os.path.abspath(args.order_path).encode("ascii")).hexdigest()[:8]
database_path = (
args.database_path
if (args.database_path is not None) else
_os.path.join(
_tempfile.gettempdir(),
string_coin("monitoring-state-{{id}}.sqlite", {"id": id_})
)
)
## exec
### setup translation for the second time
if (args.language is not None):
translation_initialize("en", args.language)
### load check kind implementations
check_kind_implementations = {
"script": implementation_check_kind_script(),
"file_state": implementation_check_kind_file_state(),
"tls_certificate": implementation_check_kind_tls_certificate(),
"http_request": implementation_check_kind_http_request(),
"generic_remote" : implementation_check_kind_generic_remote(),
}
### load notification channel implementations
notification_channel_implementations = {
"console": implementation_notification_channel_console(),
"email": implementation_notification_channel_email(),
"libnotify": implementation_notification_channel_libnotify(),
}
if (args.show_schema):
_sys.stdout.write(
_json.dumps(
order_schema_root(
check_kind_implementations,
notification_channel_implementations
),
indent = "\t"
)
+
"\n"
)
if (args.show_version):
_sys.stdout.write(version + "\n")
else:
### get order data
order = order_load(
check_kind_implementations,
notification_channel_implementations,
_os.path.abspath(args.order_path)
)
### setup translation for the second time
if (args.language is not None):
translation_initialize("en", args.language)
if (args.expose_full_order):
_sys.stdout.write(_json.dumps(order, indent = "\t") + "\n")
_sys.exit(1)
else:
_sys.stderr.write(
string_coin(
"[info] {{label}}: {{path}}\n",
{
"label": translation_get("misc.state_file_path"),
"path": database_path,
}
### load check kind implementations
check_kind_implementations = {
"script": implementation_check_kind_script(),
"file_state": implementation_check_kind_file_state(),
"tls_certificate": implementation_check_kind_tls_certificate(),
"http_request": implementation_check_kind_http_request(),
"generic_remote" : implementation_check_kind_generic_remote(),
}
### load notification channel implementations
notification_channel_implementations = {
"console": implementation_notification_channel_console(),
"email": implementation_notification_channel_email(),
"libnotify": implementation_notification_channel_libnotify(),
}
if (args.show_schema):
_sys.stdout.write(
_json.dumps(
order_schema_root(
check_kind_implementations,
notification_channel_implementations
),
indent = "\t"
)
+
"\n"
)
### mutex check
if (_os.path.exists(args.mutex_path)):
else:
### vars
if (not _os.path.exists(args.order_path)):
_sys.stderr.write(
string_coin(
"[error] {{message}} ({{path}})\n",
"{{message}}\n",
{
"message": translation_get("misc.still_running"),
"path": args.mutex_path,
}
)
)
_sys.exit(2)
else:
### setup database
sqlite_query_set(
database_path,
"CREATE TABLE IF NOT EXISTS results(check_name TEXT NOT NULL, timestamp INTEGER NOT NULL, condition TEXT NOT NULL, notification_sent BOOLEAN NOT NULL, info TEXT NOT NULL);",
{}
)
### clean database
result = sqlite_query_put(
database_path,
"DELETE FROM results WHERE ((timestamp < :timestamp_min) OR :erase_state);",
{
"timestamp_min": (get_current_timestamp() - args.time_to_live),
"erase_state": args.erase_state,
}
)
_sys.stderr.write(
string_coin(
"[info] {{text}}\n",
{
"text": translation_get(
"misc.cleanup_info",
"message": translation_get(
"misc.order_file_not_found",
{
"count": ("%u" % result.rowcount),
"path": args.order_path,
}
),
}
)
)
file_write(args.mutex_path, "", {"append": True})
### iterate through checks
for check_data in order["checks"]:
if (not check_data["active"]):
pass
else:
### get old state and examine whether the check shall be executed
rows = sqlite_query_get(
database_path,
"SELECT timestamp, condition, notification_sent FROM results WHERE (check_name = :check_name) ORDER BY timestamp DESC LIMIT :limit;",
_sys.exit(1)
else:
database_path = (
args.database_path
if (args.database_path is not None) else
_os.path.join(
_tempfile.gettempdir(),
string_coin(
"monitoring-state-{{id}}.sqlite",
{
"check_name": check_data["name"],
"limit": (check_data["threshold"] + 1),
"id": _hashlib.sha256(_os.path.abspath(args.order_path).encode("ascii")).hexdigest()[:8]
}
)
if (len(rows) <= 0):
old_item_state = None
else:
last_notification_timestamp = None
count = 1
for row in rows[1:]:
if (row[1] == rows[0][1]):
count += 1
else:
break
if (count > check_data["threshold"]):
count = None
else:
pass
for row in rows:
if (row[2]):
last_notification_timestamp = row[0]
break
else:
pass
old_item_state = {
"timestamp": rows[0][0],
"condition": condition_decode(rows[0][1]),
"count": count,
"last_notification_timestamp": last_notification_timestamp,
}
)
)
timestamp = get_current_timestamp()
due = (
(old_item_state is None)
or
(old_item_state["condition"] != enum_condition.ok)
or
((timestamp - old_item_state["timestamp"]) >= check_data["schedule"]["regular_interval"])
or
(
(old_item_state["count"] is not None)
and
((timestamp - old_item_state["timestamp"]) >= check_data["schedule"]["attentive_interval"])
### get order data
order = order_load(
check_kind_implementations,
notification_channel_implementations,
_os.path.abspath(args.order_path)
)
if (args.expose_full_order):
_sys.stdout.write(_json.dumps(order, indent = "\t") + "\n")
_sys.exit(1)
else:
_sys.stderr.write(
string_coin(
"[info] {{label}}: {{path}}\n",
{
"label": translation_get("misc.state_file_path"),
"path": database_path,
}
)
)
### mutex check
if (_os.path.exists(args.mutex_path)):
_sys.stderr.write(
string_coin(
"[error] {{message}} ({{path}})\n",
{
"message": translation_get("misc.still_running"),
"path": args.mutex_path,
}
)
)
if (not due):
pass
else:
_sys.stderr.write(
string_coin(
"-- {{check_name}}\n",
_sys.exit(2)
else:
### setup database
sqlite_query_set(
database_path,
"CREATE TABLE IF NOT EXISTS results(check_name TEXT NOT NULL, timestamp INTEGER NOT NULL, condition TEXT NOT NULL, notification_sent BOOLEAN NOT NULL, info TEXT NOT NULL);",
{}
)
### clean database
result = sqlite_query_put(
database_path,
"DELETE FROM results WHERE ((timestamp < :timestamp_min) OR :erase_state);",
{
"timestamp_min": (get_current_timestamp() - args.time_to_live),
"erase_state": args.erase_state,
}
)
_sys.stderr.write(
string_coin(
"[info] {{text}}\n",
{
"text": translation_get(
"misc.cleanup_info",
{
"count": ("%u" % result.rowcount),
}
),
}
)
)
file_write(args.mutex_path, "", {"append": True})
### iterate through checks
for check_data in order["checks"]:
if (not check_data["active"]):
pass
else:
### get old state and examine whether the check shall be executed
rows = sqlite_query_get(
database_path,
"SELECT timestamp, condition, notification_sent FROM results WHERE (check_name = :check_name) ORDER BY timestamp DESC LIMIT :limit;",
{
"check_name": check_data["name"],
"limit": (check_data["threshold"] + 1),
}
)
)
if (len(rows) <= 0):
old_item_state = None
else:
last_notification_timestamp = None
count = 1
for row in rows[1:]:
if (row[1] == rows[0][1]):
count += 1
else:
break
if (count > check_data["threshold"]):
count = None
else:
pass
for row in rows:
if (row[2]):
last_notification_timestamp = row[0]
break
else:
pass
old_item_state = {
"timestamp": rows[0][0],
"condition": condition_decode(rows[0][1]),
"count": count,
"last_notification_timestamp": last_notification_timestamp,
}
### execute check and set new state
try:
result = check_kind_implementations[check_data["kind"]].run(check_data["parameters"])
except Exception as error:
result = {
"condition": enum_condition.unknown,
"info": {
"cause": translation_get("misc.check_procedure_failed"),
"error": str(error),
},
}
count = (
1
if (
timestamp = get_current_timestamp()
due = (
(old_item_state is None)
or
(old_item_state["condition"] != result["condition"])
) else
(
(old_item_state["count"] + 1)
if (
(old_item_state["condition"] != enum_condition.ok)
or
((timestamp - old_item_state["timestamp"]) >= check_data["schedule"]["regular_interval"])
or
(
(old_item_state["count"] is not None)
and
((old_item_state["count"] + 1) <= check_data["threshold"])
) else
None
((timestamp - old_item_state["timestamp"]) >= check_data["schedule"]["attentive_interval"])
)
)
)
shall_send_notification = (
(
(
(count is not None)
and
(count == check_data["threshold"])
if (not due):
pass
else:
_sys.stderr.write(
string_coin(
"-- {{check_name}}\n",
{
"check_name": check_data["name"],
}
)
)
or
(
(count is None)
and
check_data["annoy"]
)
or
(
(count is None)
and
### execute check and set new state
try:
result = check_kind_implementations[check_data["kind"]].run(check_data["parameters"])
except Exception as error:
result = {
"condition": enum_condition.unknown,
"info": {
"cause": translation_get("misc.check_procedure_failed"),
"error": str(error),
},
}
count = (
1
if (
(old_item_state is None)
or
(old_item_state["condition"] != result["condition"])
) else
(
(old_item_state["count"] + 1)
if (
(old_item_state["count"] is not None)
and
((old_item_state["count"] + 1) <= check_data["threshold"])
) else
None
)
)
shall_send_notification = (
(
(old_item_state is not None)
and
(old_item_state["last_notification_timestamp"] is not None)
and
(check_data["schedule"]["reminding_interval"] is not None)
and
(
(timestamp - old_item_state["last_notification_timestamp"])
>=
check_data["schedule"]["reminding_interval"]
(count is not None)
and
(count == check_data["threshold"])
)
or
(
(count is None)
and
check_data["annoy"]
)
or
(
(count is None)
and
(
(old_item_state is not None)
and
(old_item_state["last_notification_timestamp"] is not None)
and
(check_data["schedule"]["reminding_interval"] is not None)
and
(
(timestamp - old_item_state["last_notification_timestamp"])
>=
check_data["schedule"]["reminding_interval"]
)
)
)
)
)
)
and
(
(result["condition"] != enum_condition.ok)
or
args.send_ok_notifications
)
)
new_item_state = {
"timestamp": timestamp,
"condition": result["condition"],
"count": count,
"last_notification_timestamp": (
timestamp
if shall_send_notification else
(
None
if (old_item_state is None) else
old_item_state["last_notification_timestamp"]
)
),
}
sqlite_query_put(
database_path,
"INSERT INTO results(check_name, timestamp, condition, notification_sent, info) VALUES (:check_name, :timestamp, :condition, :notification_sent, :info);",
{
"check_name": check_data["name"],
"timestamp": timestamp,
"condition": condition_encode(result["condition"]),
"notification_sent": shall_send_notification,
"info": _json.dumps(result["info"]),
}
)
### send notifications
if (not shall_send_notification):
pass
else:
for notification in check_data["notifications"]:
notification_channel_implementations[notification["kind"]].notify(
notification["parameters"],
check_data["name"],
check_data,
new_item_state,
dict_merge(
(
{}
if (check_data["custom"] is None) else
{"custom": check_data["custom"]}
),
result["info"]
and
(
(result["condition"] != enum_condition.ok)
or
args.send_ok_notifications
)
)
new_item_state = {
"timestamp": timestamp,
"condition": result["condition"],
"count": count,
"last_notification_timestamp": (
timestamp
if shall_send_notification else
(
None
if (old_item_state is None) else
old_item_state["last_notification_timestamp"]
)
),
}
sqlite_query_put(
database_path,
"INSERT INTO results(check_name, timestamp, condition, notification_sent, info) VALUES (:check_name, :timestamp, :condition, :notification_sent, :info);",
{
"check_name": check_data["name"],
"timestamp": timestamp,
"condition": condition_encode(result["condition"]),
"notification_sent": shall_send_notification,
"info": _json.dumps(result["info"]),
}
)
if (not _os.exists(args.mutex_path)):
pass
else:
_os.remove(args.mutex_path)
### send notifications
if (not shall_send_notification):
pass
else:
for notification in check_data["notifications"]:
notification_channel_implementations[notification["kind"]].notify(
notification["parameters"],
check_data["name"],
check_data,
new_item_state,
dict_merge(
(
{}
if (check_data["custom"] is None) else
{"custom": check_data["custom"]}
),
result["info"]
)
)
if (not _os.exists(args.mutex_path)):
pass
else:
_os.remove(args.mutex_path)
main()