13 Commits
4.04 ... main

Author SHA1 Message Date
Thorsten Spille
da985b7795 set raw str for regex l13 2025-09-29 21:49:37 +02:00
Thorsten Spille
634e283178 Merge pull request #8 from bashclub/testing
Testing
2025-08-14 19:04:37 +02:00
Thorsten Spille
69aedbb3ec Merge branch 'main' into testing 2025-08-14 19:04:30 +02:00
Thorsten Spille
ae60a8a030 Fix for Python 3.12.10+ 2025-08-14 18:58:14 +02:00
Thorsten Spille
9ed22b5311 Update checkzfs.py
4.12
2024-08-19 15:49:14 +02:00
Chriz
bf46208c4d 4.11
Individual Exlude
2024-03-26 14:06:51 +01:00
Thorsten Spille
ae5a89a21b Merge pull request #6 from bashclub/testing
v4.09
2023-05-26 21:38:36 +02:00
Thorsten Spille
2fcc2dd26f v4.10 2023-05-26 21:38:26 +02:00
Thorsten Spille
d359ec8042 v4.09 2023-04-12 13:41:42 +02:00
Thorsten Spille
672b23574b Add check-snapshot-age 2022-11-03 22:52:05 +01:00
Thorsten Spille
d10b04aa80 Update 4.08 2022-11-03 22:01:44 +01:00
Thorsten Spille
eac2c1c4d3 Merge pull request #3 from hpannenb/patch-1
Update README.md
2022-02-01 19:36:30 +01:00
Holger Pannenbäcker
faaf5494d7 Update README.md
Added missing "/".
2022-01-31 19:13:43 +01:00
3 changed files with 139 additions and 24 deletions

View File

@@ -3,6 +3,6 @@ This script checks yout ZFS replication an generates reports in different flavou
``` ```
wget -O /usr/local/bin/checkzfs https://raw.githubusercontent.com/bashclub/check-zfs-replication/main/checkzfs.py wget -O /usr/local/bin/checkzfs https://raw.githubusercontent.com/bashclub/check-zfs-replication/main/checkzfs.py
chmod +x usr/local/bin/checkzfs chmod +x /usr/local/bin/checkzfs
checkzfs --help checkzfs --help
``` ```

37
check-snapshot-age Normal file
View File

@@ -0,0 +1,37 @@
#!/usr/bin/env python3
import subprocess
import re
import time
#_snapshots = open("zfs.txt","r").read()
_snapshots = subprocess.check_output("/usr/sbin/zfs list -t snapshot -Hpo name,creation".split())
LABELS=("frequent","hourly","daily","weekly","monthly","yearly","backup-zfs","bashclub-zfs")
RE_LABELSEARCH = re.compile("|".join(LABELS))
_datasets = {}
for _datastore,_snapshot,_creation in re.findall(r"^([\w_./-]+)@([\w_.-]+)\t(\d+)",_snapshots.decode('utf-8'),re.M):
if _datastore not in _datasets:
_datasets[_datastore] = {}
_label = RE_LABELSEARCH.search(_snapshot)
if _label:
_label = _label.group(0)
else:
_label = "other"
if _label not in _datasets[_datastore]:
_datasets[_datastore][_label] = []
_datasets[_datastore][_label].append((_snapshot,int(_creation)))
for _datastore in _datasets.keys():
print(_datastore)
print("-"*40)
for _label in _datasets[_datastore].keys():
_data = _datasets[_datastore][_label]
_first = time.strftime("%d.%m.%Y %H:%M:%S",time.localtime(_data[0][1]))
_last = time.strftime("%d.%m.%Y %H:%M:%S",time.localtime(_data[-1][1]))
_count = len(_data)
print(f" {_label} {_count}")
print(f" {_first} {_data[0][0]}")
if _count > 1:
print(f" {_last} {_data[-1][0]}")
print("")

View File

@@ -1,7 +1,7 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vim: set fileencoding=utf-8:noet # vim: set fileencoding=utf-8:noet
## Copyright 2021 sysops.tv ;-) ## Copyright 2023 sysops.tv ;-)
## BSD-2-Clause ## BSD-2-Clause
## ##
## Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: ## Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
@@ -16,7 +16,7 @@
## GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT ## GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
## LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ## LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
VERSION = 4.05 VERSION = 4.14
### for check_mk usage link or copy binary to check_mk_agent/local/checkzfs ### for check_mk usage link or copy binary to check_mk_agent/local/checkzfs
### create /etc/check_mk/checkzfs ## the config file name matches the filename in check_mk_agent/local/ ### create /etc/check_mk/checkzfs ## the config file name matches the filename in check_mk_agent/local/
@@ -135,7 +135,7 @@ class zfs_dataset(object):
return self.sorted_snapshots()[0] return self.sorted_snapshots()[0]
def get_info(self,source,threshold=None,ignore_replica=False): def get_info(self,source,threshold=None,maxsnapshots=None,ignore_replica=False):
_latest = self._get_latest_snapshot(source if source != self else None) ## wenn das source dataset nicht man selber ist _latest = self._get_latest_snapshot(source if source != self else None) ## wenn das source dataset nicht man selber ist
_status = -1 _status = -1
_has_zfs_autosnapshot = any(map(lambda x: str(x.snapshot).startswith("zfs-auto-snap_"),self.snapshots.values())) _has_zfs_autosnapshot = any(map(lambda x: str(x.snapshot).startswith("zfs-auto-snap_"),self.snapshots.values()))
@@ -177,6 +177,18 @@ class zfs_dataset(object):
_message = _("Rollback zu altem Snapshot. - '{0.snapshot}' nicht mehr vorhanden".format(self.latest_snapshot)) _message = _("Rollback zu altem Snapshot. - '{0.snapshot}' nicht mehr vorhanden".format(self.latest_snapshot))
_status = 2 ## crit _status = 2 ## crit
if maxsnapshots:
_maxsnapshot_status = list(
map(lambda x: x[1],
filter(lambda y: y[0] < len(self.snapshots.keys()),
zip(maxsnapshots,(1,2))
)
)
)
if _maxsnapshot_status:
if _maxsnapshot_status[-1] > _status:
_message = _("zu viele Snapshots")
_status = _maxsnapshot_status[-1]
if not self.checkzfs: if not self.checkzfs:
_status = -1 _status = -1
@@ -215,7 +227,7 @@ class negative_regex_class(object):
return not self.regex.search(text) return not self.regex.search(text)
class zfscheck(object): class zfscheck(object):
ZFSLIST_REGEX = re.compile("^(?P<dataset>.*?)(?:|@(?P<snapshot>.*?))\t(?P<type>\w*)\t(?P<creation>\d+)\t(?P<guid>\d+)\t(?P<used>\d+|-)\t(?P<available>\d+|-)\t(?P<written>\d+|-)\t(?P<origin>.*?)\t(?P<autosnapshot>[-\w]+)\t(?P<checkzfs>[-\w]+)$",re.M) ZFSLIST_REGEX = re.compile(r"^(?P<dataset>.*?)(?:|@(?P<snapshot>.*?))\t(?P<type>\w*)\t(?P<creation>\d+)\t(?P<guid>\d+)\t(?P<used>\d+|-)\t(?P<available>\d+|-)\t(?P<written>\d+|-)\t(?P<origin>.*?)\t(?P<autosnapshot>[-\w]+)\t(?P<checkzfs>[-\w]+)$",re.M)
ZFS_DATASETS = {} ZFS_DATASETS = {}
ZFS_SNAPSHOTS = {} ZFS_SNAPSHOTS = {}
#VALIDCOLUMNS = ["source","replica","type","autosnap","snapshot","creation","guid","used","referenced","size","age","status","message"] ## valid columns #VALIDCOLUMNS = ["source","replica","type","autosnap","snapshot","creation","guid","used","referenced","size","age","status","message"] ## valid columns
@@ -260,7 +272,7 @@ class zfscheck(object):
} }
COLUMN_MAPPER = {} COLUMN_MAPPER = {}
def __init__(self,remote,source,sourceonly,legacyhosts,output,mail=None,prefix='REPLICA',debug=False,**kwargs): def __init__(self,remote,source,sourceonly,legacyhosts,output,ignoreattr,mail=None,prefix='REPLICA',debug=False,**kwargs):
_start_time = time.time() _start_time = time.time()
self.remote_hosts = remote.split(",") if remote else [""] if source and not sourceonly else [] ## wenn nicht und source woanders ... "" (also lokal) als remote self.remote_hosts = remote.split(",") if remote else [""] if source and not sourceonly else [] ## wenn nicht und source woanders ... "" (also lokal) als remote
self.source_hosts = source.split(",") if source else [""] ## wenn nix dann "" als local self.source_hosts = source.split(",") if source else [""] ## wenn nix dann "" als local
@@ -271,6 +283,7 @@ class zfscheck(object):
self.print_debug(f"Version: {VERSION}") self.print_debug(f"Version: {VERSION}")
self.prefix = prefix.strip().replace(" ","_") ## service name bei checkmk leerzeichen durch _ ersetzen self.prefix = prefix.strip().replace(" ","_") ## service name bei checkmk leerzeichen durch _ ersetzen
self.rawdata = False self.rawdata = False
self.ignoreattr = ignoreattr
self.mail_address = mail self.mail_address = mail
self._overall_status = [] self._overall_status = []
self.sortreverse = False self.sortreverse = False
@@ -463,7 +476,7 @@ class zfscheck(object):
continue continue
#if _dataset.remote in self.remote_hosts:## or _dataset.autosnapshot == 0: ## wenn das dataset von der remote seite ist ... dann weiter oder wenn autosnasphot explizit aus ist ... dann nicht als source hinzufügen #if _dataset.remote in self.remote_hosts:## or _dataset.autosnapshot == 0: ## wenn das dataset von der remote seite ist ... dann weiter oder wenn autosnasphot explizit aus ist ... dann nicht als source hinzufügen
# continue # continue
_dataset_info = _dataset.get_info(_dataset,threshold=self.threshold,ignore_replica=self.sourceonly) _dataset_info = _dataset.get_info(_dataset,threshold=self.threshold,maxsnapshots=self.maxsnapshots,ignore_replica=self.sourceonly)
self._overall_status.append(_dataset_info.get("status",-1)) ## alle stati für email overall status self._overall_status.append(_dataset_info.get("status",-1)) ## alle stati für email overall status
_output.append(_dataset_info) _output.append(_dataset_info)
if self.sourceonly == True: if self.sourceonly == True:
@@ -471,7 +484,7 @@ class zfscheck(object):
for _replica in _dataset.replica: ## jetzt das dataset welches als source angezeigt wird (alle filter etc entsprochen nach replika durchsuchen for _replica in _dataset.replica: ## jetzt das dataset welches als source angezeigt wird (alle filter etc entsprochen nach replika durchsuchen
#if not self.replicafilter.search(_replica.dataset_name): #if not self.replicafilter.search(_replica.dataset_name):
# continue # continue
_replica_info = _replica.get_info(_dataset,threshold=self.threshold) ## verarbeitung ausgabe aus klasse _replica_info = _replica.get_info(_dataset,threshold=self.threshold,maxsnapshots=self.maxsnapshots) ## verarbeitung ausgabe aus klasse
self._overall_status.append(_replica_info.get("status",-1)) ## fehler aus replica zu overall status für mail adden self._overall_status.append(_replica_info.get("status",-1)) ## fehler aus replica zu overall status für mail adden
_output.append(_replica_info) _output.append(_replica_info)
@@ -482,7 +495,7 @@ class zfscheck(object):
yield _match.groupdict() yield _match.groupdict()
def _call_proc(self,remote=None): def _call_proc(self,remote=None):
ZFS_ATTRIBUTES = "name,type,creation,guid,used,available,written,origin,com.sun:auto-snapshot,tv.sysops:checkzfs" ## wenn ändern dann auch regex oben anpassen ZFS_ATTRIBUTES = f"name,type,creation,guid,used,available,written,origin,com.sun:auto-snapshot,{self.ignoreattr}" ## wenn ändern dann auch regex oben anpassen
### eigentlicher zfs aufruf, sowohl local als auch remote ### eigentlicher zfs aufruf, sowohl local als auch remote
zfs_args = ["zfs", "list", zfs_args = ["zfs", "list",
"-t", "all", "-t", "all",
@@ -599,20 +612,20 @@ class zfscheck(object):
_written = _item.get("written","0") _written = _item.get("written","0")
_available = _item.get("available","0") _available = _item.get("available","0")
_used = _item.get("used","0") _used = _item.get("used","0")
if _status == -1: ## tv.sysops:checkzfs=ignore wollen wir nicht if _status == -1: ## tv.sysops:checkzfs=ignore wollen wir nicht (ignoreattr)
continue continue
if self.maxsnapshots: if self.maxsnapshots:
_warn = self.maxsnapshots[0] _warn = self.maxsnapshots[0]
_crit = self.maxsnapshots[1] _crit = self.maxsnapshots[1]
_maxsnapshots = f"{_warn};{_crit}" _maxsnapshots = f"{_warn};{_crit}".replace("inf","")
if _status == 0: #if _status == 0:
_status = "P" # _status = "P"
else: else:
_maxsnapshots = ";" _maxsnapshots = ";"
if self.threshold: if self.threshold:
_warn = self.threshold[0] * 60 _warn = self.threshold[0] * 60
_crit = self.threshold[1] * 60 _crit = self.threshold[1] * 60
_threshold = f"{_warn};{_crit}" _threshold = f"{_warn};{_crit}".replace("inf","")
else: else:
_threshold = ";" _threshold = ";"
_msg = _item.get("message","").strip() _msg = _item.get("message","").strip()
@@ -695,7 +708,7 @@ class zfscheck(object):
if not _email: if not _email:
_users = open("/etc/pve/user.cfg","rt").read() _users = open("/etc/pve/user.cfg","rt").read()
_email = "root@{0}".format(_hostname) _email = "root@{0}".format(_hostname)
_emailmatch = re.search("^user:root@pam:.*?:(?P<mail>[\w.]+@[\w.]+):.*?$",_users,re.M) _emailmatch = re.search(r"^user:root@pam:.*?:(?P<mail>[\w.]+@[\w.]+):.*?$",_users,re.M)
if _emailmatch: if _emailmatch:
_email = _emailmatch.group(1) _email = _emailmatch.group(1)
#raise Exception("No PVE User Email found") #raise Exception("No PVE User Email found")
@@ -756,6 +769,8 @@ if __name__ == "__main__":
help=_("Nur Snapshot-Alter prüfen")) help=_("Nur Snapshot-Alter prüfen"))
_parser.add_argument("--mail",type=str, _parser.add_argument("--mail",type=str,
help=_("Email für den Versand")) help=_("Email für den Versand"))
_parser.add_argument("--ignoreattr",type=str,default="tv.sysops:checkzfs",
help=_("ZFS Attribut für ignore"))
_parser.add_argument("--config",dest="config_file",type=str,default="", _parser.add_argument("--config",dest="config_file",type=str,default="",
help=_("Config File")) help=_("Config File"))
_parser.add_argument("--threshold",type=str, _parser.add_argument("--threshold",type=str,
@@ -776,20 +791,26 @@ if __name__ == "__main__":
help=_("Zuordnung zu anderem Host bei checkmk")) help=_("Zuordnung zu anderem Host bei checkmk"))
_parser.add_argument("--ssh-extra-options",type=str, _parser.add_argument("--ssh-extra-options",type=str,
help=_("zusätzliche SSH Optionen mit Komma getrennt (HostKeyAlgorithms=ssh-rsa)")) help=_("zusätzliche SSH Optionen mit Komma getrennt (HostKeyAlgorithms=ssh-rsa)"))
_parser.add_argument("--update",nargs="?",const="main",type=str,metavar="branch/commitid",
help=_("check for update"))
_parser.add_argument("--debug",action="store_true", _parser.add_argument("--debug",action="store_true",
help=_("debug Ausgabe")) help=_("debug Ausgabe"))
args = _parser.parse_args() args = _parser.parse_args()
CONFIG_KEYS="disabled|source|sourceonly|piggyback|remote|legacyhosts|prefix|filter|replicafilter|threshold|maxsnapshots|snapshotfilter|ssh-identity|ssh-extra-options" CONFIG_KEYS="disabled|source|sourceonly|piggyback|remote|legacyhosts|prefix|filter|replicafilter|threshold|ignoreattr|maxsnapshots|snapshotfilter|ssh-identity|ssh-extra-options"
_config_regex = re.compile(f"^({CONFIG_KEYS}):\s*(.*?)(?:\s+#|$)",re.M) _config_regex = re.compile(rf"^({CONFIG_KEYS}):\s*(.*?)(?:\s+#|$)",re.M)
_basename = os.path.basename(__file__).split(".")[0] ## name für config ermitteln aufgrund des script namens _basename = os.path.basename(__file__).split(".")[0] ## name für config ermitteln aufgrund des script namens
_is_checkmk_plugin = os.path.dirname(os.path.abspath(__file__)).find("/check_mk_agent/local") > -1 ## wenn im check_mk ordner #_is_checkmk_plugin = os.path.dirname(os.path.abspath(__file__)).find("/check_mk_agent/local") > -1 ## wenn im check_mk ordner
if _is_checkmk_plugin: #if _is_checkmk_plugin:
if os.environ.get("MK_CONFDIR"):
try: ## parse check_mk options try: ## parse check_mk options
args.config_file = f"/etc/check_mk/{_basename}" _check_mk_configdir = "/etc/check_mk"
if not os.path.isdir(_check_mk_configdir):
_check_mk_configdir = os.environ["MK_CONFDIR"]
args.config_file = f"{_check_mk_configdir}/{_basename}"
if not os.path.exists(args.config_file): ### wenn checkmk aufruf und noch keine config ... default erstellen if not os.path.exists(args.config_file): ### wenn checkmk aufruf und noch keine config ... default erstellen
if not os.path.isdir("/etc/check_mk"): if not os.path.isdir(_check_mk_configdir):
os.mkdir("/etc/check_mk") os.mkdir(_check_mk_configdir)
with open(args.config_file,"wt") as _f: ## default config erstellen with open(args.config_file,"wt") as _f: ## default config erstellen
_f.write("## config for checkzfs check_mk") _f.write("## config for checkzfs check_mk")
_f.write("\n".join([f"# {_k}:" for _k in CONFIG_KEYS.split("|")])) _f.write("\n".join([f"# {_k}:" for _k in CONFIG_KEYS.split("|")]))
@@ -829,8 +850,65 @@ if __name__ == "__main__":
args.__dict__[_k.replace("-","_")] = _v.strip() args.__dict__[_k.replace("-","_")] = _v.strip()
try: try:
if args.update:
import requests
import hashlib
import base64
from datetime import datetime
import difflib
from pkg_resources import parse_version
_github_req = requests.get(f"https://api.github.com/repos/bashclub/check-zfs-replication/contents/checkzfs.py?ref={args.update}")
if _github_req.status_code != 200:
raise Exception("Github Error")
_github_version = _github_req.json()
_github_last_modified = datetime.strptime(_github_req.headers.get("last-modified"),"%a, %d %b %Y %X %Z")
_new_script = base64.b64decode(_github_version.get("content")).decode("utf-8")
_new_version = re.findall(r"^VERSION\s*=[\s\x22]*([0-9.]*)",_new_script,re.M)
_new_version = _new_version[0] if _new_version else "0.0.0"
_script_location = os.path.realpath(__file__)
_current_last_modified = datetime.fromtimestamp(int(os.path.getmtime(_script_location)))
with (open(_script_location,"rb")) as _f:
_content = _f.read()
_current_sha = hashlib.sha1(f"blob {len(_content)}\0".encode("utf-8") + _content).hexdigest()
_content = _content.decode("utf-8")
if type(VERSION) != str:
VERSION = str(VERSION)
if _current_sha == _github_version.get("sha"):
print(f"allready up to date {_current_sha}")
sys.exit(0)
else:
_version = parse_version(VERSION)
_nversion = parse_version(_new_version)
if _version == _nversion:
print("same Version but checksums mismatch")
elif _version > _nversion:
print(f"ATTENTION: Downgrade from {VERSION} to {_new_version}")
while True:
try:
_answer = input(f"Update {_script_location} to {_new_version} (y/n) or show difference (d)? ")
except KeyboardInterrupt:
print("")
sys.exit(0)
if _answer in ("Y","y","yes","j","J"):
with open(_script_location,"wb") as _f:
_f.write(_new_script.encode("utf-8"))
print(f"updated to Version {_new_version}")
break
elif _answer in ("D","d"):
for _line in difflib.unified_diff(_content.split("\n"),
_new_script.split("\n"),
fromfile=f"Version: {VERSION}",
fromfiledate=_current_last_modified.isoformat(),
tofile=f"Version: {_new_version}",
tofiledate=_github_last_modified.isoformat(),
n=0,
lineterm=""):
print(_line)
else:
break
else:
ZFSCHECK_OBJ = zfscheck(**args.__dict__) ZFSCHECK_OBJ = zfscheck(**args.__dict__)
pass ## for debugger
except KeyboardInterrupt: except KeyboardInterrupt:
print("") print("")
sys.exit(0) sys.exit(0)