Compare commits

7 Commits

Author SHA1 Message Date
Thorsten Spille
9ed22b5311 Update checkzfs.py
4.12
2024-08-19 15:49:14 +02:00
Thorsten Spille
2fcc2dd26f v4.10 2023-05-26 21:38:26 +02:00
Thorsten Spille
d359ec8042 v4.09 2023-04-12 13:41:42 +02:00
Thorsten Spille
672b23574b Add check-snapshot-age 2022-11-03 22:52:05 +01:00
Thorsten Spille
d10b04aa80 Update 4.08 2022-11-03 22:01:44 +01:00
Thorsten Spille
eac2c1c4d3 Merge pull request #3 from hpannenb/patch-1
Update README.md
2022-02-01 19:36:30 +01:00
Holger Pannenbäcker
faaf5494d7 Update README.md
Added missing "/".
2022-01-31 19:13:43 +01:00
3 changed files with 199 additions and 65 deletions

View File

@@ -3,6 +3,6 @@ This script checks yout ZFS replication an generates reports in different flavou
``` ```
wget -O /usr/local/bin/checkzfs https://raw.githubusercontent.com/bashclub/check-zfs-replication/main/checkzfs.py wget -O /usr/local/bin/checkzfs https://raw.githubusercontent.com/bashclub/check-zfs-replication/main/checkzfs.py
chmod +x usr/local/bin/checkzfs chmod +x /usr/local/bin/checkzfs
checkzfs --help checkzfs --help
``` ```

37
check-snapshot-age Normal file
View File

@@ -0,0 +1,37 @@
#!/usr/bin/env python3
import subprocess
import re
import time
#_snapshots = open("zfs.txt","r").read()
_snapshots = subprocess.check_output("/usr/sbin/zfs list -t snapshot -Hpo name,creation".split())
LABELS=("frequent","hourly","daily","weekly","monthly","yearly","backup-zfs","bashclub-zfs")
RE_LABELSEARCH = re.compile("|".join(LABELS))
_datasets = {}
for _datastore,_snapshot,_creation in re.findall("^([\w_./-]+)@([\w_.-]+)\t(\d+)",_snapshots.decode('utf-8'),re.M):
if _datastore not in _datasets:
_datasets[_datastore] = {}
_label = RE_LABELSEARCH.search(_snapshot)
if _label:
_label = _label.group(0)
else:
_label = "other"
if _label not in _datasets[_datastore]:
_datasets[_datastore][_label] = []
_datasets[_datastore][_label].append((_snapshot,int(_creation)))
for _datastore in _datasets.keys():
print(_datastore)
print("-"*40)
for _label in _datasets[_datastore].keys():
_data = _datasets[_datastore][_label]
_first = time.strftime("%d.%m.%Y %H:%M:%S",time.localtime(_data[0][1]))
_last = time.strftime("%d.%m.%Y %H:%M:%S",time.localtime(_data[-1][1]))
_count = len(_data)
print(f" {_label} {_count}")
print(f" {_first} {_data[0][0]}")
if _count > 1:
print(f" {_last} {_data[-1][0]}")
print("")

View File

@@ -1,7 +1,7 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vim: set fileencoding=utf-8:noet # vim: set fileencoding=utf-8:noet
## Copyright 2021 sysops.tv ;-) ## Copyright 2023 sysops.tv ;-)
## BSD-2-Clause ## BSD-2-Clause
## ##
## Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: ## Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
@@ -16,7 +16,7 @@
## GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT ## GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
## LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ## LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
VERSION = 4.02 VERSION = "4.12"
### for check_mk usage link or copy binary to check_mk_agent/local/checkzfs ### for check_mk usage link or copy binary to check_mk_agent/local/checkzfs
### create /etc/check_mk/checkzfs ## the config file name matches the filename in check_mk_agent/local/ ### create /etc/check_mk/checkzfs ## the config file name matches the filename in check_mk_agent/local/
@@ -61,7 +61,6 @@ import json
import os.path import os.path
import os import os
import socket import socket
from datetime import datetime
from email.message import EmailMessage from email.message import EmailMessage
from email.mime.application import MIMEApplication from email.mime.application import MIMEApplication
from email.utils import formatdate from email.utils import formatdate
@@ -136,7 +135,7 @@ class zfs_dataset(object):
return self.sorted_snapshots()[0] return self.sorted_snapshots()[0]
def get_info(self,source,threshold=None,ignore_replica=False): def get_info(self,source,threshold=None,maxsnapshots=None,ignore_replica=False):
_latest = self._get_latest_snapshot(source if source != self else None) ## wenn das source dataset nicht man selber ist _latest = self._get_latest_snapshot(source if source != self else None) ## wenn das source dataset nicht man selber ist
_status = -1 _status = -1
_has_zfs_autosnapshot = any(map(lambda x: str(x.snapshot).startswith("zfs-auto-snap_"),self.snapshots.values())) _has_zfs_autosnapshot = any(map(lambda x: str(x.snapshot).startswith("zfs-auto-snap_"),self.snapshots.values()))
@@ -178,6 +177,18 @@ class zfs_dataset(object):
_message = _("Rollback zu altem Snapshot. - '{0.snapshot}' nicht mehr vorhanden".format(self.latest_snapshot)) _message = _("Rollback zu altem Snapshot. - '{0.snapshot}' nicht mehr vorhanden".format(self.latest_snapshot))
_status = 2 ## crit _status = 2 ## crit
if maxsnapshots:
_maxsnapshot_status = list(
map(lambda x: x[1],
filter(lambda y: y[0] < len(self.snapshots.keys()),
zip(maxsnapshots,(1,2))
)
)
)
if _maxsnapshot_status:
if _maxsnapshot_status[-1] > _status:
_message = _("zu viele Snapshots")
_status = _maxsnapshot_status[-1]
if not self.checkzfs: if not self.checkzfs:
_status = -1 _status = -1
@@ -261,7 +272,7 @@ class zfscheck(object):
} }
COLUMN_MAPPER = {} COLUMN_MAPPER = {}
def __init__(self,remote,source,sourceonly,legacyhosts,output,mail=None,prefix='REPLICA',debug=False,**kwargs): def __init__(self,remote,source,sourceonly,legacyhosts,output,ignoreattr,mail=None,prefix='REPLICA',debug=False,**kwargs):
_start_time = time.time() _start_time = time.time()
self.remote_hosts = remote.split(",") if remote else [""] if source and not sourceonly else [] ## wenn nicht und source woanders ... "" (also lokal) als remote self.remote_hosts = remote.split(",") if remote else [""] if source and not sourceonly else [] ## wenn nicht und source woanders ... "" (also lokal) als remote
self.source_hosts = source.split(",") if source else [""] ## wenn nix dann "" als local self.source_hosts = source.split(",") if source else [""] ## wenn nix dann "" als local
@@ -272,6 +283,7 @@ class zfscheck(object):
self.print_debug(f"Version: {VERSION}") self.print_debug(f"Version: {VERSION}")
self.prefix = prefix.strip().replace(" ","_") ## service name bei checkmk leerzeichen durch _ ersetzen self.prefix = prefix.strip().replace(" ","_") ## service name bei checkmk leerzeichen durch _ ersetzen
self.rawdata = False self.rawdata = False
self.ignoreattr = ignoreattr
self.mail_address = mail self.mail_address = mail
self._overall_status = [] self._overall_status = []
self.sortreverse = False self.sortreverse = False
@@ -464,7 +476,7 @@ class zfscheck(object):
continue continue
#if _dataset.remote in self.remote_hosts:## or _dataset.autosnapshot == 0: ## wenn das dataset von der remote seite ist ... dann weiter oder wenn autosnasphot explizit aus ist ... dann nicht als source hinzufügen #if _dataset.remote in self.remote_hosts:## or _dataset.autosnapshot == 0: ## wenn das dataset von der remote seite ist ... dann weiter oder wenn autosnasphot explizit aus ist ... dann nicht als source hinzufügen
# continue # continue
_dataset_info = _dataset.get_info(_dataset,threshold=self.threshold,ignore_replica=self.sourceonly) _dataset_info = _dataset.get_info(_dataset,threshold=self.threshold,maxsnapshots=self.maxsnapshots,ignore_replica=self.sourceonly)
self._overall_status.append(_dataset_info.get("status",-1)) ## alle stati für email overall status self._overall_status.append(_dataset_info.get("status",-1)) ## alle stati für email overall status
_output.append(_dataset_info) _output.append(_dataset_info)
if self.sourceonly == True: if self.sourceonly == True:
@@ -472,7 +484,7 @@ class zfscheck(object):
for _replica in _dataset.replica: ## jetzt das dataset welches als source angezeigt wird (alle filter etc entsprochen nach replika durchsuchen for _replica in _dataset.replica: ## jetzt das dataset welches als source angezeigt wird (alle filter etc entsprochen nach replika durchsuchen
#if not self.replicafilter.search(_replica.dataset_name): #if not self.replicafilter.search(_replica.dataset_name):
# continue # continue
_replica_info = _replica.get_info(_dataset,threshold=self.threshold) ## verarbeitung ausgabe aus klasse _replica_info = _replica.get_info(_dataset,threshold=self.threshold,maxsnapshots=self.maxsnapshots) ## verarbeitung ausgabe aus klasse
self._overall_status.append(_replica_info.get("status",-1)) ## fehler aus replica zu overall status für mail adden self._overall_status.append(_replica_info.get("status",-1)) ## fehler aus replica zu overall status für mail adden
_output.append(_replica_info) _output.append(_replica_info)
@@ -483,7 +495,7 @@ class zfscheck(object):
yield _match.groupdict() yield _match.groupdict()
def _call_proc(self,remote=None): def _call_proc(self,remote=None):
ZFS_ATTRIBUTES = "name,type,creation,guid,used,available,written,origin,com.sun:auto-snapshot,tv.sysops:checkzfs" ## wenn ändern dann auch regex oben anpassen ZFS_ATTRIBUTES = f"name,type,creation,guid,used,available,written,origin,com.sun:auto-snapshot,{self.ignoreattr}" ## wenn ändern dann auch regex oben anpassen
### eigentlicher zfs aufruf, sowohl local als auch remote ### eigentlicher zfs aufruf, sowohl local als auch remote
zfs_args = ["zfs", "list", zfs_args = ["zfs", "list",
"-t", "all", "-t", "all",
@@ -531,8 +543,11 @@ class zfscheck(object):
raise Exception(_stderr.decode(sys.stdout.encoding)) ## Raise Errorlevel with Error from proc -- kann check_mk stderr lesen? sollte das nach stdout? raise Exception(_stderr.decode(sys.stdout.encoding)) ## Raise Errorlevel with Error from proc -- kann check_mk stderr lesen? sollte das nach stdout?
return _stdout.decode(sys.stdout.encoding) ## ausgabe kommt als byte wir wollen str return _stdout.decode(sys.stdout.encoding) ## ausgabe kommt als byte wir wollen str
def convert_ts_date(self,ts): def convert_ts_date(self,ts,dateformat=None):
return time.strftime(self.DATEFORMAT,time.localtime(ts)) if dateformat:
return time.strftime(dateformat,time.localtime(ts))
else:
return time.strftime(self.DATEFORMAT,time.localtime(ts))
@staticmethod @staticmethod
def format_status(val): def format_status(val):
@@ -597,20 +612,20 @@ class zfscheck(object):
_written = _item.get("written","0") _written = _item.get("written","0")
_available = _item.get("available","0") _available = _item.get("available","0")
_used = _item.get("used","0") _used = _item.get("used","0")
if _status == -1: ## tv.sysops:checkzfs=ignore wollen wir nicht if _status == -1: ## tv.sysops:checkzfs=ignore wollen wir nicht (ignoreattr)
continue continue
if self.maxsnapshots: if self.maxsnapshots:
_warn = self.maxsnapshots[0] _warn = self.maxsnapshots[0]
_crit = self.maxsnapshots[1] _crit = self.maxsnapshots[1]
_maxsnapshots = f"{_warn};{_crit}" _maxsnapshots = f"{_warn};{_crit}".replace("inf","")
if _status == 0: #if _status == 0:
_status = "P" # _status = "P"
else: else:
_maxsnapshots = ";" _maxsnapshots = ";"
if self.threshold: if self.threshold:
_warn = self.threshold[0] * 60 _warn = self.threshold[0] * 60
_crit = self.threshold[1] * 60 _crit = self.threshold[1] * 60
_threshold = f"{_warn};{_crit}" _threshold = f"{_warn};{_crit}".replace("inf","")
else: else:
_threshold = ";" _threshold = ";"
_msg = _item.get("message","").strip() _msg = _item.get("message","").strip()
@@ -661,31 +676,30 @@ class zfscheck(object):
_header_names = [self.COLUMN_NAMES.get(i,i) for i in _header] _header_names = [self.COLUMN_NAMES.get(i,i) for i in _header]
_converter = dict((i,self.COLUMN_MAPPER.get(i,(lambda x: str(x)))) for i in _header) _converter = dict((i,self.COLUMN_MAPPER.get(i,(lambda x: str(x)))) for i in _header)
_hostname = socket.getfqdn() _hostname = socket.getfqdn()
_now = self.convert_ts_date(time.time(),'%Y-%m-%d %H:%M:%S')
_out = "<html>" _out = []
_out += "<head>" _out.append("<html>")
_out += "<meta name='color-scheme' content='only'>" _out.append("<head>")
_out += "<style type='text/css'>" _out.append("<meta name='color-scheme' content='only'>")
_out += "html{height:100%%;width:100%%;}" _out.append("<style type='text/css'>")
_out += "body{color:black;width:auto;padding-top:2rem;}" _out.append("html{height:100%%;width:100%%;}")
_out += "h1,h2{text-align:center;}" _out.append("body{color:black;width:auto;padding-top:2rem;}")
_out += "table{margin: 2rem auto;}" _out.append("h1,h2{text-align:center;}")
_out += "table,th,td {border:1px solid black;border-spacing:0;border-collapse:collapse;padding:.2rem;}" _out.append("table{margin: 2rem auto;}")
_out += "th{text-transform:capitalize}" _out.append("table,th,td {border:1px solid black;border-spacing:0;border-collapse:collapse;padding:.2rem;}")
_out += "td:first-child{text-align:center;font-weight:bold;text-transform:uppercase;}" _out.append("th{text-transform:capitalize}")
_out += "td:last-child{text-align:right;}" _out.append("td:first-child{text-align:center;font-weight:bold;text-transform:uppercase;}")
_out += ".warn{background-color:yellow;}" _out.append("td:last-child{text-align:right;}")
_out += ".crit{background-color:red;color:black;}" _out.append(".warn{background-color:yellow;}")
_out += "</style>" _out.append(".crit{background-color:red;color:black;}")
_out += "<title>Check ZFS</title></head><body>" _out.append("</style>")
_out += f"<h1>{_hostname}</h1><h2>{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}</h2>" _out.append("<title>Check ZFS</title></head><body>")
_out += "<table>" _out.append(f"<h1>{_hostname}</h1><h2>{_now}</h2>")
_out += "<tr><th>{0}</th></tr>".format("</th><th>".join(_header_names)) _out.append("<table>")
_out.append("<tr><th>{0}</th></tr>".format("</th><th>".join(_header_names)))
for _item in self._datasort(data): for _item in self._datasort(data):
_out += "<tr class='{1}'><td>{0}</td></tr>".format("</td><td>".join([_converter.get(_col)(_item.get(_col,"")) for _col in _header]),_converter["status"](_item.get("status","0"))) _out.append("<tr class='{1}'><td>{0}</td></tr>".format("</td><td>".join([_converter.get(_col)(_item.get(_col,"")) for _col in _header]),_converter["status"](_item.get("status","0"))))
_out.append("</table></body></html>")
_out += "</table></body></html>"
return "".join(_out) return "".join(_out)
def mail_output(self,data): def mail_output(self,data):
@@ -755,6 +769,10 @@ if __name__ == "__main__":
help=_("Nur Snapshot-Alter prüfen")) help=_("Nur Snapshot-Alter prüfen"))
_parser.add_argument("--mail",type=str, _parser.add_argument("--mail",type=str,
help=_("Email für den Versand")) help=_("Email für den Versand"))
_parser.add_argument("--ignoreattr",type=str,default="tv.sysops:checkzfs",
help=_("ZFS Attribut für ignore"))
_parser.add_argument("--config",dest="config_file",type=str,default="",
help=_("Config File"))
_parser.add_argument("--threshold",type=str, _parser.add_argument("--threshold",type=str,
help=_("Grenzwerte für Alter von Snapshots warn,crit")) help=_("Grenzwerte für Alter von Snapshots warn,crit"))
_parser.add_argument("--maxsnapshots",type=str, _parser.add_argument("--maxsnapshots",type=str,
@@ -773,42 +791,122 @@ if __name__ == "__main__":
help=_("Zuordnung zu anderem Host bei checkmk")) help=_("Zuordnung zu anderem Host bei checkmk"))
_parser.add_argument("--ssh-extra-options",type=str, _parser.add_argument("--ssh-extra-options",type=str,
help=_("zusätzliche SSH Optionen mit Komma getrennt (HostKeyAlgorithms=ssh-rsa)")) help=_("zusätzliche SSH Optionen mit Komma getrennt (HostKeyAlgorithms=ssh-rsa)"))
_parser.add_argument("--update",nargs="?",const="main",type=str,metavar="branch/commitid",
help=_("check for update"))
_parser.add_argument("--debug",action="store_true", _parser.add_argument("--debug",action="store_true",
help=_("debug Ausgabe")) help=_("debug Ausgabe"))
args = _parser.parse_args() args = _parser.parse_args()
_is_checkmk_plugin = os.path.dirname(os.path.abspath(__file__)).find("/check_mk_agent/local") > -1 ## wenn im check_mk ordner
if _is_checkmk_plugin: CONFIG_KEYS="disabled|source|sourceonly|piggyback|remote|legacyhosts|prefix|filter|replicafilter|threshold|ignoreattr|maxsnapshots|snapshotfilter|ssh-identity|ssh-extra-options"
_config_regex = re.compile(f"^({CONFIG_KEYS}):\s*(.*?)(?:\s+#|$)",re.M)
_basename = os.path.basename(__file__).split(".")[0] ## name für config ermitteln aufgrund des script namens
#_is_checkmk_plugin = os.path.dirname(os.path.abspath(__file__)).find("/check_mk_agent/local") > -1 ## wenn im check_mk ordner
#if _is_checkmk_plugin:
if os.environ.get("MK_CONFDIR"):
try: ## parse check_mk options try: ## parse check_mk options
CONFIG_KEYS="disabled|source|sourceonly|piggyback|remote|legacyhosts|prefix|filter|replicafilter|threshold|maxsnapshots|snapshotfilter|ssh-identity|ssh-extra-options" _check_mk_configdir = "/etc/check_mk"
_config_regex = re.compile(f"^({CONFIG_KEYS}):\s*(.*?)(?:\s+#|$)",re.M) if not os.path.isdir(_check_mk_configdir):
_basename = os.path.basename(__file__).split(".")[0] ## name für config ermitteln aufgrund des script namens _check_mk_configdir = os.environ["MK_CONFDIR"]
_config_file = f"/etc/check_mk/{_basename}" args.config_file = f"{_check_mk_configdir}/{_basename}"
if not os.path.exists(_config_file): ### wenn checkmk aufruf und noch keine config ... default erstellen if not os.path.exists(args.config_file): ### wenn checkmk aufruf und noch keine config ... default erstellen
if not os.path.isdir("/etc/check_mk"): if not os.path.isdir(_check_mk_configdir):
os.mkdir("/etc/check_mk") os.mkdir(_check_mk_configdir)
with open(_config_file,"wt") as _f: ## default config erstellen with open(args.config_file,"wt") as _f: ## default config erstellen
_f.write("## config for checkzfs check_mk") _f.write("## config for checkzfs check_mk")
_f.write("\n".join([f"# {_k}:" for _k in CONFIG_KEYS.split("|")])) _f.write("\n".join([f"# {_k}:" for _k in CONFIG_KEYS.split("|")]))
_f.write("\n") _f.write("\n")
print(f"please edit config {_config_file}") print(f"please edit config {args.config_file}")
os._exit(0) os._exit(0)
_rawconfig = open(_config_file,"rt").read()
for _k,_v in _config_regex.findall(_rawconfig):
if _k == "disabled" and _v.lower().strip() in ( "1","yes","true"): ## wenn disabled dann ignorieren check wird nicht durchgeführt
os._exit(0)
if _k == "sourceonly":
args.sourceonly = bool(_v.lower().strip() in ( "1","yes","true"))
elif _k == "prefix":
args.__dict__["prefix"] = _v.strip()
elif not args.__dict__.get(_k.replace("-","_"),None):
args.__dict__[_k.replace("-","_")] = _v.strip()
except: except:
pass pass
args.output = "checkmk" if not args.output else args.output args.output = "checkmk" if not args.output else args.output
_is_zabbix_plugin = os.path.dirname(os.path.abspath(__file__)).find("/zabbix/scripts") > -1 ## wenn im check_mk ordner
if _is_zabbix_plugin:
try: ## parse check_mk options
args.config_file = f"/etc/zabbix/checkzfs-{_basename}"
if not os.path.exists(args.config_file): ### wenn checkmk aufruf und noch keine config ... default erstellen
if not os.path.isdir("/etc/zabbix"):
os.mkdir("/etc/zabbix")
with open(args.config_file,"wt") as _f: ## default config erstellen
_f.write("## config for checkzfs zabbix")
_f.write("\n".join([f"# {_k}:" for _k in CONFIG_KEYS.split("|")]))
_f.write("\n")
print(f"please edit config {args.config_file}")
os._exit(0)
except:
pass
args.output = "json" if not args.output else args.output
if args.config_file:
_rawconfig = open(args.config_file,"rt").read()
for _k,_v in _config_regex.findall(_rawconfig):
if _k == "disabled" and _v.lower().strip() in ( "1","yes","true"): ## wenn disabled dann ignorieren check wird nicht durchgeführt
os._exit(0)
if _k == "sourceonly":
args.sourceonly = bool(_v.lower().strip() in ( "1","yes","true"))
elif _k == "prefix":
args.__dict__["prefix"] = _v.strip()
elif not args.__dict__.get(_k.replace("-","_"),None):
args.__dict__[_k.replace("-","_")] = _v.strip()
try: try:
ZFSCHECK_OBJ = zfscheck(**args.__dict__) if args.update:
pass ## for debugger import requests
import hashlib
import base64
from datetime import datetime
import difflib
from pkg_resources import parse_version
_github_req = requests.get(f"https://api.github.com/repos/bashclub/check-zfs-replication/contents/checkzfs.py?ref={args.update}")
if _github_req.status_code != 200:
raise Exception("Github Error")
_github_version = _github_req.json()
_github_last_modified = datetime.strptime(_github_req.headers.get("last-modified"),"%a, %d %b %Y %X %Z")
_new_script = base64.b64decode(_github_version.get("content")).decode("utf-8")
_new_version = re.findall("^VERSION\s*=\s*([0-9.]*)",_new_script,re.M)
_new_version = _new_version[0] if _new_version else "0.0.0"
_script_location = os.path.realpath(__file__)
_current_last_modified = datetime.fromtimestamp(int(os.path.getmtime(_script_location)))
with (open(_script_location,"rb")) as _f:
_content = _f.read()
_current_sha = hashlib.sha1(f"blob {len(_content)}\0".encode("utf-8") + _content).hexdigest()
_content = _content.decode("utf-8")
if _current_sha == _github_version.get("sha"):
print(f"allready up to date {_current_sha}")
sys.exit(0)
else:
_version = parse_version(VERSION)
_nversion = parse_version(_new_version)
if _version == _nversion:
print("same Version but checksums mismatch")
elif _version > _nversion:
print(f"ATTENTION: Downgrade from {VERSION} to {_new_version}")
while True:
try:
_answer = input(f"Update {_script_location} to {_new_version} (y/n) or show difference (d)? ")
except KeyboardInterrupt:
print("")
sys.exit(0)
if _answer in ("Y","y","yes","j","J"):
with open(_script_location,"wb") as _f:
_f.write(_new_script.encode("utf-8"))
print(f"updated to Version {_new_version}")
break
elif _answer in ("D","d"):
for _line in difflib.unified_diff(_content.split("\n"),
_new_script.split("\n"),
fromfile=f"Version: {VERSION}",
fromfiledate=_current_last_modified.isoformat(),
tofile=f"Version: {_new_version}",
tofiledate=_github_last_modified.isoformat(),
n=0,
lineterm=""):
print(_line)
else:
break
else:
ZFSCHECK_OBJ = zfscheck(**args.__dict__)
except KeyboardInterrupt: except KeyboardInterrupt:
print("") print("")
sys.exit(0) sys.exit(0)
@@ -817,4 +915,3 @@ if __name__ == "__main__":
if args.debug: if args.debug:
raise raise
sys.exit(1) sys.exit(1)