2022-04-25 10:02:43 +02:00
|
|
|
#!/usr/bin/python3
|
2013-05-23 10:19:43 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# -----------------------------------------------------------------------
|
|
|
|
# This file is part of TISBackup
|
|
|
|
#
|
|
|
|
# TISBackup is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# TISBackup is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with TISBackup. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
#
|
|
|
|
# -----------------------------------------------------------------------
|
2015-12-23 15:40:45 +01:00
|
|
|
import datetime
|
2024-11-28 23:20:19 +01:00
|
|
|
import os
|
|
|
|
import sys
|
2022-04-25 10:02:43 +02:00
|
|
|
from os.path import isfile, join
|
|
|
|
|
|
|
|
tisbackup_root_dir = os.path.dirname(os.path.realpath(__file__))
|
2024-11-29 00:48:59 +01:00
|
|
|
sys.path.insert(0, os.path.join(tisbackup_root_dir, "lib"))
|
|
|
|
sys.path.insert(0, os.path.join(tisbackup_root_dir, "libtisbackup"))
|
2013-05-23 10:19:43 +02:00
|
|
|
|
2024-11-28 23:20:19 +01:00
|
|
|
import errno
|
2015-12-23 15:40:45 +01:00
|
|
|
import logging
|
2024-11-28 23:20:19 +01:00
|
|
|
import os.path
|
2024-11-29 00:32:39 +01:00
|
|
|
|
2024-11-28 23:20:19 +01:00
|
|
|
from optparse import OptionParser
|
|
|
|
|
|
|
|
from iniparse import ConfigParser, ini
|
|
|
|
|
2015-12-23 15:40:45 +01:00
|
|
|
from libtisbackup.backup_mysql import backup_mysql
|
2024-11-29 00:48:59 +01:00
|
|
|
|
|
|
|
# from libtisbackup.backup_vmdk import backup_vmdk
|
|
|
|
# from libtisbackup.backup_switch import backup_switch
|
2015-12-23 15:40:45 +01:00
|
|
|
from libtisbackup.backup_null import backup_null
|
2024-11-28 23:20:19 +01:00
|
|
|
from libtisbackup.backup_pgsql import backup_pgsql
|
|
|
|
from libtisbackup.backup_rsync import backup_rsync, backup_rsync_ssh
|
2024-11-29 00:48:59 +01:00
|
|
|
|
|
|
|
# from libtisbackup.backup_oracle import backup_oracle
|
|
|
|
from libtisbackup.backup_rsync_btrfs import backup_rsync__btrfs_ssh, backup_rsync_btrfs
|
|
|
|
|
|
|
|
# from libtisbackup.backup_sqlserver import backup_sqlserver
|
2015-12-23 15:40:45 +01:00
|
|
|
from libtisbackup.backup_samba4 import backup_samba4
|
2024-11-28 23:20:19 +01:00
|
|
|
from libtisbackup.backup_xcp_metadata import backup_xcp_metadata
|
|
|
|
from libtisbackup.backup_xva import backup_xva
|
|
|
|
from libtisbackup.common import *
|
|
|
|
from libtisbackup.copy_vm_xcp import copy_vm_xcp
|
2013-05-23 10:19:43 +02:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
__version__ = "2.0"
|
2020-12-08 12:08:34 +01:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
usage = """\
|
2013-05-23 10:19:43 +02:00
|
|
|
%prog -c configfile action
|
|
|
|
|
|
|
|
TIS Files Backup system.
|
|
|
|
|
2018-01-30 12:29:16 +01:00
|
|
|
action is either :
|
2013-05-23 10:19:43 +02:00
|
|
|
backup : launch all backups or a specific one if -s option is used
|
|
|
|
cleanup : removed backups older than retension period
|
|
|
|
checknagios : check all or a specific backup against max_backup_age parameter
|
|
|
|
dumpstat : dump the content of database for the last 20 backups
|
|
|
|
retryfailed : try to relaunch the last failed backups
|
|
|
|
listdrivers : list available backup types and parameters for config inifile
|
2018-01-30 12:29:16 +01:00
|
|
|
exportbackup : copy lastest OK backups from local to location defned by --exportdir parameter
|
2013-05-23 10:19:43 +02:00
|
|
|
register_existing : scan backup directories and add missing backups to database"""
|
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
version = "VERSION"
|
|
|
|
|
|
|
|
parser = OptionParser(usage=usage, version="%prog " + version)
|
|
|
|
parser.add_option(
|
|
|
|
"-c", "--config", dest="config", default="/etc/tis/tisbackup-config.ini", help="Config file full path (default: %default)"
|
|
|
|
)
|
|
|
|
parser.add_option("-d", "--dry-run", dest="dry_run", default=False, action="store_true", help="Dry run (default: %default)")
|
|
|
|
parser.add_option("-v", "--verbose", dest="verbose", default=False, action="store_true", help="More information (default: %default)")
|
|
|
|
parser.add_option(
|
|
|
|
"-s", "--sections", dest="sections", default="", help="Comma separated list of sections (backups) to process (default: All)"
|
|
|
|
)
|
|
|
|
parser.add_option(
|
|
|
|
"-l",
|
|
|
|
"--loglevel",
|
|
|
|
dest="loglevel",
|
|
|
|
default="info",
|
|
|
|
type="choice",
|
|
|
|
choices=["debug", "warning", "info", "error", "critical"],
|
|
|
|
metavar="LOGLEVEL",
|
|
|
|
help="Loglevel (default: %default)",
|
|
|
|
)
|
|
|
|
parser.add_option("-n", "--len", dest="statscount", default=30, type="int", help="Number of lines to list for dumpstat (default: %default)")
|
|
|
|
parser.add_option(
|
|
|
|
"-b",
|
|
|
|
"--backupdir",
|
|
|
|
dest="backup_base_dir",
|
|
|
|
default="",
|
|
|
|
help="Base directory for all backups (default: [global] backup_base_dir in config file)",
|
|
|
|
)
|
|
|
|
parser.add_option(
|
|
|
|
"-x", "--exportdir", dest="exportdir", default="", help="Directory where to export latest backups with exportbackup (nodefault)"
|
|
|
|
)
|
2013-05-23 10:19:43 +02:00
|
|
|
|
|
|
|
|
|
|
|
class tis_backup:
|
2024-11-29 00:48:59 +01:00
|
|
|
logger = logging.getLogger("tisbackup")
|
2013-05-23 10:19:43 +02:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
def __init__(self, dry_run=False, verbose=False, backup_base_dir=""):
|
2013-05-23 10:19:43 +02:00
|
|
|
self.dry_run = dry_run
|
|
|
|
self.verbose = verbose
|
|
|
|
self.backup_base_dir = backup_base_dir
|
2024-11-29 00:48:59 +01:00
|
|
|
self.backup_base_dir = ""
|
2013-05-23 10:19:43 +02:00
|
|
|
self.backup_list = []
|
|
|
|
self.dry_run = dry_run
|
2024-11-29 00:48:59 +01:00
|
|
|
self.verbose = False
|
2013-05-23 10:19:43 +02:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
def read_ini_file(self, filename):
|
2022-04-25 10:02:43 +02:00
|
|
|
ini.change_comment_syntax()
|
2013-05-23 10:19:43 +02:00
|
|
|
cp = ConfigParser()
|
|
|
|
cp.read(filename)
|
|
|
|
|
|
|
|
if not self.backup_base_dir:
|
2024-11-29 00:48:59 +01:00
|
|
|
self.backup_base_dir = cp.get("global", "backup_base_dir")
|
2013-05-23 10:19:43 +02:00
|
|
|
if not os.path.isdir(self.backup_base_dir):
|
2024-11-29 00:48:59 +01:00
|
|
|
self.logger.info("Creating backup directory %s" % self.backup_base_dir)
|
2013-05-23 10:19:43 +02:00
|
|
|
os.makedirs(self.backup_base_dir)
|
|
|
|
|
|
|
|
self.logger.debug("backup directory : " + self.backup_base_dir)
|
2024-11-29 00:48:59 +01:00
|
|
|
self.dbstat = BackupStat(os.path.join(self.backup_base_dir, "log", "tisbackup.sqlite"))
|
2013-05-23 10:19:43 +02:00
|
|
|
|
|
|
|
for section in cp.sections():
|
2024-11-29 00:48:59 +01:00
|
|
|
if section != "global":
|
2013-05-23 10:19:43 +02:00
|
|
|
self.logger.debug("reading backup config " + section)
|
|
|
|
backup_item = None
|
2024-11-29 00:48:59 +01:00
|
|
|
type = cp.get(section, "type")
|
2013-05-23 10:19:43 +02:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
backup_item = backup_drivers[type](
|
|
|
|
backup_name=section, backup_dir=os.path.join(self.backup_base_dir, section), dbstat=self.dbstat, dry_run=self.dry_run
|
|
|
|
)
|
2013-05-23 10:19:43 +02:00
|
|
|
backup_item.read_config(cp)
|
|
|
|
backup_item.verbose = self.verbose
|
|
|
|
|
|
|
|
self.backup_list.append(backup_item)
|
|
|
|
|
|
|
|
# TODO check hostname socket.gethostbyname_ex('cnn.com')
|
|
|
|
# TODO socket.gethostbyaddr('64.236.16.20')
|
|
|
|
# TODO limit backup to one backup on the command line
|
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
def checknagios(self, sections=[]):
|
2013-05-23 10:19:43 +02:00
|
|
|
try:
|
|
|
|
if not sections:
|
|
|
|
sections = [backup_item.backup_name for backup_item in self.backup_list]
|
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
self.logger.debug("Start of check nagios for %s" % (",".join(sections),))
|
2013-05-23 10:19:43 +02:00
|
|
|
try:
|
|
|
|
worst_nagiosstatus = None
|
|
|
|
ok = []
|
|
|
|
warning = []
|
|
|
|
critical = []
|
|
|
|
unknown = []
|
2024-11-29 00:48:59 +01:00
|
|
|
nagiosoutput = ""
|
2013-05-23 10:19:43 +02:00
|
|
|
for backup_item in self.backup_list:
|
|
|
|
if not sections or backup_item.backup_name in sections:
|
2024-11-29 00:48:59 +01:00
|
|
|
(nagiosstatus, log) = backup_item.checknagios()
|
2013-05-23 10:19:43 +02:00
|
|
|
if nagiosstatus == nagiosStateCritical:
|
2024-11-29 00:48:59 +01:00
|
|
|
critical.append((backup_item.backup_name, log))
|
|
|
|
elif nagiosstatus == nagiosStateWarning:
|
|
|
|
warning.append((backup_item.backup_name, log))
|
2013-05-23 10:19:43 +02:00
|
|
|
elif nagiosstatus == nagiosStateOk:
|
2024-11-29 00:48:59 +01:00
|
|
|
ok.append((backup_item.backup_name, log))
|
2013-05-23 10:19:43 +02:00
|
|
|
else:
|
2024-11-29 00:48:59 +01:00
|
|
|
unknown.append((backup_item.backup_name, log))
|
|
|
|
self.logger.debug('[%s] nagios:"%i" log: %s', backup_item.backup_name, nagiosstatus, log)
|
2013-05-23 10:19:43 +02:00
|
|
|
|
|
|
|
if not ok and not critical and not unknown and not warning:
|
2024-11-29 00:48:59 +01:00
|
|
|
self.logger.debug("Nothing processed")
|
2013-05-23 10:19:43 +02:00
|
|
|
worst_nagiosstatus = nagiosStateUnknown
|
|
|
|
nagiosoutput = 'UNKNOWN : Unknown backup sections "%s"' % sections
|
|
|
|
|
|
|
|
globallog = []
|
|
|
|
|
|
|
|
if unknown:
|
|
|
|
if not worst_nagiosstatus:
|
|
|
|
worst_nagiosstatus = nagiosStateUnknown
|
2024-11-29 00:48:59 +01:00
|
|
|
nagiosoutput = "UNKNOWN status backups %s" % (",".join([b[0] for b in unknown]))
|
2013-05-23 10:19:43 +02:00
|
|
|
globallog.extend(unknown)
|
|
|
|
|
|
|
|
if critical:
|
|
|
|
if not worst_nagiosstatus:
|
|
|
|
worst_nagiosstatus = nagiosStateCritical
|
2024-11-29 00:48:59 +01:00
|
|
|
nagiosoutput = "CRITICAL backups %s" % (",".join([b[0] for b in critical]))
|
2013-05-23 10:19:43 +02:00
|
|
|
globallog.extend(critical)
|
|
|
|
|
|
|
|
if warning:
|
|
|
|
if not worst_nagiosstatus:
|
|
|
|
worst_nagiosstatus = nagiosStateWarning
|
2024-11-29 00:48:59 +01:00
|
|
|
nagiosoutput = "WARNING backups %s" % (",".join([b[0] for b in warning]))
|
2013-05-23 10:19:43 +02:00
|
|
|
globallog.extend(warning)
|
|
|
|
|
|
|
|
if ok:
|
|
|
|
if not worst_nagiosstatus:
|
|
|
|
worst_nagiosstatus = nagiosStateOk
|
2024-11-29 00:48:59 +01:00
|
|
|
nagiosoutput = "OK backups %s" % (",".join([b[0] for b in ok]))
|
2013-05-23 10:19:43 +02:00
|
|
|
globallog.extend(ok)
|
|
|
|
|
|
|
|
if worst_nagiosstatus == nagiosStateOk:
|
2024-11-29 00:48:59 +01:00
|
|
|
nagiosoutput = "ALL backups OK %s" % (",".join(sections))
|
2013-05-23 10:19:43 +02:00
|
|
|
|
2022-04-25 10:02:43 +02:00
|
|
|
except BaseException as e:
|
2013-05-23 10:19:43 +02:00
|
|
|
worst_nagiosstatus = nagiosStateCritical
|
2024-11-29 00:48:59 +01:00
|
|
|
nagiosoutput = "EXCEPTION", "Critical : %s" % str(e)
|
2013-05-23 10:19:43 +02:00
|
|
|
raise
|
|
|
|
|
|
|
|
finally:
|
2024-11-29 00:48:59 +01:00
|
|
|
self.logger.debug('worst nagios status :"%i"', worst_nagiosstatus)
|
|
|
|
print("%s (tisbackup V%s)" % (nagiosoutput, version))
|
|
|
|
print("\n".join(["[%s]:%s" % (log_elem[0], log_elem[1]) for log_elem in globallog]))
|
2013-05-23 10:19:43 +02:00
|
|
|
sys.exit(worst_nagiosstatus)
|
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
def process_backup(self, sections=[]):
|
2013-05-23 10:19:43 +02:00
|
|
|
processed = []
|
|
|
|
errors = []
|
|
|
|
if not sections:
|
|
|
|
sections = [backup_item.backup_name for backup_item in self.backup_list]
|
2018-01-30 12:29:16 +01:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
self.logger.info("Processing backup for %s" % (",".join(sections)))
|
2013-05-23 10:19:43 +02:00
|
|
|
for backup_item in self.backup_list:
|
|
|
|
if not sections or backup_item.backup_name in sections:
|
|
|
|
try:
|
2024-11-29 00:48:59 +01:00
|
|
|
assert isinstance(backup_item, backup_generic)
|
|
|
|
self.logger.info("Processing [%s]", (backup_item.backup_name))
|
2013-05-23 10:19:43 +02:00
|
|
|
stats = backup_item.process_backup()
|
2024-11-29 00:48:59 +01:00
|
|
|
processed.append((backup_item.backup_name, stats))
|
2022-04-25 10:02:43 +02:00
|
|
|
except BaseException as e:
|
2024-11-29 00:48:59 +01:00
|
|
|
self.logger.critical("Backup [%s] processed with error : %s", backup_item.backup_name, e)
|
|
|
|
errors.append((backup_item.backup_name, str(e)))
|
2013-05-23 10:19:43 +02:00
|
|
|
if not processed and not errors:
|
2024-11-29 00:48:59 +01:00
|
|
|
self.logger.critical("No backup properly finished or processed")
|
2013-05-23 10:19:43 +02:00
|
|
|
else:
|
|
|
|
if processed:
|
2024-11-29 00:48:59 +01:00
|
|
|
self.logger.info("Backup processed : %s", ",".join([b[0] for b in processed]))
|
2013-05-23 10:19:43 +02:00
|
|
|
if errors:
|
2024-11-29 00:48:59 +01:00
|
|
|
self.logger.error("Backup processed with errors: %s", ",".join([b[0] for b in errors]))
|
2013-05-23 10:19:43 +02:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
def export_backups(self, sections=[], exportdir=""):
|
2013-05-23 10:19:43 +02:00
|
|
|
processed = []
|
|
|
|
errors = []
|
|
|
|
if not sections:
|
|
|
|
sections = [backup_item.backup_name for backup_item in self.backup_list]
|
2018-01-30 12:29:16 +01:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
self.logger.info("Exporting OK backups for %s to %s" % (",".join(sections), exportdir))
|
2018-01-30 12:29:16 +01:00
|
|
|
|
2013-05-23 10:19:43 +02:00
|
|
|
for backup_item in self.backup_list:
|
|
|
|
if backup_item.backup_name in sections:
|
|
|
|
try:
|
2024-11-29 00:48:59 +01:00
|
|
|
assert isinstance(backup_item, backup_generic)
|
|
|
|
self.logger.info("Processing [%s]", (backup_item.backup_name))
|
2013-05-23 10:19:43 +02:00
|
|
|
stats = backup_item.export_latestbackup(destdir=exportdir)
|
2024-11-29 00:48:59 +01:00
|
|
|
processed.append((backup_item.backup_name, stats))
|
2022-04-25 10:02:43 +02:00
|
|
|
except BaseException as e:
|
2024-11-29 00:48:59 +01:00
|
|
|
self.logger.critical("Export Backup [%s] processed with error : %s", backup_item.backup_name, e)
|
|
|
|
errors.append((backup_item.backup_name, str(e)))
|
2013-05-23 10:19:43 +02:00
|
|
|
if not processed and not errors:
|
2024-11-29 00:48:59 +01:00
|
|
|
self.logger.critical("No export backup properly finished or processed")
|
2013-05-23 10:19:43 +02:00
|
|
|
else:
|
|
|
|
if processed:
|
2024-11-29 00:48:59 +01:00
|
|
|
self.logger.info("Export Backups processed : %s", ",".join([b[0] for b in processed]))
|
2013-05-23 10:19:43 +02:00
|
|
|
if errors:
|
2024-11-29 00:48:59 +01:00
|
|
|
self.logger.error("Export Backups processed with errors: %s", ",".join([b[0] for b in errors]))
|
2013-05-23 10:19:43 +02:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
def retry_failed_backups(self, maxage_hours=30):
|
2013-05-23 10:19:43 +02:00
|
|
|
processed = []
|
|
|
|
errors = []
|
|
|
|
|
|
|
|
# before mindate, backup is too old
|
|
|
|
mindate = datetime2isodate((datetime.datetime.now() - datetime.timedelta(hours=maxage_hours)))
|
2024-11-29 00:48:59 +01:00
|
|
|
failed_backups = self.dbstat.query(
|
|
|
|
"""\
|
2015-07-01 15:54:17 +02:00
|
|
|
select distinct backup_name as bname
|
2018-01-30 12:29:16 +01:00
|
|
|
from stats
|
2024-11-29 00:48:59 +01:00
|
|
|
where status="OK" and backup_start>=?""",
|
|
|
|
(mindate,),
|
|
|
|
)
|
2013-05-23 10:19:43 +02:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
defined_backups = list(map(lambda f: f.backup_name, [x for x in self.backup_list if not isinstance(x, backup_null)]))
|
|
|
|
failed_backups_names = set(defined_backups) - set([b["bname"] for b in failed_backups if b["bname"] in defined_backups])
|
2013-05-23 10:19:43 +02:00
|
|
|
|
|
|
|
if failed_backups_names:
|
2024-11-29 00:48:59 +01:00
|
|
|
self.logger.info("Processing backup for %s", ",".join(failed_backups_names))
|
2013-05-23 10:19:43 +02:00
|
|
|
for backup_item in self.backup_list:
|
|
|
|
if backup_item.backup_name in failed_backups_names:
|
|
|
|
try:
|
2024-11-29 00:48:59 +01:00
|
|
|
assert isinstance(backup_item, backup_generic)
|
|
|
|
self.logger.info("Processing [%s]", (backup_item.backup_name))
|
2013-05-23 10:19:43 +02:00
|
|
|
stats = backup_item.process_backup()
|
2024-11-29 00:48:59 +01:00
|
|
|
processed.append((backup_item.backup_name, stats))
|
2022-04-25 10:02:43 +02:00
|
|
|
except BaseException as e:
|
2024-11-29 00:48:59 +01:00
|
|
|
self.logger.critical("Backup [%s] not processed, error : %s", backup_item.backup_name, e)
|
|
|
|
errors.append((backup_item.backup_name, str(e)))
|
2013-05-23 10:19:43 +02:00
|
|
|
if not processed and not errors:
|
2024-11-29 00:48:59 +01:00
|
|
|
self.logger.critical("No backup properly finished or processed")
|
2013-05-23 10:19:43 +02:00
|
|
|
else:
|
|
|
|
if processed:
|
2024-11-29 00:48:59 +01:00
|
|
|
self.logger.info("Backup processed : %s", ",".join([b[0] for b in errors]))
|
2013-05-23 10:19:43 +02:00
|
|
|
if errors:
|
2024-11-29 00:48:59 +01:00
|
|
|
self.logger.error("Backup processed with errors: %s", ",".join([b[0] for b in errors]))
|
2013-05-23 10:19:43 +02:00
|
|
|
else:
|
2024-11-29 00:48:59 +01:00
|
|
|
self.logger.info("No recent failed backups found in database")
|
2013-05-23 10:19:43 +02:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
def cleanup_backup_section(self, sections=[]):
|
2013-05-23 10:19:43 +02:00
|
|
|
processed = False
|
|
|
|
if not sections:
|
|
|
|
sections = [backup_item.backup_name for backup_item in self.backup_list]
|
2018-01-30 12:29:16 +01:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
self.logger.info("Processing cleanup for %s" % (",".join(sections)))
|
2013-05-23 10:19:43 +02:00
|
|
|
for backup_item in self.backup_list:
|
|
|
|
if backup_item.backup_name in sections:
|
|
|
|
try:
|
2024-11-29 00:48:59 +01:00
|
|
|
assert isinstance(backup_item, backup_generic)
|
|
|
|
self.logger.info("Processing cleanup of [%s]", (backup_item.backup_name))
|
2013-05-23 10:19:43 +02:00
|
|
|
backup_item.cleanup_backup()
|
|
|
|
processed = True
|
2022-04-25 10:02:43 +02:00
|
|
|
except BaseException as e:
|
2024-11-29 00:48:59 +01:00
|
|
|
self.logger.critical("Cleanup of [%s] not processed, error : %s", backup_item.backup_name, e)
|
2013-05-23 10:19:43 +02:00
|
|
|
if not processed:
|
2024-11-29 00:48:59 +01:00
|
|
|
self.logger.critical("No cleanup properly finished or processed")
|
2013-05-23 10:19:43 +02:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
def register_existingbackups(self, sections=[]):
|
2013-05-23 10:19:43 +02:00
|
|
|
if not sections:
|
|
|
|
sections = [backup_item.backup_name for backup_item in self.backup_list]
|
2018-01-30 12:29:16 +01:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
self.logger.info("Append existing backups to database...")
|
2013-05-23 10:19:43 +02:00
|
|
|
for backup_item in self.backup_list:
|
|
|
|
if backup_item.backup_name in sections:
|
|
|
|
backup_item.register_existingbackups()
|
|
|
|
|
2018-01-30 12:29:16 +01:00
|
|
|
def html_report(self):
|
2013-05-23 10:19:43 +02:00
|
|
|
for backup_item in self.backup_list:
|
|
|
|
if not section or section == backup_item.backup_name:
|
2024-11-29 00:48:59 +01:00
|
|
|
assert isinstance(backup_item, backup_generic)
|
2013-05-23 10:19:43 +02:00
|
|
|
if not maxage_hours:
|
|
|
|
maxage_hours = backup_item.maximum_backup_age
|
2024-11-29 00:48:59 +01:00
|
|
|
(nagiosstatus, log) = backup_item.checknagios(maxage_hours=maxage_hours)
|
|
|
|
globallog.append("[%s] %s" % (backup_item.backup_name, log))
|
|
|
|
self.logger.debug('[%s] nagios:"%i" log: %s', backup_item.backup_name, nagiosstatus, log)
|
|
|
|
# processed = True
|
2024-11-29 00:32:39 +01:00
|
|
|
# if nagiosstatus >= worst_nagiosstatus:
|
|
|
|
# worst_nagiosstatus = nagiosstatus
|
2013-05-23 10:19:43 +02:00
|
|
|
|
|
|
|
|
2018-01-30 12:29:16 +01:00
|
|
|
def main():
|
2024-11-29 00:48:59 +01:00
|
|
|
(options, args) = parser.parse_args()
|
2018-01-30 12:29:16 +01:00
|
|
|
|
2013-05-23 10:19:43 +02:00
|
|
|
if len(args) != 1:
|
2022-04-25 10:02:43 +02:00
|
|
|
print("ERROR : You must provide one action to perform")
|
2013-05-23 10:19:43 +02:00
|
|
|
parser.print_usage()
|
|
|
|
sys.exit(2)
|
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
backup_start_date = datetime.datetime.now().strftime("%Y%m%d-%Hh%Mm%S")
|
2013-05-23 10:19:43 +02:00
|
|
|
|
|
|
|
# options
|
|
|
|
action = args[0]
|
|
|
|
if action == "listdrivers":
|
|
|
|
for t in backup_drivers:
|
2022-04-25 10:02:43 +02:00
|
|
|
print(backup_drivers[t].get_help())
|
2013-05-23 10:19:43 +02:00
|
|
|
sys.exit(0)
|
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
config_file = options.config
|
2013-05-23 10:19:43 +02:00
|
|
|
dry_run = options.dry_run
|
|
|
|
verbose = options.verbose
|
2018-01-30 12:29:16 +01:00
|
|
|
|
2013-05-23 10:19:43 +02:00
|
|
|
loglevel = options.loglevel
|
|
|
|
|
|
|
|
# setup Logger
|
2024-11-29 00:48:59 +01:00
|
|
|
logger = logging.getLogger("tisbackup")
|
2018-01-30 12:29:16 +01:00
|
|
|
hdlr = logging.StreamHandler()
|
2024-11-29 00:48:59 +01:00
|
|
|
hdlr.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s"))
|
2018-01-30 12:29:16 +01:00
|
|
|
logger.addHandler(hdlr)
|
2013-05-23 10:19:43 +02:00
|
|
|
|
|
|
|
# set loglevel
|
2024-11-29 00:48:59 +01:00
|
|
|
if loglevel in ("debug", "warning", "info", "error", "critical"):
|
2013-05-23 10:19:43 +02:00
|
|
|
numeric_level = getattr(logging, loglevel.upper(), None)
|
|
|
|
if not isinstance(numeric_level, int):
|
2024-11-29 00:48:59 +01:00
|
|
|
raise ValueError("Invalid log level: %s" % loglevel)
|
2013-05-23 10:19:43 +02:00
|
|
|
logger.setLevel(numeric_level)
|
|
|
|
|
|
|
|
# Config file
|
|
|
|
if not os.path.isfile(config_file):
|
|
|
|
logger.error("Error : could not find file : " + config_file + ", please check the path")
|
|
|
|
logger.info("Using " + config_file + " config file")
|
|
|
|
|
|
|
|
cp = ConfigParser()
|
|
|
|
cp.read(config_file)
|
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
backup_base_dir = options.backup_base_dir or cp.get("global", "backup_base_dir")
|
|
|
|
log_dir = os.path.join(backup_base_dir, "log")
|
2013-05-23 10:19:43 +02:00
|
|
|
if not os.path.exists(log_dir):
|
|
|
|
os.makedirs(log_dir)
|
|
|
|
|
|
|
|
# if we run the nagios check, we don't create log file, everything is piped to stdout
|
2024-11-29 00:48:59 +01:00
|
|
|
if action != "checknagios":
|
2018-05-16 16:26:47 +02:00
|
|
|
try:
|
2024-11-29 00:48:59 +01:00
|
|
|
hdlr = logging.FileHandler(os.path.join(log_dir, "tisbackup_%s.log" % (backup_start_date)))
|
|
|
|
hdlr.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s"))
|
2018-05-16 16:26:47 +02:00
|
|
|
logger.addHandler(hdlr)
|
2022-04-25 10:02:43 +02:00
|
|
|
except IOError as e:
|
2024-11-29 00:48:59 +01:00
|
|
|
if action == "cleanup" and e.errno == errno.ENOSPC:
|
2018-05-16 16:26:47 +02:00
|
|
|
logger.warning("No space left on device, disabling file logging.")
|
|
|
|
else:
|
|
|
|
raise e
|
2013-05-23 10:19:43 +02:00
|
|
|
|
|
|
|
# Main
|
2024-11-29 00:48:59 +01:00
|
|
|
backup = tis_backup(dry_run=dry_run, verbose=verbose, backup_base_dir=backup_base_dir)
|
2013-05-23 10:19:43 +02:00
|
|
|
backup.read_ini_file(config_file)
|
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
backup_sections = options.sections.split(",") if options.sections else []
|
2018-01-30 12:29:16 +01:00
|
|
|
|
2013-05-23 10:19:43 +02:00
|
|
|
all_sections = [backup_item.backup_name for backup_item in backup.backup_list]
|
|
|
|
if not backup_sections:
|
|
|
|
backup_sections = all_sections
|
|
|
|
else:
|
|
|
|
for b in backup_sections:
|
2024-11-29 00:32:39 +01:00
|
|
|
if b not in all_sections:
|
2024-11-29 00:48:59 +01:00
|
|
|
raise Exception("Section %s is not defined in config file" % b)
|
2013-05-23 10:19:43 +02:00
|
|
|
|
|
|
|
if dry_run:
|
|
|
|
logger.warning("WARNING : DRY RUN, nothing will be done, just printing on screen...")
|
|
|
|
|
|
|
|
if action == "backup":
|
|
|
|
backup.process_backup(backup_sections)
|
|
|
|
elif action == "exportbackup":
|
|
|
|
if not options.exportdir:
|
2024-11-29 00:48:59 +01:00
|
|
|
raise Exception("No export directory supplied dor exportbackup action")
|
|
|
|
backup.export_backups(backup_sections, options.exportdir)
|
2013-05-23 10:19:43 +02:00
|
|
|
elif action == "cleanup":
|
|
|
|
backup.cleanup_backup_section(backup_sections)
|
|
|
|
elif action == "checknagios":
|
|
|
|
backup.checknagios(backup_sections)
|
|
|
|
elif action == "dumpstat":
|
|
|
|
for s in backup_sections:
|
2024-11-29 00:48:59 +01:00
|
|
|
backup.dbstat.last_backups(s, count=options.statscount)
|
2013-05-23 10:19:43 +02:00
|
|
|
elif action == "retryfailed":
|
|
|
|
backup.retry_failed_backups()
|
|
|
|
elif action == "register_existing":
|
|
|
|
backup.register_existingbackups(backup_sections)
|
2018-01-30 12:29:16 +01:00
|
|
|
|
2013-05-23 10:19:43 +02:00
|
|
|
else:
|
2024-11-29 00:48:59 +01:00
|
|
|
logger.error('Unhandled action "%s", quitting...', action)
|
2013-05-23 10:19:43 +02:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|