few fixes and lint compatible

This commit is contained in:
k3nny 2024-11-29 00:48:59 +01:00
parent 8479c378ee
commit e7e98d0b47
5 changed files with 357 additions and 306 deletions

View File

@ -19,7 +19,6 @@ jobs:
- run: pip install ruff - run: pip install ruff
- run: | - run: |
ruff check . ruff check .
ruff fix .
# - uses: stefanzweifel/git-auto-commit-action@v4 # - uses: stefanzweifel/git-auto-commit-action@v4
# with: # with:
# commit_message: 'style fixes by ruff' # commit_message: 'style fixes by ruff'

13
.hadolint.yml Normal file
View File

@ -0,0 +1,13 @@
DL3008failure-threshold: warning
format: tty
ignored:
- DL3007
override:
error:
- DL3015
warning:
- DL3015
info:
- DL3008
style:
- DL3015

View File

@ -5,13 +5,11 @@ WORKDIR /opt/tisbackup
COPY entrypoint.sh /entrypoint.sh COPY entrypoint.sh /entrypoint.sh
COPY . /opt/tisbackup COPY . /opt/tisbackup
RUN apt update \ RUN apt-get update \
&& apt install --no-install-recommends -y rsync ssh cron \ && apt-get install --no-install-recommends -y rsync ssh cron \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/* \
&& /usr/local/bin/python3.12 -m pip install --no-cache-dir -r requirements.txt \
RUN /usr/local/bin/python3.12 -m pip install --no-cache-dir -r requirements.txt && mkdir -p /var/spool/cron/crontabs \
RUN mkdir -p /var/spool/cron/crontabs \
&& echo '59 03 * * * root /bin/bash /opt/tisbackup/backup.sh' > /etc/crontab \ && echo '59 03 * * * root /bin/bash /opt/tisbackup/backup.sh' > /etc/crontab \
&& echo '' >> /etc/crontab \ && echo '' >> /etc/crontab \
&& crontab /etc/crontab && crontab /etc/crontab

View File

@ -23,8 +23,8 @@ import sys
from os.path import isfile, join from os.path import isfile, join
tisbackup_root_dir = os.path.dirname(os.path.realpath(__file__)) tisbackup_root_dir = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0,os.path.join(tisbackup_root_dir,'lib')) sys.path.insert(0, os.path.join(tisbackup_root_dir, "lib"))
sys.path.insert(0,os.path.join(tisbackup_root_dir,'libtisbackup')) sys.path.insert(0, os.path.join(tisbackup_root_dir, "libtisbackup"))
import errno import errno
import logging import logging
@ -35,24 +35,26 @@ from optparse import OptionParser
from iniparse import ConfigParser, ini from iniparse import ConfigParser, ini
from libtisbackup.backup_mysql import backup_mysql from libtisbackup.backup_mysql import backup_mysql
#from libtisbackup.backup_vmdk import backup_vmdk
#from libtisbackup.backup_switch import backup_switch # from libtisbackup.backup_vmdk import backup_vmdk
# from libtisbackup.backup_switch import backup_switch
from libtisbackup.backup_null import backup_null from libtisbackup.backup_null import backup_null
from libtisbackup.backup_pgsql import backup_pgsql from libtisbackup.backup_pgsql import backup_pgsql
from libtisbackup.backup_rsync import backup_rsync, backup_rsync_ssh from libtisbackup.backup_rsync import backup_rsync, backup_rsync_ssh
#from libtisbackup.backup_oracle import backup_oracle
from libtisbackup.backup_rsync_btrfs import (backup_rsync__btrfs_ssh, # from libtisbackup.backup_oracle import backup_oracle
backup_rsync_btrfs) from libtisbackup.backup_rsync_btrfs import backup_rsync__btrfs_ssh, backup_rsync_btrfs
#from libtisbackup.backup_sqlserver import backup_sqlserver
# from libtisbackup.backup_sqlserver import backup_sqlserver
from libtisbackup.backup_samba4 import backup_samba4 from libtisbackup.backup_samba4 import backup_samba4
from libtisbackup.backup_xcp_metadata import backup_xcp_metadata from libtisbackup.backup_xcp_metadata import backup_xcp_metadata
from libtisbackup.backup_xva import backup_xva from libtisbackup.backup_xva import backup_xva
from libtisbackup.common import * from libtisbackup.common import *
from libtisbackup.copy_vm_xcp import copy_vm_xcp from libtisbackup.copy_vm_xcp import copy_vm_xcp
__version__="2.0" __version__ = "2.0"
usage="""\ usage = """\
%prog -c configfile action %prog -c configfile action
TIS Files Backup system. TIS Files Backup system.
@ -67,52 +69,75 @@ action is either :
exportbackup : copy lastest OK backups from local to location defned by --exportdir parameter exportbackup : copy lastest OK backups from local to location defned by --exportdir parameter
register_existing : scan backup directories and add missing backups to database""" register_existing : scan backup directories and add missing backups to database"""
version="VERSION" version = "VERSION"
parser = OptionParser(usage=usage, version="%prog " + version)
parser.add_option(
"-c", "--config", dest="config", default="/etc/tis/tisbackup-config.ini", help="Config file full path (default: %default)"
)
parser.add_option("-d", "--dry-run", dest="dry_run", default=False, action="store_true", help="Dry run (default: %default)")
parser.add_option("-v", "--verbose", dest="verbose", default=False, action="store_true", help="More information (default: %default)")
parser.add_option(
"-s", "--sections", dest="sections", default="", help="Comma separated list of sections (backups) to process (default: All)"
)
parser.add_option(
"-l",
"--loglevel",
dest="loglevel",
default="info",
type="choice",
choices=["debug", "warning", "info", "error", "critical"],
metavar="LOGLEVEL",
help="Loglevel (default: %default)",
)
parser.add_option("-n", "--len", dest="statscount", default=30, type="int", help="Number of lines to list for dumpstat (default: %default)")
parser.add_option(
"-b",
"--backupdir",
dest="backup_base_dir",
default="",
help="Base directory for all backups (default: [global] backup_base_dir in config file)",
)
parser.add_option(
"-x", "--exportdir", dest="exportdir", default="", help="Directory where to export latest backups with exportbackup (nodefault)"
)
parser=OptionParser(usage=usage,version="%prog " + version)
parser.add_option("-c","--config", dest="config", default='/etc/tis/tisbackup-config.ini', help="Config file full path (default: %default)")
parser.add_option("-d","--dry-run", dest="dry_run", default=False, action='store_true', help="Dry run (default: %default)")
parser.add_option("-v","--verbose", dest="verbose", default=False, action='store_true', help="More information (default: %default)")
parser.add_option("-s","--sections", dest="sections", default='', help="Comma separated list of sections (backups) to process (default: All)")
parser.add_option("-l","--loglevel", dest="loglevel", default='info', type='choice', choices=['debug','warning','info','error','critical'], metavar='LOGLEVEL',help="Loglevel (default: %default)")
parser.add_option("-n","--len", dest="statscount", default=30, type='int', help="Number of lines to list for dumpstat (default: %default)")
parser.add_option("-b","--backupdir", dest="backup_base_dir", default='', help="Base directory for all backups (default: [global] backup_base_dir in config file)")
parser.add_option("-x","--exportdir", dest="exportdir", default='', help="Directory where to export latest backups with exportbackup (nodefault)")
class tis_backup: class tis_backup:
logger = logging.getLogger('tisbackup') logger = logging.getLogger("tisbackup")
def __init__(self,dry_run=False,verbose=False,backup_base_dir=''): def __init__(self, dry_run=False, verbose=False, backup_base_dir=""):
self.dry_run = dry_run self.dry_run = dry_run
self.verbose = verbose self.verbose = verbose
self.backup_base_dir = backup_base_dir self.backup_base_dir = backup_base_dir
self.backup_base_dir = '' self.backup_base_dir = ""
self.backup_list = [] self.backup_list = []
self.dry_run = dry_run self.dry_run = dry_run
self.verbose=False self.verbose = False
def read_ini_file(self,filename): def read_ini_file(self, filename):
ini.change_comment_syntax() ini.change_comment_syntax()
cp = ConfigParser() cp = ConfigParser()
cp.read(filename) cp.read(filename)
if not self.backup_base_dir: if not self.backup_base_dir:
self.backup_base_dir = cp.get('global','backup_base_dir') self.backup_base_dir = cp.get("global", "backup_base_dir")
if not os.path.isdir(self.backup_base_dir): if not os.path.isdir(self.backup_base_dir):
self.logger.info('Creating backup directory %s' % self.backup_base_dir) self.logger.info("Creating backup directory %s" % self.backup_base_dir)
os.makedirs(self.backup_base_dir) os.makedirs(self.backup_base_dir)
self.logger.debug("backup directory : " + self.backup_base_dir) self.logger.debug("backup directory : " + self.backup_base_dir)
self.dbstat = BackupStat(os.path.join(self.backup_base_dir,'log','tisbackup.sqlite')) self.dbstat = BackupStat(os.path.join(self.backup_base_dir, "log", "tisbackup.sqlite"))
for section in cp.sections(): for section in cp.sections():
if (section != 'global'): if section != "global":
self.logger.debug("reading backup config " + section) self.logger.debug("reading backup config " + section)
backup_item = None backup_item = None
type = cp.get(section,'type') type = cp.get(section, "type")
backup_item = backup_drivers[type](backup_name=section, backup_item = backup_drivers[type](
backup_dir=os.path.join(self.backup_base_dir,section),dbstat=self.dbstat,dry_run=self.dry_run) backup_name=section, backup_dir=os.path.join(self.backup_base_dir, section), dbstat=self.dbstat, dry_run=self.dry_run
)
backup_item.read_config(cp) backup_item.read_config(cp)
backup_item.verbose = self.verbose backup_item.verbose = self.verbose
@ -122,35 +147,34 @@ class tis_backup:
# TODO socket.gethostbyaddr('64.236.16.20') # TODO socket.gethostbyaddr('64.236.16.20')
# TODO limit backup to one backup on the command line # TODO limit backup to one backup on the command line
def checknagios(self, sections=[]):
def checknagios(self,sections=[]):
try: try:
if not sections: if not sections:
sections = [backup_item.backup_name for backup_item in self.backup_list] sections = [backup_item.backup_name for backup_item in self.backup_list]
self.logger.debug('Start of check nagios for %s' % (','.join(sections),)) self.logger.debug("Start of check nagios for %s" % (",".join(sections),))
try: try:
worst_nagiosstatus = None worst_nagiosstatus = None
ok = [] ok = []
warning = [] warning = []
critical = [] critical = []
unknown = [] unknown = []
nagiosoutput = '' nagiosoutput = ""
for backup_item in self.backup_list: for backup_item in self.backup_list:
if not sections or backup_item.backup_name in sections: if not sections or backup_item.backup_name in sections:
(nagiosstatus,log) = backup_item.checknagios() (nagiosstatus, log) = backup_item.checknagios()
if nagiosstatus == nagiosStateCritical: if nagiosstatus == nagiosStateCritical:
critical.append((backup_item.backup_name,log)) critical.append((backup_item.backup_name, log))
elif nagiosstatus == nagiosStateWarning : elif nagiosstatus == nagiosStateWarning:
warning.append((backup_item.backup_name,log)) warning.append((backup_item.backup_name, log))
elif nagiosstatus == nagiosStateOk: elif nagiosstatus == nagiosStateOk:
ok.append((backup_item.backup_name,log)) ok.append((backup_item.backup_name, log))
else: else:
unknown.append((backup_item.backup_name,log)) unknown.append((backup_item.backup_name, log))
self.logger.debug('[%s] nagios:"%i" log: %s',backup_item.backup_name,nagiosstatus,log) self.logger.debug('[%s] nagios:"%i" log: %s', backup_item.backup_name, nagiosstatus, log)
if not ok and not critical and not unknown and not warning: if not ok and not critical and not unknown and not warning:
self.logger.debug('Nothing processed') self.logger.debug("Nothing processed")
worst_nagiosstatus = nagiosStateUnknown worst_nagiosstatus = nagiosStateUnknown
nagiosoutput = 'UNKNOWN : Unknown backup sections "%s"' % sections nagiosoutput = 'UNKNOWN : Unknown backup sections "%s"' % sections
@ -159,155 +183,154 @@ class tis_backup:
if unknown: if unknown:
if not worst_nagiosstatus: if not worst_nagiosstatus:
worst_nagiosstatus = nagiosStateUnknown worst_nagiosstatus = nagiosStateUnknown
nagiosoutput = 'UNKNOWN status backups %s' % (','.join([b[0] for b in unknown])) nagiosoutput = "UNKNOWN status backups %s" % (",".join([b[0] for b in unknown]))
globallog.extend(unknown) globallog.extend(unknown)
if critical: if critical:
if not worst_nagiosstatus: if not worst_nagiosstatus:
worst_nagiosstatus = nagiosStateCritical worst_nagiosstatus = nagiosStateCritical
nagiosoutput = 'CRITICAL backups %s' % (','.join([b[0] for b in critical])) nagiosoutput = "CRITICAL backups %s" % (",".join([b[0] for b in critical]))
globallog.extend(critical) globallog.extend(critical)
if warning: if warning:
if not worst_nagiosstatus: if not worst_nagiosstatus:
worst_nagiosstatus = nagiosStateWarning worst_nagiosstatus = nagiosStateWarning
nagiosoutput = 'WARNING backups %s' % (','.join([b[0] for b in warning])) nagiosoutput = "WARNING backups %s" % (",".join([b[0] for b in warning]))
globallog.extend(warning) globallog.extend(warning)
if ok: if ok:
if not worst_nagiosstatus: if not worst_nagiosstatus:
worst_nagiosstatus = nagiosStateOk worst_nagiosstatus = nagiosStateOk
nagiosoutput = 'OK backups %s' % (','.join([b[0] for b in ok])) nagiosoutput = "OK backups %s" % (",".join([b[0] for b in ok]))
globallog.extend(ok) globallog.extend(ok)
if worst_nagiosstatus == nagiosStateOk: if worst_nagiosstatus == nagiosStateOk:
nagiosoutput = 'ALL backups OK %s' % (','.join(sections)) nagiosoutput = "ALL backups OK %s" % (",".join(sections))
except BaseException as e: except BaseException as e:
worst_nagiosstatus = nagiosStateCritical worst_nagiosstatus = nagiosStateCritical
nagiosoutput = 'EXCEPTION',"Critical : %s" % str(e) nagiosoutput = "EXCEPTION", "Critical : %s" % str(e)
raise raise
finally: finally:
self.logger.debug('worst nagios status :"%i"',worst_nagiosstatus) self.logger.debug('worst nagios status :"%i"', worst_nagiosstatus)
print('%s (tisbackup V%s)' %(nagiosoutput,version)) print("%s (tisbackup V%s)" % (nagiosoutput, version))
print('\n'.join(["[%s]:%s" % (log_elem[0],log_elem[1]) for log_elem in globallog])) print("\n".join(["[%s]:%s" % (log_elem[0], log_elem[1]) for log_elem in globallog]))
sys.exit(worst_nagiosstatus) sys.exit(worst_nagiosstatus)
def process_backup(self,sections=[]): def process_backup(self, sections=[]):
processed = [] processed = []
errors = [] errors = []
if not sections: if not sections:
sections = [backup_item.backup_name for backup_item in self.backup_list] sections = [backup_item.backup_name for backup_item in self.backup_list]
self.logger.info('Processing backup for %s' % (','.join(sections)) ) self.logger.info("Processing backup for %s" % (",".join(sections)))
for backup_item in self.backup_list: for backup_item in self.backup_list:
if not sections or backup_item.backup_name in sections: if not sections or backup_item.backup_name in sections:
try: try:
assert(isinstance(backup_item,backup_generic)) assert isinstance(backup_item, backup_generic)
self.logger.info('Processing [%s]',(backup_item.backup_name)) self.logger.info("Processing [%s]", (backup_item.backup_name))
stats = backup_item.process_backup() stats = backup_item.process_backup()
processed.append((backup_item.backup_name,stats)) processed.append((backup_item.backup_name, stats))
except BaseException as e: except BaseException as e:
self.logger.critical('Backup [%s] processed with error : %s',backup_item.backup_name,e) self.logger.critical("Backup [%s] processed with error : %s", backup_item.backup_name, e)
errors.append((backup_item.backup_name,str(e))) errors.append((backup_item.backup_name, str(e)))
if not processed and not errors: if not processed and not errors:
self.logger.critical('No backup properly finished or processed') self.logger.critical("No backup properly finished or processed")
else: else:
if processed: if processed:
self.logger.info('Backup processed : %s' , ",".join([b[0] for b in processed])) self.logger.info("Backup processed : %s", ",".join([b[0] for b in processed]))
if errors: if errors:
self.logger.error('Backup processed with errors: %s' , ",".join([b[0] for b in errors])) self.logger.error("Backup processed with errors: %s", ",".join([b[0] for b in errors]))
def export_backups(self,sections=[],exportdir=''): def export_backups(self, sections=[], exportdir=""):
processed = [] processed = []
errors = [] errors = []
if not sections: if not sections:
sections = [backup_item.backup_name for backup_item in self.backup_list] sections = [backup_item.backup_name for backup_item in self.backup_list]
self.logger.info('Exporting OK backups for %s to %s' % (','.join(sections),exportdir) ) self.logger.info("Exporting OK backups for %s to %s" % (",".join(sections), exportdir))
for backup_item in self.backup_list: for backup_item in self.backup_list:
if backup_item.backup_name in sections: if backup_item.backup_name in sections:
try: try:
assert(isinstance(backup_item,backup_generic)) assert isinstance(backup_item, backup_generic)
self.logger.info('Processing [%s]',(backup_item.backup_name)) self.logger.info("Processing [%s]", (backup_item.backup_name))
stats = backup_item.export_latestbackup(destdir=exportdir) stats = backup_item.export_latestbackup(destdir=exportdir)
processed.append((backup_item.backup_name,stats)) processed.append((backup_item.backup_name, stats))
except BaseException as e: except BaseException as e:
self.logger.critical('Export Backup [%s] processed with error : %s',backup_item.backup_name,e) self.logger.critical("Export Backup [%s] processed with error : %s", backup_item.backup_name, e)
errors.append((backup_item.backup_name,str(e))) errors.append((backup_item.backup_name, str(e)))
if not processed and not errors: if not processed and not errors:
self.logger.critical('No export backup properly finished or processed') self.logger.critical("No export backup properly finished or processed")
else: else:
if processed: if processed:
self.logger.info('Export Backups processed : %s' , ",".join([b[0] for b in processed])) self.logger.info("Export Backups processed : %s", ",".join([b[0] for b in processed]))
if errors: if errors:
self.logger.error('Export Backups processed with errors: %s' , ",".join([b[0] for b in errors])) self.logger.error("Export Backups processed with errors: %s", ",".join([b[0] for b in errors]))
def retry_failed_backups(self,maxage_hours=30): def retry_failed_backups(self, maxage_hours=30):
processed = [] processed = []
errors = [] errors = []
# before mindate, backup is too old # before mindate, backup is too old
mindate = datetime2isodate((datetime.datetime.now() - datetime.timedelta(hours=maxage_hours))) mindate = datetime2isodate((datetime.datetime.now() - datetime.timedelta(hours=maxage_hours)))
failed_backups = self.dbstat.query("""\ failed_backups = self.dbstat.query(
"""\
select distinct backup_name as bname select distinct backup_name as bname
from stats from stats
where status="OK" and backup_start>=?""",(mindate,)) where status="OK" and backup_start>=?""",
(mindate,),
)
defined_backups = list(map(lambda f:f.backup_name, [ x for x in self.backup_list if not isinstance(x, backup_null) ]))
failed_backups_names = set(defined_backups) - set([b['bname'] for b in failed_backups if b['bname'] in defined_backups])
defined_backups = list(map(lambda f: f.backup_name, [x for x in self.backup_list if not isinstance(x, backup_null)]))
failed_backups_names = set(defined_backups) - set([b["bname"] for b in failed_backups if b["bname"] in defined_backups])
if failed_backups_names: if failed_backups_names:
self.logger.info('Processing backup for %s',','.join(failed_backups_names)) self.logger.info("Processing backup for %s", ",".join(failed_backups_names))
for backup_item in self.backup_list: for backup_item in self.backup_list:
if backup_item.backup_name in failed_backups_names: if backup_item.backup_name in failed_backups_names:
try: try:
assert(isinstance(backup_item,backup_generic)) assert isinstance(backup_item, backup_generic)
self.logger.info('Processing [%s]',(backup_item.backup_name)) self.logger.info("Processing [%s]", (backup_item.backup_name))
stats = backup_item.process_backup() stats = backup_item.process_backup()
processed.append((backup_item.backup_name,stats)) processed.append((backup_item.backup_name, stats))
except BaseException as e: except BaseException as e:
self.logger.critical('Backup [%s] not processed, error : %s',backup_item.backup_name,e) self.logger.critical("Backup [%s] not processed, error : %s", backup_item.backup_name, e)
errors.append((backup_item.backup_name,str(e))) errors.append((backup_item.backup_name, str(e)))
if not processed and not errors: if not processed and not errors:
self.logger.critical('No backup properly finished or processed') self.logger.critical("No backup properly finished or processed")
else: else:
if processed: if processed:
self.logger.info('Backup processed : %s' , ",".join([b[0] for b in errors])) self.logger.info("Backup processed : %s", ",".join([b[0] for b in errors]))
if errors: if errors:
self.logger.error('Backup processed with errors: %s' , ",".join([b[0] for b in errors])) self.logger.error("Backup processed with errors: %s", ",".join([b[0] for b in errors]))
else: else:
self.logger.info('No recent failed backups found in database') self.logger.info("No recent failed backups found in database")
def cleanup_backup_section(self, sections=[]):
def cleanup_backup_section(self,sections = []):
processed = False processed = False
if not sections: if not sections:
sections = [backup_item.backup_name for backup_item in self.backup_list] sections = [backup_item.backup_name for backup_item in self.backup_list]
self.logger.info('Processing cleanup for %s' % (','.join(sections)) ) self.logger.info("Processing cleanup for %s" % (",".join(sections)))
for backup_item in self.backup_list: for backup_item in self.backup_list:
if backup_item.backup_name in sections: if backup_item.backup_name in sections:
try: try:
assert(isinstance(backup_item,backup_generic)) assert isinstance(backup_item, backup_generic)
self.logger.info('Processing cleanup of [%s]',(backup_item.backup_name)) self.logger.info("Processing cleanup of [%s]", (backup_item.backup_name))
backup_item.cleanup_backup() backup_item.cleanup_backup()
processed = True processed = True
except BaseException as e: except BaseException as e:
self.logger.critical('Cleanup of [%s] not processed, error : %s',backup_item.backup_name,e) self.logger.critical("Cleanup of [%s] not processed, error : %s", backup_item.backup_name, e)
if not processed: if not processed:
self.logger.critical('No cleanup properly finished or processed') self.logger.critical("No cleanup properly finished or processed")
def register_existingbackups(self,sections = []): def register_existingbackups(self, sections=[]):
if not sections: if not sections:
sections = [backup_item.backup_name for backup_item in self.backup_list] sections = [backup_item.backup_name for backup_item in self.backup_list]
self.logger.info('Append existing backups to database...') self.logger.info("Append existing backups to database...")
for backup_item in self.backup_list: for backup_item in self.backup_list:
if backup_item.backup_name in sections: if backup_item.backup_name in sections:
backup_item.register_existingbackups() backup_item.register_existingbackups()
@ -315,26 +338,26 @@ class tis_backup:
def html_report(self): def html_report(self):
for backup_item in self.backup_list: for backup_item in self.backup_list:
if not section or section == backup_item.backup_name: if not section or section == backup_item.backup_name:
assert(isinstance(backup_item,backup_generic)) assert isinstance(backup_item, backup_generic)
if not maxage_hours: if not maxage_hours:
maxage_hours = backup_item.maximum_backup_age maxage_hours = backup_item.maximum_backup_age
(nagiosstatus,log) = backup_item.checknagios(maxage_hours=maxage_hours) (nagiosstatus, log) = backup_item.checknagios(maxage_hours=maxage_hours)
globallog.append('[%s] %s' % (backup_item.backup_name,log)) globallog.append("[%s] %s" % (backup_item.backup_name, log))
self.logger.debug('[%s] nagios:"%i" log: %s',backup_item.backup_name,nagiosstatus,log) self.logger.debug('[%s] nagios:"%i" log: %s', backup_item.backup_name, nagiosstatus, log)
#processed = True # processed = True
# if nagiosstatus >= worst_nagiosstatus: # if nagiosstatus >= worst_nagiosstatus:
# worst_nagiosstatus = nagiosstatus # worst_nagiosstatus = nagiosstatus
def main(): def main():
(options,args)=parser.parse_args() (options, args) = parser.parse_args()
if len(args) != 1: if len(args) != 1:
print("ERROR : You must provide one action to perform") print("ERROR : You must provide one action to perform")
parser.print_usage() parser.print_usage()
sys.exit(2) sys.exit(2)
backup_start_date = datetime.datetime.now().strftime('%Y%m%d-%Hh%Mm%S') backup_start_date = datetime.datetime.now().strftime("%Y%m%d-%Hh%Mm%S")
# options # options
action = args[0] action = args[0]
@ -343,23 +366,23 @@ def main():
print(backup_drivers[t].get_help()) print(backup_drivers[t].get_help())
sys.exit(0) sys.exit(0)
config_file =options.config config_file = options.config
dry_run = options.dry_run dry_run = options.dry_run
verbose = options.verbose verbose = options.verbose
loglevel = options.loglevel loglevel = options.loglevel
# setup Logger # setup Logger
logger = logging.getLogger('tisbackup') logger = logging.getLogger("tisbackup")
hdlr = logging.StreamHandler() hdlr = logging.StreamHandler()
hdlr.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s')) hdlr.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s"))
logger.addHandler(hdlr) logger.addHandler(hdlr)
# set loglevel # set loglevel
if loglevel in ('debug','warning','info','error','critical'): if loglevel in ("debug", "warning", "info", "error", "critical"):
numeric_level = getattr(logging, loglevel.upper(), None) numeric_level = getattr(logging, loglevel.upper(), None)
if not isinstance(numeric_level, int): if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % loglevel) raise ValueError("Invalid log level: %s" % loglevel)
logger.setLevel(numeric_level) logger.setLevel(numeric_level)
# Config file # Config file
@ -370,28 +393,28 @@ def main():
cp = ConfigParser() cp = ConfigParser()
cp.read(config_file) cp.read(config_file)
backup_base_dir = options.backup_base_dir or cp.get('global','backup_base_dir') backup_base_dir = options.backup_base_dir or cp.get("global", "backup_base_dir")
log_dir = os.path.join(backup_base_dir,'log') log_dir = os.path.join(backup_base_dir, "log")
if not os.path.exists(log_dir): if not os.path.exists(log_dir):
os.makedirs(log_dir) os.makedirs(log_dir)
# if we run the nagios check, we don't create log file, everything is piped to stdout # if we run the nagios check, we don't create log file, everything is piped to stdout
if action!='checknagios': if action != "checknagios":
try: try:
hdlr = logging.FileHandler(os.path.join(log_dir,'tisbackup_%s.log' % (backup_start_date))) hdlr = logging.FileHandler(os.path.join(log_dir, "tisbackup_%s.log" % (backup_start_date)))
hdlr.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s')) hdlr.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s"))
logger.addHandler(hdlr) logger.addHandler(hdlr)
except IOError as e: except IOError as e:
if action == 'cleanup' and e.errno == errno.ENOSPC: if action == "cleanup" and e.errno == errno.ENOSPC:
logger.warning("No space left on device, disabling file logging.") logger.warning("No space left on device, disabling file logging.")
else: else:
raise e raise e
# Main # Main
backup = tis_backup(dry_run=dry_run,verbose=verbose,backup_base_dir=backup_base_dir) backup = tis_backup(dry_run=dry_run, verbose=verbose, backup_base_dir=backup_base_dir)
backup.read_ini_file(config_file) backup.read_ini_file(config_file)
backup_sections = options.sections.split(',') if options.sections else [] backup_sections = options.sections.split(",") if options.sections else []
all_sections = [backup_item.backup_name for backup_item in backup.backup_list] all_sections = [backup_item.backup_name for backup_item in backup.backup_list]
if not backup_sections: if not backup_sections:
@ -399,7 +422,7 @@ def main():
else: else:
for b in backup_sections: for b in backup_sections:
if b not in all_sections: if b not in all_sections:
raise Exception('Section %s is not defined in config file' % b) raise Exception("Section %s is not defined in config file" % b)
if dry_run: if dry_run:
logger.warning("WARNING : DRY RUN, nothing will be done, just printing on screen...") logger.warning("WARNING : DRY RUN, nothing will be done, just printing on screen...")
@ -408,23 +431,22 @@ def main():
backup.process_backup(backup_sections) backup.process_backup(backup_sections)
elif action == "exportbackup": elif action == "exportbackup":
if not options.exportdir: if not options.exportdir:
raise Exception('No export directory supplied dor exportbackup action') raise Exception("No export directory supplied dor exportbackup action")
backup.export_backups(backup_sections,options.exportdir) backup.export_backups(backup_sections, options.exportdir)
elif action == "cleanup": elif action == "cleanup":
backup.cleanup_backup_section(backup_sections) backup.cleanup_backup_section(backup_sections)
elif action == "checknagios": elif action == "checknagios":
backup.checknagios(backup_sections) backup.checknagios(backup_sections)
elif action == "dumpstat": elif action == "dumpstat":
for s in backup_sections: for s in backup_sections:
backup.dbstat.last_backups(s,count=options.statscount) backup.dbstat.last_backups(s, count=options.statscount)
elif action == "retryfailed": elif action == "retryfailed":
backup.retry_failed_backups() backup.retry_failed_backups()
elif action == "register_existing": elif action == "register_existing":
backup.register_existingbackups(backup_sections) backup.register_existingbackups(backup_sections)
else: else:
logger.error('Unhandled action "%s", quitting...',action) logger.error('Unhandled action "%s", quitting...', action)
sys.exit(1) sys.exit(1)

View File

@ -22,8 +22,8 @@ import sys
from os.path import isfile, join from os.path import isfile, join
tisbackup_root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__))) tisbackup_root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__)))
sys.path.append(os.path.join(tisbackup_root_dir,'lib')) sys.path.append(os.path.join(tisbackup_root_dir, "lib"))
sys.path.append(os.path.join(tisbackup_root_dir,'libtisbackup')) sys.path.append(os.path.join(tisbackup_root_dir, "libtisbackup"))
import glob import glob
@ -34,9 +34,7 @@ import time
from shutil import * from shutil import *
from urllib.parse import urlparse from urllib.parse import urlparse
from flask import (Flask, Response, abort, appcontext_pushed, flash, g, from flask import Flask, Response, abort, appcontext_pushed, flash, g, jsonify, redirect, render_template, request, session, url_for
jsonify, redirect, render_template, request, session,
url_for)
from iniparse import ConfigParser, RawConfigParser from iniparse import ConfigParser, RawConfigParser
from config import huey from config import huey
@ -47,61 +45,61 @@ from tisbackup import tis_backup
cp = ConfigParser() cp = ConfigParser()
cp.read("/etc/tis/tisbackup_gui.ini") cp.read("/etc/tis/tisbackup_gui.ini")
CONFIG = cp.get('general','config_tisbackup').split(",") CONFIG = cp.get("general", "config_tisbackup").split(",")
SECTIONS = cp.get('general','sections') SECTIONS = cp.get("general", "sections")
ADMIN_EMAIL = cp.get('general','ADMIN_EMAIL') ADMIN_EMAIL = cp.get("general", "ADMIN_EMAIL")
BASE_DIR = cp.get('general','base_config_dir') BASE_DIR = cp.get("general", "base_config_dir")
tisbackup_config_file= CONFIG[0] tisbackup_config_file = CONFIG[0]
config_number=0 config_number = 0
cp = ConfigParser() cp = ConfigParser()
cp.read(tisbackup_config_file) cp.read(tisbackup_config_file)
backup_base_dir = cp.get('global','backup_base_dir') backup_base_dir = cp.get("global", "backup_base_dir")
dbstat = BackupStat(os.path.join(backup_base_dir,'log','tisbackup.sqlite')) dbstat = BackupStat(os.path.join(backup_base_dir, "log", "tisbackup.sqlite"))
mindate = None mindate = None
error = None error = None
info = None info = None
app = Flask(__name__) app = Flask(__name__)
app.secret_key = 'fsiqefiuqsefARZ4Zfesfe34234dfzefzfe' app.secret_key = "fsiqefiuqsefARZ4Zfesfe34234dfzefzfe"
app.config['PROPAGATE_EXCEPTIONS'] = True app.config["PROPAGATE_EXCEPTIONS"] = True
tasks_db = os.path.join(tisbackup_root_dir,"tasks.sqlite") tasks_db = os.path.join(tisbackup_root_dir, "tasks.sqlite")
def read_all_configs(base_dir): def read_all_configs(base_dir):
raw_configs = [] raw_configs = []
list_config = [] list_config = []
#config_base_dir = base_dir # config_base_dir = base_dir
for file in os.listdir(base_dir): for file in os.listdir(base_dir):
if isfile(join(base_dir,file)): if isfile(join(base_dir, file)):
raw_configs.append(join(base_dir,file)) raw_configs.append(join(base_dir, file))
for elem in raw_configs: for elem in raw_configs:
line = open(elem).readline() line = open(elem).readline()
if 'global' in line: if "global" in line:
list_config.append(elem) list_config.append(elem)
backup_dict = {} backup_dict = {}
backup_dict['rsync_ssh_list'] = [] backup_dict["rsync_ssh_list"] = []
backup_dict['rsync_btrfs_list'] = [] backup_dict["rsync_btrfs_list"] = []
backup_dict['rsync_list'] = [] backup_dict["rsync_list"] = []
backup_dict['null_list'] = [] backup_dict["null_list"] = []
backup_dict['pgsql_list'] = [] backup_dict["pgsql_list"] = []
backup_dict['mysql_list'] = [] backup_dict["mysql_list"] = []
#backup_dict['sqlserver_list'] = [] # backup_dict['sqlserver_list'] = []
backup_dict['xva_list'] = [] backup_dict["xva_list"] = []
backup_dict['metadata_list'] = [] backup_dict["metadata_list"] = []
#backup_dict['switch_list'] = [] # backup_dict['switch_list'] = []
#backup_dict['oracle_list'] = [] # backup_dict['oracle_list'] = []
result = [] result = []
cp = ConfigParser() cp = ConfigParser()
for config_file in list_config: for config_file in list_config:
cp.read(config_file) cp.read(config_file)
backup_base_dir = cp.get('global', 'backup_base_dir') backup_base_dir = cp.get("global", "backup_base_dir")
backup = tis_backup(backup_base_dir=backup_base_dir) backup = tis_backup(backup_base_dir=backup_base_dir)
backup.read_ini_file(config_file) backup.read_ini_file(config_file)
@ -113,7 +111,7 @@ def read_all_configs(base_dir):
else: else:
for b in backup_sections: for b in backup_sections:
if b not in all_sections: if b not in all_sections:
raise Exception('Section %s is not defined in config file' % b) raise Exception("Section %s is not defined in config file" % b)
# never used.. # never used..
# if not backup_sections: # if not backup_sections:
@ -128,35 +126,28 @@ def read_all_configs(base_dir):
result.append(b) result.append(b)
for row in result: for row in result:
backup_name = row['backup_name'] backup_name = row["backup_name"]
server_name = row['server_name'] server_name = row["server_name"]
backup_type = row['type'] backup_type = row["type"]
if backup_type == "xcp-dump-metadata": if backup_type == "xcp-dump-metadata":
backup_dict['metadata_list'].append( backup_dict["metadata_list"].append([server_name, backup_name, backup_type, ""])
[server_name, backup_name, backup_type, ""])
if backup_type == "rsync+ssh": if backup_type == "rsync+ssh":
remote_dir = row['remote_dir'] remote_dir = row["remote_dir"]
backup_dict['rsync_ssh_list'].append( backup_dict["rsync_ssh_list"].append([server_name, backup_name, backup_type, remote_dir])
[server_name, backup_name, backup_type, remote_dir])
if backup_type == "rsync+btrfs+ssh": if backup_type == "rsync+btrfs+ssh":
remote_dir = row['remote_dir'] remote_dir = row["remote_dir"]
backup_dict['rsync_btrfs_list'].append( backup_dict["rsync_btrfs_list"].append([server_name, backup_name, backup_type, remote_dir])
[server_name, backup_name, backup_type, remote_dir])
if backup_type == "rsync": if backup_type == "rsync":
remote_dir = row['remote_dir'] remote_dir = row["remote_dir"]
backup_dict['rsync_list'].append( backup_dict["rsync_list"].append([server_name, backup_name, backup_type, remote_dir])
[server_name, backup_name, backup_type, remote_dir])
if backup_type == "null": if backup_type == "null":
backup_dict['null_list'].append( backup_dict["null_list"].append([server_name, backup_name, backup_type, ""])
[server_name, backup_name, backup_type, ""])
if backup_type == "pgsql+ssh": if backup_type == "pgsql+ssh":
db_name = row['db_name'] if len(row['db_name']) > 0 else '*' db_name = row["db_name"] if len(row["db_name"]) > 0 else "*"
backup_dict['pgsql_list'].append( backup_dict["pgsql_list"].append([server_name, backup_name, backup_type, db_name])
[server_name, backup_name, backup_type, db_name])
if backup_type == "mysql+ssh": if backup_type == "mysql+ssh":
db_name = row['db_name'] if len(row['db_name']) > 0 else '*' db_name = row["db_name"] if len(row["db_name"]) > 0 else "*"
backup_dict['mysql_list'].append( backup_dict["mysql_list"].append([server_name, backup_name, backup_type, db_name])
[server_name, backup_name, backup_type, db_name])
# if backup_type == "sqlserver+ssh": # if backup_type == "sqlserver+ssh":
# db_name = row['db_name'] # db_name = row['db_name']
# backup_dict['sqlserver_list'].append( # backup_dict['sqlserver_list'].append(
@ -166,12 +157,11 @@ def read_all_configs(base_dir):
# backup_dict['oracle_list'].append( # backup_dict['oracle_list'].append(
# [server_name, backup_name, backup_type, db_name]) # [server_name, backup_name, backup_type, db_name])
if backup_type == "xen-xva": if backup_type == "xen-xva":
backup_dict['xva_list'].append( backup_dict["xva_list"].append([server_name, backup_name, backup_type, ""])
[server_name, backup_name, backup_type, ""])
# if backup_type == "switch": # if backup_type == "switch":
# backup_dict['switch_list'].append( # backup_dict['switch_list'].append(
# [server_name, backup_name, backup_type, ""]) # [server_name, backup_name, backup_type, ""])
return backup_dict return backup_dict
@ -180,7 +170,7 @@ def read_config():
cp = ConfigParser() cp = ConfigParser()
cp.read(config_file) cp.read(config_file)
backup_base_dir = cp.get('global','backup_base_dir') backup_base_dir = cp.get("global", "backup_base_dir")
backup = tis_backup(backup_base_dir=backup_base_dir) backup = tis_backup(backup_base_dir=backup_base_dir)
backup.read_ini_file(config_file) backup.read_ini_file(config_file)
@ -192,10 +182,10 @@ def read_config():
else: else:
for b in backup_sections: for b in backup_sections:
if b not in all_sections: if b not in all_sections:
raise Exception('Section %s is not defined in config file' % b) raise Exception("Section %s is not defined in config file" % b)
result = [] result = []
# not used ... # not used ...
# if not backup_sections: # if not backup_sections:
# sections = [backup_item.backup_name for backup_item in backup.backup_list] # sections = [backup_item.backup_name for backup_item in backup.backup_list]
@ -203,46 +193,46 @@ def read_config():
for backup_item in backup.backup_list: for backup_item in backup.backup_list:
if backup_item.backup_name in backup_sections: if backup_item.backup_name in backup_sections:
b = {} b = {}
for attrib_name in backup_item.required_params+backup_item.optional_params: for attrib_name in backup_item.required_params + backup_item.optional_params:
if hasattr(backup_item,attrib_name): if hasattr(backup_item, attrib_name):
b[attrib_name] = getattr(backup_item,attrib_name) b[attrib_name] = getattr(backup_item, attrib_name)
result.append(b) result.append(b)
backup_dict = {} backup_dict = {}
backup_dict['rsync_ssh_list'] = [] backup_dict["rsync_ssh_list"] = []
backup_dict['rsync_btrfs_list'] = [] backup_dict["rsync_btrfs_list"] = []
backup_dict['rsync_list'] = [] backup_dict["rsync_list"] = []
backup_dict['null_list'] = [] backup_dict["null_list"] = []
backup_dict['pgsql_list'] = [] backup_dict["pgsql_list"] = []
backup_dict['mysql_list'] = [] backup_dict["mysql_list"] = []
#backup_dict['sqlserver_list'] = [] # backup_dict['sqlserver_list'] = []
backup_dict['xva_list'] = [] backup_dict["xva_list"] = []
backup_dict['metadata_list'] = [] backup_dict["metadata_list"] = []
#backup_dict['switch_list'] = [] # backup_dict['switch_list'] = []
#backup_dict['oracle_list'] = [] # backup_dict['oracle_list'] = []
for row in result: for row in result:
backup_name = row['backup_name'] backup_name = row["backup_name"]
server_name = row['server_name'] server_name = row["server_name"]
backup_type = row['type'] backup_type = row["type"]
if backup_type == "xcp-dump-metadata": if backup_type == "xcp-dump-metadata":
backup_dict['metadata_list'].append([server_name, backup_name, backup_type, ""]) backup_dict["metadata_list"].append([server_name, backup_name, backup_type, ""])
if backup_type == "rsync+ssh": if backup_type == "rsync+ssh":
remote_dir = row['remote_dir'] remote_dir = row["remote_dir"]
backup_dict['rsync_ssh_list'].append([server_name, backup_name, backup_type,remote_dir]) backup_dict["rsync_ssh_list"].append([server_name, backup_name, backup_type, remote_dir])
if backup_type == "rsync+btrfs+ssh": if backup_type == "rsync+btrfs+ssh":
remote_dir = row['remote_dir'] remote_dir = row["remote_dir"]
backup_dict['rsync_btrfs_list'].append([server_name, backup_name, backup_type,remote_dir]) backup_dict["rsync_btrfs_list"].append([server_name, backup_name, backup_type, remote_dir])
if backup_type == "rsync": if backup_type == "rsync":
remote_dir = row['remote_dir'] remote_dir = row["remote_dir"]
backup_dict['rsync_list'].append([server_name, backup_name, backup_type,remote_dir]) backup_dict["rsync_list"].append([server_name, backup_name, backup_type, remote_dir])
if backup_type == "null": if backup_type == "null":
backup_dict['null_list'].append([server_name, backup_name, backup_type, ""]) backup_dict["null_list"].append([server_name, backup_name, backup_type, ""])
if backup_type == "pgsql+ssh": if backup_type == "pgsql+ssh":
db_name = row['db_name'] if len(row['db_name']) > 0 else '*' db_name = row["db_name"] if len(row["db_name"]) > 0 else "*"
backup_dict['pgsql_list'].append([server_name, backup_name, backup_type, db_name]) backup_dict["pgsql_list"].append([server_name, backup_name, backup_type, db_name])
if backup_type == "mysql+ssh": if backup_type == "mysql+ssh":
db_name = row['db_name'] if len(row['db_name']) > 0 else '*' db_name = row["db_name"] if len(row["db_name"]) > 0 else "*"
backup_dict['mysql_list'].append([server_name, backup_name, backup_type, db_name]) backup_dict["mysql_list"].append([server_name, backup_name, backup_type, db_name])
# if backup_type == "sqlserver+ssh": # if backup_type == "sqlserver+ssh":
# db_name = row['db_name'] # db_name = row['db_name']
# backup_dict['sqlserver_list'].append([server_name, backup_name, backup_type, db_name]) # backup_dict['sqlserver_list'].append([server_name, backup_name, backup_type, db_name])
@ -250,49 +240,68 @@ def read_config():
# db_name = row['db_name'] # db_name = row['db_name']
# backup_dict['oracle_list'].append([server_name, backup_name, backup_type, db_name]) # backup_dict['oracle_list'].append([server_name, backup_name, backup_type, db_name])
if backup_type == "xen-xva": if backup_type == "xen-xva":
backup_dict['xva_list'].append([server_name, backup_name, backup_type, ""]) backup_dict["xva_list"].append([server_name, backup_name, backup_type, ""])
# if backup_type == "switch": # if backup_type == "switch":
# backup_dict['switch_list'].append([server_name, backup_name, backup_type, ""]) # backup_dict['switch_list'].append([server_name, backup_name, backup_type, ""])
return backup_dict return backup_dict
@app.route('/')
@app.route("/")
def backup_all(): def backup_all():
backup_dict = read_config() backup_dict = read_config()
return render_template('backups.html', backup_list = backup_dict) return render_template("backups.html", backup_list=backup_dict)
@app.route('/config_number/') @app.route("/config_number/")
@app.route('/config_number/<int:id>') @app.route("/config_number/<int:id>")
def set_config_number(id=None): def set_config_number(id=None):
if id is not None and len(CONFIG) > id: if id is not None and len(CONFIG) > id:
global config_number global config_number
config_number=id config_number = id
read_config() read_config()
return jsonify(configs=CONFIG,config_number=config_number) return jsonify(configs=CONFIG, config_number=config_number)
@app.route('/all_json') @app.route("/all_json")
def backup_all_json(): def backup_all_json():
backup_dict = read_all_configs(BASE_DIR) backup_dict = read_all_configs(BASE_DIR)
return json.dumps(backup_dict['rsync_list']+backup_dict['rsync_btrfs_list']+backup_dict['rsync_ssh_list']+backup_dict['pgsql_list']+backup_dict['mysql_list']+backup_dict['xva_list']+backup_dict['null_list']+backup_dict['metadata_list']) return json.dumps(
#+ backup_dict['switch_list'])+backup_dict['sqlserver_list'] backup_dict["rsync_list"]
+ backup_dict["rsync_btrfs_list"]
+ backup_dict["rsync_ssh_list"]
+ backup_dict["pgsql_list"]
+ backup_dict["mysql_list"]
+ backup_dict["xva_list"]
+ backup_dict["null_list"]
+ backup_dict["metadata_list"]
)
# + backup_dict['switch_list'])+backup_dict['sqlserver_list']
@app.route('/json') @app.route("/json")
def backup_json(): def backup_json():
backup_dict = read_config() backup_dict = read_config()
return json.dumps(backup_dict['rsync_list']+backup_dict['rsync_btrfs_list']+backup_dict['rsync_ssh_list']+backup_dict['pgsql_list']+backup_dict['mysql_list']+backup_dict['xva_list']+backup_dict['null_list']+backup_dict['metadata_list']) return json.dumps(
#+ backup_dict['switch_list'])+backup_dict['sqlserver_list'] backup_dict["rsync_list"]
+ backup_dict["rsync_btrfs_list"]
+ backup_dict["rsync_ssh_list"]
+ backup_dict["pgsql_list"]
+ backup_dict["mysql_list"]
+ backup_dict["xva_list"]
+ backup_dict["null_list"]
+ backup_dict["metadata_list"]
)
# + backup_dict['switch_list'])+backup_dict['sqlserver_list']
def check_usb_disk(): def check_usb_disk():
"""This method returns the mounts point of FIRST external disk""" """This method returns the mounts point of FIRST external disk"""
# disk_name = [] # disk_name = []
usb_disk_list = [] usb_disk_list = []
for name in glob.glob('/dev/sd[a-z]'): for name in glob.glob("/dev/sd[a-z]"):
for line in os.popen("udevadm info -q env -n %s" % name): for line in os.popen("udevadm info -q env -n %s" % name):
if re.match("ID_PATH=.*usb.*", line): if re.match("ID_PATH=.*usb.*", line):
usb_disk_list += [ name ] usb_disk_list += [name]
if len(usb_disk_list) == 0: if len(usb_disk_list) == 0:
raise_error("Cannot find any external usb disk", "You should plug the usb hard drive into the server") raise_error("Cannot find any external usb disk", "You should plug the usb hard drive into the server")
@ -301,20 +310,23 @@ def check_usb_disk():
usb_partition_list = [] usb_partition_list = []
for usb_disk in usb_disk_list: for usb_disk in usb_disk_list:
cmd = "udevadm info -q path -n %s" % usb_disk + '1' cmd = "udevadm info -q path -n %s" % usb_disk + "1"
output = os.popen(cmd).read() output = os.popen(cmd).read()
print("cmd : " + cmd) print("cmd : " + cmd)
print("output : " + output) print("output : " + output)
if '/devices/pci' in output: if "/devices/pci" in output:
#flash("partition found: %s1" % usb_disk) # flash("partition found: %s1" % usb_disk)
usb_partition_list.append(usb_disk + "1") usb_partition_list.append(usb_disk + "1")
print(usb_partition_list) print(usb_partition_list)
if len(usb_partition_list) ==0: if len(usb_partition_list) == 0:
raise_error("The drive %s has no partition" % (usb_disk_list[0] ), "You should initialize the usb drive and format an ext4 partition with TISBACKUP label") raise_error(
return "" "The drive %s has no partition" % (usb_disk_list[0]),
"You should initialize the usb drive and format an ext4 partition with TISBACKUP label",
)
return ""
tisbackup_partition_list = [] tisbackup_partition_list = []
for usb_partition in usb_partition_list: for usb_partition in usb_partition_list:
@ -322,133 +334,139 @@ def check_usb_disk():
flash("tisbackup backup partition found: %s" % usb_partition) flash("tisbackup backup partition found: %s" % usb_partition)
tisbackup_partition_list.append(usb_partition) tisbackup_partition_list.append(usb_partition)
print(tisbackup_partition_list) print(tisbackup_partition_list)
if len(tisbackup_partition_list) ==0: if len(tisbackup_partition_list) == 0:
raise_error("No tisbackup partition exist on disk %s" % (usb_disk_list[0] ), "You should initialize the usb drive and format an ext4 partition with TISBACKUP label") raise_error(
"No tisbackup partition exist on disk %s" % (usb_disk_list[0]),
"You should initialize the usb drive and format an ext4 partition with TISBACKUP label",
)
return "" return ""
if len(tisbackup_partition_list) > 1: if len(tisbackup_partition_list) > 1:
raise_error("There are many usb disk", "You should plug remove one of them") raise_error("There are many usb disk", "You should plug remove one of them")
return "" return ""
return tisbackup_partition_list[0] return tisbackup_partition_list[0]
def check_already_mount(partition_name,refresh): def check_already_mount(partition_name, refresh):
with open('/proc/mounts') as f: with open("/proc/mounts") as f:
mount_point = "" mount_point = ""
for line in f.readlines(): for line in f.readlines():
if line.startswith(partition_name): if line.startswith(partition_name):
mount_point = line.split(' ')[1] mount_point = line.split(" ")[1]
if not refresh: if not refresh:
run_command("/bin/umount %s" % mount_point) run_command("/bin/umount %s" % mount_point)
os.rmdir(mount_point) os.rmdir(mount_point)
return mount_point return mount_point
def run_command(cmd, info=""): def run_command(cmd, info=""):
flash("Executing: %s"% cmd) flash("Executing: %s" % cmd)
from subprocess import CalledProcessError, check_output from subprocess import CalledProcessError, check_output
result =""
result = ""
try: try:
result = check_output(cmd, stderr=subprocess.STDOUT,shell=True) result = check_output(cmd, stderr=subprocess.STDOUT, shell=True)
except CalledProcessError: except CalledProcessError:
raise_error(result,info) raise_error(result, info)
return result return result
def check_mount_disk(partition_name, refresh):
mount_point = check_already_mount(partition_name, refresh)
if not refresh:
mount_point = "/mnt/TISBACKUP-" +str(time.time()) def check_mount_disk(partition_name, refresh):
mount_point = check_already_mount(partition_name, refresh)
if not refresh:
mount_point = "/mnt/TISBACKUP-" + str(time.time())
os.mkdir(mount_point) os.mkdir(mount_point)
flash("must mount " + partition_name ) flash("must mount " + partition_name)
cmd = "mount %s %s" % (partition_name, mount_point) cmd = "mount %s %s" % (partition_name, mount_point)
if run_command(cmd,"You should manualy mount the usb drive") != "": if run_command(cmd, "You should manualy mount the usb drive") != "":
flash("Remove directory: %s" % mount_point) flash("Remove directory: %s" % mount_point)
os.rmdir(mount_point) os.rmdir(mount_point)
return "" return ""
return mount_point return mount_point
@app.route('/status.json')
@app.route("/status.json")
def export_backup_status(): def export_backup_status():
exports = dbstat.query('select * from stats where TYPE="EXPORT" and backup_start>="%s"' % mindate) exports = dbstat.query('select * from stats where TYPE="EXPORT" and backup_start>="%s"' % mindate)
error = "" error = ""
finish=not runnings_backups() finish = not runnings_backups()
if get_task() is not None and finish: if get_task() is not None and finish:
status = get_task().get() status = get_task().get()
if status != "ok": if status != "ok":
error = "Export failing with error: "+status error = "Export failing with error: " + status
return jsonify(data=exports, finish=finish, error=error)
return jsonify(data=exports,finish=finish,error=error)
def runnings_backups(): def runnings_backups():
task = get_task() task = get_task()
is_runnig = (task is not None) is_runnig = task is not None
finish = ( is_runnig and task.get() is not None) finish = is_runnig and task.get() is not None
return is_runnig and not finish return is_runnig and not finish
@app.route('/backups.json') @app.route("/backups.json")
def last_backup_json(): def last_backup_json():
exports = dbstat.query('select * from stats where TYPE="BACKUP" ORDER BY backup_start DESC ') exports = dbstat.query('select * from stats where TYPE="BACKUP" ORDER BY backup_start DESC ')
return Response(response=json.dumps(exports), return Response(response=json.dumps(exports), status=200, mimetype="application/json")
status=200,
mimetype="application/json")
@app.route('/last_backups') @app.route("/last_backups")
def last_backup(): def last_backup():
exports = dbstat.query('select * from stats where TYPE="BACKUP" ORDER BY backup_start DESC LIMIT 20 ') exports = dbstat.query('select * from stats where TYPE="BACKUP" ORDER BY backup_start DESC LIMIT 20 ')
return render_template("last_backups.html", backups=exports) return render_template("last_backups.html", backups=exports)
@app.route('/export_backup') @app.route("/export_backup")
def export_backup(): def export_backup():
raise_error("", "") raise_error("", "")
backup_dict = read_config() backup_dict = read_config()
sections = [] sections = []
backup_sections = [] backup_sections = []
for backup_types in backup_dict: for backup_types in backup_dict:
if backup_types == "null_list": if backup_types == "null_list":
continue continue
for section in backup_dict[backup_types]: for section in backup_dict[backup_types]:
#if section.count > 0: # if section.count > 0:
if len(section) > 0: if len(section) > 0:
sections.append(section[1]) sections.append(section[1])
noJobs = (not runnings_backups()) noJobs = not runnings_backups()
if "start" in list(request.args.keys()) or not noJobs: if "start" in list(request.args.keys()) or not noJobs:
start=True start = True
if "sections" in list(request.args.keys()): if "sections" in list(request.args.keys()):
backup_sections = request.args.getlist('sections') backup_sections = request.args.getlist("sections")
else: else:
start=False start = False
cp.read(tisbackup_config_file) cp.read(tisbackup_config_file)
partition_name = check_usb_disk() partition_name = check_usb_disk()
if partition_name: if partition_name:
if noJobs: if noJobs:
mount_point = check_mount_disk( partition_name, False) mount_point = check_mount_disk(partition_name, False)
else: else:
mount_point = check_mount_disk( partition_name, True) mount_point = check_mount_disk(partition_name, True)
if noJobs: if noJobs:
global mindate global mindate
mindate = datetime2isodate(datetime.datetime.now()) mindate = datetime2isodate(datetime.datetime.now())
if not error and start: if not error and start:
print(tisbackup_config_file) print(tisbackup_config_file)
task = run_export_backup(base=backup_base_dir, config_file=CONFIG[config_number], mount_point=mount_point, backup_sections=",".join([str(x) for x in backup_sections])) task = run_export_backup(
base=backup_base_dir,
config_file=CONFIG[config_number],
mount_point=mount_point,
backup_sections=",".join([str(x) for x in backup_sections]),
)
set_task(task) set_task(task)
return render_template("export_backup.html", error=error, start=start, info=info, email=ADMIN_EMAIL, sections=sections) return render_template("export_backup.html", error=error, start=start, info=info, email=ADMIN_EMAIL, sections=sections)
@ -458,9 +476,10 @@ def raise_error(strError, strInfo):
info = strInfo info = strInfo
if __name__ == "__main__": if __name__ == "__main__":
read_config() read_config()
from os import environ from os import environ
if 'WINGDB_ACTIVE' in environ:
if "WINGDB_ACTIVE" in environ:
app.debug = False app.debug = False
app.run(host= '0.0.0.0',port=8080) app.run(host="0.0.0.0", port=8080)