Compare commits

...

9 Commits

Author SHA1 Message Date
aa8a68aa80 EOF & whitespace
Some checks failed
lint / docker (push) Failing after 4m47s
2024-11-29 00:54:31 +01:00
7fcc5afc64 EOF & whitespace 2024-11-29 00:54:09 +01:00
e7e98d0b47 few fixes and lint compatible 2024-11-29 00:48:59 +01:00
8479c378ee fix basic 2024-11-29 00:32:39 +01:00
274e1e2e59 requirements.txt 2024-11-29 00:02:24 +01:00
eb0bdaedbd fix import 2024-11-28 23:59:02 +01:00
99dc6e0abf fix import 2024-11-28 23:46:48 +01:00
e8ba6df102 fix first pass - .gitignore 2024-11-28 23:21:26 +01:00
ffd9bf3d39 fix first pass 2024-11-28 23:20:19 +01:00
81 changed files with 4779 additions and 4628 deletions

View File

@ -19,7 +19,6 @@ jobs:
- run: pip install ruff
- run: |
ruff check .
ruff fix .
# - uses: stefanzweifel/git-auto-commit-action@v4
# with:
# commit_message: 'style fixes by ruff'

2
.gitignore vendored
View File

@ -2,11 +2,13 @@
*.swp
*~
*.pyc
__pycache__/*
/tasks.sqlite
/tasks.sqlite-wal
/srvinstallation
/tasks.sqlite-shm
.idea
.ruff_cache/*
/deb/builddir
/deb/*.deb
/lib

13
.hadolint.yml Normal file
View File

@ -0,0 +1,13 @@
DL3008failure-threshold: warning
format: tty
ignored:
- DL3007
override:
error:
- DL3015
warning:
- DL3015
info:
- DL3008
style:
- DL3015

7
.pre-commit-config.yaml Normal file
View File

@ -0,0 +1,7 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml

20
Dockerfile Executable file
View File

@ -0,0 +1,20 @@
FROM python:3.12-slim
WORKDIR /opt/tisbackup
COPY entrypoint.sh /entrypoint.sh
COPY . /opt/tisbackup
RUN apt-get update \
&& apt-get install --no-install-recommends -y rsync ssh cron \
&& rm -rf /var/lib/apt/lists/* \
&& /usr/local/bin/python3.12 -m pip install --no-cache-dir -r requirements.txt \
&& mkdir -p /var/spool/cron/crontabs \
&& echo '59 03 * * * root /bin/bash /opt/tisbackup/backup.sh' > /etc/crontab \
&& echo '' >> /etc/crontab \
&& crontab /etc/crontab
EXPOSE 8080
ENTRYPOINT ["/entrypoint.sh"]
CMD ["/usr/local/bin/python3.12","/opt/tisbackup/tisbackup_gui.py"]

41
compose.yml Executable file
View File

@ -0,0 +1,41 @@
services:
tisbackup_gui:
container_name: tisbackup_gui
image: "tisbackup:latest"
build: .
volumes:
- ./config/:/etc/tis/
- ./backup/:/backup/
- /etc/timezone:/etc/timezone:ro
- /etc/localtime:/etc/localtime:ro
restart: unless-stopped
ports:
- 9980:8080
deploy:
resources:
limits:
cpus: 0.50
memory: 512M
reservations:
cpus: 0.25
memory: 128M
tisbackup_cron:
container_name: tisbackup_cron
image: "tisbackup:latest"
build: .
volumes:
- ./config/:/etc/tis/
- ./ssh/:/config_ssh/
- ./backup/:/backup/
- /etc/timezone:/etc/timezone:ro
- /etc/localtime:/etc/localtime:ro
restart: always
command: "/bin/bash /opt/tisbackup/cron.sh"
deploy:
resources:
limits:
cpus: 0.50
memory: 512M
reservations:
cpus: 0.25
memory: 128M

13
config.py Normal file → Executable file
View File

@ -1,10 +1,9 @@
import os,sys
from huey.backends.sqlite_backend import SqliteQueue,SqliteDataStore
from huey.api import Huey, create_task
import os
import sys
from huey.contrib.sql_huey import SqlHuey
from huey.storage import SqliteStorage
tisbackup_root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__)))
tasks_db = os.path.join(tisbackup_root_dir,"tasks.sqlite")
queue = SqliteQueue('tisbackups',tasks_db)
result_store = SqliteDataStore('tisbackups',tasks_db)
huey = Huey(queue,result_store,always_eager=False)
tasks_db = os.path.join(tisbackup_root_dir, "tasks.sqlite")
huey = SqlHuey(name="tisbackups",filename=tasks_db,always_eager=False,storage_class=SqliteStorage)

4
cron.sh Executable file
View File

@ -0,0 +1,4 @@
#!/bin/bash
set -x
echo "Starting cron job for TIS Backup"
cron -f -l 2

View File

@ -52,5 +52,3 @@ The documentation for tisbackup is here: [tisbackup doc](https://tisbackup.readt
dpkg --force-all --purge tis-tisbackup
apt autoremove
```

View File

@ -7,4 +7,3 @@ Depends: unzip, ssh, rsync, python3-paramiko, python3-pyvmomi, python3-pexpect,
Maintainer: Tranquil-IT <technique@tranquil.it>
Description: TISBackup backup management
Homepage: https://www.tranquil.it

View File

@ -32,5 +32,3 @@ rsync -aP ../samples/tisbackup-config.ini.sample ./builddir/etc/tis/tisbackup-c
chmod 755 ./builddir/opt/tisbackup/tisbackup.py
dpkg-deb --build builddir tis-tisbackup-1-${VERSION}.deb

View File

@ -293,5 +293,3 @@ function splitQuery(query) {
}
return result;
}

7
entrypoint.sh Executable file
View File

@ -0,0 +1,7 @@
#!/bin/sh
env >> /etc/environment
# execute CMD
echo "$@"
exec "$@"

View File

@ -55,11 +55,12 @@
# --------------------------------------------------------------------
import gettext
import six.moves.xmlrpc_client as xmlrpclib
import six.moves.http_client as httplib
import socket
import sys
import six.moves.http_client as httplib
import six.moves.xmlrpc_client as xmlrpclib
translation = gettext.translation('xen-xm', fallback = True)
API_VERSION_1_1 = '1.1'

View File

@ -15,4 +15,3 @@
# along with TISBackup. If not, see <http://www.gnu.org/licenses/>.
#
# -----------------------------------------------------------------------

View File

@ -21,6 +21,7 @@
import sys
try:
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko
import paramiko
@ -32,6 +33,7 @@ sys.stderr = sys.__stderr__
from libtisbackup.common import *
class backup_mysql(backup_generic):
"""Backup a mysql database as gzipped sql file through ssh"""
type = 'mysql+ssh'

View File

@ -18,8 +18,9 @@
#
# -----------------------------------------------------------------------
import os
import datetime
import os
from .common import *
@ -48,4 +49,3 @@ register_driver(backup_null)
if __name__=='__main__':
pass

View File

@ -18,6 +18,7 @@
#
# -----------------------------------------------------------------------
import sys
try:
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko
import paramiko
@ -27,12 +28,14 @@ except ImportError as e:
sys.stderr = sys.__stderr__
import datetime
import base64
import datetime
import os
from libtisbackup.common import *
import re
from libtisbackup.common import *
class backup_oracle(backup_generic):
"""Backup a oracle database as zipped file through ssh"""
type = 'oracle+ssh'

View File

@ -18,6 +18,7 @@
#
# -----------------------------------------------------------------------
import sys
try:
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko
import paramiko
@ -29,6 +30,7 @@ sys.stderr = sys.__stderr__
from .common import *
class backup_pgsql(backup_generic):
"""Backup a postgresql database as gzipped sql file through ssh"""
type = 'pgsql+ssh'

View File

@ -18,14 +18,14 @@
#
# -----------------------------------------------------------------------
import os
import datetime
from libtisbackup.common import *
import time
import logging
import re
import os
import os.path
import datetime
import re
import time
from libtisbackup.common import *
class backup_rsync(backup_generic):
@ -341,4 +341,3 @@ if __name__=='__main__':
b.read_config(cp)
b.process_backup()
print((b.checknagios()))

View File

@ -18,14 +18,13 @@
#
# -----------------------------------------------------------------------
import os
import datetime
from .common import *
import time
import logging
import re
import os
import os.path
import datetime
import re
import time
from .common import *
@ -359,4 +358,3 @@ if __name__=='__main__':
b.read_config(cp)
b.process_backup()
print((b.checknagios()))

View File

@ -21,6 +21,7 @@
import sys
try:
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko
import paramiko
@ -32,6 +33,7 @@ sys.stderr = sys.__stderr__
from .common import *
class backup_samba4(backup_generic):
"""Backup a samba4 databases as gzipped tdbs file through ssh"""
type = 'samba4'

View File

@ -21,6 +21,7 @@
import sys
try:
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko
import paramiko
@ -30,11 +31,13 @@ except ImportError as e:
sys.stderr = sys.__stderr__
import datetime
import base64
import datetime
import os
from .common import *
class backup_sqlserver(backup_generic):
"""Backup a SQLSERVER database as gzipped sql file through ssh"""
type = 'sqlserver+ssh'

View File

@ -18,23 +18,26 @@
#
# -----------------------------------------------------------------------
import os
import datetime
from .common import *
from . import XenAPI
import time
import logging
import re
import os.path
import datetime
import select
import urllib.request, urllib.error, urllib.parse, urllib.request, urllib.parse, urllib.error
import base64
import datetime
import logging
import os
import os.path
import re
import select
import socket
import requests
import pexpect
import time
import urllib.error
import urllib.parse
import urllib.request
from stat import *
import pexpect
import requests
from . import XenAPI
from .common import *
class backup_switch(backup_generic):
"""Backup a startup-config on a switch"""
@ -259,4 +262,3 @@ if __name__=='__main__':
cp.read('/opt/tisbackup/configtest.ini')
b = backup_xva()
b.read_config(cp)

View File

@ -18,25 +18,25 @@
#
# -----------------------------------------------------------------------
from .common import *
import pyVmomi
from pyVmomi import vim
from pyVmomi import vmodl
from pyVim.connect import SmartConnect, Disconnect
from datetime import datetime, date, timedelta
import atexit
import getpass
from datetime import date, datetime, timedelta
import pyVmomi
import requests
from pyVim.connect import Disconnect, SmartConnect
from pyVmomi import vim, vmodl
# Disable HTTPS verification warnings.
from requests.packages import urllib3
from .common import *
urllib3.disable_warnings()
import os
import time
import tarfile
import re
import tarfile
import time
import xml.etree.ElementTree as ET
from stat import *
@ -279,4 +279,3 @@ class backup_vmdk(backup_generic):
register_driver(backup_vmdk)

View File

@ -20,9 +20,11 @@
from .common import *
import paramiko
from .common import *
class backup_xcp_metadata(backup_generic):
"""Backup metatdata of a xcp pool using xe pool-dump-database"""
type = 'xcp-dump-metadata'

View File

@ -18,20 +18,23 @@
#
# -----------------------------------------------------------------------
import logging
import re
import os
import datetime
import urllib.request, urllib.parse, urllib.error
import socket
import tarfile
import hashlib
from stat import *
import logging
import os
import re
import socket
import ssl
import tarfile
import urllib.error
import urllib.parse
import urllib.request
from stat import *
import requests
from .common import *
from . import XenAPI
from .common import *
if hasattr(ssl, '_create_unverified_context'):
ssl._create_default_https_context = ssl._create_unverified_context

View File

@ -18,19 +18,19 @@
#
# -----------------------------------------------------------------------
from abc import ABC, abstractmethod
import os
import subprocess
import re
import logging
import datetime
import time
from iniparse import ConfigParser
import sqlite3
import shutil
import logging
import os
import re
import select
import shutil
import sqlite3
import subprocess
import sys
import time
from abc import ABC, abstractmethod
from iniparse import ConfigParser
try:
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko

View File

@ -18,22 +18,24 @@
#
# -----------------------------------------------------------------------
import os
import datetime
from .common import *
from . import XenAPI
import time
import logging
import re
import os.path
import os
import datetime
import select
import urllib.request, urllib.error, urllib.parse
import base64
import datetime
import logging
import os
import os.path
import re
import select
import socket
from stat import *
import ssl
import time
import urllib.error
import urllib.parse
import urllib.request
from stat import *
from . import XenAPI
from .common import *
if hasattr(ssl, '_create_unverified_context'):
ssl._create_default_https_context = ssl._create_unverified_context

View File

@ -3,18 +3,16 @@
# Copyright (c) 2007 Tim Lauridsen <tla@rasmil.dk>
# All Rights Reserved. See LICENSE-PSF & LICENSE for details.
from .ini import INIConfig, change_comment_syntax
from .compat import ConfigParser, RawConfigParser, SafeConfigParser
from .config import BasicConfig, ConfigNamespace
from .compat import RawConfigParser, ConfigParser, SafeConfigParser
from .configparser import (DEFAULTSECT, MAX_INTERPOLATION_DEPTH,
DuplicateSectionError, InterpolationDepthError,
InterpolationMissingOptionError,
InterpolationSyntaxError, NoOptionError,
NoSectionError)
from .ini import INIConfig, change_comment_syntax
from .utils import tidy
from .configparser import DuplicateSectionError, \
NoSectionError, NoOptionError, \
InterpolationMissingOptionError, \
InterpolationDepthError, \
InterpolationSyntaxError, \
DEFAULTSECT, MAX_INTERPOLATION_DEPTH
__all__ = [
'BasicConfig', 'ConfigNamespace',
'INIConfig', 'tidy', 'change_comment_syntax',

View File

@ -12,21 +12,18 @@ The underlying INIConfig object can be accessed as cfg.data
"""
import re
from .configparser import DuplicateSectionError, \
NoSectionError, NoOptionError, \
InterpolationMissingOptionError, \
InterpolationDepthError, \
InterpolationSyntaxError, \
DEFAULTSECT, MAX_INTERPOLATION_DEPTH
# These are imported only for compatiability.
# The code below does not reference them directly.
from .configparser import Error, InterpolationError, \
MissingSectionHeaderError, ParsingError
import six
from . import ini
# These are imported only for compatiability.
# The code below does not reference them directly.
from .configparser import (DEFAULTSECT, MAX_INTERPOLATION_DEPTH,
DuplicateSectionError, Error,
InterpolationDepthError, InterpolationError,
InterpolationMissingOptionError,
InterpolationSyntaxError, MissingSectionHeaderError,
NoOptionError, NoSectionError, ParsingError)
class RawConfigParser(object):

View File

@ -1,6 +1,6 @@
try:
from ConfigParser import *
# not all objects get imported with __all__
from ConfigParser import *
from ConfigParser import Error, InterpolationMissingOptionError
except ImportError:
from configparser import *

View File

@ -42,11 +42,11 @@ Example:
# Backward-compatiable with ConfigParser
import re
from .configparser import DEFAULTSECT, ParsingError, MissingSectionHeaderError
import six
from . import config
from .configparser import DEFAULTSECT, MissingSectionHeaderError, ParsingError
class LineType(object):

View File

@ -1,5 +1,5 @@
from . import compat
from .ini import LineContainer, EmptyLine
from .ini import EmptyLine, LineContainer
def tidy(cfg):

10
pyproject.toml Normal file
View File

@ -0,0 +1,10 @@
[tool.black]
line-length = 140
[tool.ruff]
# Allow lines to be as long as 120.
line-length = 140
indent-width = 4
[tool.ruff.lint]
ignore = ["F401","F403","F405","E402"]

7
requirements.txt Normal file → Executable file
View File

@ -1,3 +1,10 @@
six
requests
paramiko
pexpect
flask
simplejson
huey
iniparse
redis
peewee

View File

@ -12,4 +12,3 @@ echo $VERSION > __VERSION__
rpmbuild -bb --buildroot $PWD/builddir -v --clean tis-tisbackup.spec
cp RPMS/*/*.rpm .

View File

@ -14,5 +14,3 @@ else
sleep 3
fi
echo $(date +%Y-%m-%d\ %H:%M:%S) : Fin Export TISBackup sur Disque USB : $target >> /var/log/tisbackup.log

View File

@ -95,4 +95,3 @@ maximum_backup_age=30
;type=xcp-dump-metadata
;server_name=srvxen1
;private_key=/root/.ssh/id_rsa

View File

@ -18,4 +18,3 @@ password_file=/home/homes/ssamson/tisbackup-pra/xen_passwd
network_name=net-test
#start_vm=no
#max_copies=3

View File

@ -4,4 +4,3 @@
# m h dom mon dow user command
30 22 * * * root /opt/tisbackup/tisbackup.py -c /etc/tis/tisbackup-config.ini backup >> /var/log/tisbackup.log 2>&1
30 12 * * * root /opt/tisbackup/tisbackup.py -c /etc/tis/tisbackup-config.ini cleanup >> /var/log/tisbackup.log 2>&1

View File

@ -95,4 +95,3 @@ case "$1" in
esac
exit 0

View File

@ -95,4 +95,3 @@ case "$1" in
esac
exit 0

View File

@ -14948,4 +14948,3 @@
}));
}(window, document));

View File

@ -1,6 +1,8 @@
from huey import RedisHuey
import os
import logging
import os
from huey import RedisHuey
from tisbackup import tis_backup
huey = RedisHuey('tisbackup', host='localhost')

View File

@ -98,4 +98,3 @@
</script>
</body></html>

View File

@ -18,41 +18,43 @@
#
# -----------------------------------------------------------------------
import datetime
import subprocess
import os,sys
import os
import sys
from os.path import isfile, join
tisbackup_root_dir = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0,os.path.join(tisbackup_root_dir,'lib'))
sys.path.insert(0,os.path.join(tisbackup_root_dir,'libtisbackup'))
sys.path.insert(0, os.path.join(tisbackup_root_dir, "lib"))
sys.path.insert(0, os.path.join(tisbackup_root_dir, "libtisbackup"))
from iniparse import ini,ConfigParser
from optparse import OptionParser
import re
import getopt
import os.path
import logging
import errno
from libtisbackup.common import *
import logging
import os.path
from optparse import OptionParser
from iniparse import ConfigParser, ini
from libtisbackup.backup_mysql import backup_mysql
from libtisbackup.backup_rsync import backup_rsync
from libtisbackup.backup_rsync import backup_rsync_ssh
#from libtisbackup.backup_oracle import backup_oracle
from libtisbackup.backup_rsync_btrfs import backup_rsync_btrfs
from libtisbackup.backup_rsync_btrfs import backup_rsync__btrfs_ssh
from libtisbackup.backup_pgsql import backup_pgsql
from libtisbackup.backup_xva import backup_xva
#from libtisbackup.backup_vmdk import backup_vmdk
#from libtisbackup.backup_switch import backup_switch
# from libtisbackup.backup_vmdk import backup_vmdk
# from libtisbackup.backup_switch import backup_switch
from libtisbackup.backup_null import backup_null
from libtisbackup.backup_xcp_metadata import backup_xcp_metadata
from libtisbackup.copy_vm_xcp import copy_vm_xcp
#from libtisbackup.backup_sqlserver import backup_sqlserver
from libtisbackup.backup_pgsql import backup_pgsql
from libtisbackup.backup_rsync import backup_rsync, backup_rsync_ssh
# from libtisbackup.backup_oracle import backup_oracle
from libtisbackup.backup_rsync_btrfs import backup_rsync__btrfs_ssh, backup_rsync_btrfs
# from libtisbackup.backup_sqlserver import backup_sqlserver
from libtisbackup.backup_samba4 import backup_samba4
from libtisbackup.backup_xcp_metadata import backup_xcp_metadata
from libtisbackup.backup_xva import backup_xva
from libtisbackup.common import *
from libtisbackup.copy_vm_xcp import copy_vm_xcp
__version__="2.0"
__version__ = "2.0"
usage="""\
usage = """\
%prog -c configfile action
TIS Files Backup system.
@ -67,52 +69,75 @@ action is either :
exportbackup : copy lastest OK backups from local to location defned by --exportdir parameter
register_existing : scan backup directories and add missing backups to database"""
version="VERSION"
version = "VERSION"
parser = OptionParser(usage=usage, version="%prog " + version)
parser.add_option(
"-c", "--config", dest="config", default="/etc/tis/tisbackup-config.ini", help="Config file full path (default: %default)"
)
parser.add_option("-d", "--dry-run", dest="dry_run", default=False, action="store_true", help="Dry run (default: %default)")
parser.add_option("-v", "--verbose", dest="verbose", default=False, action="store_true", help="More information (default: %default)")
parser.add_option(
"-s", "--sections", dest="sections", default="", help="Comma separated list of sections (backups) to process (default: All)"
)
parser.add_option(
"-l",
"--loglevel",
dest="loglevel",
default="info",
type="choice",
choices=["debug", "warning", "info", "error", "critical"],
metavar="LOGLEVEL",
help="Loglevel (default: %default)",
)
parser.add_option("-n", "--len", dest="statscount", default=30, type="int", help="Number of lines to list for dumpstat (default: %default)")
parser.add_option(
"-b",
"--backupdir",
dest="backup_base_dir",
default="",
help="Base directory for all backups (default: [global] backup_base_dir in config file)",
)
parser.add_option(
"-x", "--exportdir", dest="exportdir", default="", help="Directory where to export latest backups with exportbackup (nodefault)"
)
parser=OptionParser(usage=usage,version="%prog " + version)
parser.add_option("-c","--config", dest="config", default='/etc/tis/tisbackup-config.ini', help="Config file full path (default: %default)")
parser.add_option("-d","--dry-run", dest="dry_run", default=False, action='store_true', help="Dry run (default: %default)")
parser.add_option("-v","--verbose", dest="verbose", default=False, action='store_true', help="More information (default: %default)")
parser.add_option("-s","--sections", dest="sections", default='', help="Comma separated list of sections (backups) to process (default: All)")
parser.add_option("-l","--loglevel", dest="loglevel", default='info', type='choice', choices=['debug','warning','info','error','critical'], metavar='LOGLEVEL',help="Loglevel (default: %default)")
parser.add_option("-n","--len", dest="statscount", default=30, type='int', help="Number of lines to list for dumpstat (default: %default)")
parser.add_option("-b","--backupdir", dest="backup_base_dir", default='', help="Base directory for all backups (default: [global] backup_base_dir in config file)")
parser.add_option("-x","--exportdir", dest="exportdir", default='', help="Directory where to export latest backups with exportbackup (nodefault)")
class tis_backup:
logger = logging.getLogger('tisbackup')
logger = logging.getLogger("tisbackup")
def __init__(self,dry_run=False,verbose=False,backup_base_dir=''):
def __init__(self, dry_run=False, verbose=False, backup_base_dir=""):
self.dry_run = dry_run
self.verbose = verbose
self.backup_base_dir = backup_base_dir
self.backup_base_dir = ''
self.backup_base_dir = ""
self.backup_list = []
self.dry_run = dry_run
self.verbose=False
self.verbose = False
def read_ini_file(self,filename):
def read_ini_file(self, filename):
ini.change_comment_syntax()
cp = ConfigParser()
cp.read(filename)
if not self.backup_base_dir:
self.backup_base_dir = cp.get('global','backup_base_dir')
self.backup_base_dir = cp.get("global", "backup_base_dir")
if not os.path.isdir(self.backup_base_dir):
self.logger.info('Creating backup directory %s' % self.backup_base_dir)
self.logger.info("Creating backup directory %s" % self.backup_base_dir)
os.makedirs(self.backup_base_dir)
self.logger.debug("backup directory : " + self.backup_base_dir)
self.dbstat = BackupStat(os.path.join(self.backup_base_dir,'log','tisbackup.sqlite'))
self.dbstat = BackupStat(os.path.join(self.backup_base_dir, "log", "tisbackup.sqlite"))
for section in cp.sections():
if (section != 'global'):
if section != "global":
self.logger.debug("reading backup config " + section)
backup_item = None
type = cp.get(section,'type')
type = cp.get(section, "type")
backup_item = backup_drivers[type](backup_name=section,
backup_dir=os.path.join(self.backup_base_dir,section),dbstat=self.dbstat,dry_run=self.dry_run)
backup_item = backup_drivers[type](
backup_name=section, backup_dir=os.path.join(self.backup_base_dir, section), dbstat=self.dbstat, dry_run=self.dry_run
)
backup_item.read_config(cp)
backup_item.verbose = self.verbose
@ -122,35 +147,34 @@ class tis_backup:
# TODO socket.gethostbyaddr('64.236.16.20')
# TODO limit backup to one backup on the command line
def checknagios(self,sections=[]):
def checknagios(self, sections=[]):
try:
if not sections:
sections = [backup_item.backup_name for backup_item in self.backup_list]
self.logger.debug('Start of check nagios for %s' % (','.join(sections),))
self.logger.debug("Start of check nagios for %s" % (",".join(sections),))
try:
worst_nagiosstatus = None
ok = []
warning = []
critical = []
unknown = []
nagiosoutput = ''
nagiosoutput = ""
for backup_item in self.backup_list:
if not sections or backup_item.backup_name in sections:
(nagiosstatus,log) = backup_item.checknagios()
(nagiosstatus, log) = backup_item.checknagios()
if nagiosstatus == nagiosStateCritical:
critical.append((backup_item.backup_name,log))
elif nagiosstatus == nagiosStateWarning :
warning.append((backup_item.backup_name,log))
critical.append((backup_item.backup_name, log))
elif nagiosstatus == nagiosStateWarning:
warning.append((backup_item.backup_name, log))
elif nagiosstatus == nagiosStateOk:
ok.append((backup_item.backup_name,log))
ok.append((backup_item.backup_name, log))
else:
unknown.append((backup_item.backup_name,log))
self.logger.debug('[%s] nagios:"%i" log: %s',backup_item.backup_name,nagiosstatus,log)
unknown.append((backup_item.backup_name, log))
self.logger.debug('[%s] nagios:"%i" log: %s', backup_item.backup_name, nagiosstatus, log)
if not ok and not critical and not unknown and not warning:
self.logger.debug('Nothing processed')
self.logger.debug("Nothing processed")
worst_nagiosstatus = nagiosStateUnknown
nagiosoutput = 'UNKNOWN : Unknown backup sections "%s"' % sections
@ -159,156 +183,154 @@ class tis_backup:
if unknown:
if not worst_nagiosstatus:
worst_nagiosstatus = nagiosStateUnknown
nagiosoutput = 'UNKNOWN status backups %s' % (','.join([b[0] for b in unknown]))
nagiosoutput = "UNKNOWN status backups %s" % (",".join([b[0] for b in unknown]))
globallog.extend(unknown)
if critical:
if not worst_nagiosstatus:
worst_nagiosstatus = nagiosStateCritical
nagiosoutput = 'CRITICAL backups %s' % (','.join([b[0] for b in critical]))
nagiosoutput = "CRITICAL backups %s" % (",".join([b[0] for b in critical]))
globallog.extend(critical)
if warning:
if not worst_nagiosstatus:
worst_nagiosstatus = nagiosStateWarning
nagiosoutput = 'WARNING backups %s' % (','.join([b[0] for b in warning]))
nagiosoutput = "WARNING backups %s" % (",".join([b[0] for b in warning]))
globallog.extend(warning)
if ok:
if not worst_nagiosstatus:
worst_nagiosstatus = nagiosStateOk
nagiosoutput = 'OK backups %s' % (','.join([b[0] for b in ok]))
nagiosoutput = "OK backups %s" % (",".join([b[0] for b in ok]))
globallog.extend(ok)
if worst_nagiosstatus == nagiosStateOk:
nagiosoutput = 'ALL backups OK %s' % (','.join(sections))
nagiosoutput = "ALL backups OK %s" % (",".join(sections))
except BaseException as e:
worst_nagiosstatus = nagiosStateCritical
nagiosoutput = 'EXCEPTION',"Critical : %s" % str(e)
nagiosoutput = "EXCEPTION", "Critical : %s" % str(e)
raise
finally:
self.logger.debug('worst nagios status :"%i"',worst_nagiosstatus)
print('%s (tisbackup V%s)' %(nagiosoutput,version))
print('\n'.join(["[%s]:%s" % (l[0],l[1]) for l in globallog]))
self.logger.debug('worst nagios status :"%i"', worst_nagiosstatus)
print("%s (tisbackup V%s)" % (nagiosoutput, version))
print("\n".join(["[%s]:%s" % (log_elem[0], log_elem[1]) for log_elem in globallog]))
sys.exit(worst_nagiosstatus)
def process_backup(self,sections=[]):
def process_backup(self, sections=[]):
processed = []
errors = []
if not sections:
sections = [backup_item.backup_name for backup_item in self.backup_list]
self.logger.info('Processing backup for %s' % (','.join(sections)) )
self.logger.info("Processing backup for %s" % (",".join(sections)))
for backup_item in self.backup_list:
if not sections or backup_item.backup_name in sections:
try:
assert(isinstance(backup_item,backup_generic))
self.logger.info('Processing [%s]',(backup_item.backup_name))
assert isinstance(backup_item, backup_generic)
self.logger.info("Processing [%s]", (backup_item.backup_name))
stats = backup_item.process_backup()
processed.append((backup_item.backup_name,stats))
processed.append((backup_item.backup_name, stats))
except BaseException as e:
self.logger.critical('Backup [%s] processed with error : %s',backup_item.backup_name,e)
errors.append((backup_item.backup_name,str(e)))
self.logger.critical("Backup [%s] processed with error : %s", backup_item.backup_name, e)
errors.append((backup_item.backup_name, str(e)))
if not processed and not errors:
self.logger.critical('No backup properly finished or processed')
self.logger.critical("No backup properly finished or processed")
else:
if processed:
self.logger.info('Backup processed : %s' , ",".join([b[0] for b in processed]))
self.logger.info("Backup processed : %s", ",".join([b[0] for b in processed]))
if errors:
self.logger.error('Backup processed with errors: %s' , ",".join([b[0] for b in errors]))
self.logger.error("Backup processed with errors: %s", ",".join([b[0] for b in errors]))
def export_backups(self,sections=[],exportdir=''):
def export_backups(self, sections=[], exportdir=""):
processed = []
errors = []
if not sections:
sections = [backup_item.backup_name for backup_item in self.backup_list]
self.logger.info('Exporting OK backups for %s to %s' % (','.join(sections),exportdir) )
self.logger.info("Exporting OK backups for %s to %s" % (",".join(sections), exportdir))
for backup_item in self.backup_list:
if backup_item.backup_name in sections:
try:
assert(isinstance(backup_item,backup_generic))
self.logger.info('Processing [%s]',(backup_item.backup_name))
assert isinstance(backup_item, backup_generic)
self.logger.info("Processing [%s]", (backup_item.backup_name))
stats = backup_item.export_latestbackup(destdir=exportdir)
processed.append((backup_item.backup_name,stats))
processed.append((backup_item.backup_name, stats))
except BaseException as e:
self.logger.critical('Export Backup [%s] processed with error : %s',backup_item.backup_name,e)
errors.append((backup_item.backup_name,str(e)))
self.logger.critical("Export Backup [%s] processed with error : %s", backup_item.backup_name, e)
errors.append((backup_item.backup_name, str(e)))
if not processed and not errors:
self.logger.critical('No export backup properly finished or processed')
self.logger.critical("No export backup properly finished or processed")
else:
if processed:
self.logger.info('Export Backups processed : %s' , ",".join([b[0] for b in processed]))
self.logger.info("Export Backups processed : %s", ",".join([b[0] for b in processed]))
if errors:
self.logger.error('Export Backups processed with errors: %s' , ",".join([b[0] for b in errors]))
self.logger.error("Export Backups processed with errors: %s", ",".join([b[0] for b in errors]))
def retry_failed_backups(self,maxage_hours=30):
def retry_failed_backups(self, maxage_hours=30):
processed = []
errors = []
# before mindate, backup is too old
mindate = datetime2isodate((datetime.datetime.now() - datetime.timedelta(hours=maxage_hours)))
failed_backups = self.dbstat.query("""\
failed_backups = self.dbstat.query(
"""\
select distinct backup_name as bname
from stats
where status="OK" and backup_start>=?""",(mindate,))
defined_backups = list(map(lambda f:f.backup_name, [ x for x in self.backup_list if not isinstance(x, backup_null) ]))
failed_backups_names = set(defined_backups) - set([b['bname'] for b in failed_backups if b['bname'] in defined_backups])
where status="OK" and backup_start>=?""",
(mindate,),
)
defined_backups = list(map(lambda f: f.backup_name, [x for x in self.backup_list if not isinstance(x, backup_null)]))
failed_backups_names = set(defined_backups) - set([b["bname"] for b in failed_backups if b["bname"] in defined_backups])
if failed_backups_names:
self.logger.info('Processing backup for %s',','.join(failed_backups_names))
self.logger.info("Processing backup for %s", ",".join(failed_backups_names))
for backup_item in self.backup_list:
if backup_item.backup_name in failed_backups_names:
try:
assert(isinstance(backup_item,backup_generic))
self.logger.info('Processing [%s]',(backup_item.backup_name))
assert isinstance(backup_item, backup_generic)
self.logger.info("Processing [%s]", (backup_item.backup_name))
stats = backup_item.process_backup()
processed.append((backup_item.backup_name,stats))
processed.append((backup_item.backup_name, stats))
except BaseException as e:
self.logger.critical('Backup [%s] not processed, error : %s',backup_item.backup_name,e)
errors.append((backup_item.backup_name,str(e)))
self.logger.critical("Backup [%s] not processed, error : %s", backup_item.backup_name, e)
errors.append((backup_item.backup_name, str(e)))
if not processed and not errors:
self.logger.critical('No backup properly finished or processed')
self.logger.critical("No backup properly finished or processed")
else:
if processed:
self.logger.info('Backup processed : %s' , ",".join([b[0] for b in errors]))
self.logger.info("Backup processed : %s", ",".join([b[0] for b in errors]))
if errors:
self.logger.error('Backup processed with errors: %s' , ",".join([b[0] for b in errors]))
self.logger.error("Backup processed with errors: %s", ",".join([b[0] for b in errors]))
else:
self.logger.info('No recent failed backups found in database')
self.logger.info("No recent failed backups found in database")
def cleanup_backup_section(self,sections = []):
log = ''
def cleanup_backup_section(self, sections=[]):
processed = False
if not sections:
sections = [backup_item.backup_name for backup_item in self.backup_list]
self.logger.info('Processing cleanup for %s' % (','.join(sections)) )
self.logger.info("Processing cleanup for %s" % (",".join(sections)))
for backup_item in self.backup_list:
if backup_item.backup_name in sections:
try:
assert(isinstance(backup_item,backup_generic))
self.logger.info('Processing cleanup of [%s]',(backup_item.backup_name))
assert isinstance(backup_item, backup_generic)
self.logger.info("Processing cleanup of [%s]", (backup_item.backup_name))
backup_item.cleanup_backup()
processed = True
except BaseException as e:
self.logger.critical('Cleanup of [%s] not processed, error : %s',backup_item.backup_name,e)
self.logger.critical("Cleanup of [%s] not processed, error : %s", backup_item.backup_name, e)
if not processed:
self.logger.critical('No cleanup properly finished or processed')
self.logger.critical("No cleanup properly finished or processed")
def register_existingbackups(self,sections = []):
def register_existingbackups(self, sections=[]):
if not sections:
sections = [backup_item.backup_name for backup_item in self.backup_list]
self.logger.info('Append existing backups to database...')
self.logger.info("Append existing backups to database...")
for backup_item in self.backup_list:
if backup_item.backup_name in sections:
backup_item.register_existingbackups()
@ -316,26 +338,26 @@ class tis_backup:
def html_report(self):
for backup_item in self.backup_list:
if not section or section == backup_item.backup_name:
assert(isinstance(backup_item,backup_generic))
assert isinstance(backup_item, backup_generic)
if not maxage_hours:
maxage_hours = backup_item.maximum_backup_age
(nagiosstatus,log) = backup_item.checknagios(maxage_hours=maxage_hours)
globallog.append('[%s] %s' % (backup_item.backup_name,log))
self.logger.debug('[%s] nagios:"%i" log: %s',backup_item.backup_name,nagiosstatus,log)
processed = True
if nagiosstatus >= worst_nagiosstatus:
worst_nagiosstatus = nagiosstatus
(nagiosstatus, log) = backup_item.checknagios(maxage_hours=maxage_hours)
globallog.append("[%s] %s" % (backup_item.backup_name, log))
self.logger.debug('[%s] nagios:"%i" log: %s', backup_item.backup_name, nagiosstatus, log)
# processed = True
# if nagiosstatus >= worst_nagiosstatus:
# worst_nagiosstatus = nagiosstatus
def main():
(options,args)=parser.parse_args()
(options, args) = parser.parse_args()
if len(args) != 1:
print("ERROR : You must provide one action to perform")
parser.print_usage()
sys.exit(2)
backup_start_date = datetime.datetime.now().strftime('%Y%m%d-%Hh%Mm%S')
backup_start_date = datetime.datetime.now().strftime("%Y%m%d-%Hh%Mm%S")
# options
action = args[0]
@ -344,23 +366,23 @@ def main():
print(backup_drivers[t].get_help())
sys.exit(0)
config_file =options.config
config_file = options.config
dry_run = options.dry_run
verbose = options.verbose
loglevel = options.loglevel
# setup Logger
logger = logging.getLogger('tisbackup')
logger = logging.getLogger("tisbackup")
hdlr = logging.StreamHandler()
hdlr.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
hdlr.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s"))
logger.addHandler(hdlr)
# set loglevel
if loglevel in ('debug','warning','info','error','critical'):
if loglevel in ("debug", "warning", "info", "error", "critical"):
numeric_level = getattr(logging, loglevel.upper(), None)
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % loglevel)
raise ValueError("Invalid log level: %s" % loglevel)
logger.setLevel(numeric_level)
# Config file
@ -371,36 +393,36 @@ def main():
cp = ConfigParser()
cp.read(config_file)
backup_base_dir = options.backup_base_dir or cp.get('global','backup_base_dir')
log_dir = os.path.join(backup_base_dir,'log')
backup_base_dir = options.backup_base_dir or cp.get("global", "backup_base_dir")
log_dir = os.path.join(backup_base_dir, "log")
if not os.path.exists(log_dir):
os.makedirs(log_dir)
# if we run the nagios check, we don't create log file, everything is piped to stdout
if action!='checknagios':
if action != "checknagios":
try:
hdlr = logging.FileHandler(os.path.join(log_dir,'tisbackup_%s.log' % (backup_start_date)))
hdlr.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
hdlr = logging.FileHandler(os.path.join(log_dir, "tisbackup_%s.log" % (backup_start_date)))
hdlr.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s"))
logger.addHandler(hdlr)
except IOError as e:
if action == 'cleanup' and e.errno == errno.ENOSPC:
if action == "cleanup" and e.errno == errno.ENOSPC:
logger.warning("No space left on device, disabling file logging.")
else:
raise e
# Main
backup = tis_backup(dry_run=dry_run,verbose=verbose,backup_base_dir=backup_base_dir)
backup = tis_backup(dry_run=dry_run, verbose=verbose, backup_base_dir=backup_base_dir)
backup.read_ini_file(config_file)
backup_sections = options.sections.split(',') if options.sections else []
backup_sections = options.sections.split(",") if options.sections else []
all_sections = [backup_item.backup_name for backup_item in backup.backup_list]
if not backup_sections:
backup_sections = all_sections
else:
for b in backup_sections:
if not b in all_sections:
raise Exception('Section %s is not defined in config file' % b)
if b not in all_sections:
raise Exception("Section %s is not defined in config file" % b)
if dry_run:
logger.warning("WARNING : DRY RUN, nothing will be done, just printing on screen...")
@ -409,23 +431,22 @@ def main():
backup.process_backup(backup_sections)
elif action == "exportbackup":
if not options.exportdir:
raise Exception('No export directory supplied dor exportbackup action')
backup.export_backups(backup_sections,options.exportdir)
raise Exception("No export directory supplied dor exportbackup action")
backup.export_backups(backup_sections, options.exportdir)
elif action == "cleanup":
backup.cleanup_backup_section(backup_sections)
elif action == "checknagios":
backup.checknagios(backup_sections)
elif action == "dumpstat":
for s in backup_sections:
backup.dbstat.last_backups(s,count=options.statscount)
backup.dbstat.last_backups(s, count=options.statscount)
elif action == "retryfailed":
backup.retry_failed_backups()
elif action == "register_existing":
backup.register_existingbackups(backup_sections)
else:
logger.error('Unhandled action "%s", quitting...',action)
logger.error('Unhandled action "%s", quitting...', action)
sys.exit(1)

View File

@ -17,89 +17,89 @@
# along with TISBackup. If not, see <http://www.gnu.org/licenses/>.
#
# -----------------------------------------------------------------------
import os,sys
import os
import sys
from os.path import isfile, join
tisbackup_root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__)))
sys.path.append(os.path.join(tisbackup_root_dir,'lib'))
sys.path.append(os.path.join(tisbackup_root_dir,'libtisbackup'))
sys.path.append(os.path.join(tisbackup_root_dir, "lib"))
sys.path.append(os.path.join(tisbackup_root_dir, "libtisbackup"))
from shutil import *
from iniparse import ConfigParser,RawConfigParser
from libtisbackup.common import *
import time
from flask import request, Flask, session, g, appcontext_pushed, redirect, url_for, abort, render_template, flash, jsonify, Response
from urllib.parse import urlparse
import json
import glob
import time
from config import huey
from tasks import run_export_backup, get_task, set_task
from tisbackup import tis_backup
import json
import logging
import re
import time
from shutil import *
from urllib.parse import urlparse
from flask import Flask, Response, abort, appcontext_pushed, flash, g, jsonify, redirect, render_template, request, session, url_for
from iniparse import ConfigParser, RawConfigParser
from config import huey
from libtisbackup.common import *
from tasks import get_task, run_export_backup, set_task
from tisbackup import tis_backup
cp = ConfigParser()
cp.read("/etc/tis/tisbackup_gui.ini")
CONFIG = cp.get('general','config_tisbackup').split(",")
SECTIONS = cp.get('general','sections')
ADMIN_EMAIL = cp.get('general','ADMIN_EMAIL')
BASE_DIR = cp.get('general','base_config_dir')
CONFIG = cp.get("general", "config_tisbackup").split(",")
SECTIONS = cp.get("general", "sections")
ADMIN_EMAIL = cp.get("general", "ADMIN_EMAIL")
BASE_DIR = cp.get("general", "base_config_dir")
tisbackup_config_file= CONFIG[0]
config_number=0
tisbackup_config_file = CONFIG[0]
config_number = 0
cp = ConfigParser()
cp.read(tisbackup_config_file)
backup_base_dir = cp.get('global','backup_base_dir')
dbstat = BackupStat(os.path.join(backup_base_dir,'log','tisbackup.sqlite'))
backup_base_dir = cp.get("global", "backup_base_dir")
dbstat = BackupStat(os.path.join(backup_base_dir, "log", "tisbackup.sqlite"))
mindate = None
error = None
info = None
app = Flask(__name__)
app.secret_key = 'fsiqefiuqsefARZ4Zfesfe34234dfzefzfe'
app.config['PROPAGATE_EXCEPTIONS'] = True
app.secret_key = "fsiqefiuqsefARZ4Zfesfe34234dfzefzfe"
app.config["PROPAGATE_EXCEPTIONS"] = True
tasks_db = os.path.join(tisbackup_root_dir,"tasks.sqlite")
tasks_db = os.path.join(tisbackup_root_dir, "tasks.sqlite")
def read_all_configs(base_dir):
raw_configs = []
list_config = []
config_base_dir = base_dir
# config_base_dir = base_dir
for file in os.listdir(base_dir):
if isfile(join(base_dir,file)):
raw_configs.append(join(base_dir,file))
if isfile(join(base_dir, file)):
raw_configs.append(join(base_dir, file))
for elem in raw_configs:
line = open(elem).readline()
if 'global' in line:
if "global" in line:
list_config.append(elem)
backup_dict = {}
backup_dict['rsync_ssh_list'] = []
backup_dict['rsync_btrfs_list'] = []
backup_dict['rsync_list'] = []
backup_dict['null_list'] = []
backup_dict['pgsql_list'] = []
backup_dict['mysql_list'] = []
#backup_dict['sqlserver_list'] = []
backup_dict['xva_list'] = []
backup_dict['metadata_list'] = []
#backup_dict['switch_list'] = []
#backup_dict['oracle_list'] = []
backup_dict["rsync_ssh_list"] = []
backup_dict["rsync_btrfs_list"] = []
backup_dict["rsync_list"] = []
backup_dict["null_list"] = []
backup_dict["pgsql_list"] = []
backup_dict["mysql_list"] = []
# backup_dict['sqlserver_list'] = []
backup_dict["xva_list"] = []
backup_dict["metadata_list"] = []
# backup_dict['switch_list'] = []
# backup_dict['oracle_list'] = []
result = []
cp = ConfigParser()
for config_file in list_config:
cp.read(config_file)
backup_base_dir = cp.get('global', 'backup_base_dir')
backup_base_dir = cp.get("global", "backup_base_dir")
backup = tis_backup(backup_base_dir=backup_base_dir)
backup.read_ini_file(config_file)
@ -110,11 +110,12 @@ def read_all_configs(base_dir):
backup_sections = all_sections
else:
for b in backup_sections:
if not b in all_sections:
raise Exception('Section %s is not defined in config file' % b)
if b not in all_sections:
raise Exception("Section %s is not defined in config file" % b)
if not backup_sections:
sections = [backup_item.backup_name for backup_item in backup.backup_list]
# never used..
# if not backup_sections:
# sections = [backup_item.backup_name for backup_item in backup.backup_list]
for backup_item in backup.backup_list:
if backup_item.backup_name in backup_sections:
@ -125,35 +126,28 @@ def read_all_configs(base_dir):
result.append(b)
for row in result:
backup_name = row['backup_name']
server_name = row['server_name']
backup_type = row['type']
backup_name = row["backup_name"]
server_name = row["server_name"]
backup_type = row["type"]
if backup_type == "xcp-dump-metadata":
backup_dict['metadata_list'].append(
[server_name, backup_name, backup_type, ""])
backup_dict["metadata_list"].append([server_name, backup_name, backup_type, ""])
if backup_type == "rsync+ssh":
remote_dir = row['remote_dir']
backup_dict['rsync_ssh_list'].append(
[server_name, backup_name, backup_type, remote_dir])
remote_dir = row["remote_dir"]
backup_dict["rsync_ssh_list"].append([server_name, backup_name, backup_type, remote_dir])
if backup_type == "rsync+btrfs+ssh":
remote_dir = row['remote_dir']
backup_dict['rsync_btrfs_list'].append(
[server_name, backup_name, backup_type, remote_dir])
remote_dir = row["remote_dir"]
backup_dict["rsync_btrfs_list"].append([server_name, backup_name, backup_type, remote_dir])
if backup_type == "rsync":
remote_dir = row['remote_dir']
backup_dict['rsync_list'].append(
[server_name, backup_name, backup_type, remote_dir])
remote_dir = row["remote_dir"]
backup_dict["rsync_list"].append([server_name, backup_name, backup_type, remote_dir])
if backup_type == "null":
backup_dict['null_list'].append(
[server_name, backup_name, backup_type, ""])
backup_dict["null_list"].append([server_name, backup_name, backup_type, ""])
if backup_type == "pgsql+ssh":
db_name = row['db_name'] if len(row['db_name']) > 0 else '*'
backup_dict['pgsql_list'].append(
[server_name, backup_name, backup_type, db_name])
db_name = row["db_name"] if len(row["db_name"]) > 0 else "*"
backup_dict["pgsql_list"].append([server_name, backup_name, backup_type, db_name])
if backup_type == "mysql+ssh":
db_name = row['db_name'] if len(row['db_name']) > 0 else '*'
backup_dict['mysql_list'].append(
[server_name, backup_name, backup_type, db_name])
db_name = row["db_name"] if len(row["db_name"]) > 0 else "*"
backup_dict["mysql_list"].append([server_name, backup_name, backup_type, db_name])
# if backup_type == "sqlserver+ssh":
# db_name = row['db_name']
# backup_dict['sqlserver_list'].append(
@ -163,8 +157,7 @@ def read_all_configs(base_dir):
# backup_dict['oracle_list'].append(
# [server_name, backup_name, backup_type, db_name])
if backup_type == "xen-xva":
backup_dict['xva_list'].append(
[server_name, backup_name, backup_type, ""])
backup_dict["xva_list"].append([server_name, backup_name, backup_type, ""])
# if backup_type == "switch":
# backup_dict['switch_list'].append(
# [server_name, backup_name, backup_type, ""])
@ -177,7 +170,7 @@ def read_config():
cp = ConfigParser()
cp.read(config_file)
backup_base_dir = cp.get('global','backup_base_dir')
backup_base_dir = cp.get("global", "backup_base_dir")
backup = tis_backup(backup_base_dir=backup_base_dir)
backup.read_ini_file(config_file)
@ -188,56 +181,58 @@ def read_config():
backup_sections = all_sections
else:
for b in backup_sections:
if not b in all_sections:
raise Exception('Section %s is not defined in config file' % b)
if b not in all_sections:
raise Exception("Section %s is not defined in config file" % b)
result = []
if not backup_sections:
sections = [backup_item.backup_name for backup_item in backup.backup_list]
# not used ...
# if not backup_sections:
# sections = [backup_item.backup_name for backup_item in backup.backup_list]
for backup_item in backup.backup_list:
if backup_item.backup_name in backup_sections:
b = {}
for attrib_name in backup_item.required_params+backup_item.optional_params:
if hasattr(backup_item,attrib_name):
b[attrib_name] = getattr(backup_item,attrib_name)
for attrib_name in backup_item.required_params + backup_item.optional_params:
if hasattr(backup_item, attrib_name):
b[attrib_name] = getattr(backup_item, attrib_name)
result.append(b)
backup_dict = {}
backup_dict['rsync_ssh_list'] = []
backup_dict['rsync_btrfs_list'] = []
backup_dict['rsync_list'] = []
backup_dict['null_list'] = []
backup_dict['pgsql_list'] = []
backup_dict['mysql_list'] = []
#backup_dict['sqlserver_list'] = []
backup_dict['xva_list'] = []
backup_dict['metadata_list'] = []
#backup_dict['switch_list'] = []
#backup_dict['oracle_list'] = []
backup_dict["rsync_ssh_list"] = []
backup_dict["rsync_btrfs_list"] = []
backup_dict["rsync_list"] = []
backup_dict["null_list"] = []
backup_dict["pgsql_list"] = []
backup_dict["mysql_list"] = []
# backup_dict['sqlserver_list'] = []
backup_dict["xva_list"] = []
backup_dict["metadata_list"] = []
# backup_dict['switch_list'] = []
# backup_dict['oracle_list'] = []
for row in result:
backup_name = row['backup_name']
server_name = row['server_name']
backup_type = row['type']
backup_name = row["backup_name"]
server_name = row["server_name"]
backup_type = row["type"]
if backup_type == "xcp-dump-metadata":
backup_dict['metadata_list'].append([server_name, backup_name, backup_type, ""])
backup_dict["metadata_list"].append([server_name, backup_name, backup_type, ""])
if backup_type == "rsync+ssh":
remote_dir = row['remote_dir']
backup_dict['rsync_ssh_list'].append([server_name, backup_name, backup_type,remote_dir])
remote_dir = row["remote_dir"]
backup_dict["rsync_ssh_list"].append([server_name, backup_name, backup_type, remote_dir])
if backup_type == "rsync+btrfs+ssh":
remote_dir = row['remote_dir']
backup_dict['rsync_btrfs_list'].append([server_name, backup_name, backup_type,remote_dir])
remote_dir = row["remote_dir"]
backup_dict["rsync_btrfs_list"].append([server_name, backup_name, backup_type, remote_dir])
if backup_type == "rsync":
remote_dir = row['remote_dir']
backup_dict['rsync_list'].append([server_name, backup_name, backup_type,remote_dir])
remote_dir = row["remote_dir"]
backup_dict["rsync_list"].append([server_name, backup_name, backup_type, remote_dir])
if backup_type == "null":
backup_dict['null_list'].append([server_name, backup_name, backup_type, ""])
backup_dict["null_list"].append([server_name, backup_name, backup_type, ""])
if backup_type == "pgsql+ssh":
db_name = row['db_name'] if len(row['db_name']) > 0 else '*'
backup_dict['pgsql_list'].append([server_name, backup_name, backup_type, db_name])
db_name = row["db_name"] if len(row["db_name"]) > 0 else "*"
backup_dict["pgsql_list"].append([server_name, backup_name, backup_type, db_name])
if backup_type == "mysql+ssh":
db_name = row['db_name'] if len(row['db_name']) > 0 else '*'
backup_dict['mysql_list'].append([server_name, backup_name, backup_type, db_name])
db_name = row["db_name"] if len(row["db_name"]) > 0 else "*"
backup_dict["mysql_list"].append([server_name, backup_name, backup_type, db_name])
# if backup_type == "sqlserver+ssh":
# db_name = row['db_name']
# backup_dict['sqlserver_list'].append([server_name, backup_name, backup_type, db_name])
@ -245,49 +240,68 @@ def read_config():
# db_name = row['db_name']
# backup_dict['oracle_list'].append([server_name, backup_name, backup_type, db_name])
if backup_type == "xen-xva":
backup_dict['xva_list'].append([server_name, backup_name, backup_type, ""])
backup_dict["xva_list"].append([server_name, backup_name, backup_type, ""])
# if backup_type == "switch":
# backup_dict['switch_list'].append([server_name, backup_name, backup_type, ""])
return backup_dict
@app.route('/')
@app.route("/")
def backup_all():
backup_dict = read_config()
return render_template('backups.html', backup_list = backup_dict)
return render_template("backups.html", backup_list=backup_dict)
@app.route('/config_number/')
@app.route('/config_number/<int:id>')
@app.route("/config_number/")
@app.route("/config_number/<int:id>")
def set_config_number(id=None):
if id != None and len(CONFIG) > id:
if id is not None and len(CONFIG) > id:
global config_number
config_number=id
config_number = id
read_config()
return jsonify(configs=CONFIG,config_number=config_number)
return jsonify(configs=CONFIG, config_number=config_number)
@app.route('/all_json')
@app.route("/all_json")
def backup_all_json():
backup_dict = read_all_configs(BASE_DIR)
return json.dumps(backup_dict['rsync_list']+backup_dict['rsync_btrfs_list']+backup_dict['rsync_ssh_list']+backup_dict['pgsql_list']+backup_dict['mysql_list']+backup_dict['xva_list']+backup_dict['null_list']+backup_dict['metadata_list'])
#+ backup_dict['switch_list'])+backup_dict['sqlserver_list']
return json.dumps(
backup_dict["rsync_list"]
+ backup_dict["rsync_btrfs_list"]
+ backup_dict["rsync_ssh_list"]
+ backup_dict["pgsql_list"]
+ backup_dict["mysql_list"]
+ backup_dict["xva_list"]
+ backup_dict["null_list"]
+ backup_dict["metadata_list"]
)
# + backup_dict['switch_list'])+backup_dict['sqlserver_list']
@app.route('/json')
@app.route("/json")
def backup_json():
backup_dict = read_config()
return json.dumps(backup_dict['rsync_list']+backup_dict['rsync_btrfs_list']+backup_dict['rsync_ssh_list']+backup_dict['pgsql_list']+backup_dict['mysql_list']+backup_dict['xva_list']+backup_dict['null_list']+backup_dict['metadata_list'])
#+ backup_dict['switch_list'])+backup_dict['sqlserver_list']
return json.dumps(
backup_dict["rsync_list"]
+ backup_dict["rsync_btrfs_list"]
+ backup_dict["rsync_ssh_list"]
+ backup_dict["pgsql_list"]
+ backup_dict["mysql_list"]
+ backup_dict["xva_list"]
+ backup_dict["null_list"]
+ backup_dict["metadata_list"]
)
# + backup_dict['switch_list'])+backup_dict['sqlserver_list']
def check_usb_disk():
"""This method returns the mounts point of FIRST external disk"""
# disk_name = []
# disk_name = []
usb_disk_list = []
for name in glob.glob('/dev/sd[a-z]'):
for name in glob.glob("/dev/sd[a-z]"):
for line in os.popen("udevadm info -q env -n %s" % name):
if re.match("ID_PATH=.*usb.*", line):
usb_disk_list += [ name ]
usb_disk_list += [name]
if len(usb_disk_list) == 0:
raise_error("Cannot find any external usb disk", "You should plug the usb hard drive into the server")
@ -296,19 +310,22 @@ def check_usb_disk():
usb_partition_list = []
for usb_disk in usb_disk_list:
cmd = "udevadm info -q path -n %s" % usb_disk + '1'
output = os.popen(cmd).read()
cmd = "udevadm info -q path -n %s" % usb_disk + "1"
output = os.popen(cmd).read()
print("cmd : " + cmd)
print("output : " + output)
if '/devices/pci' in output:
#flash("partition found: %s1" % usb_disk)
if "/devices/pci" in output:
# flash("partition found: %s1" % usb_disk)
usb_partition_list.append(usb_disk + "1")
print(usb_partition_list)
if len(usb_partition_list) ==0:
raise_error("The drive %s has no partition" % (usb_disk_list[0] ), "You should initialize the usb drive and format an ext4 partition with TISBACKUP label")
if len(usb_partition_list) == 0:
raise_error(
"The drive %s has no partition" % (usb_disk_list[0]),
"You should initialize the usb drive and format an ext4 partition with TISBACKUP label",
)
return ""
tisbackup_partition_list = []
@ -319,131 +336,137 @@ def check_usb_disk():
print(tisbackup_partition_list)
if len(tisbackup_partition_list) ==0:
raise_error("No tisbackup partition exist on disk %s" % (usb_disk_list[0] ), "You should initialize the usb drive and format an ext4 partition with TISBACKUP label")
if len(tisbackup_partition_list) == 0:
raise_error(
"No tisbackup partition exist on disk %s" % (usb_disk_list[0]),
"You should initialize the usb drive and format an ext4 partition with TISBACKUP label",
)
return ""
if len(tisbackup_partition_list) > 1:
if len(tisbackup_partition_list) > 1:
raise_error("There are many usb disk", "You should plug remove one of them")
return ""
return tisbackup_partition_list[0]
def check_already_mount(partition_name,refresh):
with open('/proc/mounts') as f:
def check_already_mount(partition_name, refresh):
with open("/proc/mounts") as f:
mount_point = ""
for line in f.readlines():
if line.startswith(partition_name):
mount_point = line.split(' ')[1]
mount_point = line.split(" ")[1]
if not refresh:
run_command("/bin/umount %s" % mount_point)
os.rmdir(mount_point)
return mount_point
def run_command(cmd, info=""):
flash("Executing: %s"% cmd)
flash("Executing: %s" % cmd)
from subprocess import CalledProcessError, check_output
result =""
result = ""
try:
result = check_output(cmd, stderr=subprocess.STDOUT,shell=True)
except CalledProcessError as e:
raise_error(result,info)
result = check_output(cmd, stderr=subprocess.STDOUT, shell=True)
except CalledProcessError:
raise_error(result, info)
return result
def check_mount_disk(partition_name, refresh):
mount_point = check_already_mount(partition_name, refresh)
mount_point = check_already_mount(partition_name, refresh)
if not refresh:
mount_point = "/mnt/TISBACKUP-" +str(time.time())
mount_point = "/mnt/TISBACKUP-" + str(time.time())
os.mkdir(mount_point)
flash("must mount " + partition_name )
flash("must mount " + partition_name)
cmd = "mount %s %s" % (partition_name, mount_point)
if run_command(cmd,"You should manualy mount the usb drive") != "":
if run_command(cmd, "You should manualy mount the usb drive") != "":
flash("Remove directory: %s" % mount_point)
os.rmdir(mount_point)
return ""
return mount_point
@app.route('/status.json')
@app.route("/status.json")
def export_backup_status():
exports = dbstat.query('select * from stats where TYPE="EXPORT" and backup_start>="%s"' % mindate)
error = ""
finish=not runnings_backups()
if get_task() != None and finish:
finish = not runnings_backups()
if get_task() is not None and finish:
status = get_task().get()
if status != "ok":
error = "Export failing with error: "+status
error = "Export failing with error: " + status
return jsonify(data=exports, finish=finish, error=error)
return jsonify(data=exports,finish=finish,error=error)
def runnings_backups():
task = get_task()
is_runnig = (task != None)
finish = ( is_runnig and task.get() != None)
task = get_task()
is_runnig = task is not None
finish = is_runnig and task.get() is not None
return is_runnig and not finish
@app.route('/backups.json')
@app.route("/backups.json")
def last_backup_json():
exports = dbstat.query('select * from stats where TYPE="BACKUP" ORDER BY backup_start DESC ')
return Response(response=json.dumps(exports),
status=200,
mimetype="application/json")
return Response(response=json.dumps(exports), status=200, mimetype="application/json")
@app.route('/last_backups')
@app.route("/last_backups")
def last_backup():
exports = dbstat.query('select * from stats where TYPE="BACKUP" ORDER BY backup_start DESC LIMIT 20 ')
return render_template("last_backups.html", backups=exports)
@app.route('/export_backup')
@app.route("/export_backup")
def export_backup():
raise_error("", "")
backup_dict = read_config()
sections = []
backup_sections = []
for backup_types in backup_dict:
for backup_types in backup_dict:
if backup_types == "null_list":
continue
for section in backup_dict[backup_types]:
#if section.count > 0:
# if section.count > 0:
if len(section) > 0:
sections.append(section[1])
noJobs = (not runnings_backups())
noJobs = not runnings_backups()
if "start" in list(request.args.keys()) or not noJobs:
start=True
start = True
if "sections" in list(request.args.keys()):
backup_sections = request.args.getlist('sections')
backup_sections = request.args.getlist("sections")
else:
start=False
start = False
cp.read(tisbackup_config_file)
partition_name = check_usb_disk()
if partition_name:
if noJobs:
mount_point = check_mount_disk( partition_name, False)
mount_point = check_mount_disk(partition_name, False)
else:
mount_point = check_mount_disk( partition_name, True)
mount_point = check_mount_disk(partition_name, True)
if noJobs:
global mindate
mindate = datetime2isodate(datetime.datetime.now())
mindate = datetime2isodate(datetime.datetime.now())
if not error and start:
print(tisbackup_config_file)
task = run_export_backup(base=backup_base_dir, config_file=CONFIG[config_number], mount_point=mount_point, backup_sections=",".join([str(x) for x in backup_sections]))
task = run_export_backup(
base=backup_base_dir,
config_file=CONFIG[config_number],
mount_point=mount_point,
backup_sections=",".join([str(x) for x in backup_sections]),
)
set_task(task)
return render_template("export_backup.html", error=error, start=start, info=info, email=ADMIN_EMAIL, sections=sections)
@ -456,6 +479,7 @@ def raise_error(strError, strInfo):
if __name__ == "__main__":
read_config()
from os import environ
if 'WINGDB_ACTIVE' in environ:
if "WINGDB_ACTIVE" in environ:
app.debug = False
app.run(host= '0.0.0.0',port=8080)
app.run(host="0.0.0.0", port=8080)