fix iniparse
All checks were successful
lint / docker (push) Successful in 9m14s

fix code passing ruff linter
pre-commit ruff
pre-commit ruff format
This commit is contained in:
k3nny 2024-11-29 22:54:39 +01:00
parent aa8a68aa80
commit 737f9bea38
27 changed files with 2375 additions and 2016 deletions

View File

@ -1,7 +1,16 @@
repos: repos:
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0 rev: v5.0.0
hooks: hooks:
- id: trailing-whitespace - id: trailing-whitespace
- id: end-of-file-fixer - id: end-of-file-fixer
- id: check-yaml - id: check-yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.8.1
hooks:
# Run the linter.
- id: ruff
# Run the formatter.
- id: ruff-format

View File

@ -6,4 +6,4 @@ from huey.storage import SqliteStorage
tisbackup_root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__))) tisbackup_root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__)))
tasks_db = os.path.join(tisbackup_root_dir, "tasks.sqlite") tasks_db = os.path.join(tisbackup_root_dir, "tasks.sqlite")
huey = SqlHuey(name="tisbackups",filename=tasks_db,always_eager=False,storage_class=SqliteStorage) huey = SqlHuey(name="tisbackups", filename=tasks_db, always_eager=False, storage_class=SqliteStorage)

View File

@ -30,50 +30,50 @@
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones. # ones.
extensions = [ extensions = [
'sphinx.ext.doctest', "sphinx.ext.doctest",
'sphinx.ext.intersphinx', "sphinx.ext.intersphinx",
'sphinx.ext.todo', "sphinx.ext.todo",
'sphinx.ext.viewcode', "sphinx.ext.viewcode",
'sphinx.ext.githubpages', "sphinx.ext.githubpages",
] ]
# Add any paths that contain templates here, relative to this directory. # Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates'] templates_path = ["_templates"]
# The suffix(es) of source filenames. # The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string: # You can specify multiple suffix as a list of string:
# #
# source_suffix = ['.rst', '.md'] # source_suffix = ['.rst', '.md']
source_suffix = '.rst' source_suffix = ".rst"
# The encoding of source files. # The encoding of source files.
# #
# source_encoding = 'utf-8-sig' # source_encoding = 'utf-8-sig'
# The master toctree document. # The master toctree document.
master_doc = 'index' master_doc = "index"
# General information about the project. # General information about the project.
project = 'TISBackup' project = "TISBackup"
copyright = '2020, Tranquil IT' copyright = "2020, Tranquil IT"
author = 'Tranquil IT' author = "Tranquil IT"
# The version info for the project you're documenting, acts as replacement for # The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the # |version| and |release|, also used in various other places throughout the
# built documents. # built documents.
# #
# The short X.Y version. # The short X.Y version.
version = '1.8' version = "1.8"
# The full version, including alpha/beta/rc tags. # The full version, including alpha/beta/rc tags.
release = '1.8.2' release = "1.8.2"
# The language for content autogenerated by Sphinx. Refer to documentation # The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages. # for a list of supported languages.
# #
# This is also used if you do content translation via gettext catalogs. # This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases. # Usually you set "language" from the command line for these cases.
language = 'en' language = "en"
locale_dirs = ['locale/'] locale_dirs = ["locale/"]
gettext_compact = False gettext_compact = False
# There are two options for replacing |today|: either, you set today to some # There are two options for replacing |today|: either, you set today to some
@ -110,7 +110,7 @@ exclude_patterns = []
# show_authors = False # show_authors = False
# The name of the Pygments (syntax highlighting) style to use. # The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx' pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting. # A list of ignored prefixes for module index sorting.
# modindex_common_prefix = [] # modindex_common_prefix = []
@ -126,18 +126,19 @@ todo_include_todos = True
try: try:
import sphinx_rtd_theme import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme" html_theme = "sphinx_rtd_theme"
html_favicon = "_static/favicon.ico" html_favicon = "_static/favicon.ico"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_context = { html_context = {
'css_files': [ "css_files": [
'_static/css/custom.css', # overrides for wide tables in RTD theme "_static/css/custom.css", # overrides for wide tables in RTD theme
'_static/css/ribbon.css', "_static/css/ribbon.css",
'_static/theme_overrides.css', # override wide tables in RTD theme "_static/theme_overrides.css", # override wide tables in RTD theme
], ],
} }
except ImportError as e: except ImportError as e: # noqa : F841
html_theme = 'alabaster' html_theme = "alabaster"
html_theme_path = [] html_theme_path = []
@ -178,7 +179,7 @@ except ImportError as e:
# Add any paths that contain custom static files (such as style sheets) here, # Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files, # relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css". # so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static'] html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or # Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied # .htaccess) here, relative to this directory. These files are copied
@ -258,15 +259,13 @@ html_static_path = ['_static']
# html_search_scorer = 'scorer.js' # html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder. # Output file base name for HTML help builder.
htmlhelp_basename = 'tisbackupdoc' htmlhelp_basename = "tisbackupdoc"
# -- Linkcheck ------------------- # -- Linkcheck -------------------
# make linkcheck # make linkcheck
# URL patterns to ignore # URL patterns to ignore
linkcheck_ignore = [r'http.*://.*mydomain.lan.*', linkcheck_ignore = [r"http.*://.*mydomain.lan.*", r"http.*://.*host_fqdn.*", r"http://user:pwd@host_fqdn:port"]
r'http.*://.*host_fqdn.*',
r'http://user:pwd@host_fqdn:port']
# -- Options for LaTeX output --------------------------------------------- # -- Options for LaTeX output ---------------------------------------------
@ -279,23 +278,20 @@ linkcheck_ignore = [r'http.*://.*mydomain.lan.*',
# > \setlength\paperwidth {15.59cm}} # > \setlength\paperwidth {15.59cm}}
latex_elements = { latex_elements = {
# The paper size ('letterpaper' or 'a4paper'). # The paper size ('letterpaper' or 'a4paper').
# #
# 'papersize': 'letterpaper', # 'papersize': 'letterpaper',
'papersize': 'lulupaper', "papersize": "lulupaper",
# The font size ('10pt', '11pt' or '12pt').
# The font size ('10pt', '11pt' or '12pt'). #
# "pointsize": "9pt",
'pointsize': '9pt', # Additional stuff for the LaTeX preamble.
#
# Additional stuff for the LaTeX preamble. "preamble": r"\batchmode",
# # Latex figure (float) alignment
'preamble': r'\batchmode', #
# 'figure_align': 'htbp',
# Latex figure (float) alignment "sphinxsetup": "hmargin={1.5cm,1.5cm}, vmargin={3cm,3cm}, marginpar=1cm",
#
# 'figure_align': 'htbp',
'sphinxsetup': 'hmargin={1.5cm,1.5cm}, vmargin={3cm,3cm}, marginpar=1cm',
} }
@ -303,7 +299,7 @@ latex_elements = {
# (source start file, target name, title, # (source start file, target name, title,
# author, documentclass [howto, manual, or own class]). # author, documentclass [howto, manual, or own class]).
latex_documents = [ latex_documents = [
(master_doc, 'tisbackup.tex', 'TISBackup Documentation', 'Tranquil IT', 'manual'), (master_doc, "tisbackup.tex", "TISBackup Documentation", "Tranquil IT", "manual"),
] ]
# The name of an image file (relative to this directory) to place at the top of # The name of an image file (relative to this directory) to place at the top of
@ -343,10 +339,7 @@ latex_documents = [
# One entry per manual page. List of tuples # One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section). # (source start file, name, description, authors, manual section).
man_pages = [ man_pages = [(master_doc, "tisbackup", "TISBackup Documentation", [author], 1)]
(master_doc, 'tisbackup', 'TISBackup Documentation',
[author], 1)
]
# If true, show URL addresses after external links. # If true, show URL addresses after external links.
# #
@ -359,9 +352,15 @@ man_pages = [
# (source start file, target name, title, author, # (source start file, target name, title, author,
# dir menu entry, description, category) # dir menu entry, description, category)
texinfo_documents = [ texinfo_documents = [
(master_doc, 'tisbackup', 'TISBackup Documentation', (
author, 'Tranquil IT', 'The objective of TISbackup is to benefit from file backups and centralized alert feedback on "reasonable" data volumes.', master_doc,
'Miscellaneous'), "tisbackup",
"TISBackup Documentation",
author,
"Tranquil IT",
'The objective of TISbackup is to benefit from file backups and centralized alert feedback on "reasonable" data volumes.',
"Miscellaneous",
),
] ]
# Documents to append as an appendix to all manuals. # Documents to append as an appendix to all manuals.
@ -382,7 +381,7 @@ texinfo_documents = [
# Example configuration for intersphinx: refer to the Python standard library. # Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None} intersphinx_mapping = {"https://docs.python.org/": None}
# -- Options for Epub output ---------------------------------------------- # -- Options for Epub output ----------------------------------------------
@ -438,7 +437,7 @@ epub_copyright = copyright
# epub_post_files = [] # epub_post_files = []
# A list of files that should not be packed into the epub file. # A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html'] epub_exclude_files = ["search.html"]
# The depth of the table of contents in toc.ncx. # The depth of the table of contents in toc.ncx.
# #

View File

@ -61,10 +61,11 @@ import sys
import six.moves.http_client as httplib import six.moves.http_client as httplib
import six.moves.xmlrpc_client as xmlrpclib import six.moves.xmlrpc_client as xmlrpclib
translation = gettext.translation('xen-xm', fallback = True) translation = gettext.translation("xen-xm", fallback=True)
API_VERSION_1_1 = "1.1"
API_VERSION_1_2 = "1.2"
API_VERSION_1_1 = '1.1'
API_VERSION_1_2 = '1.2'
class Failure(Exception): class Failure(Exception):
def __init__(self, details): def __init__(self, details):
@ -79,41 +80,48 @@ class Failure(Exception):
return msg return msg
def _details_map(self): def _details_map(self):
return dict([(str(i), self.details[i]) return dict([(str(i), self.details[i]) for i in range(len(self.details))])
for i in range(len(self.details))])
# Just a "constant" that we use to decide whether to retry the RPC # Just a "constant" that we use to decide whether to retry the RPC
_RECONNECT_AND_RETRY = object() _RECONNECT_AND_RETRY = object()
class UDSHTTPConnection(httplib.HTTPConnection): class UDSHTTPConnection(httplib.HTTPConnection):
"""HTTPConnection subclass to allow HTTP over Unix domain sockets. """ """HTTPConnection subclass to allow HTTP over Unix domain sockets."""
def connect(self): def connect(self):
path = self.host.replace("_", "/") path = self.host.replace("_", "/")
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.sock.connect(path) self.sock.connect(path)
class UDSHTTP(httplib.HTTPConnection): class UDSHTTP(httplib.HTTPConnection):
_connection_class = UDSHTTPConnection _connection_class = UDSHTTPConnection
class UDSTransport(xmlrpclib.Transport): class UDSTransport(xmlrpclib.Transport):
def __init__(self, use_datetime=0): def __init__(self, use_datetime=0):
self._use_datetime = use_datetime self._use_datetime = use_datetime
self._extra_headers=[] self._extra_headers = []
self._connection = (None, None) self._connection = (None, None)
def add_extra_header(self, key, value): def add_extra_header(self, key, value):
self._extra_headers += [ (key,value) ] self._extra_headers += [(key, value)]
def make_connection(self, host): def make_connection(self, host):
# Python 2.4 compatibility # Python 2.4 compatibility
if sys.version_info[0] <= 2 and sys.version_info[1] < 7: if sys.version_info[0] <= 2 and sys.version_info[1] < 7:
return UDSHTTP(host) return UDSHTTP(host)
else: else:
return UDSHTTPConnection(host) return UDSHTTPConnection(host)
def send_request(self, connection, handler, request_body): def send_request(self, connection, handler, request_body):
connection.putrequest("POST", handler) connection.putrequest("POST", handler)
for key, value in self._extra_headers: for key, value in self._extra_headers:
connection.putheader(key, value) connection.putheader(key, value)
class Session(xmlrpclib.ServerProxy): class Session(xmlrpclib.ServerProxy):
"""A server proxy and session manager for communicating with xapi using """A server proxy and session manager for communicating with xapi using
the Xen-API. the Xen-API.
@ -126,32 +134,27 @@ class Session(xmlrpclib.ServerProxy):
session.xenapi.session.logout() session.xenapi.session.logout()
""" """
def __init__(self, uri, transport=None, encoding=None, verbose=0, def __init__(self, uri, transport=None, encoding=None, verbose=0, allow_none=1, ignore_ssl=False):
allow_none=1, ignore_ssl=False):
# Fix for CA-172901 (+ Python 2.4 compatibility) # Fix for CA-172901 (+ Python 2.4 compatibility)
# Fix for context=ctx ( < Python 2.7.9 compatibility) # Fix for context=ctx ( < Python 2.7.9 compatibility)
if not (sys.version_info[0] <= 2 and sys.version_info[1] <= 7 and sys.version_info[2] <= 9 ) \ if not (sys.version_info[0] <= 2 and sys.version_info[1] <= 7 and sys.version_info[2] <= 9) and ignore_ssl:
and ignore_ssl:
import ssl import ssl
ctx = ssl._create_unverified_context() ctx = ssl._create_unverified_context()
xmlrpclib.ServerProxy.__init__(self, uri, transport, encoding, xmlrpclib.ServerProxy.__init__(self, uri, transport, encoding, verbose, allow_none, context=ctx)
verbose, allow_none, context=ctx)
else: else:
xmlrpclib.ServerProxy.__init__(self, uri, transport, encoding, xmlrpclib.ServerProxy.__init__(self, uri, transport, encoding, verbose, allow_none)
verbose, allow_none)
self.transport = transport self.transport = transport
self._session = None self._session = None
self.last_login_method = None self.last_login_method = None
self.last_login_params = None self.last_login_params = None
self.API_version = API_VERSION_1_1 self.API_version = API_VERSION_1_1
def xenapi_request(self, methodname, params): def xenapi_request(self, methodname, params):
if methodname.startswith('login'): if methodname.startswith("login"):
self._login(methodname, params) self._login(methodname, params)
return None return None
elif methodname == 'logout' or methodname == 'session.logout': elif methodname == "logout" or methodname == "session.logout":
self._logout() self._logout()
return None return None
else: else:
@ -162,29 +165,25 @@ class Session(xmlrpclib.ServerProxy):
if result is _RECONNECT_AND_RETRY: if result is _RECONNECT_AND_RETRY:
retry_count += 1 retry_count += 1
if self.last_login_method: if self.last_login_method:
self._login(self.last_login_method, self._login(self.last_login_method, self.last_login_params)
self.last_login_params)
else: else:
raise xmlrpclib.Fault(401, 'You must log in') raise xmlrpclib.Fault(401, "You must log in")
else: else:
return result return result
raise xmlrpclib.Fault( raise xmlrpclib.Fault(500, "Tried 3 times to get a valid session, but failed")
500, 'Tried 3 times to get a valid session, but failed')
def _login(self, method, params): def _login(self, method, params):
try: try:
result = _parse_result( result = _parse_result(getattr(self, "session.%s" % method)(*params))
getattr(self, 'session.%s' % method)(*params))
if result is _RECONNECT_AND_RETRY: if result is _RECONNECT_AND_RETRY:
raise xmlrpclib.Fault( raise xmlrpclib.Fault(500, "Received SESSION_INVALID when logging in")
500, 'Received SESSION_INVALID when logging in')
self._session = result self._session = result
self.last_login_method = method self.last_login_method = method
self.last_login_params = params self.last_login_params = params
self.API_version = self._get_api_version() self.API_version = self._get_api_version()
except socket.error as e: except socket.error as e:
if e.errno == socket.errno.ETIMEDOUT: if e.errno == socket.errno.ETIMEDOUT:
raise xmlrpclib.Fault(504, 'The connection timed out') raise xmlrpclib.Fault(504, "The connection timed out")
else: else:
raise e raise e
@ -205,41 +204,41 @@ class Session(xmlrpclib.ServerProxy):
host = self.xenapi.pool.get_master(pool) host = self.xenapi.pool.get_master(pool)
major = self.xenapi.host.get_API_version_major(host) major = self.xenapi.host.get_API_version_major(host)
minor = self.xenapi.host.get_API_version_minor(host) minor = self.xenapi.host.get_API_version_minor(host)
return "%s.%s"%(major,minor) return "%s.%s" % (major, minor)
def __getattr__(self, name): def __getattr__(self, name):
if name == 'handle': if name == "handle":
return self._session return self._session
elif name == 'xenapi': elif name == "xenapi":
return _Dispatcher(self.API_version, self.xenapi_request, None) return _Dispatcher(self.API_version, self.xenapi_request, None)
elif name.startswith('login') or name.startswith('slave_local'): elif name.startswith("login") or name.startswith("slave_local"):
return lambda *params: self._login(name, params) return lambda *params: self._login(name, params)
elif name == 'logout': elif name == "logout":
return _Dispatcher(self.API_version, self.xenapi_request, "logout") return _Dispatcher(self.API_version, self.xenapi_request, "logout")
else: else:
return xmlrpclib.ServerProxy.__getattr__(self, name) return xmlrpclib.ServerProxy.__getattr__(self, name)
def xapi_local(): def xapi_local():
return Session("http://_var_lib_xcp_xapi/", transport=UDSTransport()) return Session("http://_var_lib_xcp_xapi/", transport=UDSTransport())
def _parse_result(result): def _parse_result(result):
if type(result) != dict or 'Status' not in result: if not isinstance(type(result), dict) or "Status" not in result:
raise xmlrpclib.Fault(500, 'Missing Status in response from server' + result) raise xmlrpclib.Fault(500, "Missing Status in response from server" + result)
if result['Status'] == 'Success': if result["Status"] == "Success":
if 'Value' in result: if "Value" in result:
return result['Value'] return result["Value"]
else: else:
raise xmlrpclib.Fault(500, raise xmlrpclib.Fault(500, "Missing Value in response from server")
'Missing Value in response from server')
else: else:
if 'ErrorDescription' in result: if "ErrorDescription" in result:
if result['ErrorDescription'][0] == 'SESSION_INVALID': if result["ErrorDescription"][0] == "SESSION_INVALID":
return _RECONNECT_AND_RETRY return _RECONNECT_AND_RETRY
else: else:
raise Failure(result['ErrorDescription']) raise Failure(result["ErrorDescription"])
else: else:
raise xmlrpclib.Fault( raise xmlrpclib.Fault(500, "Missing ErrorDescription in response from server")
500, 'Missing ErrorDescription in response from server')
# Based upon _Method from xmlrpclib. # Based upon _Method from xmlrpclib.
@ -251,9 +250,9 @@ class _Dispatcher:
def __repr__(self): def __repr__(self):
if self.__name: if self.__name:
return '<XenAPI._Dispatcher for %s>' % self.__name return "<XenAPI._Dispatcher for %s>" % self.__name
else: else:
return '<XenAPI._Dispatcher>' return "<XenAPI._Dispatcher>"
def __getattr__(self, name): def __getattr__(self, name):
if self.__name is None: if self.__name is None:

View File

@ -19,11 +19,10 @@
# ----------------------------------------------------------------------- # -----------------------------------------------------------------------
import sys import sys
try: try:
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko sys.stderr = open("/dev/null") # Silence silly warnings from paramiko
import paramiko import paramiko
except ImportError as e: except ImportError as e:
print(("Error : can not load paramiko library %s" % e)) print(("Error : can not load paramiko library %s" % e))
@ -36,19 +35,19 @@ from libtisbackup.common import *
class backup_mysql(backup_generic): class backup_mysql(backup_generic):
"""Backup a mysql database as gzipped sql file through ssh""" """Backup a mysql database as gzipped sql file through ssh"""
type = 'mysql+ssh'
required_params = backup_generic.required_params + ['db_user','db_passwd','private_key']
optional_params = backup_generic.optional_params + ['db_name']
db_name='' type = "mysql+ssh"
db_user='' required_params = backup_generic.required_params + ["db_user", "db_passwd", "private_key"]
db_passwd='' optional_params = backup_generic.optional_params + ["db_name"]
db_name = ""
db_user = ""
db_passwd = ""
dest_dir = "" dest_dir = ""
def do_backup(self,stats): def do_backup(self, stats):
self.dest_dir = os.path.join(self.backup_dir,self.backup_start_date) self.dest_dir = os.path.join(self.backup_dir, self.backup_start_date)
if not os.path.isdir(self.dest_dir): if not os.path.isdir(self.dest_dir):
if not self.dry_run: if not self.dry_run:
@ -56,126 +55,145 @@ class backup_mysql(backup_generic):
else: else:
print(('mkdir "%s"' % self.dest_dir)) print(('mkdir "%s"' % self.dest_dir))
else: else:
raise Exception('backup destination directory already exists : %s' % self.dest_dir) raise Exception("backup destination directory already exists : %s" % self.dest_dir)
self.logger.debug('[%s] Connecting to %s with user root and key %s',self.backup_name,self.server_name,self.private_key) self.logger.debug("[%s] Connecting to %s with user root and key %s", self.backup_name, self.server_name, self.private_key)
try: try:
mykey = paramiko.RSAKey.from_private_key_file(self.private_key) mykey = paramiko.RSAKey.from_private_key_file(self.private_key)
except paramiko.SSHException: except paramiko.SSHException:
#mykey = paramiko.DSSKey.from_private_key_file(self.private_key) # mykey = paramiko.DSSKey.from_private_key_file(self.private_key)
mykey = paramiko.Ed25519Key.from_private_key_file(self.private_key) mykey = paramiko.Ed25519Key.from_private_key_file(self.private_key)
self.ssh = paramiko.SSHClient() self.ssh = paramiko.SSHClient()
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.ssh.connect(self.server_name,username='root',pkey = mykey, port=self.ssh_port) self.ssh.connect(self.server_name, username="root", pkey=mykey, port=self.ssh_port)
self.db_passwd=self.db_passwd.replace('$','\$') self.db_passwd = self.db_passwd.replace("$", "\$")
if not self.db_name: if not self.db_name:
stats['log']= "Successfully backuping processed to the following databases :" stats["log"] = "Successfully backuping processed to the following databases :"
stats['status']='List' stats["status"] = "List"
cmd = 'mysql -N -B -p -e "SHOW DATABASES;" -u ' + self.db_user +' -p' + self.db_passwd + ' 2> /dev/null' cmd = 'mysql -N -B -p -e "SHOW DATABASES;" -u ' + self.db_user + " -p" + self.db_passwd + " 2> /dev/null"
self.logger.debug('[%s] List databases: %s',self.backup_name,cmd) self.logger.debug("[%s] List databases: %s", self.backup_name, cmd)
(error_code,output) = ssh_exec(cmd,ssh=self.ssh) (error_code, output) = ssh_exec(cmd, ssh=self.ssh)
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output) self.logger.debug("[%s] Output of %s :\n%s", self.backup_name, cmd, output)
if error_code: if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd)) raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code, cmd))
databases = output.split('\n') databases = output.split("\n")
for database in databases: for database in databases:
if database != "": if database != "":
self.db_name = database.rstrip() self.db_name = database.rstrip()
self.do_mysqldump(stats) self.do_mysqldump(stats)
else: else:
stats['log']= "Successfully backup processed to the following database :" stats["log"] = "Successfully backup processed to the following database :"
self.do_mysqldump(stats) self.do_mysqldump(stats)
def do_mysqldump(self, stats):
def do_mysqldump(self,stats):
t = datetime.datetime.now() t = datetime.datetime.now()
backup_start_date = t.strftime('%Y%m%d-%Hh%Mm%S') backup_start_date = t.strftime("%Y%m%d-%Hh%Mm%S")
# dump db # dump db
stats['status']='Dumping' stats["status"] = "Dumping"
cmd = 'mysqldump --single-transaction -u' + self.db_user +' -p' + self.db_passwd + ' ' + self.db_name + ' > /tmp/' + self.db_name + '-' + backup_start_date + '.sql' cmd = (
self.logger.debug('[%s] Dump DB : %s',self.backup_name,cmd) "mysqldump --single-transaction -u"
+ self.db_user
+ " -p"
+ self.db_passwd
+ " "
+ self.db_name
+ " > /tmp/"
+ self.db_name
+ "-"
+ backup_start_date
+ ".sql"
)
self.logger.debug("[%s] Dump DB : %s", self.backup_name, cmd)
if not self.dry_run: if not self.dry_run:
(error_code,output) = ssh_exec(cmd,ssh=self.ssh) (error_code, output) = ssh_exec(cmd, ssh=self.ssh)
print(output) print(output)
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output) self.logger.debug("[%s] Output of %s :\n%s", self.backup_name, cmd, output)
if error_code: if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd)) raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code, cmd))
# zip the file # zip the file
stats['status']='Zipping' stats["status"] = "Zipping"
cmd = 'gzip /tmp/' + self.db_name + '-' + backup_start_date + '.sql' cmd = "gzip /tmp/" + self.db_name + "-" + backup_start_date + ".sql"
self.logger.debug('[%s] Compress backup : %s',self.backup_name,cmd) self.logger.debug("[%s] Compress backup : %s", self.backup_name, cmd)
if not self.dry_run: if not self.dry_run:
(error_code,output) = ssh_exec(cmd,ssh=self.ssh) (error_code, output) = ssh_exec(cmd, ssh=self.ssh)
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output) self.logger.debug("[%s] Output of %s :\n%s", self.backup_name, cmd, output)
if error_code: if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd)) raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code, cmd))
# get the file # get the file
stats['status']='SFTP' stats["status"] = "SFTP"
filepath = '/tmp/' + self.db_name + '-' + backup_start_date + '.sql.gz' filepath = "/tmp/" + self.db_name + "-" + backup_start_date + ".sql.gz"
localpath = os.path.join(self.dest_dir , self.db_name + '.sql.gz') localpath = os.path.join(self.dest_dir, self.db_name + ".sql.gz")
self.logger.debug('[%s] Get gz backup with sftp on %s from %s to %s',self.backup_name,self.server_name,filepath,localpath) self.logger.debug("[%s] Get gz backup with sftp on %s from %s to %s", self.backup_name, self.server_name, filepath, localpath)
if not self.dry_run: if not self.dry_run:
transport = self.ssh.get_transport() transport = self.ssh.get_transport()
sftp = paramiko.SFTPClient.from_transport(transport) sftp = paramiko.SFTPClient.from_transport(transport)
sftp.get(filepath, localpath) sftp.get(filepath, localpath)
sftp.close() sftp.close()
if not self.dry_run: if not self.dry_run:
stats['total_files_count']=1 + stats.get('total_files_count', 0) stats["total_files_count"] = 1 + stats.get("total_files_count", 0)
stats['written_files_count']=1 + stats.get('written_files_count', 0) stats["written_files_count"] = 1 + stats.get("written_files_count", 0)
stats['total_bytes']=os.stat(localpath).st_size + stats.get('total_bytes', 0) stats["total_bytes"] = os.stat(localpath).st_size + stats.get("total_bytes", 0)
stats['written_bytes']=os.stat(localpath).st_size + stats.get('written_bytes', 0) stats["written_bytes"] = os.stat(localpath).st_size + stats.get("written_bytes", 0)
stats['log'] = '%s "%s"' % (stats['log'] ,self.db_name) stats["log"] = '%s "%s"' % (stats["log"], self.db_name)
stats['backup_location'] = self.dest_dir stats["backup_location"] = self.dest_dir
stats['status']='RMTemp' stats["status"] = "RMTemp"
cmd = 'rm -f /tmp/' + self.db_name + '-' + backup_start_date + '.sql.gz' cmd = "rm -f /tmp/" + self.db_name + "-" + backup_start_date + ".sql.gz"
self.logger.debug('[%s] Remove temp gzip : %s',self.backup_name,cmd) self.logger.debug("[%s] Remove temp gzip : %s", self.backup_name, cmd)
if not self.dry_run: if not self.dry_run:
(error_code,output) = ssh_exec(cmd,ssh=self.ssh) (error_code, output) = ssh_exec(cmd, ssh=self.ssh)
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output) self.logger.debug("[%s] Output of %s :\n%s", self.backup_name, cmd, output)
if error_code: if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd)) raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code, cmd))
stats['status']='OK' stats["status"] = "OK"
def register_existingbackups(self): def register_existingbackups(self):
"""scan backup dir and insert stats in database""" """scan backup dir and insert stats in database"""
registered = [b['backup_location'] for b in self.dbstat.query('select distinct backup_location from stats where backup_name=?',(self.backup_name,))] registered = [
b["backup_location"]
for b in self.dbstat.query("select distinct backup_location from stats where backup_name=?", (self.backup_name,))
]
filelist = os.listdir(self.backup_dir) filelist = os.listdir(self.backup_dir)
filelist.sort() filelist.sort()
p = re.compile('^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$') p = re.compile("^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$")
for item in filelist: for item in filelist:
if p.match(item): if p.match(item):
dir_name = os.path.join(self.backup_dir,item) dir_name = os.path.join(self.backup_dir, item)
if not dir_name in registered: if dir_name not in registered:
start = datetime.datetime.strptime(item,'%Y%m%d-%Hh%Mm%S').isoformat() start = datetime.datetime.strptime(item, "%Y%m%d-%Hh%Mm%S").isoformat()
if fileisodate(dir_name)>start: if fileisodate(dir_name) > start:
stop = fileisodate(dir_name) stop = fileisodate(dir_name)
else: else:
stop = start stop = start
self.logger.info('Registering %s started on %s',dir_name,start) self.logger.info("Registering %s started on %s", dir_name, start)
self.logger.debug(' Disk usage %s','du -sb "%s"' % dir_name) self.logger.debug(" Disk usage %s", 'du -sb "%s"' % dir_name)
if not self.dry_run: if not self.dry_run:
size_bytes = int(os.popen('du -sb "%s"' % dir_name).read().split('\t')[0]) size_bytes = int(os.popen('du -sb "%s"' % dir_name).read().split("\t")[0])
else: else:
size_bytes = 0 size_bytes = 0
self.logger.debug(' Size in bytes : %i',size_bytes) self.logger.debug(" Size in bytes : %i", size_bytes)
if not self.dry_run: if not self.dry_run:
self.dbstat.add(self.backup_name,self.server_name,'',\ self.dbstat.add(
backup_start=start,backup_end = stop,status='OK',total_bytes=size_bytes,backup_location=dir_name) self.backup_name,
self.server_name,
"",
backup_start=start,
backup_end=stop,
status="OK",
total_bytes=size_bytes,
backup_location=dir_name,
)
else: else:
self.logger.info('Skipping %s, already registered',dir_name) self.logger.info("Skipping %s, already registered", dir_name)
register_driver(backup_mysql) register_driver(backup_mysql)

View File

@ -27,25 +27,32 @@ from .common import *
class backup_null(backup_generic): class backup_null(backup_generic):
"""Null backup to register servers which don't need any backups """Null backup to register servers which don't need any backups
but we still want to know they are taken in account""" but we still want to know they are taken in account"""
type = 'null'
required_params = ['type','server_name','backup_name'] type = "null"
required_params = ["type", "server_name", "backup_name"]
optional_params = [] optional_params = []
def do_backup(self,stats): def do_backup(self, stats):
pass pass
def process_backup(self): def process_backup(self):
pass pass
def cleanup_backup(self): def cleanup_backup(self):
pass pass
def register_existingbackups(self): def register_existingbackups(self):
pass pass
def export_latestbackup(self,destdir):
def export_latestbackup(self, destdir):
return {} return {}
def checknagios(self,maxage_hours=30):
return (nagiosStateOk,"No backups needs to be performed") def checknagios(self, maxage_hours=30):
return (nagiosStateOk, "No backups needs to be performed")
register_driver(backup_null) register_driver(backup_null)
if __name__=='__main__': if __name__ == "__main__":
pass pass

View File

@ -20,7 +20,7 @@
import sys import sys
try: try:
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko sys.stderr = open("/dev/null") # Silence silly warnings from paramiko
import paramiko import paramiko
except ImportError as e: except ImportError as e:
print(("Error : can not load paramiko library %s" % e)) print(("Error : can not load paramiko library %s" % e))
@ -38,140 +38,158 @@ from libtisbackup.common import *
class backup_oracle(backup_generic): class backup_oracle(backup_generic):
"""Backup a oracle database as zipped file through ssh""" """Backup a oracle database as zipped file through ssh"""
type = 'oracle+ssh'
required_params = backup_generic.required_params + ['db_name','private_key', 'userid']
optional_params = ['username', 'remote_backup_dir', 'ignore_error_oracle_code']
db_name=''
username='oracle'
remote_backup_dir = r'/home/oracle/backup'
ignore_error_oracle_code = [ ]
def do_backup(self,stats): type = "oracle+ssh"
required_params = backup_generic.required_params + ["db_name", "private_key", "userid"]
optional_params = ["username", "remote_backup_dir", "ignore_error_oracle_code"]
db_name = ""
username = "oracle"
remote_backup_dir = r"/home/oracle/backup"
ignore_error_oracle_code = []
self.logger.debug('[%s] Connecting to %s with user %s and key %s',self.backup_name,self.server_name,self.username,self.private_key) def do_backup(self, stats):
self.logger.debug(
"[%s] Connecting to %s with user %s and key %s", self.backup_name, self.server_name, self.username, self.private_key
)
try: try:
mykey = paramiko.RSAKey.from_private_key_file(self.private_key) mykey = paramiko.RSAKey.from_private_key_file(self.private_key)
except paramiko.SSHException: except paramiko.SSHException:
#mykey = paramiko.DSSKey.from_private_key_file(self.private_key) # mykey = paramiko.DSSKey.from_private_key_file(self.private_key)
mykey = paramiko.Ed25519Key.from_private_key_file(self.private_key) mykey = paramiko.Ed25519Key.from_private_key_file(self.private_key)
self.ssh = paramiko.SSHClient() self.ssh = paramiko.SSHClient()
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.ssh.connect(self.server_name,username=self.username,pkey = mykey,port=self.ssh_port) self.ssh.connect(self.server_name, username=self.username, pkey=mykey, port=self.ssh_port)
t = datetime.datetime.now() t = datetime.datetime.now()
self.backup_start_date = t.strftime('%Y%m%d-%Hh%Mm%S') self.backup_start_date = t.strftime("%Y%m%d-%Hh%Mm%S")
dumpfile= self.remote_backup_dir + '/' + self.db_name + '_' + self.backup_start_date+'.dmp' dumpfile = self.remote_backup_dir + "/" + self.db_name + "_" + self.backup_start_date + ".dmp"
dumplog = self.remote_backup_dir + '/' + self.db_name + '_' + self.backup_start_date+'.log' dumplog = self.remote_backup_dir + "/" + self.db_name + "_" + self.backup_start_date + ".log"
self.dest_dir = os.path.join(self.backup_dir,self.backup_start_date) self.dest_dir = os.path.join(self.backup_dir, self.backup_start_date)
if not os.path.isdir(self.dest_dir): if not os.path.isdir(self.dest_dir):
if not self.dry_run: if not self.dry_run:
os.makedirs(self.dest_dir) os.makedirs(self.dest_dir)
else: else:
print(('mkdir "%s"' % self.dest_dir)) print(('mkdir "%s"' % self.dest_dir))
else: else:
raise Exception('backup destination directory already exists : %s' % self.dest_dir) raise Exception("backup destination directory already exists : %s" % self.dest_dir)
# dump db # dump db
stats['status']='Dumping' stats["status"] = "Dumping"
cmd = "exp '%s' file='%s' grants=y log='%s'"% (self.userid,dumpfile, dumplog) cmd = "exp '%s' file='%s' grants=y log='%s'" % (self.userid, dumpfile, dumplog)
self.logger.debug('[%s] Dump DB : %s',self.backup_name,cmd) self.logger.debug("[%s] Dump DB : %s", self.backup_name, cmd)
if not self.dry_run: if not self.dry_run:
(error_code,output) = ssh_exec(cmd,ssh=self.ssh) (error_code, output) = ssh_exec(cmd, ssh=self.ssh)
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output) self.logger.debug("[%s] Output of %s :\n%s", self.backup_name, cmd, output)
if error_code: if error_code:
localpath = os.path.join(self.dest_dir , self.db_name + '.log') localpath = os.path.join(self.dest_dir, self.db_name + ".log")
self.logger.debug('[%s] Get log file with sftp on %s from %s to %s',self.backup_name,self.server_name,dumplog,localpath) self.logger.debug("[%s] Get log file with sftp on %s from %s to %s", self.backup_name, self.server_name, dumplog, localpath)
transport = self.ssh.get_transport() transport = self.ssh.get_transport()
sftp = paramiko.SFTPClient.from_transport(transport) sftp = paramiko.SFTPClient.from_transport(transport)
sftp.get(dumplog, localpath) sftp.get(dumplog, localpath)
sftp.close() sftp.close()
file = open(localpath) file = open(localpath)
for line in file: for line in file:
if re.search('EXP-[0-9]+:', line) and not re.match('EXP-[0-9]+:', line).group(0).replace(':','') in self.ignore_error_oracle_code: if (
stats['status']='RMTemp' re.search("EXP-[0-9]+:", line)
self.clean_dumpfiles(dumpfile,dumplog) and re.match("EXP-[0-9]+:", line).group(0).replace(":", "") not in self.ignore_error_oracle_code
raise Exception('Aborting, Not null exit code (%s) for "%s"' % (re.match('EXP-[0-9]+:', line).group(0).replace(':',''),cmd)) ):
stats["status"] = "RMTemp"
self.clean_dumpfiles(dumpfile, dumplog)
raise Exception(
'Aborting, Not null exit code (%s) for "%s"' % (re.match("EXP-[0-9]+:", line).group(0).replace(":", ""), cmd)
)
file.close() file.close()
# zip the file # zip the file
stats['status']='Zipping' stats["status"] = "Zipping"
cmd = 'gzip %s' % dumpfile cmd = "gzip %s" % dumpfile
self.logger.debug('[%s] Compress backup : %s',self.backup_name,cmd) self.logger.debug("[%s] Compress backup : %s", self.backup_name, cmd)
if not self.dry_run: if not self.dry_run:
(error_code,output) = ssh_exec(cmd,ssh=self.ssh) (error_code, output) = ssh_exec(cmd, ssh=self.ssh)
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output) self.logger.debug("[%s] Output of %s :\n%s", self.backup_name, cmd, output)
if error_code: if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd)) raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code, cmd))
# get the file # get the file
stats['status']='SFTP' stats["status"] = "SFTP"
filepath = dumpfile + '.gz' filepath = dumpfile + ".gz"
localpath = os.path.join(self.dest_dir , self.db_name + '.dmp.gz') localpath = os.path.join(self.dest_dir, self.db_name + ".dmp.gz")
self.logger.debug('[%s] Get gz backup with sftp on %s from %s to %s',self.backup_name,self.server_name,filepath,localpath) self.logger.debug("[%s] Get gz backup with sftp on %s from %s to %s", self.backup_name, self.server_name, filepath, localpath)
if not self.dry_run: if not self.dry_run:
transport = self.ssh.get_transport() transport = self.ssh.get_transport()
sftp = paramiko.SFTPClient.from_transport(transport) sftp = paramiko.SFTPClient.from_transport(transport)
sftp.get(filepath, localpath) sftp.get(filepath, localpath)
sftp.close() sftp.close()
if not self.dry_run: if not self.dry_run:
stats['total_files_count']=1 stats["total_files_count"] = 1
stats['written_files_count']=1 stats["written_files_count"] = 1
stats['total_bytes']=os.stat(localpath).st_size stats["total_bytes"] = os.stat(localpath).st_size
stats['written_bytes']=os.stat(localpath).st_size stats["written_bytes"] = os.stat(localpath).st_size
stats['log']='gzip dump of DB %s:%s (%d bytes) to %s' % (self.server_name,self.db_name, stats['written_bytes'], localpath) stats["log"] = "gzip dump of DB %s:%s (%d bytes) to %s" % (self.server_name, self.db_name, stats["written_bytes"], localpath)
stats['backup_location'] = self.dest_dir stats["backup_location"] = self.dest_dir
stats['status']='RMTemp' stats["status"] = "RMTemp"
self.clean_dumpfiles(dumpfile,dumplog) self.clean_dumpfiles(dumpfile, dumplog)
stats['status']='OK' stats["status"] = "OK"
def register_existingbackups(self): def register_existingbackups(self):
"""scan backup dir and insert stats in database""" """scan backup dir and insert stats in database"""
registered = [b['backup_location'] for b in self.dbstat.query('select distinct backup_location from stats where backup_name=?',(self.backup_name,))] registered = [
b["backup_location"]
for b in self.dbstat.query("select distinct backup_location from stats where backup_name=?", (self.backup_name,))
]
filelist = os.listdir(self.backup_dir) filelist = os.listdir(self.backup_dir)
filelist.sort() filelist.sort()
p = re.compile('^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$') p = re.compile("^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$")
for item in filelist: for item in filelist:
if p.match(item): if p.match(item):
dir_name = os.path.join(self.backup_dir,item) dir_name = os.path.join(self.backup_dir, item)
if not dir_name in registered: if dir_name not in registered:
start = datetime.datetime.strptime(item,'%Y%m%d-%Hh%Mm%S').isoformat() start = datetime.datetime.strptime(item, "%Y%m%d-%Hh%Mm%S").isoformat()
if fileisodate(dir_name)>start: if fileisodate(dir_name) > start:
stop = fileisodate(dir_name) stop = fileisodate(dir_name)
else: else:
stop = start stop = start
self.logger.info('Registering %s started on %s',dir_name,start) self.logger.info("Registering %s started on %s", dir_name, start)
self.logger.debug(' Disk usage %s','du -sb "%s"' % dir_name) self.logger.debug(" Disk usage %s", 'du -sb "%s"' % dir_name)
if not self.dry_run: if not self.dry_run:
size_bytes = int(os.popen('du -sb "%s"' % dir_name).read().split('\t')[0]) size_bytes = int(os.popen('du -sb "%s"' % dir_name).read().split("\t")[0])
else: else:
size_bytes = 0 size_bytes = 0
self.logger.debug(' Size in bytes : %i',size_bytes) self.logger.debug(" Size in bytes : %i", size_bytes)
if not self.dry_run: if not self.dry_run:
self.dbstat.add(self.backup_name,self.server_name,'',\ self.dbstat.add(
backup_start=start,backup_end = stop,status='OK',total_bytes=size_bytes,backup_location=dir_name) self.backup_name,
self.server_name,
"",
backup_start=start,
backup_end=stop,
status="OK",
total_bytes=size_bytes,
backup_location=dir_name,
)
else: else:
self.logger.info('Skipping %s, already registered',dir_name) self.logger.info("Skipping %s, already registered", dir_name)
def clean_dumpfiles(self, dumpfile, dumplog):
cmd = 'rm -f "%s.gz" "%s"' % (dumpfile, dumplog)
self.logger.debug("[%s] Remove temp gzip : %s", self.backup_name, cmd)
if not self.dry_run:
(error_code, output) = ssh_exec(cmd, ssh=self.ssh)
self.logger.debug("[%s] Output of %s :\n%s", self.backup_name, cmd, output)
if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code, cmd))
cmd = "rm -f " + self.remote_backup_dir + "/" + self.db_name + "_" + self.backup_start_date + ".dmp"
self.logger.debug("[%s] Remove temp dump : %s", self.backup_name, cmd)
if not self.dry_run:
(error_code, output) = ssh_exec(cmd, ssh=self.ssh)
self.logger.debug("[%s] Output of %s :\n%s", self.backup_name, cmd, output)
if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code, cmd))
def clean_dumpfiles(self,dumpfile,dumplog):
cmd = 'rm -f "%s.gz" "%s"' %( dumpfile , dumplog)
self.logger.debug('[%s] Remove temp gzip : %s',self.backup_name,cmd)
if not self.dry_run:
(error_code,output) = ssh_exec(cmd,ssh=self.ssh)
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
cmd = 'rm -f '+self.remote_backup_dir + '/' + self.db_name + '_' + self.backup_start_date+'.dmp'
self.logger.debug('[%s] Remove temp dump : %s',self.backup_name,cmd)
if not self.dry_run:
(error_code,output) = ssh_exec(cmd,ssh=self.ssh)
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
register_driver(backup_oracle) register_driver(backup_oracle)

View File

@ -20,7 +20,7 @@
import sys import sys
try: try:
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko sys.stderr = open("/dev/null") # Silence silly warnings from paramiko
import paramiko import paramiko
except ImportError as e: except ImportError as e:
print(("Error : can not load paramiko library %s" % e)) print(("Error : can not load paramiko library %s" % e))
@ -33,16 +33,17 @@ from .common import *
class backup_pgsql(backup_generic): class backup_pgsql(backup_generic):
"""Backup a postgresql database as gzipped sql file through ssh""" """Backup a postgresql database as gzipped sql file through ssh"""
type = 'pgsql+ssh'
required_params = backup_generic.required_params + ['private_key']
optional_params = backup_generic.optional_params + ['db_name','tmp_dir','encoding']
db_name = '' type = "pgsql+ssh"
tmp_dir = '/tmp' required_params = backup_generic.required_params + ["private_key"]
encoding = 'UTF8' optional_params = backup_generic.optional_params + ["db_name", "tmp_dir", "encoding"]
def do_backup(self,stats): db_name = ""
self.dest_dir = os.path.join(self.backup_dir,self.backup_start_date) tmp_dir = "/tmp"
encoding = "UTF8"
def do_backup(self, stats):
self.dest_dir = os.path.join(self.backup_dir, self.backup_start_date)
if not os.path.isdir(self.dest_dir): if not os.path.isdir(self.dest_dir):
if not self.dry_run: if not self.dry_run:
@ -50,117 +51,127 @@ class backup_pgsql(backup_generic):
else: else:
print(('mkdir "%s"' % self.dest_dir)) print(('mkdir "%s"' % self.dest_dir))
else: else:
raise Exception('backup destination directory already exists : %s' % self.dest_dir) raise Exception("backup destination directory already exists : %s" % self.dest_dir)
try: try:
mykey = paramiko.RSAKey.from_private_key_file(self.private_key) mykey = paramiko.RSAKey.from_private_key_file(self.private_key)
except paramiko.SSHException: except paramiko.SSHException:
#mykey = paramiko.DSSKey.from_private_key_file(self.private_key) # mykey = paramiko.DSSKey.from_private_key_file(self.private_key)
mykey = paramiko.Ed25519Key.from_private_key_file(self.private_key) mykey = paramiko.Ed25519Key.from_private_key_file(self.private_key)
self.logger.debug('[%s] Trying to connect to "%s" with username root and key "%s"',self.backup_name,self.server_name,self.private_key) self.logger.debug(
'[%s] Trying to connect to "%s" with username root and key "%s"', self.backup_name, self.server_name, self.private_key
)
self.ssh = paramiko.SSHClient() self.ssh = paramiko.SSHClient()
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.ssh.connect(self.server_name,username='root',pkey = mykey,port=self.ssh_port) self.ssh.connect(self.server_name, username="root", pkey=mykey, port=self.ssh_port)
if self.db_name:
if self.db_name: stats["log"] = "Successfully backup processed to the following database :"
stats['log']= "Successfully backup processed to the following database :"
self.do_pgsqldump(stats) self.do_pgsqldump(stats)
else: else:
stats['log']= "Successfully backuping processed to the following databases :" stats["log"] = "Successfully backuping processed to the following databases :"
stats['status']='List' stats["status"] = "List"
cmd = """su - postgres -c 'psql -A -t -c "SELECT datname FROM pg_database WHERE datistemplate = false;"' 2> /dev/null""" cmd = """su - postgres -c 'psql -A -t -c "SELECT datname FROM pg_database WHERE datistemplate = false;"' 2> /dev/null"""
self.logger.debug('[%s] List databases: %s',self.backup_name,cmd) self.logger.debug("[%s] List databases: %s", self.backup_name, cmd)
(error_code,output) = ssh_exec(cmd,ssh=self.ssh) (error_code, output) = ssh_exec(cmd, ssh=self.ssh)
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output) self.logger.debug("[%s] Output of %s :\n%s", self.backup_name, cmd, output)
if error_code: if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd)) raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code, cmd))
databases = output.split('\n') databases = output.split("\n")
for database in databases: for database in databases:
if database.strip() not in ("", "template0", "template1"): if database.strip() not in ("", "template0", "template1"):
self.db_name = database.strip() self.db_name = database.strip()
self.do_pgsqldump(stats) self.do_pgsqldump(stats)
stats["status"] = "OK"
stats['status']='OK' def do_pgsqldump(self, stats):
def do_pgsqldump(self,stats):
t = datetime.datetime.now() t = datetime.datetime.now()
backup_start_date = t.strftime('%Y%m%d-%Hh%Mm%S') backup_start_date = t.strftime("%Y%m%d-%Hh%Mm%S")
params = { params = {
'encoding':self.encoding, "encoding": self.encoding,
'db_name':self.db_name, "db_name": self.db_name,
'tmp_dir':self.tmp_dir, "tmp_dir": self.tmp_dir,
'dest_dir':self.dest_dir, "dest_dir": self.dest_dir,
'backup_start_date':backup_start_date} "backup_start_date": backup_start_date,
}
# dump db # dump db
filepath = '%(tmp_dir)s/%(db_name)s-%(backup_start_date)s.sql.gz' % params filepath = "%(tmp_dir)s/%(db_name)s-%(backup_start_date)s.sql.gz" % params
cmd = "su - postgres -c 'pg_dump -E %(encoding)s -Z9 %(db_name)s'" % params cmd = "su - postgres -c 'pg_dump -E %(encoding)s -Z9 %(db_name)s'" % params
cmd += ' > ' + filepath cmd += " > " + filepath
self.logger.debug('[%s] %s ',self.backup_name,cmd) self.logger.debug("[%s] %s ", self.backup_name, cmd)
if not self.dry_run: if not self.dry_run:
(error_code,output) = ssh_exec(cmd,ssh=self.ssh) (error_code, output) = ssh_exec(cmd, ssh=self.ssh)
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output) self.logger.debug("[%s] Output of %s :\n%s", self.backup_name, cmd, output)
if error_code: if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd)) raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code, cmd))
# get the file # get the file
localpath = '%(dest_dir)s/%(db_name)s-%(backup_start_date)s.sql.gz' % params localpath = "%(dest_dir)s/%(db_name)s-%(backup_start_date)s.sql.gz" % params
self.logger.debug('[%s] get the file using sftp from "%s" to "%s" ',self.backup_name,filepath,localpath) self.logger.debug('[%s] get the file using sftp from "%s" to "%s" ', self.backup_name, filepath, localpath)
if not self.dry_run: if not self.dry_run:
transport = self.ssh.get_transport() transport = self.ssh.get_transport()
sftp = paramiko.SFTPClient.from_transport(transport) sftp = paramiko.SFTPClient.from_transport(transport)
sftp.get(filepath, localpath) sftp.get(filepath, localpath)
sftp.close() sftp.close()
if not self.dry_run: if not self.dry_run:
stats['total_files_count']=1 + stats.get('total_files_count', 0) stats["total_files_count"] = 1 + stats.get("total_files_count", 0)
stats['written_files_count']=1 + stats.get('written_files_count', 0) stats["written_files_count"] = 1 + stats.get("written_files_count", 0)
stats['total_bytes']=os.stat(localpath).st_size + stats.get('total_bytes', 0) stats["total_bytes"] = os.stat(localpath).st_size + stats.get("total_bytes", 0)
stats['written_bytes']=os.stat(localpath).st_size + stats.get('written_bytes', 0) stats["written_bytes"] = os.stat(localpath).st_size + stats.get("written_bytes", 0)
stats['log'] = '%s "%s"' % (stats['log'] ,self.db_name) stats["log"] = '%s "%s"' % (stats["log"], self.db_name)
stats['backup_location'] = self.dest_dir stats["backup_location"] = self.dest_dir
cmd = 'rm -f %(tmp_dir)s/%(db_name)s-%(backup_start_date)s.sql.gz' % params cmd = "rm -f %(tmp_dir)s/%(db_name)s-%(backup_start_date)s.sql.gz" % params
self.logger.debug('[%s] %s ',self.backup_name,cmd) self.logger.debug("[%s] %s ", self.backup_name, cmd)
if not self.dry_run: if not self.dry_run:
(error_code,output) = ssh_exec(cmd,ssh=self.ssh) (error_code, output) = ssh_exec(cmd, ssh=self.ssh)
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output) self.logger.debug("[%s] Output of %s :\n%s", self.backup_name, cmd, output)
if error_code: if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd)) raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code, cmd))
def register_existingbackups(self): def register_existingbackups(self):
"""scan backup dir and insert stats in database""" """scan backup dir and insert stats in database"""
registered = [b['backup_location'] for b in self.dbstat.query('select distinct backup_location from stats where backup_name=?',(self.backup_name,))] registered = [
b["backup_location"]
for b in self.dbstat.query("select distinct backup_location from stats where backup_name=?", (self.backup_name,))
]
filelist = os.listdir(self.backup_dir) filelist = os.listdir(self.backup_dir)
filelist.sort() filelist.sort()
p = re.compile('^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$') p = re.compile("^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$")
for item in filelist: for item in filelist:
if p.match(item): if p.match(item):
dir_name = os.path.join(self.backup_dir,item) dir_name = os.path.join(self.backup_dir, item)
if not dir_name in registered: if dir_name not in registered:
start = datetime.datetime.strptime(item,'%Y%m%d-%Hh%Mm%S').isoformat() start = datetime.datetime.strptime(item, "%Y%m%d-%Hh%Mm%S").isoformat()
if fileisodate(dir_name)>start: if fileisodate(dir_name) > start:
stop = fileisodate(dir_name) stop = fileisodate(dir_name)
else: else:
stop = start stop = start
self.logger.info('Registering %s started on %s',dir_name,start) self.logger.info("Registering %s started on %s", dir_name, start)
self.logger.debug(' Disk usage %s','du -sb "%s"' % dir_name) self.logger.debug(" Disk usage %s", 'du -sb "%s"' % dir_name)
if not self.dry_run: if not self.dry_run:
size_bytes = int(os.popen('du -sb "%s"' % dir_name).read().split('\t')[0]) size_bytes = int(os.popen('du -sb "%s"' % dir_name).read().split("\t")[0])
else: else:
size_bytes = 0 size_bytes = 0
self.logger.debug(' Size in bytes : %i',size_bytes) self.logger.debug(" Size in bytes : %i", size_bytes)
if not self.dry_run: if not self.dry_run:
self.dbstat.add(self.backup_name,self.server_name,'',\ self.dbstat.add(
backup_start=start,backup_end = stop,status='OK',total_bytes=size_bytes,backup_location=dir_name) self.backup_name,
self.server_name,
"",
backup_start=start,
backup_end=stop,
status="OK",
total_bytes=size_bytes,
backup_location=dir_name,
)
else: else:
self.logger.info('Skipping %s, already registered',dir_name) self.logger.info("Skipping %s, already registered", dir_name)
register_driver(backup_pgsql) register_driver(backup_pgsql)

View File

@ -30,78 +30,84 @@ from libtisbackup.common import *
class backup_rsync(backup_generic): class backup_rsync(backup_generic):
"""Backup a directory on remote server with rsync and rsync protocol (requires running remote rsync daemon)""" """Backup a directory on remote server with rsync and rsync protocol (requires running remote rsync daemon)"""
type = 'rsync'
required_params = backup_generic.required_params + ['remote_user','remote_dir','rsync_module','password_file']
optional_params = backup_generic.optional_params + ['compressionlevel','compression','bwlimit','exclude_list','protect_args','overload_args']
remote_user='root' type = "rsync"
remote_dir='' required_params = backup_generic.required_params + ["remote_user", "remote_dir", "rsync_module", "password_file"]
optional_params = backup_generic.optional_params + [
"compressionlevel",
"compression",
"bwlimit",
"exclude_list",
"protect_args",
"overload_args",
]
exclude_list='' remote_user = "root"
rsync_module='' remote_dir = ""
password_file = ''
compression = '' exclude_list = ""
rsync_module = ""
password_file = ""
compression = ""
bwlimit = 0 bwlimit = 0
protect_args = '1' protect_args = "1"
overload_args = None overload_args = None
compressionlevel = 0 compressionlevel = 0
def read_config(self, iniconf):
assert isinstance(iniconf, ConfigParser)
backup_generic.read_config(self, iniconf)
if not self.bwlimit and iniconf.has_option("global", "bw_limit"):
self.bwlimit = iniconf.getint("global", "bw_limit")
if not self.compressionlevel and iniconf.has_option("global", "compression_level"):
self.compressionlevel = iniconf.getint("global", "compression_level")
def do_backup(self, stats):
def read_config(self,iniconf):
assert(isinstance(iniconf,ConfigParser))
backup_generic.read_config(self,iniconf)
if not self.bwlimit and iniconf.has_option('global','bw_limit'):
self.bwlimit = iniconf.getint('global','bw_limit')
if not self.compressionlevel and iniconf.has_option('global','compression_level'):
self.compressionlevel = iniconf.getint('global','compression_level')
def do_backup(self,stats):
if not self.set_lock(): if not self.set_lock():
self.logger.error("[%s] a lock file is set, a backup maybe already running!!",self.backup_name) self.logger.error("[%s] a lock file is set, a backup maybe already running!!", self.backup_name)
return False return False
try: try:
try: try:
backup_source = 'undefined' backup_source = "undefined"
dest_dir = os.path.join(self.backup_dir,self.backup_start_date+'.rsync/') dest_dir = os.path.join(self.backup_dir, self.backup_start_date + ".rsync/")
if not os.path.isdir(dest_dir): if not os.path.isdir(dest_dir):
if not self.dry_run: if not self.dry_run:
os.makedirs(dest_dir) os.makedirs(dest_dir)
else: else:
print(('mkdir "%s"' % dest_dir)) print(('mkdir "%s"' % dest_dir))
else: else:
raise Exception('backup destination directory already exists : %s' % dest_dir) raise Exception("backup destination directory already exists : %s" % dest_dir)
options = ['-rt','--stats','--delete-excluded','--numeric-ids','--delete-after'] options = ["-rt", "--stats", "--delete-excluded", "--numeric-ids", "--delete-after"]
if self.logger.level: if self.logger.level:
options.append('-P') options.append("-P")
if self.dry_run: if self.dry_run:
options.append('-d') options.append("-d")
if self.overload_args != None: if self.overload_args is not None:
options.append(self.overload_args) options.append(self.overload_args)
elif not "cygdrive" in self.remote_dir: elif "cygdrive" not in self.remote_dir:
# we don't preserve owner, group, links, hardlinks, perms for windows/cygwin as it is not reliable nor useful # we don't preserve owner, group, links, hardlinks, perms for windows/cygwin as it is not reliable nor useful
options.append('-lpgoD') options.append("-lpgoD")
# the protect-args option is not available in all rsync version # the protect-args option is not available in all rsync version
if not self.protect_args.lower() in ('false','no','0'): if self.protect_args.lower() not in ("false", "no", "0"):
options.append('--protect-args') options.append("--protect-args")
if self.compression.lower() in ('true','yes','1'): if self.compression.lower() in ("true", "yes", "1"):
options.append('-z') options.append("-z")
if self.compressionlevel: if self.compressionlevel:
options.append('--compress-level=%s' % self.compressionlevel) options.append("--compress-level=%s" % self.compressionlevel)
if self.bwlimit: if self.bwlimit:
options.append('--bwlimit %s' % self.bwlimit) options.append("--bwlimit %s" % self.bwlimit)
latest = self.get_latest_backup(self.backup_start_date) latest = self.get_latest_backup(self.backup_start_date)
if latest: if latest:
options.extend(['--link-dest="%s"' % os.path.join('..',b,'') for b in latest]) options.extend(['--link-dest="%s"' % os.path.join("..", b, "") for b in latest])
def strip_quotes(s): def strip_quotes(s):
if s[0] == '"': if s[0] == '"':
@ -113,173 +119,193 @@ class backup_rsync(backup_generic):
# Add excludes # Add excludes
if "--exclude" in self.exclude_list: if "--exclude" in self.exclude_list:
# old settings with exclude_list=--exclude toto --exclude=titi # old settings with exclude_list=--exclude toto --exclude=titi
excludes = [strip_quotes(s).strip() for s in self.exclude_list.replace('--exclude=','').replace('--exclude ','').split()] excludes = [
strip_quotes(s).strip() for s in self.exclude_list.replace("--exclude=", "").replace("--exclude ", "").split()
]
else: else:
try: try:
# newsettings with exclude_list='too','titi', parsed as a str python list content # newsettings with exclude_list='too','titi', parsed as a str python list content
excludes = eval('[%s]' % self.exclude_list) excludes = eval("[%s]" % self.exclude_list)
except Exception as e: except Exception as e:
raise Exception('Error reading exclude list : value %s, eval error %s (don\'t forget quotes and comma...)' % (self.exclude_list,e)) raise Exception(
"Error reading exclude list : value %s, eval error %s (don't forget quotes and comma...)"
% (self.exclude_list, e)
)
options.extend(['--exclude="%s"' % x for x in excludes]) options.extend(['--exclude="%s"' % x for x in excludes])
if (self.rsync_module and not self.password_file): if self.rsync_module and not self.password_file:
raise Exception('You must specify a password file if you specify a rsync module') raise Exception("You must specify a password file if you specify a rsync module")
if (not self.rsync_module and not self.private_key): if not self.rsync_module and not self.private_key:
raise Exception('If you don''t use SSH, you must specify a rsync module') raise Exception("If you don" "t use SSH, you must specify a rsync module")
#rsync_re = re.compile('(?P<server>[^:]*)::(?P<export>[^/]*)/(?P<path>.*)') # rsync_re = re.compile('(?P<server>[^:]*)::(?P<export>[^/]*)/(?P<path>.*)')
#ssh_re = re.compile('((?P<user>.*)@)?(?P<server>[^:]*):(?P<path>/.*)') # ssh_re = re.compile('((?P<user>.*)@)?(?P<server>[^:]*):(?P<path>/.*)')
# Add ssh connection params # Add ssh connection params
if self.rsync_module: if self.rsync_module:
# Case of rsync exports # Case of rsync exports
if self.password_file: if self.password_file:
options.append('--password-file="%s"' % self.password_file) options.append('--password-file="%s"' % self.password_file)
backup_source = '%s@%s::%s%s' % (self.remote_user, self.server_name, self.rsync_module, self.remote_dir) backup_source = "%s@%s::%s%s" % (self.remote_user, self.server_name, self.rsync_module, self.remote_dir)
else: else:
# case of rsync + ssh # case of rsync + ssh
ssh_params = ['-o StrictHostKeyChecking=no'] ssh_params = ["-o StrictHostKeyChecking=no"]
ssh_params.append('-o BatchMode=yes') ssh_params.append("-o BatchMode=yes")
if self.private_key: if self.private_key:
ssh_params.append('-i %s' % self.private_key) ssh_params.append("-i %s" % self.private_key)
if self.cipher_spec: if self.cipher_spec:
ssh_params.append('-c %s' % self.cipher_spec) ssh_params.append("-c %s" % self.cipher_spec)
if self.ssh_port != 22: if self.ssh_port != 22:
ssh_params.append('-p %i' % self.ssh_port) ssh_params.append("-p %i" % self.ssh_port)
options.append('-e "/usr/bin/ssh %s"' % (" ".join(ssh_params))) options.append('-e "/usr/bin/ssh %s"' % (" ".join(ssh_params)))
backup_source = '%s@%s:%s' % (self.remote_user,self.server_name,self.remote_dir) backup_source = "%s@%s:%s" % (self.remote_user, self.server_name, self.remote_dir)
# ensure there is a slash at end # ensure there is a slash at end
if backup_source[-1] != '/': if backup_source[-1] != "/":
backup_source += '/' backup_source += "/"
options_params = " ".join(options) options_params = " ".join(options)
cmd = '/usr/bin/rsync %s %s %s 2>&1' % (options_params,backup_source,dest_dir) cmd = "/usr/bin/rsync %s %s %s 2>&1" % (options_params, backup_source, dest_dir)
self.logger.debug("[%s] rsync : %s",self.backup_name,cmd) self.logger.debug("[%s] rsync : %s", self.backup_name, cmd)
if not self.dry_run: if not self.dry_run:
self.line = '' self.line = ""
process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True) process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True)
def ondata(data,context):
def ondata(data, context):
if context.verbose: if context.verbose:
print(data) print(data)
context.logger.debug(data) context.logger.debug(data)
log = monitor_stdout(process,ondata,self) log = monitor_stdout(process, ondata, self)
reg_total_files = re.compile('Number of files: (?P<file>\d+)') reg_total_files = re.compile("Number of files: (?P<file>\d+)")
reg_transferred_files = re.compile('Number of .*files transferred: (?P<file>\d+)') reg_transferred_files = re.compile("Number of .*files transferred: (?P<file>\d+)")
for l in log.splitlines(): for l in log.splitlines():
line = l.replace(',','') line = l.replace(",", "")
m = reg_total_files.match(line) m = reg_total_files.match(line)
if m: if m:
stats['total_files_count'] += int(m.groupdict()['file']) stats["total_files_count"] += int(m.groupdict()["file"])
m = reg_transferred_files.match(line) m = reg_transferred_files.match(line)
if m: if m:
stats['written_files_count'] += int(m.groupdict()['file']) stats["written_files_count"] += int(m.groupdict()["file"])
if line.startswith('Total file size:'): if line.startswith("Total file size:"):
stats['total_bytes'] += int(line.split(':')[1].split()[0]) stats["total_bytes"] += int(line.split(":")[1].split()[0])
if line.startswith('Total transferred file size:'): if line.startswith("Total transferred file size:"):
stats['written_bytes'] += int(line.split(':')[1].split()[0]) stats["written_bytes"] += int(line.split(":")[1].split()[0])
returncode = process.returncode returncode = process.returncode
## deal with exit code 24 (file vanished) ## deal with exit code 24 (file vanished)
if (returncode == 24): if returncode == 24:
self.logger.warning("[" + self.backup_name + "] Note: some files vanished before transfer") self.logger.warning("[" + self.backup_name + "] Note: some files vanished before transfer")
elif (returncode == 23): elif returncode == 23:
self.logger.warning("[" + self.backup_name + "] unable so set uid on some files") self.logger.warning("[" + self.backup_name + "] unable so set uid on some files")
elif (returncode != 0): elif returncode != 0:
self.logger.error("[" + self.backup_name + "] shell program exited with error code " + str(returncode)) self.logger.error("[" + self.backup_name + "] shell program exited with error code " + str(returncode))
raise Exception("[" + self.backup_name + "] shell program exited with error code " + str(returncode), cmd, log[-512:]) raise Exception(
"[" + self.backup_name + "] shell program exited with error code " + str(returncode), cmd, log[-512:]
)
else: else:
print(cmd) print(cmd)
#we suppress the .rsync suffix if everything went well # we suppress the .rsync suffix if everything went well
finaldest = os.path.join(self.backup_dir,self.backup_start_date) finaldest = os.path.join(self.backup_dir, self.backup_start_date)
self.logger.debug("[%s] renaming target directory from %s to %s" ,self.backup_name,dest_dir,finaldest) self.logger.debug("[%s] renaming target directory from %s to %s", self.backup_name, dest_dir, finaldest)
if not self.dry_run: if not self.dry_run:
os.rename(dest_dir, finaldest) os.rename(dest_dir, finaldest)
self.logger.debug("[%s] touching datetime of target directory %s" ,self.backup_name,finaldest) self.logger.debug("[%s] touching datetime of target directory %s", self.backup_name, finaldest)
print((os.popen('touch "%s"' % finaldest).read())) print((os.popen('touch "%s"' % finaldest).read()))
else: else:
print(("mv" ,dest_dir,finaldest)) print(("mv", dest_dir, finaldest))
stats['backup_location'] = finaldest stats["backup_location"] = finaldest
stats['status']='OK' stats["status"] = "OK"
stats['log']='ssh+rsync backup from %s OK, %d bytes written for %d changed files' % (backup_source,stats['written_bytes'],stats['written_files_count']) stats["log"] = "ssh+rsync backup from %s OK, %d bytes written for %d changed files" % (
backup_source,
stats["written_bytes"],
stats["written_files_count"],
)
except BaseException as e: except BaseException as e:
stats['status']='ERROR' stats["status"] = "ERROR"
stats['log']=str(e) stats["log"] = str(e)
raise raise
finally: finally:
self.remove_lock() self.remove_lock()
def get_latest_backup(self,current): def get_latest_backup(self, current):
result = [] result = []
filelist = os.listdir(self.backup_dir) filelist = os.listdir(self.backup_dir)
filelist.sort() filelist.sort()
filelist.reverse() filelist.reverse()
full = '' # full = ''
r_full = re.compile('^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$') r_full = re.compile("^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$")
r_partial = re.compile('^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}.rsync$') r_partial = re.compile("^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}.rsync$")
# we take all latest partials younger than the latest full and the latest full # we take all latest partials younger than the latest full and the latest full
for item in filelist: for item in filelist:
if r_partial.match(item) and item<current: if r_partial.match(item) and item < current:
result.append(item) result.append(item)
elif r_full.match(item) and item<current: elif r_full.match(item) and item < current:
result.append(item) result.append(item)
break break
return result return result
def register_existingbackups(self): def register_existingbackups(self):
"""scan backup dir and insert stats in database""" """scan backup dir and insert stats in database"""
registered = [b['backup_location'] for b in self.dbstat.query('select distinct backup_location from stats where backup_name=?',(self.backup_name,))] registered = [
b["backup_location"]
for b in self.dbstat.query("select distinct backup_location from stats where backup_name=?", (self.backup_name,))
]
filelist = os.listdir(self.backup_dir) filelist = os.listdir(self.backup_dir)
filelist.sort() filelist.sort()
p = re.compile('^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$') p = re.compile("^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$")
for item in filelist: for item in filelist:
if p.match(item): if p.match(item):
dir_name = os.path.join(self.backup_dir,item) dir_name = os.path.join(self.backup_dir, item)
if not dir_name in registered: if dir_name not in registered:
start = datetime.datetime.strptime(item,'%Y%m%d-%Hh%Mm%S').isoformat() start = datetime.datetime.strptime(item, "%Y%m%d-%Hh%Mm%S").isoformat()
if fileisodate(dir_name)>start: if fileisodate(dir_name) > start:
stop = fileisodate(dir_name) stop = fileisodate(dir_name)
else: else:
stop = start stop = start
self.logger.info('Registering %s started on %s',dir_name,start) self.logger.info("Registering %s started on %s", dir_name, start)
self.logger.debug(' Disk usage %s','du -sb "%s"' % dir_name) self.logger.debug(" Disk usage %s", 'du -sb "%s"' % dir_name)
if not self.dry_run: if not self.dry_run:
size_bytes = int(os.popen('du -sb "%s"' % dir_name).read().split('\t')[0]) size_bytes = int(os.popen('du -sb "%s"' % dir_name).read().split("\t")[0])
else: else:
size_bytes = 0 size_bytes = 0
self.logger.debug(' Size in bytes : %i',size_bytes) self.logger.debug(" Size in bytes : %i", size_bytes)
if not self.dry_run: if not self.dry_run:
self.dbstat.add(self.backup_name,self.server_name,'',\ self.dbstat.add(
backup_start=start,backup_end = stop,status='OK',total_bytes=size_bytes,backup_location=dir_name) self.backup_name,
self.server_name,
"",
backup_start=start,
backup_end=stop,
status="OK",
total_bytes=size_bytes,
backup_location=dir_name,
)
else: else:
self.logger.info('Skipping %s, already registered',dir_name) self.logger.info("Skipping %s, already registered", dir_name)
def is_pid_still_running(self, lockfile):
def is_pid_still_running(self,lockfile):
f = open(lockfile) f = open(lockfile)
lines = f.readlines() lines = f.readlines()
f.close() f.close()
if len(lines)==0 : if len(lines) == 0:
self.logger.info("[" + self.backup_name + "] empty lock file, removing...") self.logger.info("[" + self.backup_name + "] empty lock file, removing...")
return False return False
for line in lines: for line in lines:
if line.startswith('pid='): if line.startswith("pid="):
pid = line.split('=')[1].strip() pid = line.split("=")[1].strip()
if os.path.exists("/proc/" + pid): if os.path.exists("/proc/" + pid):
self.logger.info("[" + self.backup_name + "] process still there") self.logger.info("[" + self.backup_name + "] process still there")
return True return True
@ -290,54 +316,63 @@ class backup_rsync(backup_generic):
self.logger.info("[" + self.backup_name + "] incorrrect lock file : no pid line") self.logger.info("[" + self.backup_name + "] incorrrect lock file : no pid line")
return False return False
def set_lock(self): def set_lock(self):
self.logger.debug("[" + self.backup_name + "] setting lock") self.logger.debug("[" + self.backup_name + "] setting lock")
#TODO: improve for race condition # TODO: improve for race condition
#TODO: also check if process is really there # TODO: also check if process is really there
if os.path.isfile(self.backup_dir + '/lock'): if os.path.isfile(self.backup_dir + "/lock"):
self.logger.debug("[" + self.backup_name + "] File " + self.backup_dir + '/lock already exist') self.logger.debug("[" + self.backup_name + "] File " + self.backup_dir + "/lock already exist")
if self.is_pid_still_running(self.backup_dir + '/lock')==False: if not self.is_pid_still_running(self.backup_dir + "/lock"):
self.logger.info("[" + self.backup_name + "] removing lock file " + self.backup_dir + '/lock') self.logger.info("[" + self.backup_name + "] removing lock file " + self.backup_dir + "/lock")
os.unlink(self.backup_dir + '/lock') os.unlink(self.backup_dir + "/lock")
else: else:
return False return False
lockfile = open(self.backup_dir + '/lock',"w") lockfile = open(self.backup_dir + "/lock", "w")
# Write all the lines at once: # Write all the lines at once:
lockfile.write('pid='+str(os.getpid())) lockfile.write("pid=" + str(os.getpid()))
lockfile.write('\nbackup_time=' + self.backup_start_date) lockfile.write("\nbackup_time=" + self.backup_start_date)
lockfile.close() lockfile.close()
return True return True
def remove_lock(self): def remove_lock(self):
self.logger.debug("[%s] removing lock",self.backup_name ) self.logger.debug("[%s] removing lock", self.backup_name)
os.unlink(self.backup_dir + '/lock') os.unlink(self.backup_dir + "/lock")
class backup_rsync_ssh(backup_rsync): class backup_rsync_ssh(backup_rsync):
"""Backup a directory on remote server with rsync and ssh protocol (requires rsync software on remote host)""" """Backup a directory on remote server with rsync and ssh protocol (requires rsync software on remote host)"""
type = 'rsync+ssh'
required_params = backup_generic.required_params + ['remote_user','remote_dir','private_key'] type = "rsync+ssh"
optional_params = backup_generic.optional_params + ['compression','bwlimit','ssh_port','exclude_list','protect_args','overload_args', 'cipher_spec'] required_params = backup_generic.required_params + ["remote_user", "remote_dir", "private_key"]
cipher_spec = '' optional_params = backup_generic.optional_params + [
"compression",
"bwlimit",
"ssh_port",
"exclude_list",
"protect_args",
"overload_args",
"cipher_spec",
]
cipher_spec = ""
register_driver(backup_rsync) register_driver(backup_rsync)
register_driver(backup_rsync_ssh) register_driver(backup_rsync_ssh)
if __name__=='__main__': if __name__ == "__main__":
logger = logging.getLogger('tisbackup') logger = logging.getLogger("tisbackup")
logger.setLevel(logging.DEBUG) logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s")
handler = logging.StreamHandler() handler = logging.StreamHandler()
handler.setFormatter(formatter) handler.setFormatter(formatter)
logger.addHandler(handler) logger.addHandler(handler)
cp = ConfigParser() cp = ConfigParser()
cp.read('/opt/tisbackup/configtest.ini') cp.read("/opt/tisbackup/configtest.ini")
dbstat = BackupStat('/backup/data/log/tisbackup.sqlite') dbstat = BackupStat("/backup/data/log/tisbackup.sqlite")
b = backup_rsync('htouvet','/backup/data/htouvet',dbstat) b = backup_rsync("htouvet", "/backup/data/htouvet", dbstat)
b.read_config(cp) b.read_config(cp)
b.process_backup() b.process_backup()
print((b.checknagios())) print((b.checknagios()))

View File

@ -30,86 +30,90 @@ from .common import *
class backup_rsync_btrfs(backup_generic): class backup_rsync_btrfs(backup_generic):
"""Backup a directory on remote server with rsync and btrfs protocol (requires running remote rsync daemon)""" """Backup a directory on remote server with rsync and btrfs protocol (requires running remote rsync daemon)"""
type = 'rsync+btrfs'
required_params = backup_generic.required_params + ['remote_user','remote_dir','rsync_module','password_file']
optional_params = backup_generic.optional_params + ['compressionlevel','compression','bwlimit','exclude_list','protect_args','overload_args']
remote_user='root' type = "rsync+btrfs"
remote_dir='' required_params = backup_generic.required_params + ["remote_user", "remote_dir", "rsync_module", "password_file"]
optional_params = backup_generic.optional_params + [
"compressionlevel",
"compression",
"bwlimit",
"exclude_list",
"protect_args",
"overload_args",
]
exclude_list='' remote_user = "root"
rsync_module='' remote_dir = ""
password_file = ''
compression = '' exclude_list = ""
rsync_module = ""
password_file = ""
compression = ""
bwlimit = 0 bwlimit = 0
protect_args = '1' protect_args = "1"
overload_args = None overload_args = None
compressionlevel = 0 compressionlevel = 0
def read_config(self, iniconf):
assert isinstance(iniconf, ConfigParser)
backup_generic.read_config(self, iniconf)
if not self.bwlimit and iniconf.has_option("global", "bw_limit"):
self.bwlimit = iniconf.getint("global", "bw_limit")
if not self.compressionlevel and iniconf.has_option("global", "compression_level"):
self.compressionlevel = iniconf.getint("global", "compression_level")
def do_backup(self, stats):
def read_config(self,iniconf):
assert(isinstance(iniconf,ConfigParser))
backup_generic.read_config(self,iniconf)
if not self.bwlimit and iniconf.has_option('global','bw_limit'):
self.bwlimit = iniconf.getint('global','bw_limit')
if not self.compressionlevel and iniconf.has_option('global','compression_level'):
self.compressionlevel = iniconf.getint('global','compression_level')
def do_backup(self,stats):
if not self.set_lock(): if not self.set_lock():
self.logger.error("[%s] a lock file is set, a backup maybe already running!!",self.backup_name) self.logger.error("[%s] a lock file is set, a backup maybe already running!!", self.backup_name)
return False return False
try: try:
try: try:
backup_source = 'undefined' backup_source = "undefined"
dest_dir = os.path.join(self.backup_dir,'last_backup') dest_dir = os.path.join(self.backup_dir, "last_backup")
if not os.path.isdir(dest_dir): if not os.path.isdir(dest_dir):
if not self.dry_run: if not self.dry_run:
cmd = "/bin/btrfs subvolume create %s"%dest_dir cmd = "/bin/btrfs subvolume create %s" % dest_dir
process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True) process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True)
log = monitor_stdout(process,'',self) log = monitor_stdout(process, "", self)
returncode = process.returncode returncode = process.returncode
if (returncode != 0): if returncode != 0:
self.logger.error("[" + self.backup_name + "] shell program exited with error code: %s"%log) self.logger.error("[" + self.backup_name + "] shell program exited with error code: %s" % log)
raise Exception("[" + self.backup_name + "] shell program exited with error code " + str(returncode), cmd) raise Exception("[" + self.backup_name + "] shell program exited with error code " + str(returncode), cmd)
else: else:
self.logger.info("[" + self.backup_name + "] create btrs volume: %s"%dest_dir) self.logger.info("[" + self.backup_name + "] create btrs volume: %s" % dest_dir)
else: else:
print(('btrfs subvolume create "%s"' %dest_dir)) print(('btrfs subvolume create "%s"' % dest_dir))
options = ["-rt", "--stats", "--delete-excluded", "--numeric-ids", "--delete-after"]
options = ['-rt','--stats','--delete-excluded','--numeric-ids','--delete-after']
if self.logger.level: if self.logger.level:
options.append('-P') options.append("-P")
if self.dry_run: if self.dry_run:
options.append('-d') options.append("-d")
if self.overload_args != None: if self.overload_args is not None:
options.append(self.overload_args) options.append(self.overload_args)
elif not "cygdrive" in self.remote_dir: elif "cygdrive" not in self.remote_dir:
# we don't preserve owner, group, links, hardlinks, perms for windows/cygwin as it is not reliable nor useful # we don't preserve owner, group, links, hardlinks, perms for windows/cygwin as it is not reliable nor useful
options.append('-lpgoD') options.append("-lpgoD")
# the protect-args option is not available in all rsync version # the protect-args option is not available in all rsync version
if not self.protect_args.lower() in ('false','no','0'): if self.protect_args.lower() not in ("false", "no", "0"):
options.append('--protect-args') options.append("--protect-args")
if self.compression.lower() in ('true','yes','1'): if self.compression.lower() in ("true", "yes", "1"):
options.append('-z') options.append("-z")
if self.compressionlevel: if self.compressionlevel:
options.append('--compress-level=%s' % self.compressionlevel) options.append("--compress-level=%s" % self.compressionlevel)
if self.bwlimit: if self.bwlimit:
options.append('--bwlimit %s' % self.bwlimit) options.append("--bwlimit %s" % self.bwlimit)
latest = self.get_latest_backup(self.backup_start_date) # latest = self.get_latest_backup(self.backup_start_date)
#remove link-dest replace by btrfs # remove link-dest replace by btrfs
#if latest: # if latest:
# options.extend(['--link-dest="%s"' % os.path.join('..',b,'') for b in latest]) # options.extend(['--link-dest="%s"' % os.path.join('..',b,'') for b in latest])
def strip_quotes(s): def strip_quotes(s):
@ -122,181 +126,203 @@ class backup_rsync_btrfs(backup_generic):
# Add excludes # Add excludes
if "--exclude" in self.exclude_list: if "--exclude" in self.exclude_list:
# old settings with exclude_list=--exclude toto --exclude=titi # old settings with exclude_list=--exclude toto --exclude=titi
excludes = [strip_quotes(s).strip() for s in self.exclude_list.replace('--exclude=','').replace('--exclude ','').split()] excludes = [
strip_quotes(s).strip() for s in self.exclude_list.replace("--exclude=", "").replace("--exclude ", "").split()
]
else: else:
try: try:
# newsettings with exclude_list='too','titi', parsed as a str python list content # newsettings with exclude_list='too','titi', parsed as a str python list content
excludes = eval('[%s]' % self.exclude_list) excludes = eval("[%s]" % self.exclude_list)
except Exception as e: except Exception as e:
raise Exception('Error reading exclude list : value %s, eval error %s (don\'t forget quotes and comma...)' % (self.exclude_list,e)) raise Exception(
"Error reading exclude list : value %s, eval error %s (don't forget quotes and comma...)"
% (self.exclude_list, e)
)
options.extend(['--exclude="%s"' % x for x in excludes]) options.extend(['--exclude="%s"' % x for x in excludes])
if (self.rsync_module and not self.password_file): if self.rsync_module and not self.password_file:
raise Exception('You must specify a password file if you specify a rsync module') raise Exception("You must specify a password file if you specify a rsync module")
if (not self.rsync_module and not self.private_key): if not self.rsync_module and not self.private_key:
raise Exception('If you don''t use SSH, you must specify a rsync module') raise Exception("If you don" "t use SSH, you must specify a rsync module")
#rsync_re = re.compile('(?P<server>[^:]*)::(?P<export>[^/]*)/(?P<path>.*)') # rsync_re = re.compile('(?P<server>[^:]*)::(?P<export>[^/]*)/(?P<path>.*)')
#ssh_re = re.compile('((?P<user>.*)@)?(?P<server>[^:]*):(?P<path>/.*)') # ssh_re = re.compile('((?P<user>.*)@)?(?P<server>[^:]*):(?P<path>/.*)')
# Add ssh connection params # Add ssh connection params
if self.rsync_module: if self.rsync_module:
# Case of rsync exports # Case of rsync exports
if self.password_file: if self.password_file:
options.append('--password-file="%s"' % self.password_file) options.append('--password-file="%s"' % self.password_file)
backup_source = '%s@%s::%s%s' % (self.remote_user, self.server_name, self.rsync_module, self.remote_dir) backup_source = "%s@%s::%s%s" % (self.remote_user, self.server_name, self.rsync_module, self.remote_dir)
else: else:
# case of rsync + ssh # case of rsync + ssh
ssh_params = ['-o StrictHostKeyChecking=no'] ssh_params = ["-o StrictHostKeyChecking=no"]
if self.private_key: if self.private_key:
ssh_params.append('-i %s' % self.private_key) ssh_params.append("-i %s" % self.private_key)
if self.cipher_spec: if self.cipher_spec:
ssh_params.append('-c %s' % self.cipher_spec) ssh_params.append("-c %s" % self.cipher_spec)
if self.ssh_port != 22: if self.ssh_port != 22:
ssh_params.append('-p %i' % self.ssh_port) ssh_params.append("-p %i" % self.ssh_port)
options.append('-e "/usr/bin/ssh %s"' % (" ".join(ssh_params))) options.append('-e "/usr/bin/ssh %s"' % (" ".join(ssh_params)))
backup_source = '%s@%s:%s' % (self.remote_user,self.server_name,self.remote_dir) backup_source = "%s@%s:%s" % (self.remote_user, self.server_name, self.remote_dir)
# ensure there is a slash at end # ensure there is a slash at end
if backup_source[-1] != '/': if backup_source[-1] != "/":
backup_source += '/' backup_source += "/"
options_params = " ".join(options) options_params = " ".join(options)
cmd = '/usr/bin/rsync %s %s %s 2>&1' % (options_params,backup_source,dest_dir) cmd = "/usr/bin/rsync %s %s %s 2>&1" % (options_params, backup_source, dest_dir)
self.logger.debug("[%s] rsync : %s",self.backup_name,cmd) self.logger.debug("[%s] rsync : %s", self.backup_name, cmd)
if not self.dry_run: if not self.dry_run:
self.line = '' self.line = ""
process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True) process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True)
def ondata(data,context):
def ondata(data, context):
if context.verbose: if context.verbose:
print(data) print(data)
context.logger.debug(data) context.logger.debug(data)
log = monitor_stdout(process,ondata,self) log = monitor_stdout(process, ondata, self)
reg_total_files = re.compile('Number of files: (?P<file>\d+)') reg_total_files = re.compile("Number of files: (?P<file>\d+)")
reg_transferred_files = re.compile('Number of .*files transferred: (?P<file>\d+)') reg_transferred_files = re.compile("Number of .*files transferred: (?P<file>\d+)")
for l in log.splitlines(): for l in log.splitlines():
line = l.replace(',','') line = l.replace(",", "")
m = reg_total_files.match(line) m = reg_total_files.match(line)
if m: if m:
stats['total_files_count'] += int(m.groupdict()['file']) stats["total_files_count"] += int(m.groupdict()["file"])
m = reg_transferred_files.match(line) m = reg_transferred_files.match(line)
if m: if m:
stats['written_files_count'] += int(m.groupdict()['file']) stats["written_files_count"] += int(m.groupdict()["file"])
if line.startswith('Total file size:'): if line.startswith("Total file size:"):
stats['total_bytes'] += int(line.split(':')[1].split()[0]) stats["total_bytes"] += int(line.split(":")[1].split()[0])
if line.startswith('Total transferred file size:'): if line.startswith("Total transferred file size:"):
stats['written_bytes'] += int(line.split(':')[1].split()[0]) stats["written_bytes"] += int(line.split(":")[1].split()[0])
returncode = process.returncode returncode = process.returncode
## deal with exit code 24 (file vanished) ## deal with exit code 24 (file vanished)
if (returncode == 24): if returncode == 24:
self.logger.warning("[" + self.backup_name + "] Note: some files vanished before transfer") self.logger.warning("[" + self.backup_name + "] Note: some files vanished before transfer")
elif (returncode == 23): elif returncode == 23:
self.logger.warning("[" + self.backup_name + "] unable so set uid on some files") self.logger.warning("[" + self.backup_name + "] unable so set uid on some files")
elif (returncode != 0): elif returncode != 0:
self.logger.error("[" + self.backup_name + "] shell program exited with error code ", str(returncode)) self.logger.error("[" + self.backup_name + "] shell program exited with error code ", str(returncode))
raise Exception("[" + self.backup_name + "] shell program exited with error code " + str(returncode), cmd, log[-512:]) raise Exception(
"[" + self.backup_name + "] shell program exited with error code " + str(returncode), cmd, log[-512:]
)
else: else:
print(cmd) print(cmd)
#we take a snapshot of last_backup if everything went well # we take a snapshot of last_backup if everything went well
finaldest = os.path.join(self.backup_dir,self.backup_start_date) finaldest = os.path.join(self.backup_dir, self.backup_start_date)
self.logger.debug("[%s] snapshoting last_backup directory from %s to %s" ,self.backup_name,dest_dir,finaldest) self.logger.debug("[%s] snapshoting last_backup directory from %s to %s", self.backup_name, dest_dir, finaldest)
if not os.path.isdir(finaldest): if not os.path.isdir(finaldest):
if not self.dry_run: if not self.dry_run:
cmd = "/bin/btrfs subvolume snapshot %s %s"%(dest_dir,finaldest) cmd = "/bin/btrfs subvolume snapshot %s %s" % (dest_dir, finaldest)
process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True) process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True)
log = monitor_stdout(process,'',self) log = monitor_stdout(process, "", self)
returncode = process.returncode returncode = process.returncode
if (returncode != 0): if returncode != 0:
self.logger.error("[" + self.backup_name + "] shell program exited with error code " + str(returncode)) self.logger.error("[" + self.backup_name + "] shell program exited with error code " + str(returncode))
raise Exception("[" + self.backup_name + "] shell program exited with error code " + str(returncode), cmd, log[-512:]) raise Exception(
"[" + self.backup_name + "] shell program exited with error code " + str(returncode), cmd, log[-512:]
)
else: else:
self.logger.info("[" + self.backup_name + "] snapshot directory created %s"%finaldest) self.logger.info("[" + self.backup_name + "] snapshot directory created %s" % finaldest)
else: else:
print(("btrfs snapshot of %s to %s"%(dest_dir,finaldest))) print(("btrfs snapshot of %s to %s" % (dest_dir, finaldest)))
else: else:
raise Exception('snapshot directory already exists : %s' %finaldest) raise Exception("snapshot directory already exists : %s" % finaldest)
self.logger.debug("[%s] touching datetime of target directory %s" ,self.backup_name,finaldest) self.logger.debug("[%s] touching datetime of target directory %s", self.backup_name, finaldest)
print((os.popen('touch "%s"' % finaldest).read())) print((os.popen('touch "%s"' % finaldest).read()))
stats['backup_location'] = finaldest stats["backup_location"] = finaldest
stats['status']='OK' stats["status"] = "OK"
stats['log']='ssh+rsync+btrfs backup from %s OK, %d bytes written for %d changed files' % (backup_source,stats['written_bytes'],stats['written_files_count']) stats["log"] = "ssh+rsync+btrfs backup from %s OK, %d bytes written for %d changed files" % (
backup_source,
stats["written_bytes"],
stats["written_files_count"],
)
except BaseException as e: except BaseException as e:
stats['status']='ERROR' stats["status"] = "ERROR"
stats['log']=str(e) stats["log"] = str(e)
raise raise
finally: finally:
self.remove_lock() self.remove_lock()
def get_latest_backup(self,current): def get_latest_backup(self, current):
result = [] result = []
filelist = os.listdir(self.backup_dir) filelist = os.listdir(self.backup_dir)
filelist.sort() filelist.sort()
filelist.reverse() filelist.reverse()
full = '' # full = ''
r_full = re.compile('^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$') r_full = re.compile("^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$")
r_partial = re.compile('^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}.rsync$') r_partial = re.compile("^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}.rsync$")
# we take all latest partials younger than the latest full and the latest full # we take all latest partials younger than the latest full and the latest full
for item in filelist: for item in filelist:
if r_partial.match(item) and item<current: if r_partial.match(item) and item < current:
result.append(item) result.append(item)
elif r_full.match(item) and item<current: elif r_full.match(item) and item < current:
result.append(item) result.append(item)
break break
return result return result
def register_existingbackups(self): def register_existingbackups(self):
"""scan backup dir and insert stats in database""" """scan backup dir and insert stats in database"""
registered = [b['backup_location'] for b in self.dbstat.query('select distinct backup_location from stats where backup_name=?',(self.backup_name,))] registered = [
b["backup_location"]
for b in self.dbstat.query("select distinct backup_location from stats where backup_name=?", (self.backup_name,))
]
filelist = os.listdir(self.backup_dir) filelist = os.listdir(self.backup_dir)
filelist.sort() filelist.sort()
p = re.compile('^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$') p = re.compile("^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$")
for item in filelist: for item in filelist:
if p.match(item): if p.match(item):
dir_name = os.path.join(self.backup_dir,item) dir_name = os.path.join(self.backup_dir, item)
if not dir_name in registered: if dir_name not in registered:
start = datetime.datetime.strptime(item,'%Y%m%d-%Hh%Mm%S').isoformat() start = datetime.datetime.strptime(item, "%Y%m%d-%Hh%Mm%S").isoformat()
if fileisodate(dir_name)>start: if fileisodate(dir_name) > start:
stop = fileisodate(dir_name) stop = fileisodate(dir_name)
else: else:
stop = start stop = start
self.logger.info('Registering %s started on %s',dir_name,start) self.logger.info("Registering %s started on %s", dir_name, start)
self.logger.debug(' Disk usage %s','du -sb "%s"' % dir_name) self.logger.debug(" Disk usage %s", 'du -sb "%s"' % dir_name)
if not self.dry_run: if not self.dry_run:
size_bytes = int(os.popen('du -sb "%s"' % dir_name).read().split('\t')[0]) size_bytes = int(os.popen('du -sb "%s"' % dir_name).read().split("\t")[0])
else: else:
size_bytes = 0 size_bytes = 0
self.logger.debug(' Size in bytes : %i',size_bytes) self.logger.debug(" Size in bytes : %i", size_bytes)
if not self.dry_run: if not self.dry_run:
self.dbstat.add(self.backup_name,self.server_name,'',\ self.dbstat.add(
backup_start=start,backup_end = stop,status='OK',total_bytes=size_bytes,backup_location=dir_name) self.backup_name,
self.server_name,
"",
backup_start=start,
backup_end=stop,
status="OK",
total_bytes=size_bytes,
backup_location=dir_name,
)
else: else:
self.logger.info('Skipping %s, already registered',dir_name) self.logger.info("Skipping %s, already registered", dir_name)
def is_pid_still_running(self, lockfile):
def is_pid_still_running(self,lockfile):
f = open(lockfile) f = open(lockfile)
lines = f.readlines() lines = f.readlines()
f.close() f.close()
if len(lines)==0 : if len(lines) == 0:
self.logger.info("[" + self.backup_name + "] empty lock file, removing...") self.logger.info("[" + self.backup_name + "] empty lock file, removing...")
return False return False
for line in lines: for line in lines:
if line.startswith('pid='): if line.startswith("pid="):
pid = line.split('=')[1].strip() pid = line.split("=")[1].strip()
if os.path.exists("/proc/" + pid): if os.path.exists("/proc/" + pid):
self.logger.info("[" + self.backup_name + "] process still there") self.logger.info("[" + self.backup_name + "] process still there")
return True return True
@ -307,54 +333,63 @@ class backup_rsync_btrfs(backup_generic):
self.logger.info("[" + self.backup_name + "] incorrrect lock file : no pid line") self.logger.info("[" + self.backup_name + "] incorrrect lock file : no pid line")
return False return False
def set_lock(self): def set_lock(self):
self.logger.debug("[" + self.backup_name + "] setting lock") self.logger.debug("[" + self.backup_name + "] setting lock")
#TODO: improve for race condition # TODO: improve for race condition
#TODO: also check if process is really there # TODO: also check if process is really there
if os.path.isfile(self.backup_dir + '/lock'): if os.path.isfile(self.backup_dir + "/lock"):
self.logger.debug("[" + self.backup_name + "] File " + self.backup_dir + '/lock already exist') self.logger.debug("[" + self.backup_name + "] File " + self.backup_dir + "/lock already exist")
if self.is_pid_still_running(self.backup_dir + '/lock')==False: if not self.is_pid_still_running(self.backup_dir + "/lock"):
self.logger.info("[" + self.backup_name + "] removing lock file " + self.backup_dir + '/lock') self.logger.info("[" + self.backup_name + "] removing lock file " + self.backup_dir + "/lock")
os.unlink(self.backup_dir + '/lock') os.unlink(self.backup_dir + "/lock")
else: else:
return False return False
lockfile = open(self.backup_dir + '/lock',"w") lockfile = open(self.backup_dir + "/lock", "w")
# Write all the lines at once: # Write all the lines at once:
lockfile.write('pid='+str(os.getpid())) lockfile.write("pid=" + str(os.getpid()))
lockfile.write('\nbackup_time=' + self.backup_start_date) lockfile.write("\nbackup_time=" + self.backup_start_date)
lockfile.close() lockfile.close()
return True return True
def remove_lock(self): def remove_lock(self):
self.logger.debug("[%s] removing lock",self.backup_name ) self.logger.debug("[%s] removing lock", self.backup_name)
os.unlink(self.backup_dir + '/lock') os.unlink(self.backup_dir + "/lock")
class backup_rsync__btrfs_ssh(backup_rsync_btrfs): class backup_rsync__btrfs_ssh(backup_rsync_btrfs):
"""Backup a directory on remote server with rsync,ssh and btrfs protocol (requires rsync software on remote host)""" """Backup a directory on remote server with rsync,ssh and btrfs protocol (requires rsync software on remote host)"""
type = 'rsync+btrfs+ssh'
required_params = backup_generic.required_params + ['remote_user','remote_dir','private_key'] type = "rsync+btrfs+ssh"
optional_params = backup_generic.optional_params + ['compression','bwlimit','ssh_port','exclude_list','protect_args','overload_args','cipher_spec'] required_params = backup_generic.required_params + ["remote_user", "remote_dir", "private_key"]
cipher_spec = '' optional_params = backup_generic.optional_params + [
"compression",
"bwlimit",
"ssh_port",
"exclude_list",
"protect_args",
"overload_args",
"cipher_spec",
]
cipher_spec = ""
register_driver(backup_rsync_btrfs) register_driver(backup_rsync_btrfs)
register_driver(backup_rsync__btrfs_ssh) register_driver(backup_rsync__btrfs_ssh)
if __name__=='__main__': if __name__ == "__main__":
logger = logging.getLogger('tisbackup') logger = logging.getLogger("tisbackup")
logger.setLevel(logging.DEBUG) logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s")
handler = logging.StreamHandler() handler = logging.StreamHandler()
handler.setFormatter(formatter) handler.setFormatter(formatter)
logger.addHandler(handler) logger.addHandler(handler)
cp = ConfigParser() cp = ConfigParser()
cp.read('/opt/tisbackup/configtest.ini') cp.read("/opt/tisbackup/configtest.ini")
dbstat = BackupStat('/backup/data/log/tisbackup.sqlite') dbstat = BackupStat("/backup/data/log/tisbackup.sqlite")
b = backup_rsync('htouvet','/backup/data/htouvet',dbstat) b = backup_rsync("htouvet", "/backup/data/htouvet", dbstat)
b.read_config(cp) b.read_config(cp)
b.process_backup() b.process_backup()
print((b.checknagios())) print((b.checknagios()))

View File

@ -19,11 +19,10 @@
# ----------------------------------------------------------------------- # -----------------------------------------------------------------------
import sys import sys
try: try:
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko sys.stderr = open("/dev/null") # Silence silly warnings from paramiko
import paramiko import paramiko
except ImportError as e: except ImportError as e:
print("Error : can not load paramiko library %s" % e) print("Error : can not load paramiko library %s" % e)
@ -36,14 +35,15 @@ from .common import *
class backup_samba4(backup_generic): class backup_samba4(backup_generic):
"""Backup a samba4 databases as gzipped tdbs file through ssh""" """Backup a samba4 databases as gzipped tdbs file through ssh"""
type = 'samba4'
required_params = backup_generic.required_params + ['private_key'] type = "samba4"
optional_params = backup_generic.optional_params + ['root_dir_samba'] required_params = backup_generic.required_params + ["private_key"]
optional_params = backup_generic.optional_params + ["root_dir_samba"]
root_dir_samba = "/var/lib/samba/" root_dir_samba = "/var/lib/samba/"
def do_backup(self,stats):
self.dest_dir = os.path.join(self.backup_dir,self.backup_start_date)
def do_backup(self, stats):
self.dest_dir = os.path.join(self.backup_dir, self.backup_start_date)
if not os.path.isdir(self.dest_dir): if not os.path.isdir(self.dest_dir):
if not self.dry_run: if not self.dry_run:
@ -51,118 +51,128 @@ class backup_samba4(backup_generic):
else: else:
print('mkdir "%s"' % self.dest_dir) print('mkdir "%s"' % self.dest_dir)
else: else:
raise Exception('backup destination directory already exists : %s' % self.dest_dir) raise Exception("backup destination directory already exists : %s" % self.dest_dir)
self.logger.debug('[%s] Connecting to %s with user root and key %s',self.backup_name,self.server_name,self.private_key) self.logger.debug("[%s] Connecting to %s with user root and key %s", self.backup_name, self.server_name, self.private_key)
try: try:
mykey = paramiko.RSAKey.from_private_key_file(self.private_key) mykey = paramiko.RSAKey.from_private_key_file(self.private_key)
except paramiko.SSHException: except paramiko.SSHException:
#mykey = paramiko.DSSKey.from_private_key_file(self.private_key) # mykey = paramiko.DSSKey.from_private_key_file(self.private_key)
mykey = paramiko.Ed25519Key.from_private_key_file(self.private_key) mykey = paramiko.Ed25519Key.from_private_key_file(self.private_key)
self.ssh = paramiko.SSHClient() self.ssh = paramiko.SSHClient()
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.ssh.connect(self.server_name,username='root',pkey = mykey, port=self.ssh_port) self.ssh.connect(self.server_name, username="root", pkey=mykey, port=self.ssh_port)
stats['log']= "Successfully backuping processed to the following databases :" stats["log"] = "Successfully backuping processed to the following databases :"
stats['status']='List' stats["status"] = "List"
dir_ldbs = os.path.join(self.root_dir_samba+'/private/sam.ldb.d/') dir_ldbs = os.path.join(self.root_dir_samba + "/private/sam.ldb.d/")
cmd = 'ls %s/*.ldb 2> /dev/null' % dir_ldbs cmd = "ls %s/*.ldb 2> /dev/null" % dir_ldbs
self.logger.debug('[%s] List databases: %s',self.backup_name,cmd) self.logger.debug("[%s] List databases: %s", self.backup_name, cmd)
(error_code,output) = ssh_exec(cmd,ssh=self.ssh) (error_code, output) = ssh_exec(cmd, ssh=self.ssh)
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output) self.logger.debug("[%s] Output of %s :\n%s", self.backup_name, cmd, output)
if error_code: if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd)) raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code, cmd))
databases = output.split('\n') databases = output.split("\n")
for database in databases: for database in databases:
if database != "": if database != "":
self.db_name = database.rstrip() self.db_name = database.rstrip()
self.do_mysqldump(stats) self.do_mysqldump(stats)
def do_mysqldump(self, stats):
def do_mysqldump(self,stats): # t = datetime.datetime.now()
t = datetime.datetime.now() # backup_start_date = t.strftime('%Y%m%d-%Hh%Mm%S')
backup_start_date = t.strftime('%Y%m%d-%Hh%Mm%S')
# dump db # dump db
stats['status']='Dumping' stats["status"] = "Dumping"
cmd = 'tdbbackup -s .tisbackup ' + self.db_name cmd = "tdbbackup -s .tisbackup " + self.db_name
self.logger.debug('[%s] Dump DB : %s',self.backup_name,cmd) self.logger.debug("[%s] Dump DB : %s", self.backup_name, cmd)
if not self.dry_run: if not self.dry_run:
(error_code,output) = ssh_exec(cmd,ssh=self.ssh) (error_code, output) = ssh_exec(cmd, ssh=self.ssh)
print(output) print(output)
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output) self.logger.debug("[%s] Output of %s :\n%s", self.backup_name, cmd, output)
if error_code: if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd)) raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code, cmd))
# zip the file # zip the file
stats['status']='Zipping' stats["status"] = "Zipping"
cmd = 'gzip -f "%s.tisbackup"' % self.db_name cmd = 'gzip -f "%s.tisbackup"' % self.db_name
self.logger.debug('[%s] Compress backup : %s',self.backup_name,cmd) self.logger.debug("[%s] Compress backup : %s", self.backup_name, cmd)
if not self.dry_run: if not self.dry_run:
(error_code,output) = ssh_exec(cmd,ssh=self.ssh) (error_code, output) = ssh_exec(cmd, ssh=self.ssh)
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output) self.logger.debug("[%s] Output of %s :\n%s", self.backup_name, cmd, output)
if error_code: if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd)) raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code, cmd))
# get the file # get the file
stats['status']='SFTP' stats["status"] = "SFTP"
filepath = self.db_name + '.tisbackup.gz' filepath = self.db_name + ".tisbackup.gz"
localpath = os.path.join(self.dest_dir , os.path.basename(self.db_name) + '.tisbackup.gz') localpath = os.path.join(self.dest_dir, os.path.basename(self.db_name) + ".tisbackup.gz")
self.logger.debug('[%s] Get gz backup with sftp on %s from %s to %s',self.backup_name,self.server_name,filepath,localpath) self.logger.debug("[%s] Get gz backup with sftp on %s from %s to %s", self.backup_name, self.server_name, filepath, localpath)
if not self.dry_run: if not self.dry_run:
transport = self.ssh.get_transport() transport = self.ssh.get_transport()
sftp = paramiko.SFTPClient.from_transport(transport) sftp = paramiko.SFTPClient.from_transport(transport)
sftp.get(filepath, localpath) sftp.get(filepath, localpath)
sftp.close() sftp.close()
if not self.dry_run: if not self.dry_run:
stats['total_files_count']=1 + stats.get('total_files_count', 0) stats["total_files_count"] = 1 + stats.get("total_files_count", 0)
stats['written_files_count']=1 + stats.get('written_files_count', 0) stats["written_files_count"] = 1 + stats.get("written_files_count", 0)
stats['total_bytes']=os.stat(localpath).st_size + stats.get('total_bytes', 0) stats["total_bytes"] = os.stat(localpath).st_size + stats.get("total_bytes", 0)
stats['written_bytes']=os.stat(localpath).st_size + stats.get('written_bytes', 0) stats["written_bytes"] = os.stat(localpath).st_size + stats.get("written_bytes", 0)
stats['log'] = '%s "%s"' % (stats['log'] ,self.db_name) stats["log"] = '%s "%s"' % (stats["log"], self.db_name)
stats['backup_location'] = self.dest_dir stats["backup_location"] = self.dest_dir
stats['status']='RMTemp' stats["status"] = "RMTemp"
cmd = 'rm -f "%s"' % filepath cmd = 'rm -f "%s"' % filepath
self.logger.debug('[%s] Remove temp gzip : %s',self.backup_name,cmd) self.logger.debug("[%s] Remove temp gzip : %s", self.backup_name, cmd)
if not self.dry_run: if not self.dry_run:
(error_code,output) = ssh_exec(cmd,ssh=self.ssh) (error_code, output) = ssh_exec(cmd, ssh=self.ssh)
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output) self.logger.debug("[%s] Output of %s :\n%s", self.backup_name, cmd, output)
if error_code: if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd)) raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code, cmd))
stats['status']='OK' stats["status"] = "OK"
def register_existingbackups(self): def register_existingbackups(self):
"""scan backup dir and insert stats in database""" """scan backup dir and insert stats in database"""
registered = [b['backup_location'] for b in self.dbstat.query('select distinct backup_location from stats where backup_name=?',(self.backup_name,))] registered = [
b["backup_location"]
for b in self.dbstat.query("select distinct backup_location from stats where backup_name=?", (self.backup_name,))
]
filelist = os.listdir(self.backup_dir) filelist = os.listdir(self.backup_dir)
filelist.sort() filelist.sort()
p = re.compile('^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$') p = re.compile("^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$")
for item in filelist: for item in filelist:
if p.match(item): if p.match(item):
dir_name = os.path.join(self.backup_dir,item) dir_name = os.path.join(self.backup_dir, item)
if not dir_name in registered: if dir_name not in registered:
start = datetime.datetime.strptime(item,'%Y%m%d-%Hh%Mm%S').isoformat() start = datetime.datetime.strptime(item, "%Y%m%d-%Hh%Mm%S").isoformat()
if fileisodate(dir_name)>start: if fileisodate(dir_name) > start:
stop = fileisodate(dir_name) stop = fileisodate(dir_name)
else: else:
stop = start stop = start
self.logger.info('Registering %s started on %s',dir_name,start) self.logger.info("Registering %s started on %s", dir_name, start)
self.logger.debug(' Disk usage %s','du -sb "%s"' % dir_name) self.logger.debug(" Disk usage %s", 'du -sb "%s"' % dir_name)
if not self.dry_run: if not self.dry_run:
size_bytes = int(os.popen('du -sb "%s"' % dir_name).read().split('\t')[0]) size_bytes = int(os.popen('du -sb "%s"' % dir_name).read().split("\t")[0])
else: else:
size_bytes = 0 size_bytes = 0
self.logger.debug(' Size in bytes : %i',size_bytes) self.logger.debug(" Size in bytes : %i", size_bytes)
if not self.dry_run: if not self.dry_run:
self.dbstat.add(self.backup_name,self.server_name,'',\ self.dbstat.add(
backup_start=start,backup_end = stop,status='OK',total_bytes=size_bytes,backup_location=dir_name) self.backup_name,
self.server_name,
"",
backup_start=start,
backup_end=stop,
status="OK",
total_bytes=size_bytes,
backup_location=dir_name,
)
else: else:
self.logger.info('Skipping %s, already registered',dir_name) self.logger.info("Skipping %s, already registered", dir_name)
register_driver(backup_samba4) register_driver(backup_samba4)

View File

@ -19,11 +19,10 @@
# ----------------------------------------------------------------------- # -----------------------------------------------------------------------
import sys import sys
try: try:
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko sys.stderr = open("/dev/null") # Silence silly warnings from paramiko
import paramiko import paramiko
except ImportError as e: except ImportError as e:
print("Error : can not load paramiko library %s" % e) print("Error : can not load paramiko library %s" % e)
@ -40,122 +39,137 @@ from .common import *
class backup_sqlserver(backup_generic): class backup_sqlserver(backup_generic):
"""Backup a SQLSERVER database as gzipped sql file through ssh""" """Backup a SQLSERVER database as gzipped sql file through ssh"""
type = 'sqlserver+ssh'
required_params = backup_generic.required_params + ['db_name','private_key'] type = "sqlserver+ssh"
optional_params = ['username', 'remote_backup_dir', 'sqlserver_before_2005', 'db_server_name', 'db_user', 'db_password'] required_params = backup_generic.required_params + ["db_name", "private_key"]
db_name='' optional_params = ["username", "remote_backup_dir", "sqlserver_before_2005", "db_server_name", "db_user", "db_password"]
db_user='' db_name = ""
db_password='' db_user = ""
db_password = ""
userdb = "-E" userdb = "-E"
username='Administrateur' username = "Administrateur"
remote_backup_dir = r'c:/WINDOWS/Temp/' remote_backup_dir = r"c:/WINDOWS/Temp/"
sqlserver_before_2005 = False sqlserver_before_2005 = False
db_server_name = "localhost" db_server_name = "localhost"
def do_backup(self, stats):
def do_backup(self,stats):
try: try:
mykey = paramiko.RSAKey.from_private_key_file(self.private_key) mykey = paramiko.RSAKey.from_private_key_file(self.private_key)
except paramiko.SSHException: except paramiko.SSHException:
#mykey = paramiko.DSSKey.from_private_key_file(self.private_key) # mykey = paramiko.DSSKey.from_private_key_file(self.private_key)
mykey = paramiko.Ed25519Key.from_private_key_file(self.private_key) mykey = paramiko.Ed25519Key.from_private_key_file(self.private_key)
self.logger.debug('[%s] Connecting to %s with user root and key %s',self.backup_name,self.server_name,self.private_key) self.logger.debug("[%s] Connecting to %s with user root and key %s", self.backup_name, self.server_name, self.private_key)
ssh = paramiko.SSHClient() ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(self.server_name,username=self.username,pkey=mykey, port=self.ssh_port) ssh.connect(self.server_name, username=self.username, pkey=mykey, port=self.ssh_port)
t = datetime.datetime.now() t = datetime.datetime.now()
backup_start_date = t.strftime('%Y%m%d-%Hh%Mm%S') backup_start_date = t.strftime("%Y%m%d-%Hh%Mm%S")
backup_file = self.remote_backup_dir + '/' + self.db_name + '-' + backup_start_date + '.bak' backup_file = self.remote_backup_dir + "/" + self.db_name + "-" + backup_start_date + ".bak"
if not self.db_user == '': if not self.db_user == "":
self.userdb = '-U %s -P %s' % ( self.db_user, self.db_password ) self.userdb = "-U %s -P %s" % (self.db_user, self.db_password)
# dump db # dump db
stats['status']='Dumping' stats["status"] = "Dumping"
if self.sqlserver_before_2005: if self.sqlserver_before_2005:
cmd = """osql -E -Q "BACKUP DATABASE [%s] cmd = """osql -E -Q "BACKUP DATABASE [%s]
TO DISK='%s' TO DISK='%s'
WITH FORMAT" """ % ( self.db_name, backup_file ) WITH FORMAT" """ % (self.db_name, backup_file)
else: else:
cmd = """sqlcmd %s -S "%s" -d master -Q "BACKUP DATABASE [%s] cmd = """sqlcmd %s -S "%s" -d master -Q "BACKUP DATABASE [%s]
TO DISK = N'%s' TO DISK = N'%s'
WITH INIT, NOUNLOAD , WITH INIT, NOUNLOAD ,
NAME = N'Backup %s', NOSKIP ,STATS = 10, NOFORMAT" """ % (self.userdb, self.db_server_name, self.db_name, backup_file ,self.db_name ) NAME = N'Backup %s', NOSKIP ,STATS = 10, NOFORMAT" """ % (
self.logger.debug('[%s] Dump DB : %s',self.backup_name,cmd) self.userdb,
self.db_server_name,
self.db_name,
backup_file,
self.db_name,
)
self.logger.debug("[%s] Dump DB : %s", self.backup_name, cmd)
try: try:
if not self.dry_run: if not self.dry_run:
(error_code,output) = ssh_exec(cmd,ssh=ssh) (error_code, output) = ssh_exec(cmd, ssh=ssh)
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output) self.logger.debug("[%s] Output of %s :\n%s", self.backup_name, cmd, output)
if error_code: if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd)) raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code, cmd))
# zip the file # zip the file
stats['status']='Zipping' stats["status"] = "Zipping"
cmd = 'gzip "%s"' % backup_file cmd = 'gzip "%s"' % backup_file
self.logger.debug('[%s] Compress backup : %s',self.backup_name,cmd) self.logger.debug("[%s] Compress backup : %s", self.backup_name, cmd)
if not self.dry_run: if not self.dry_run:
(error_code,output) = ssh_exec(cmd,ssh=ssh) (error_code, output) = ssh_exec(cmd, ssh=ssh)
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output) self.logger.debug("[%s] Output of %s :\n%s", self.backup_name, cmd, output)
if error_code: if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd)) raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code, cmd))
# get the file # get the file
stats['status']='SFTP' stats["status"] = "SFTP"
filepath = backup_file + '.gz' filepath = backup_file + ".gz"
localpath = os.path.join(self.backup_dir , self.db_name + '-' + backup_start_date + '.bak.gz') localpath = os.path.join(self.backup_dir, self.db_name + "-" + backup_start_date + ".bak.gz")
self.logger.debug('[%s] Get gz backup with sftp on %s from %s to %s',self.backup_name,self.server_name,filepath,localpath) self.logger.debug("[%s] Get gz backup with sftp on %s from %s to %s", self.backup_name, self.server_name, filepath, localpath)
if not self.dry_run: if not self.dry_run:
transport = ssh.get_transport() transport = ssh.get_transport()
sftp = paramiko.SFTPClient.from_transport(transport) sftp = paramiko.SFTPClient.from_transport(transport)
sftp.get(filepath, localpath) sftp.get(filepath, localpath)
sftp.close() sftp.close()
if not self.dry_run: if not self.dry_run:
stats['total_files_count']=1 stats["total_files_count"] = 1
stats['written_files_count']=1 stats["written_files_count"] = 1
stats['total_bytes']=os.stat(localpath).st_size stats["total_bytes"] = os.stat(localpath).st_size
stats['written_bytes']=os.stat(localpath).st_size stats["written_bytes"] = os.stat(localpath).st_size
stats['log']='gzip dump of DB %s:%s (%d bytes) to %s' % (self.server_name,self.db_name, stats['written_bytes'], localpath) stats["log"] = "gzip dump of DB %s:%s (%d bytes) to %s" % (self.server_name, self.db_name, stats["written_bytes"], localpath)
stats['backup_location'] = localpath stats["backup_location"] = localpath
finally: finally:
stats['status']='RMTemp' stats["status"] = "RMTemp"
cmd = 'rm -f "%s" "%s"' % ( backup_file + '.gz', backup_file ) cmd = 'rm -f "%s" "%s"' % (backup_file + ".gz", backup_file)
self.logger.debug('[%s] Remove temp gzip : %s',self.backup_name,cmd) self.logger.debug("[%s] Remove temp gzip : %s", self.backup_name, cmd)
if not self.dry_run: if not self.dry_run:
(error_code,output) = ssh_exec(cmd,ssh=ssh) (error_code, output) = ssh_exec(cmd, ssh=ssh)
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output) self.logger.debug("[%s] Output of %s :\n%s", self.backup_name, cmd, output)
if error_code: if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd)) raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code, cmd))
stats["status"] = "OK"
stats['status']='OK'
def register_existingbackups(self): def register_existingbackups(self):
"""scan backup dir and insert stats in database""" """scan backup dir and insert stats in database"""
registered = [b['backup_location'] for b in self.dbstat.query('select distinct backup_location from stats where backup_name=?',(self.backup_name,))] registered = [
b["backup_location"]
for b in self.dbstat.query("select distinct backup_location from stats where backup_name=?", (self.backup_name,))
]
filelist = os.listdir(self.backup_dir) filelist = os.listdir(self.backup_dir)
filelist.sort() filelist.sort()
p = re.compile('^%s-(?P<date>\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}).bak.gz$' % self.db_name) p = re.compile("^%s-(?P<date>\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}).bak.gz$" % self.db_name)
for item in filelist: for item in filelist:
sr = p.match(item) sr = p.match(item)
if sr: if sr:
file_name = os.path.join(self.backup_dir,item) file_name = os.path.join(self.backup_dir, item)
start = datetime.datetime.strptime(sr.groups()[0],'%Y%m%d-%Hh%Mm%S').isoformat() start = datetime.datetime.strptime(sr.groups()[0], "%Y%m%d-%Hh%Mm%S").isoformat()
if not file_name in registered: if file_name not in registered:
self.logger.info('Registering %s from %s',file_name,fileisodate(file_name)) self.logger.info("Registering %s from %s", file_name, fileisodate(file_name))
size_bytes = int(os.popen('du -sb "%s"' % file_name).read().split('\t')[0]) size_bytes = int(os.popen('du -sb "%s"' % file_name).read().split("\t")[0])
self.logger.debug(' Size in bytes : %i',size_bytes) self.logger.debug(" Size in bytes : %i", size_bytes)
if not self.dry_run: if not self.dry_run:
self.dbstat.add(self.backup_name,self.server_name,'',\ self.dbstat.add(
backup_start=start,backup_end=fileisodate(file_name),status='OK',total_bytes=size_bytes,backup_location=file_name) self.backup_name,
self.server_name,
"",
backup_start=start,
backup_end=fileisodate(file_name),
status="OK",
total_bytes=size_bytes,
backup_location=file_name,
)
else: else:
self.logger.info('Skipping %s from %s, already registered',file_name,fileisodate(file_name)) self.logger.info("Skipping %s from %s, already registered", file_name, fileisodate(file_name))
register_driver(backup_sqlserver) register_driver(backup_sqlserver)

View File

@ -41,16 +41,16 @@ from .common import *
class backup_switch(backup_generic): class backup_switch(backup_generic):
"""Backup a startup-config on a switch""" """Backup a startup-config on a switch"""
type = 'switch'
required_params = backup_generic.required_params + ['switch_ip','switch_type'] type = "switch"
optional_params = backup_generic.optional_params + [ 'switch_user', 'switch_password']
switch_user = '' required_params = backup_generic.required_params + ["switch_ip", "switch_type"]
switch_password = '' optional_params = backup_generic.optional_params + ["switch_user", "switch_password"]
switch_user = ""
switch_password = ""
def switch_hp(self, filename): def switch_hp(self, filename):
s = socket.socket() s = socket.socket()
try: try:
s.connect((self.switch_ip, 23)) s.connect((self.switch_ip, 23))
@ -58,31 +58,31 @@ class backup_switch(backup_generic):
except: except:
raise raise
child=pexpect.spawn('telnet '+self.switch_ip) child = pexpect.spawn("telnet " + self.switch_ip)
time.sleep(1) time.sleep(1)
if self.switch_user != "": if self.switch_user != "":
child.sendline(self.switch_user) child.sendline(self.switch_user)
child.sendline(self.switch_password+'\r') child.sendline(self.switch_password + "\r")
else: else:
child.sendline(self.switch_password+'\r') child.sendline(self.switch_password + "\r")
try: try:
child.expect("#") child.expect("#")
except: except:
raise Exception("Bad Credentials") raise Exception("Bad Credentials")
child.sendline( "terminal length 1000\r") child.sendline("terminal length 1000\r")
child.expect("#") child.expect("#")
child.sendline( "show config\r") child.sendline("show config\r")
child.maxread = 100000000 child.maxread = 100000000
child.expect("Startup.+$") child.expect("Startup.+$")
lines = child.after lines = child.after
if "-- MORE --" in lines: if "-- MORE --" in lines:
raise Exception("Terminal lenght is not sufficient") raise Exception("Terminal lenght is not sufficient")
child.expect("#") child.expect("#")
lines += child.before lines += child.before
child.sendline("logout\r") child.sendline("logout\r")
child.send('y\r') child.send("y\r")
for line in lines.split("\n")[1:-1]: for line in lines.split("\n")[1:-1]:
open(filename,"a").write(line.strip()+"\n") open(filename, "a").write(line.strip() + "\n")
def switch_cisco(self, filename): def switch_cisco(self, filename):
s = socket.socket() s = socket.socket()
@ -92,38 +92,37 @@ class backup_switch(backup_generic):
except: except:
raise raise
child=pexpect.spawn('telnet '+self.switch_ip) child = pexpect.spawn("telnet " + self.switch_ip)
time.sleep(1) time.sleep(1)
if self.switch_user: if self.switch_user:
child.sendline(self.switch_user) child.sendline(self.switch_user)
child.expect('Password: ') child.expect("Password: ")
child.sendline(self.switch_password+'\r') child.sendline(self.switch_password + "\r")
try: try:
child.expect(">") child.expect(">")
except: except:
raise Exception("Bad Credentials") raise Exception("Bad Credentials")
child.sendline('enable\r') child.sendline("enable\r")
child.expect('Password: ') child.expect("Password: ")
child.sendline(self.switch_password+'\r') child.sendline(self.switch_password + "\r")
try: try:
child.expect("#") child.expect("#")
except: except:
raise Exception("Bad Credentials") raise Exception("Bad Credentials")
child.sendline( "terminal length 0\r") child.sendline("terminal length 0\r")
child.expect("#") child.expect("#")
child.sendline("show run\r") child.sendline("show run\r")
child.expect('Building configuration...') child.expect("Building configuration...")
child.expect("#") child.expect("#")
running_config = child.before running_config = child.before
child.sendline("show vlan\r") child.sendline("show vlan\r")
child.expect('VLAN') child.expect("VLAN")
child.expect("#") child.expect("#")
vlan = 'VLAN'+child.before vlan = "VLAN" + child.before
open(filename,"a").write(running_config+'\n'+vlan) open(filename, "a").write(running_config + "\n" + vlan)
child.send('exit\r') child.send("exit\r")
child.close() child.close()
def switch_linksys_SRW2024(self, filename): def switch_linksys_SRW2024(self, filename):
s = socket.socket() s = socket.socket()
try: try:
@ -132,48 +131,53 @@ class backup_switch(backup_generic):
except: except:
raise raise
child=pexpect.spawn('telnet '+self.switch_ip) child = pexpect.spawn("telnet " + self.switch_ip)
time.sleep(1) time.sleep(1)
if hasattr(self,'switch_password'): if hasattr(self, "switch_password"):
child.sendline(self.switch_user+'\t') child.sendline(self.switch_user + "\t")
child.sendline(self.switch_password+'\r') child.sendline(self.switch_password + "\r")
else: else:
child.sendline(self.switch_user+'\r') child.sendline(self.switch_user + "\r")
try: try:
child.expect('Menu') child.expect("Menu")
except: except:
raise Exception("Bad Credentials") raise Exception("Bad Credentials")
child.sendline('\032') child.sendline("\032")
child.expect('>') child.expect(">")
child.sendline('lcli') child.sendline("lcli")
child.expect("Name:") child.expect("Name:")
if hasattr(self,'switch_password'): if hasattr(self, "switch_password"):
child.send(self.switch_user+'\r'+self.switch_password+'\r') child.send(self.switch_user + "\r" + self.switch_password + "\r")
else: else:
child.sendline(self.switch_user) child.sendline(self.switch_user)
child.expect(".*#") child.expect(".*#")
child.sendline( "terminal datadump") child.sendline("terminal datadump")
child.expect("#") child.expect("#")
child.sendline( "show startup-config") child.sendline("show startup-config")
child.expect("#") child.expect("#")
lines = child.before lines = child.before
if "Unrecognized command" in lines: if "Unrecognized command" in lines:
raise Exception("Bad Credentials") raise Exception("Bad Credentials")
child.sendline("exit") child.sendline("exit")
#child.expect( ">") # child.expect( ">")
#child.sendline("logout") # child.sendline("logout")
for line in lines.split("\n")[1:-1]: for line in lines.split("\n")[1:-1]:
open(filename,"a").write(line.strip()+"\n") open(filename, "a").write(line.strip() + "\n")
def switch_dlink_DGS1210(self, filename): def switch_dlink_DGS1210(self, filename):
login_data = {'Login' : self.switch_user, 'Password' : self.switch_password, 'sellanId' : 0, 'sellan' : 0, 'lang_seqid' : 1} login_data = {"Login": self.switch_user, "Password": self.switch_password, "sellanId": 0, "sellan": 0, "lang_seqid": 1}
resp = requests.post('http://%s/form/formLoginApply' % self.switch_ip, data=login_data, headers={"Referer":'http://%s/www/login.html' % self.switch_ip}) resp = requests.post(
"http://%s/form/formLoginApply" % self.switch_ip,
data=login_data,
headers={"Referer": "http://%s/www/login.html" % self.switch_ip},
)
if "Wrong password" in resp.text: if "Wrong password" in resp.text:
raise Exception("Wrong password") raise Exception("Wrong password")
resp = requests.post("http://%s/BinFile/config.bin" % self.switch_ip, headers={"Referer":'http://%s/www/iss/013_download_cfg.html' % self.switch_ip}) resp = requests.post(
with open(filename, 'w') as f: "http://%s/BinFile/config.bin" % self.switch_ip, headers={"Referer": "http://%s/www/iss/013_download_cfg.html" % self.switch_ip}
)
with open(filename, "w") as f:
f.write(resp.content) f.write(resp.content)
def switch_dlink_DGS1510(self, filename): def switch_dlink_DGS1510(self, filename):
@ -184,12 +188,12 @@ class backup_switch(backup_generic):
except: except:
raise raise
child = pexpect.spawn('telnet ' + self.switch_ip) child = pexpect.spawn("telnet " + self.switch_ip)
time.sleep(1) time.sleep(1)
if self.switch_user: if self.switch_user:
child.sendline(self.switch_user) child.sendline(self.switch_user)
child.expect('Password:') child.expect("Password:")
child.sendline(self.switch_password + '\r') child.sendline(self.switch_password + "\r")
try: try:
child.expect("#") child.expect("#")
except: except:
@ -198,67 +202,66 @@ class backup_switch(backup_generic):
child.expect("#") child.expect("#")
child.sendline("show run\r") child.sendline("show run\r")
child.logfile_read = open(filename, "a") child.logfile_read = open(filename, "a")
child.expect('End of configuration file') child.expect("End of configuration file")
child.expect('#--') child.expect("#--")
child.expect("#") child.expect("#")
child.close() child.close()
myre = re.compile("#--+") myre = re.compile("#--+")
config = myre.split(open(filename).read())[2] config = myre.split(open(filename).read())[2]
with open(filename,'w') as f: with open(filename, "w") as f:
f.write(config) f.write(config)
def do_backup(self, stats):
def do_backup(self,stats):
try: try:
dest_filename = os.path.join(self.backup_dir,"%s-%s" % (self.backup_name,self.backup_start_date)) dest_filename = os.path.join(self.backup_dir, "%s-%s" % (self.backup_name, self.backup_start_date))
options = [] # options = []
options_params = " ".join(options) # options_params = " ".join(options)
if "LINKSYS-SRW2024" == self.switch_type: if "LINKSYS-SRW2024" == self.switch_type:
dest_filename += '.txt' dest_filename += ".txt"
self.switch_linksys_SRW2024(dest_filename) self.switch_linksys_SRW2024(dest_filename)
elif self.switch_type in [ "CISCO", ]: elif self.switch_type in [
dest_filename += '.txt' "CISCO",
]:
dest_filename += ".txt"
self.switch_cisco(dest_filename) self.switch_cisco(dest_filename)
elif self.switch_type in [ "HP-PROCURVE-4104GL", "HP-PROCURVE-2524" ]: elif self.switch_type in ["HP-PROCURVE-4104GL", "HP-PROCURVE-2524"]:
dest_filename += '.txt' dest_filename += ".txt"
self.switch_hp(dest_filename) self.switch_hp(dest_filename)
elif "DLINK-DGS1210" == self.switch_type: elif "DLINK-DGS1210" == self.switch_type:
dest_filename += '.bin' dest_filename += ".bin"
self.switch_dlink_DGS1210(dest_filename) self.switch_dlink_DGS1210(dest_filename)
elif "DLINK-DGS1510" == self.switch_type: elif "DLINK-DGS1510" == self.switch_type:
dest_filename += '.cfg' dest_filename += ".cfg"
self.switch_dlink_DGS1510(dest_filename) self.switch_dlink_DGS1510(dest_filename)
else: else:
raise Exception("Unknown Switch type") raise Exception("Unknown Switch type")
stats['total_files_count']=1 stats["total_files_count"] = 1
stats['written_files_count']=1 stats["written_files_count"] = 1
stats['total_bytes']= os.stat(dest_filename).st_size stats["total_bytes"] = os.stat(dest_filename).st_size
stats['written_bytes'] = stats['total_bytes'] stats["written_bytes"] = stats["total_bytes"]
stats['backup_location'] = dest_filename stats["backup_location"] = dest_filename
stats['status']='OK' stats["status"] = "OK"
stats['log']='Switch backup from %s OK, %d bytes written' % (self.server_name,stats['written_bytes']) stats["log"] = "Switch backup from %s OK, %d bytes written" % (self.server_name, stats["written_bytes"])
except BaseException as e: except BaseException as e:
stats['status']='ERROR' stats["status"] = "ERROR"
stats['log']=str(e) stats["log"] = str(e)
raise raise
register_driver(backup_switch) register_driver(backup_switch)
if __name__=='__main__': if __name__ == "__main__":
logger = logging.getLogger('tisbackup') logger = logging.getLogger("tisbackup")
logger.setLevel(logging.DEBUG) logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s")
handler = logging.StreamHandler() handler = logging.StreamHandler()
handler.setFormatter(formatter) handler.setFormatter(formatter)
logger.addHandler(handler) logger.addHandler(handler)
cp = ConfigParser() cp = ConfigParser()
cp.read('/opt/tisbackup/configtest.ini') cp.read("/opt/tisbackup/configtest.ini")
b = backup_xva() b = backup_xva()
b.read_config(cp) b.read_config(cp)

View File

@ -26,6 +26,7 @@ import pyVmomi
import requests import requests
from pyVim.connect import Disconnect, SmartConnect from pyVim.connect import Disconnect, SmartConnect
from pyVmomi import vim, vmodl from pyVmomi import vim, vmodl
# Disable HTTPS verification warnings. # Disable HTTPS verification warnings.
from requests.packages import urllib3 from requests.packages import urllib3
@ -41,39 +42,36 @@ from stat import *
class backup_vmdk(backup_generic): class backup_vmdk(backup_generic):
type = 'esx-vmdk' type = "esx-vmdk"
required_params = backup_generic.required_params + ['esxhost','password_file','server_name'] required_params = backup_generic.required_params + ["esxhost", "password_file", "server_name"]
optional_params = backup_generic.optional_params + ['esx_port', 'prefix_clone', 'create_ovafile', 'halt_vm'] optional_params = backup_generic.optional_params + ["esx_port", "prefix_clone", "create_ovafile", "halt_vm"]
esx_port = 443 esx_port = 443
prefix_clone = "clone-" prefix_clone = "clone-"
create_ovafile = "no" create_ovafile = "no"
halt_vm = "no" halt_vm = "no"
def make_compatible_cookie(self,client_cookie): def make_compatible_cookie(self, client_cookie):
cookie_name = client_cookie.split("=", 1)[0] cookie_name = client_cookie.split("=", 1)[0]
cookie_value = client_cookie.split("=", 1)[1].split(";", 1)[0] cookie_value = client_cookie.split("=", 1)[1].split(";", 1)[0]
cookie_path = client_cookie.split("=", 1)[1].split(";", 1)[1].split( cookie_path = client_cookie.split("=", 1)[1].split(";", 1)[1].split(";", 1)[0].lstrip()
";", 1)[0].lstrip()
cookie_text = " " + cookie_value + "; $" + cookie_path cookie_text = " " + cookie_value + "; $" + cookie_path
# Make a cookie # Make a cookie
cookie = dict() cookie = dict()
cookie[cookie_name] = cookie_text cookie[cookie_name] = cookie_text
return cookie return cookie
def download_file(self, url, local_filename, cookie, headers):
def download_file(self,url, local_filename, cookie, headers): r = requests.get(url, stream=True, headers=headers, cookies=cookie, verify=False)
r = requests.get(url, stream=True, headers=headers,cookies=cookie,verify=False) with open(local_filename, "wb") as f:
with open(local_filename, 'wb') as f: for chunk in r.iter_content(chunk_size=1024 * 1024 * 64):
for chunk in r.iter_content(chunk_size=1024*1024*64):
if chunk: if chunk:
f.write(chunk) f.write(chunk)
f.flush() f.flush()
return local_filename return local_filename
def export_vmdks(self, vm):
def export_vmdks(self,vm):
HttpNfcLease = vm.ExportVm() HttpNfcLease = vm.ExportVm()
try: try:
infos = HttpNfcLease.info infos = HttpNfcLease.info
@ -82,153 +80,147 @@ class backup_vmdk(backup_generic):
for device_url in device_urls: for device_url in device_urls:
deviceId = device_url.key deviceId = device_url.key
deviceUrlStr = device_url.url deviceUrlStr = device_url.url
diskFileName = vm.name.replace(self.prefix_clone,'') + "-" + device_url.targetId diskFileName = vm.name.replace(self.prefix_clone, "") + "-" + device_url.targetId
diskUrlStr = deviceUrlStr.replace("*", self.esxhost) diskUrlStr = deviceUrlStr.replace("*", self.esxhost)
diskLocalPath = './' + diskFileName # diskLocalPath = './' + diskFileName
cookie = self.make_compatible_cookie(si._stub.cookie) cookie = self.make_compatible_cookie(si._stub.cookie)
headers = {'Content-Type': 'application/octet-stream'} headers = {"Content-Type": "application/octet-stream"}
self.logger.debug("[%s] exporting disk: %s" %(self.server_name,diskFileName)) self.logger.debug("[%s] exporting disk: %s" % (self.server_name, diskFileName))
self.download_file(diskUrlStr, diskFileName, cookie, headers) self.download_file(diskUrlStr, diskFileName, cookie, headers)
vmdks.append({"filename":diskFileName,"id":deviceId}) vmdks.append({"filename": diskFileName, "id": deviceId})
finally: finally:
HttpNfcLease.Complete() HttpNfcLease.Complete()
return vmdks return vmdks
def create_ovf(self, vm, vmdks):
def create_ovf(self,vm,vmdks):
ovfDescParams = vim.OvfManager.CreateDescriptorParams() ovfDescParams = vim.OvfManager.CreateDescriptorParams()
ovf = si.content.ovfManager.CreateDescriptor(vm, ovfDescParams) ovf = si.content.ovfManager.CreateDescriptor(vm, ovfDescParams)
root = ET.fromstring(ovf.ovfDescriptor) root = ET.fromstring(ovf.ovfDescriptor)
new_id = list(root[0][1].attrib.values())[0][1:3] new_id = list(root[0][1].attrib.values())[0][1:3]
ovfFiles = [] ovfFiles = []
for vmdk in vmdks: for vmdk in vmdks:
old_id = vmdk['id'][1:3] old_id = vmdk["id"][1:3]
id = vmdk['id'].replace(old_id,new_id) id = vmdk["id"].replace(old_id, new_id)
ovfFiles.append(vim.OvfManager.OvfFile(size=os.path.getsize(vmdk['filename']), path=vmdk['filename'], deviceId=id)) ovfFiles.append(vim.OvfManager.OvfFile(size=os.path.getsize(vmdk["filename"]), path=vmdk["filename"], deviceId=id))
ovfDescParams = vim.OvfManager.CreateDescriptorParams() ovfDescParams = vim.OvfManager.CreateDescriptorParams()
ovfDescParams.ovfFiles = ovfFiles; ovfDescParams.ovfFiles = ovfFiles
ovf = si.content.ovfManager.CreateDescriptor(vm, ovfDescParams) ovf = si.content.ovfManager.CreateDescriptor(vm, ovfDescParams)
ovf_filename = vm.name+".ovf" ovf_filename = vm.name + ".ovf"
self.logger.debug("[%s] creating ovf file: %s" %(self.server_name,ovf_filename)) self.logger.debug("[%s] creating ovf file: %s" % (self.server_name, ovf_filename))
with open(ovf_filename, "w") as f: with open(ovf_filename, "w") as f:
f.write(ovf.ovfDescriptor) f.write(ovf.ovfDescriptor)
return ovf_filename return ovf_filename
def create_ova(self,vm, vmdks, ovf_filename): def create_ova(self, vm, vmdks, ovf_filename):
ova_filename = vm.name+".ova" ova_filename = vm.name + ".ova"
vmdks.insert(0,{"filename":ovf_filename,"id":"false"}) vmdks.insert(0, {"filename": ovf_filename, "id": "false"})
self.logger.debug("[%s] creating ova file: %s" %(self.server_name,ova_filename)) self.logger.debug("[%s] creating ova file: %s" % (self.server_name, ova_filename))
with tarfile.open(ova_filename, "w") as tar: with tarfile.open(ova_filename, "w") as tar:
for vmdk in vmdks: for vmdk in vmdks:
tar.add(vmdk['filename']) tar.add(vmdk["filename"])
os.unlink(vmdk['filename']) os.unlink(vmdk["filename"])
return ova_filename return ova_filename
def clone_vm(self,vm): def clone_vm(self, vm):
task = self.wait_task(vm.CreateSnapshot_Task(name="backup",description="Automatic backup "+datetime.now().strftime("%Y-%m-%d %H:%M:%s"),memory=False,quiesce=True)) task = self.wait_task(
snapshot=task.info.result vm.CreateSnapshot_Task(
name="backup", description="Automatic backup " + datetime.now().strftime("%Y-%m-%d %H:%M:%s"), memory=False, quiesce=True
)
)
snapshot = task.info.result
prefix_vmclone = self.prefix_clone prefix_vmclone = self.prefix_clone
clone_name = prefix_vmclone + vm.name clone_name = prefix_vmclone + vm.name
datastore = '[%s]' % vm.datastore[0].name datastore = "[%s]" % vm.datastore[0].name
vmx_file = vim.vm.FileInfo(logDirectory=None, snapshotDirectory=None, suspendDirectory=None, vmPathName=datastore)
config = vim.vm.ConfigSpec(
vmx_file = vim.vm.FileInfo(logDirectory=None, name=clone_name, memoryMB=vm.summary.config.memorySizeMB, numCPUs=vm.summary.config.numCpu, files=vmx_file
snapshotDirectory=None, )
suspendDirectory=None,
vmPathName=datastore)
config = vim.vm.ConfigSpec(name=clone_name, memoryMB=vm.summary.config.memorySizeMB, numCPUs=vm.summary.config.numCpu, files=vmx_file)
hosts = datacenter.hostFolder.childEntity hosts = datacenter.hostFolder.childEntity
resource_pool = hosts[0].resourcePool resource_pool = hosts[0].resourcePool
self.wait_task(vmFolder.CreateVM_Task(config=config,pool=resource_pool)) self.wait_task(vmFolder.CreateVM_Task(config=config, pool=resource_pool))
new_vm = [x for x in vmFolder.childEntity if x.name == clone_name][0] new_vm = [x for x in vmFolder.childEntity if x.name == clone_name][0]
controller = vim.vm.device.VirtualDeviceSpec() controller = vim.vm.device.VirtualDeviceSpec()
controller.operation = vim.vm.device.VirtualDeviceSpec.Operation.add controller.operation = vim.vm.device.VirtualDeviceSpec.Operation.add
controller.device = vim.vm.device.VirtualLsiLogicController(busNumber=0,sharedBus='noSharing') controller.device = vim.vm.device.VirtualLsiLogicController(busNumber=0, sharedBus="noSharing")
controller.device.key = 0 controller.device.key = 0
i=0 i = 0
vm_devices = [] vm_devices = []
clone_folder = "%s/" % "/".join(new_vm.summary.config.vmPathName.split('/')[:-1]) clone_folder = "%s/" % "/".join(new_vm.summary.config.vmPathName.split("/")[:-1])
for device in vm.config.hardware.device: for device in vm.config.hardware.device:
if device.__class__.__name__ == 'vim.vm.device.VirtualDisk': if device.__class__.__name__ == "vim.vm.device.VirtualDisk":
cur_vers = int(re.findall(r'\d{3,6}', device.backing.fileName)[0]) cur_vers = int(re.findall(r"\d{3,6}", device.backing.fileName)[0])
if cur_vers == 1: if cur_vers == 1:
source = device.backing.fileName.replace('-000001','') source = device.backing.fileName.replace("-000001", "")
else: else:
source = device.backing.fileName.replace('%d.' % cur_vers,'%d.' % ( cur_vers - 1 )) source = device.backing.fileName.replace("%d." % cur_vers, "%d." % (cur_vers - 1))
dest = clone_folder + source.split("/")[-1]
dest = clone_folder+source.split('/')[-1] disk_spec = vim.VirtualDiskManager.VirtualDiskSpec(diskType="sparseMonolithic", adapterType="ide")
disk_spec = vim.VirtualDiskManager.VirtualDiskSpec(diskType="sparseMonolithic",adapterType="ide") self.wait_task(si.content.virtualDiskManager.CopyVirtualDisk_Task(sourceName=source, destName=dest, destSpec=disk_spec))
self.wait_task(si.content.virtualDiskManager.CopyVirtualDisk_Task(sourceName=source,destName=dest,destSpec=disk_spec)) # self.wait_task(si.content.virtualDiskManager.ShrinkVirtualDisk_Task(dest))
# self.wait_task(si.content.virtualDiskManager.ShrinkVirtualDisk_Task(dest))
diskfileBacking = vim.vm.device.VirtualDisk.FlatVer2BackingInfo() diskfileBacking = vim.vm.device.VirtualDisk.FlatVer2BackingInfo()
diskfileBacking.fileName = dest diskfileBacking.fileName = dest
diskfileBacking.diskMode = "persistent" diskfileBacking.diskMode = "persistent"
vdisk_spec = vim.vm.device.VirtualDeviceSpec() vdisk_spec = vim.vm.device.VirtualDeviceSpec()
vdisk_spec.operation = vim.vm.device.VirtualDeviceSpec.Operation.add vdisk_spec.operation = vim.vm.device.VirtualDeviceSpec.Operation.add
vdisk_spec.device = vim.vm.device.VirtualDisk(capacityInKB=10000 ,controllerKey=controller.device.key) vdisk_spec.device = vim.vm.device.VirtualDisk(capacityInKB=10000, controllerKey=controller.device.key)
vdisk_spec.device.key = 0 vdisk_spec.device.key = 0
vdisk_spec.device.backing = diskfileBacking vdisk_spec.device.backing = diskfileBacking
vdisk_spec.device.unitNumber = i vdisk_spec.device.unitNumber = i
vm_devices.append(vdisk_spec) vm_devices.append(vdisk_spec)
i+=1 i += 1
vm_devices.append(controller) vm_devices.append(controller)
config.deviceChange=vm_devices config.deviceChange = vm_devices
self.wait_task(new_vm.ReconfigVM_Task(config)) self.wait_task(new_vm.ReconfigVM_Task(config))
self.wait_task(snapshot.RemoveSnapshot_Task(removeChildren=True)) self.wait_task(snapshot.RemoveSnapshot_Task(removeChildren=True))
return new_vm return new_vm
def wait_task(self,task):
def wait_task(self, task):
while task.info.state in ["queued", "running"]: while task.info.state in ["queued", "running"]:
time.sleep(2) time.sleep(2)
self.logger.debug("[%s] %s",self.server_name,task.info.descriptionId) self.logger.debug("[%s] %s", self.server_name, task.info.descriptionId)
return task return task
def do_backup(self, stats):
def do_backup(self,stats):
try: try:
dest_dir = os.path.join(self.backup_dir,"%s" % self.backup_start_date) dest_dir = os.path.join(self.backup_dir, "%s" % self.backup_start_date)
if not os.path.isdir(dest_dir): if not os.path.isdir(dest_dir):
if not self.dry_run: if not self.dry_run:
os.makedirs(dest_dir) os.makedirs(dest_dir)
else: else:
print('mkdir "%s"' % dest_dir) print('mkdir "%s"' % dest_dir)
else: else:
raise Exception('backup destination directory already exists : %s' % dest_dir) raise Exception("backup destination directory already exists : %s" % dest_dir)
os.chdir(dest_dir) os.chdir(dest_dir)
user_esx, password_esx, null = open(self.password_file).read().split('\n') user_esx, password_esx, null = open(self.password_file).read().split("\n")
global si global si
si = SmartConnect(host=self.esxhost,user=user_esx,pwd=password_esx,port=self.esx_port) si = SmartConnect(host=self.esxhost, user=user_esx, pwd=password_esx, port=self.esx_port)
if not si: if not si:
raise Exception("Could not connect to the specified host using specified " raise Exception("Could not connect to the specified host using specified " "username and password")
"username and password")
atexit.register(Disconnect, si) atexit.register(Disconnect, si)
content = si.RetrieveContent() content = si.RetrieveContent()
for child in content.rootFolder.childEntity: for child in content.rootFolder.childEntity:
if hasattr(child, 'vmFolder'): if hasattr(child, "vmFolder"):
global vmFolder, datacenter global vmFolder, datacenter
datacenter = child datacenter = child
vmFolder = datacenter.vmFolder vmFolder = datacenter.vmFolder
@ -240,7 +232,7 @@ class backup_vmdk(backup_generic):
vm.ShutdownGuest() vm.ShutdownGuest()
vm_is_off = True vm_is_off = True
if vm_is_off: if vm_is_off:
vmdks = self.export_vmdks(vm) vmdks = self.export_vmdks(vm)
ovf_filename = self.create_ovf(vm, vmdks) ovf_filename = self.create_ovf(vm, vmdks)
else: else:
@ -250,32 +242,29 @@ class backup_vmdk(backup_generic):
self.wait_task(new_vm.Destroy_Task()) self.wait_task(new_vm.Destroy_Task())
if str2bool(self.create_ovafile): if str2bool(self.create_ovafile):
ova_filename = self.create_ova(vm, vmdks, ovf_filename) ova_filename = self.create_ova(vm, vmdks, ovf_filename) # noqa : F841
if str2bool(self.halt_vm): if str2bool(self.halt_vm):
vm.PowerOnVM() vm.PowerOnVM()
if os.path.exists(dest_dir): if os.path.exists(dest_dir):
for file in os.listdir(dest_dir): for file in os.listdir(dest_dir):
stats['written_bytes'] += os.stat(file)[ST_SIZE] stats["written_bytes"] += os.stat(file)[ST_SIZE]
stats['total_files_count'] += 1 stats["total_files_count"] += 1
stats['written_files_count'] += 1 stats["written_files_count"] += 1
stats['total_bytes'] = stats['written_bytes'] stats["total_bytes"] = stats["written_bytes"]
else: else:
stats['written_bytes'] = 0 stats["written_bytes"] = 0
stats['backup_location'] = dest_dir stats["backup_location"] = dest_dir
stats['log']='XVA backup from %s OK, %d bytes written' % (self.server_name,stats['written_bytes'])
stats['status']='OK'
stats["log"] = "XVA backup from %s OK, %d bytes written" % (self.server_name, stats["written_bytes"])
stats["status"] = "OK"
except BaseException as e: except BaseException as e:
stats['status']='ERROR' stats["status"] = "ERROR"
stats['log']=str(e) stats["log"] = str(e)
raise raise
register_driver(backup_vmdk) register_driver(backup_vmdk)

View File

@ -19,7 +19,6 @@
# ----------------------------------------------------------------------- # -----------------------------------------------------------------------
import paramiko import paramiko
from .common import * from .common import *
@ -27,67 +26,76 @@ from .common import *
class backup_xcp_metadata(backup_generic): class backup_xcp_metadata(backup_generic):
"""Backup metatdata of a xcp pool using xe pool-dump-database""" """Backup metatdata of a xcp pool using xe pool-dump-database"""
type = 'xcp-dump-metadata'
required_params = ['type','server_name','private_key','backup_name']
def do_backup(self,stats): type = "xcp-dump-metadata"
required_params = ["type", "server_name", "private_key", "backup_name"]
self.logger.debug('[%s] Connecting to %s with user root and key %s',self.backup_name,self.server_name,self.private_key)
def do_backup(self, stats):
self.logger.debug("[%s] Connecting to %s with user root and key %s", self.backup_name, self.server_name, self.private_key)
t = datetime.datetime.now() t = datetime.datetime.now()
backup_start_date = t.strftime('%Y%m%d-%Hh%Mm%S') backup_start_date = t.strftime("%Y%m%d-%Hh%Mm%S")
# dump pool medatadata # dump pool medatadata
localpath = os.path.join(self.backup_dir , 'xcp_metadata-' + backup_start_date + '.dump') localpath = os.path.join(self.backup_dir, "xcp_metadata-" + backup_start_date + ".dump")
stats['status']='Dumping' stats["status"] = "Dumping"
if not self.dry_run: if not self.dry_run:
cmd = "/opt/xensource/bin/xe pool-dump-database file-name=" cmd = "/opt/xensource/bin/xe pool-dump-database file-name="
self.logger.debug('[%s] Dump XCP Metadata : %s', self.backup_name, cmd) self.logger.debug("[%s] Dump XCP Metadata : %s", self.backup_name, cmd)
(error_code, output) = ssh_exec(cmd, server_name=self.server_name,private_key=self.private_key, remote_user='root') (error_code, output) = ssh_exec(cmd, server_name=self.server_name, private_key=self.private_key, remote_user="root")
with open(localpath,"w") as f: with open(localpath, "w") as f:
f.write(output) f.write(output)
# zip the file # zip the file
stats['status']='Zipping' stats["status"] = "Zipping"
cmd = 'gzip %s ' % localpath cmd = "gzip %s " % localpath
self.logger.debug('[%s] Compress backup : %s',self.backup_name,cmd) self.logger.debug("[%s] Compress backup : %s", self.backup_name, cmd)
if not self.dry_run: if not self.dry_run:
call_external_process(cmd) call_external_process(cmd)
localpath += ".gz" localpath += ".gz"
if not self.dry_run: if not self.dry_run:
stats['total_files_count']=1 stats["total_files_count"] = 1
stats['written_files_count']=1 stats["written_files_count"] = 1
stats['total_bytes']=os.stat(localpath).st_size stats["total_bytes"] = os.stat(localpath).st_size
stats['written_bytes']=os.stat(localpath).st_size stats["written_bytes"] = os.stat(localpath).st_size
stats['log']='gzip dump of DB %s:%s (%d bytes) to %s' % (self.server_name,'xcp metadata dump', stats['written_bytes'], localpath) stats["log"] = "gzip dump of DB %s:%s (%d bytes) to %s" % (self.server_name, "xcp metadata dump", stats["written_bytes"], localpath)
stats['backup_location'] = localpath stats["backup_location"] = localpath
stats['status']='OK' stats["status"] = "OK"
def register_existingbackups(self): def register_existingbackups(self):
"""scan metatdata backup files and insert stats in database""" """scan metatdata backup files and insert stats in database"""
registered = [b['backup_location'] for b in self.dbstat.query('select distinct backup_location from stats where backup_name=?',(self.backup_name,))] registered = [
b["backup_location"]
for b in self.dbstat.query("select distinct backup_location from stats where backup_name=?", (self.backup_name,))
]
filelist = os.listdir(self.backup_dir) filelist = os.listdir(self.backup_dir)
filelist.sort() filelist.sort()
p = re.compile('^%s-(?P<date>\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}).dump.gz$' % self.server_name) p = re.compile("^%s-(?P<date>\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}).dump.gz$" % self.server_name)
for item in filelist: for item in filelist:
sr = p.match(item) sr = p.match(item)
if sr: if sr:
file_name = os.path.join(self.backup_dir,item) file_name = os.path.join(self.backup_dir, item)
start = datetime.datetime.strptime(sr.groups()[0],'%Y%m%d-%Hh%Mm%S').isoformat() start = datetime.datetime.strptime(sr.groups()[0], "%Y%m%d-%Hh%Mm%S").isoformat()
if not file_name in registered: if file_name not in registered:
self.logger.info('Registering %s from %s',file_name,fileisodate(file_name)) self.logger.info("Registering %s from %s", file_name, fileisodate(file_name))
size_bytes = int(os.popen('du -sb "%s"' % file_name).read().split('\t')[0]) size_bytes = int(os.popen('du -sb "%s"' % file_name).read().split("\t")[0])
self.logger.debug(' Size in bytes : %i',size_bytes) self.logger.debug(" Size in bytes : %i", size_bytes)
if not self.dry_run: if not self.dry_run:
self.dbstat.add(self.backup_name,self.server_name,'',\ self.dbstat.add(
backup_start=start,backup_end=fileisodate(file_name),status='OK',total_bytes=size_bytes,backup_location=file_name) self.backup_name,
self.server_name,
"",
backup_start=start,
backup_end=fileisodate(file_name),
status="OK",
total_bytes=size_bytes,
backup_location=file_name,
)
else: else:
self.logger.info('Skipping %s from %s, already registered',file_name,fileisodate(file_name)) self.logger.info("Skipping %s from %s, already registered", file_name, fileisodate(file_name))
register_driver(backup_xcp_metadata) register_driver(backup_xcp_metadata)

View File

@ -36,16 +36,24 @@ import requests
from . import XenAPI from . import XenAPI
from .common import * from .common import *
if hasattr(ssl, '_create_unverified_context'): if hasattr(ssl, "_create_unverified_context"):
ssl._create_default_https_context = ssl._create_unverified_context ssl._create_default_https_context = ssl._create_unverified_context
class backup_xva(backup_generic): class backup_xva(backup_generic):
"""Backup a VM running on a XCP server as a XVA file (requires xe tools and XenAPI)""" """Backup a VM running on a XCP server as a XVA file (requires xe tools and XenAPI)"""
type = 'xen-xva'
required_params = backup_generic.required_params + ['xcphost','password_file','server_name'] type = "xen-xva"
optional_params = backup_generic.optional_params + ['enable_https', 'halt_vm', 'verify_export', 'reuse_snapshot', 'ignore_proxies', 'use_compression' ]
required_params = backup_generic.required_params + ["xcphost", "password_file", "server_name"]
optional_params = backup_generic.optional_params + [
"enable_https",
"halt_vm",
"verify_export",
"reuse_snapshot",
"ignore_proxies",
"use_compression",
]
enable_https = "no" enable_https = "no"
halt_vm = "no" halt_vm = "no"
@ -55,34 +63,33 @@ class backup_xva(backup_generic):
use_compression = "true" use_compression = "true"
if str2bool(ignore_proxies): if str2bool(ignore_proxies):
os.environ['http_proxy']="" os.environ["http_proxy"] = ""
os.environ['https_proxy']="" os.environ["https_proxy"] = ""
def verify_export_xva(self,filename): def verify_export_xva(self, filename):
self.logger.debug("[%s] Verify xva export integrity",self.server_name) self.logger.debug("[%s] Verify xva export integrity", self.server_name)
tar = tarfile.open(filename) tar = tarfile.open(filename)
members = tar.getmembers() members = tar.getmembers()
for tarinfo in members: for tarinfo in members:
if re.search('^[0-9]*$',os.path.basename(tarinfo.name)): if re.search("^[0-9]*$", os.path.basename(tarinfo.name)):
sha1sum = hashlib.sha1(tar.extractfile(tarinfo).read()).hexdigest() sha1sum = hashlib.sha1(tar.extractfile(tarinfo).read()).hexdigest()
sha1sum2 = tar.extractfile(tarinfo.name+'.checksum').read() sha1sum2 = tar.extractfile(tarinfo.name + ".checksum").read()
if not sha1sum == sha1sum2: if not sha1sum == sha1sum2:
raise Exception("File corrupt") raise Exception("File corrupt")
tar.close() tar.close()
def export_xva(self, vdi_name, filename, halt_vm,dry_run,enable_https=True, reuse_snapshot="no"): def export_xva(self, vdi_name, filename, halt_vm, dry_run, enable_https=True, reuse_snapshot="no"):
user_xen, password_xen, null = open(self.password_file).read().split("\n")
user_xen, password_xen, null = open(self.password_file).read().split('\n') session = XenAPI.Session("https://" + self.xcphost)
session = XenAPI.Session('https://'+self.xcphost)
try: try:
session.login_with_password(user_xen,password_xen) session.login_with_password(user_xen, password_xen)
except XenAPI.Failure as error: except XenAPI.Failure as error:
msg,ip = error.details msg, ip = error.details
if msg == 'HOST_IS_SLAVE': if msg == "HOST_IS_SLAVE":
xcphost = ip xcphost = ip
session = XenAPI.Session('https://'+xcphost) session = XenAPI.Session("https://" + xcphost)
session.login_with_password(user_xen,password_xen) session.login_with_password(user_xen, password_xen)
if not session.xenapi.VM.get_by_name_label(vdi_name): if not session.xenapi.VM.get_by_name_label(vdi_name):
return "bad VM name: %s" % vdi_name return "bad VM name: %s" % vdi_name
@ -90,105 +97,101 @@ class backup_xva(backup_generic):
vm = session.xenapi.VM.get_by_name_label(vdi_name)[0] vm = session.xenapi.VM.get_by_name_label(vdi_name)[0]
status_vm = session.xenapi.VM.get_power_state(vm) status_vm = session.xenapi.VM.get_power_state(vm)
self.logger.debug("[%s] Check if previous fail backups exist",vdi_name) self.logger.debug("[%s] Check if previous fail backups exist", vdi_name)
backups_fail = files = [f for f in os.listdir(self.backup_dir) if f.startswith(vdi_name) and f.endswith(".tmp")] backups_fail = [f for f in os.listdir(self.backup_dir) if f.startswith(vdi_name) and f.endswith(".tmp")]
for backup_fail in backups_fail: for backup_fail in backups_fail:
self.logger.debug('[%s] Delete backup "%s"', vdi_name, backup_fail) self.logger.debug('[%s] Delete backup "%s"', vdi_name, backup_fail)
os.unlink(os.path.join(self.backup_dir, backup_fail)) os.unlink(os.path.join(self.backup_dir, backup_fail))
# add snapshot option
#add snapshot option
if not str2bool(halt_vm): if not str2bool(halt_vm):
self.logger.debug("[%s] Check if previous tisbackups snapshots exist",vdi_name) self.logger.debug("[%s] Check if previous tisbackups snapshots exist", vdi_name)
old_snapshots = session.xenapi.VM.get_by_name_label("tisbackup-%s"%(vdi_name)) old_snapshots = session.xenapi.VM.get_by_name_label("tisbackup-%s" % (vdi_name))
self.logger.debug("[%s] Old snaps count %s", vdi_name, len(old_snapshots)) self.logger.debug("[%s] Old snaps count %s", vdi_name, len(old_snapshots))
if len(old_snapshots) == 1 and str2bool(reuse_snapshot) == True: if len(old_snapshots) == 1 and str2bool(reuse_snapshot):
snapshot = old_snapshots[0] snapshot = old_snapshots[0]
self.logger.debug("[%s] Reusing snap \"%s\"", vdi_name, session.xenapi.VM.get_name_description(snapshot)) self.logger.debug('[%s] Reusing snap "%s"', vdi_name, session.xenapi.VM.get_name_description(snapshot))
vm = snapshot # vm = session.xenapi.VM.get_by_name_label("tisbackup-%s"%(vdi_name))[0] vm = snapshot # vm = session.xenapi.VM.get_by_name_label("tisbackup-%s"%(vdi_name))[0]
else: else:
self.logger.debug("[%s] Deleting %s old snaps", vdi_name, len(old_snapshots)) self.logger.debug("[%s] Deleting %s old snaps", vdi_name, len(old_snapshots))
for old_snapshot in old_snapshots: for old_snapshot in old_snapshots:
self.logger.debug("[%s] Destroy snapshot %s",vdi_name,session.xenapi.VM.get_name_description(old_snapshot)) self.logger.debug("[%s] Destroy snapshot %s", vdi_name, session.xenapi.VM.get_name_description(old_snapshot))
try: try:
for vbd in session.xenapi.VM.get_VBDs(old_snapshot): for vbd in session.xenapi.VM.get_VBDs(old_snapshot):
if session.xenapi.VBD.get_type(vbd) == 'CD' and session.xenapi.VBD.get_record(vbd)['empty'] == False: if session.xenapi.VBD.get_type(vbd) == "CD" and not session.xenapi.VBD.get_record(vbd)["empty"]:
session.xenapi.VBD.eject(vbd) session.xenapi.VBD.eject(vbd)
else: else:
vdi = session.xenapi.VBD.get_VDI(vbd) vdi = session.xenapi.VBD.get_VDI(vbd)
if not 'NULL' in vdi: if "NULL" not in vdi:
session.xenapi.VDI.destroy(vdi) session.xenapi.VDI.destroy(vdi)
session.xenapi.VM.destroy(old_snapshot) session.xenapi.VM.destroy(old_snapshot)
except XenAPI.Failure as error: except XenAPI.Failure as error:
return("error when destroy snapshot %s"%(error)) return "error when destroy snapshot %s" % (error)
now = datetime.datetime.now() now = datetime.datetime.now()
self.logger.debug("[%s] Snapshot in progress",vdi_name) self.logger.debug("[%s] Snapshot in progress", vdi_name)
try: try:
snapshot = session.xenapi.VM.snapshot(vm,"tisbackup-%s"%(vdi_name)) snapshot = session.xenapi.VM.snapshot(vm, "tisbackup-%s" % (vdi_name))
self.logger.debug("[%s] got snapshot %s", vdi_name, snapshot) self.logger.debug("[%s] got snapshot %s", vdi_name, snapshot)
except XenAPI.Failure as error: except XenAPI.Failure as error:
return("error when snapshot %s"%(error)) return "error when snapshot %s" % (error)
#get snapshot opaqueRef # get snapshot opaqueRef
vm = session.xenapi.VM.get_by_name_label("tisbackup-%s"%(vdi_name))[0] vm = session.xenapi.VM.get_by_name_label("tisbackup-%s" % (vdi_name))[0]
session.xenapi.VM.set_name_description(snapshot,"snapshot created by tisbackup on: %s"%(now.strftime("%Y-%m-%d %H:%M"))) session.xenapi.VM.set_name_description(snapshot, "snapshot created by tisbackup on: %s" % (now.strftime("%Y-%m-%d %H:%M")))
else: else:
self.logger.debug("[%s] Status of VM: %s",self.backup_name,status_vm) self.logger.debug("[%s] Status of VM: %s", self.backup_name, status_vm)
if status_vm == "Running": if status_vm == "Running":
self.logger.debug("[%s] Shudown in progress",self.backup_name) self.logger.debug("[%s] Shudown in progress", self.backup_name)
if dry_run: if dry_run:
print("session.xenapi.VM.clean_shutdown(vm)") print("session.xenapi.VM.clean_shutdown(vm)")
else: else:
session.xenapi.VM.clean_shutdown(vm) session.xenapi.VM.clean_shutdown(vm)
try: try:
try: try:
filename_temp = filename+".tmp" filename_temp = filename + ".tmp"
self.logger.debug("[%s] Copy in progress",self.backup_name) self.logger.debug("[%s] Copy in progress", self.backup_name)
if not str2bool(self.use_compression): if not str2bool(self.use_compression):
socket.setdefaulttimeout(120) socket.setdefaulttimeout(120)
scheme = "http://" scheme = "http://"
if str2bool(enable_https): if str2bool(enable_https):
scheme = "https://" scheme = "https://"
url = scheme+user_xen+":"+password_xen+"@"+self.xcphost+"/export?use_compression="+self.use_compression+"&uuid="+session.xenapi.VM.get_uuid(vm)
# url = scheme+user_xen+":"+password_xen+"@"+self.xcphost+"/export?use_compression="+self.use_compression+"&uuid="+session.xenapi.VM.get_uuid(vm)
top_level_url = (
scheme + self.xcphost + "/export?use_compression=" + self.use_compression + "&uuid=" + session.xenapi.VM.get_uuid(vm)
top_level_url = scheme+self.xcphost+"/export?use_compression="+self.use_compression+"&uuid="+session.xenapi.VM.get_uuid(vm) )
r = requests.get(top_level_url, auth=(user_xen, password_xen)) r = requests.get(top_level_url, auth=(user_xen, password_xen))
open(filename_temp, 'wb').write(r.content) open(filename_temp, "wb").write(r.content)
except Exception as e: except Exception as e:
self.logger.error("[%s] error when fetching snap: %s", "tisbackup-%s"%(vdi_name), e) self.logger.error("[%s] error when fetching snap: %s", "tisbackup-%s" % (vdi_name), e)
if os.path.exists(filename_temp): if os.path.exists(filename_temp):
os.unlink(filename_temp) os.unlink(filename_temp)
raise raise
finally: finally:
if not str2bool(halt_vm): if not str2bool(halt_vm):
self.logger.debug("[%s] Destroy snapshot",'tisbackup-%s'%(vdi_name)) self.logger.debug("[%s] Destroy snapshot", "tisbackup-%s" % (vdi_name))
try: try:
for vbd in session.xenapi.VM.get_VBDs(snapshot): for vbd in session.xenapi.VM.get_VBDs(snapshot):
if session.xenapi.VBD.get_type(vbd) == 'CD' and session.xenapi.VBD.get_record(vbd)['empty'] == False: if session.xenapi.VBD.get_type(vbd) == "CD" and not session.xenapi.VBD.get_record(vbd)["empty"]:
session.xenapi.VBD.eject(vbd) session.xenapi.VBD.eject(vbd)
else: else:
vdi = session.xenapi.VBD.get_VDI(vbd) vdi = session.xenapi.VBD.get_VDI(vbd)
if not 'NULL' in vdi: if "NULL" not in vdi:
session.xenapi.VDI.destroy(vdi) session.xenapi.VDI.destroy(vdi)
session.xenapi.VM.destroy(snapshot) session.xenapi.VM.destroy(snapshot)
except XenAPI.Failure as error: except XenAPI.Failure as error:
return("error when destroy snapshot %s"%(error)) return "error when destroy snapshot %s" % (error)
elif status_vm == "Running": elif status_vm == "Running":
self.logger.debug("[%s] Starting in progress",self.backup_name) self.logger.debug("[%s] Starting in progress", self.backup_name)
if dry_run: if dry_run:
print("session.xenapi.Async.VM.start(vm,False,True)") print("session.xenapi.Async.VM.start(vm,False,True)")
else: else:
session.xenapi.Async.VM.start(vm,False,True) session.xenapi.Async.VM.start(vm, False, True)
session.logout() session.logout()
@ -196,85 +199,102 @@ class backup_xva(backup_generic):
tar = os.system('tar tf "%s" > /dev/null' % filename_temp) tar = os.system('tar tf "%s" > /dev/null' % filename_temp)
if not tar == 0: if not tar == 0:
os.unlink(filename_temp) os.unlink(filename_temp)
return("Tar error") return "Tar error"
if str2bool(self.verify_export): if str2bool(self.verify_export):
self.verify_export_xva(filename_temp) self.verify_export_xva(filename_temp)
os.rename(filename_temp,filename) os.rename(filename_temp, filename)
return(0) return 0
def do_backup(self, stats):
def do_backup(self,stats):
try: try:
dest_filename = os.path.join(self.backup_dir,"%s-%s.%s" % (self.backup_name,self.backup_start_date,'xva')) dest_filename = os.path.join(self.backup_dir, "%s-%s.%s" % (self.backup_name, self.backup_start_date, "xva"))
options = [] # options = []
options_params = " ".join(options) # options_params = " ".join(options)
cmd = self.export_xva( vdi_name= self.server_name,filename= dest_filename, halt_vm= self.halt_vm, enable_https=self.enable_https, dry_run= self.dry_run, reuse_snapshot=self.reuse_snapshot) cmd = self.export_xva(
vdi_name=self.server_name,
filename=dest_filename,
halt_vm=self.halt_vm,
enable_https=self.enable_https,
dry_run=self.dry_run,
reuse_snapshot=self.reuse_snapshot,
)
if os.path.exists(dest_filename): if os.path.exists(dest_filename):
stats['written_bytes'] = os.stat(dest_filename)[ST_SIZE] stats["written_bytes"] = os.stat(dest_filename)[ST_SIZE]
stats['total_files_count'] = 1 stats["total_files_count"] = 1
stats['written_files_count'] = 1 stats["written_files_count"] = 1
stats['total_bytes'] = stats['written_bytes'] stats["total_bytes"] = stats["written_bytes"]
else: else:
stats['written_bytes'] = 0 stats["written_bytes"] = 0
stats['backup_location'] = dest_filename stats["backup_location"] = dest_filename
if cmd == 0: if cmd == 0:
stats['log']='XVA backup from %s OK, %d bytes written' % (self.server_name,stats['written_bytes']) stats["log"] = "XVA backup from %s OK, %d bytes written" % (self.server_name, stats["written_bytes"])
stats['status']='OK' stats["status"] = "OK"
else: else:
raise Exception(cmd) raise Exception(cmd)
except BaseException as e: except BaseException as e:
stats['status']='ERROR' stats["status"] = "ERROR"
stats['log']=str(e) stats["log"] = str(e)
raise raise
def register_existingbackups(self): def register_existingbackups(self):
"""scan backup dir and insert stats in database""" """scan backup dir and insert stats in database"""
registered = [b['backup_location'] for b in self.dbstat.query('select distinct backup_location from stats where backup_name=?',(self.backup_name,))] registered = [
b["backup_location"]
for b in self.dbstat.query("select distinct backup_location from stats where backup_name=?", (self.backup_name,))
]
filelist = os.listdir(self.backup_dir) filelist = os.listdir(self.backup_dir)
filelist.sort() filelist.sort()
for item in filelist: for item in filelist:
if item.endswith('.xva'): if item.endswith(".xva"):
dir_name = os.path.join(self.backup_dir,item) dir_name = os.path.join(self.backup_dir, item)
if not dir_name in registered: if dir_name not in registered:
start = (datetime.datetime.strptime(item,self.backup_name+'-%Y%m%d-%Hh%Mm%S.xva') + datetime.timedelta(0,30*60)).isoformat() start = (
if fileisodate(dir_name)>start: datetime.datetime.strptime(item, self.backup_name + "-%Y%m%d-%Hh%Mm%S.xva") + datetime.timedelta(0, 30 * 60)
).isoformat()
if fileisodate(dir_name) > start:
stop = fileisodate(dir_name) stop = fileisodate(dir_name)
else: else:
stop = start stop = start
self.logger.info('Registering %s started on %s',dir_name,start) self.logger.info("Registering %s started on %s", dir_name, start)
self.logger.debug(' Disk usage %s','du -sb "%s"' % dir_name) self.logger.debug(" Disk usage %s", 'du -sb "%s"' % dir_name)
if not self.dry_run: if not self.dry_run:
size_bytes = int(os.popen('du -sb "%s"' % dir_name).read().split('\t')[0]) size_bytes = int(os.popen('du -sb "%s"' % dir_name).read().split("\t")[0])
else: else:
size_bytes = 0 size_bytes = 0
self.logger.debug(' Size in bytes : %i',size_bytes) self.logger.debug(" Size in bytes : %i", size_bytes)
if not self.dry_run: if not self.dry_run:
self.dbstat.add(self.backup_name,self.server_name,'',\ self.dbstat.add(
backup_start=start,backup_end = stop,status='OK',total_bytes=size_bytes,backup_location=dir_name,TYPE='BACKUP') self.backup_name,
self.server_name,
"",
backup_start=start,
backup_end=stop,
status="OK",
total_bytes=size_bytes,
backup_location=dir_name,
TYPE="BACKUP",
)
else: else:
self.logger.info('Skipping %s, already registered',dir_name) self.logger.info("Skipping %s, already registered", dir_name)
register_driver(backup_xva) register_driver(backup_xva)
if __name__=='__main__': if __name__ == "__main__":
logger = logging.getLogger('tisbackup') logger = logging.getLogger("tisbackup")
logger.setLevel(logging.DEBUG) logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s")
handler = logging.StreamHandler() handler = logging.StreamHandler()
handler.setFormatter(formatter) handler.setFormatter(formatter)
logger.addHandler(handler) logger.addHandler(handler)
cp = ConfigParser() cp = ConfigParser()
cp.read('/opt/tisbackup/configtest.ini') cp.read("/opt/tisbackup/configtest.ini")
b = backup_xva() b = backup_xva()
b.read_config(cp) b.read_config(cp)

File diff suppressed because it is too large Load Diff

View File

@ -36,262 +36,252 @@ from stat import *
from . import XenAPI from . import XenAPI
from .common import * from .common import *
if hasattr(ssl, '_create_unverified_context'): if hasattr(ssl, "_create_unverified_context"):
ssl._create_default_https_context = ssl._create_unverified_context ssl._create_default_https_context = ssl._create_unverified_context
class copy_vm_xcp(backup_generic): class copy_vm_xcp(backup_generic):
"""Backup a VM running on a XCP server on a second SR (requires xe tools and XenAPI)""" """Backup a VM running on a XCP server on a second SR (requires xe tools and XenAPI)"""
type = 'copy-vm-xcp'
required_params = backup_generic.required_params + ['server_name','storage_name','password_file','vm_name','network_name'] type = "copy-vm-xcp"
optional_params = backup_generic.optional_params + ['start_vm','max_copies', 'delete_snapshot', 'halt_vm']
required_params = backup_generic.required_params + ["server_name", "storage_name", "password_file", "vm_name", "network_name"]
optional_params = backup_generic.optional_params + ["start_vm", "max_copies", "delete_snapshot", "halt_vm"]
start_vm = "no" start_vm = "no"
max_copies = 1 max_copies = 1
halt_vm = "no" halt_vm = "no"
delete_snapshot = "yes" delete_snapshot = "yes"
def read_config(self,iniconf): def read_config(self, iniconf):
assert(isinstance(iniconf,ConfigParser)) assert isinstance(iniconf, ConfigParser)
backup_generic.read_config(self,iniconf) backup_generic.read_config(self, iniconf)
if self.start_vm in 'no' and iniconf.has_option('global','start_vm'): if self.start_vm in "no" and iniconf.has_option("global", "start_vm"):
self.start_vm = iniconf.get('global','start_vm') self.start_vm = iniconf.get("global", "start_vm")
if self.max_copies == 1 and iniconf.has_option('global','max_copies'): if self.max_copies == 1 and iniconf.has_option("global", "max_copies"):
self.max_copies = iniconf.getint('global','max_copies') self.max_copies = iniconf.getint("global", "max_copies")
if self.delete_snapshot == "yes" and iniconf.has_option('global','delete_snapshot'): if self.delete_snapshot == "yes" and iniconf.has_option("global", "delete_snapshot"):
self.delete_snapshot = iniconf.get('global','delete_snapshot') self.delete_snapshot = iniconf.get("global", "delete_snapshot")
def copy_vm_to_sr(self, vm_name, storage_name, dry_run, delete_snapshot="yes"): def copy_vm_to_sr(self, vm_name, storage_name, dry_run, delete_snapshot="yes"):
user_xen, password_xen, null = open(self.password_file).read().split('\n') user_xen, password_xen, null = open(self.password_file).read().split("\n")
session = XenAPI.Session('https://'+self.server_name) session = XenAPI.Session("https://" + self.server_name)
try: try:
session.login_with_password(user_xen,password_xen) session.login_with_password(user_xen, password_xen)
except XenAPI.Failure as error: except XenAPI.Failure as error:
msg,ip = error.details msg, ip = error.details
if msg == 'HOST_IS_SLAVE': if msg == "HOST_IS_SLAVE":
server_name = ip server_name = ip
session = XenAPI.Session('https://'+server_name) session = XenAPI.Session("https://" + server_name)
session.login_with_password(user_xen,password_xen) session.login_with_password(user_xen, password_xen)
self.logger.debug("[%s] VM (%s) to backup in storage: %s", self.backup_name, vm_name, storage_name)
now = datetime.datetime.now()
self.logger.debug("[%s] VM (%s) to backup in storage: %s",self.backup_name,vm_name,storage_name) # get storage opaqueRef
now = datetime.datetime.now() try:
storage = session.xenapi.SR.get_by_name_label(storage_name)[0]
#get storage opaqueRef except IndexError as error:
try: result = (1, "error get SR opaqueref %s" % (error))
storage = session.xenapi.SR.get_by_name_label(storage_name)[0]
except IndexError as error:
result = (1,"error get SR opaqueref %s"%(error))
return result
#get vm to copy opaqueRef
try:
vm = session.xenapi.VM.get_by_name_label(vm_name)[0]
except IndexError as error:
result = (1,"error get VM opaqueref %s"%(error))
return result
# get vm backup network opaqueRef
try:
networkRef = session.xenapi.network.get_by_name_label(self.network_name)[0]
except IndexError as error:
result = (1, "error get VM network opaqueref %s" % (error))
return result
if str2bool(self.halt_vm):
status_vm = session.xenapi.VM.get_power_state(vm)
self.logger.debug("[%s] Status of VM: %s",self.backup_name,status_vm)
if status_vm == "Running":
self.logger.debug("[%s] Shutdown in progress",self.backup_name)
if dry_run:
print("session.xenapi.VM.clean_shutdown(vm)")
else:
session.xenapi.VM.clean_shutdown(vm)
snapshot = vm
else:
#do the snapshot
self.logger.debug("[%s] Snapshot in progress",self.backup_name)
try:
snapshot = session.xenapi.VM.snapshot(vm,"tisbackup-%s"%(vm_name))
except XenAPI.Failure as error:
result = (1,"error when snapshot %s"%(error))
return result
#get snapshot opaqueRef
snapshot = session.xenapi.VM.get_by_name_label("tisbackup-%s"%(vm_name))[0]
session.xenapi.VM.set_name_description(snapshot,"snapshot created by tisbackup on : %s"%(now.strftime("%Y-%m-%d %H:%M")))
vm_backup_name = "zzz-%s-"%(vm_name)
#Check if old backup exit
list_backups = []
for vm_ref in session.xenapi.VM.get_all():
name_lablel = session.xenapi.VM.get_name_label(vm_ref)
if vm_backup_name in name_lablel:
list_backups.append(name_lablel)
list_backups.sort()
if len(list_backups) >= 1:
# Shutting last backup if started
last_backup_vm = session.xenapi.VM.get_by_name_label(list_backups[-1])[0]
if not "Halted" in session.xenapi.VM.get_power_state(last_backup_vm):
self.logger.debug("[%s] Shutting down last backup vm : %s", self.backup_name, list_backups[-1] )
session.xenapi.VM.hard_shutdown(last_backup_vm)
# Delete oldest backup if exist
if len(list_backups) >= int(self.max_copies):
for i in range(len(list_backups)-int(self.max_copies)+1):
oldest_backup_vm = session.xenapi.VM.get_by_name_label(list_backups[i])[0]
if not "Halted" in session.xenapi.VM.get_power_state(oldest_backup_vm):
self.logger.debug("[%s] Shutting down old vm : %s", self.backup_name, list_backups[i] )
session.xenapi.VM.hard_shutdown(oldest_backup_vm)
try:
self.logger.debug("[%s] Deleting old vm : %s", self.backup_name, list_backups[i])
for vbd in session.xenapi.VM.get_VBDs(oldest_backup_vm):
if session.xenapi.VBD.get_type(vbd) == 'CD'and session.xenapi.VBD.get_record(vbd)['empty'] == False:
session.xenapi.VBD.eject(vbd)
else:
vdi = session.xenapi.VBD.get_VDI(vbd)
if not 'NULL' in vdi:
session.xenapi.VDI.destroy(vdi)
session.xenapi.VM.destroy(oldest_backup_vm)
except XenAPI.Failure as error:
result = (1,"error when destroy old backup vm %s"%(error))
return result
self.logger.debug("[%s] Copy %s in progress on %s",self.backup_name,vm_name,storage_name)
try:
backup_vm = session.xenapi.VM.copy(snapshot,vm_backup_name+now.strftime("%Y-%m-%d %H:%M"),storage)
except XenAPI.Failure as error:
result = (1,"error when copy %s"%(error))
return result
# define VM as a template
session.xenapi.VM.set_is_a_template(backup_vm,False)
#change the network of the new VM
try:
vifDestroy = session.xenapi.VM.get_VIFs(backup_vm)
except IndexError as error:
result = (1,"error get VIF opaqueref %s"%(error))
return result
for i in vifDestroy:
vifRecord = session.xenapi.VIF.get_record(i)
session.xenapi.VIF.destroy(i)
data = {'MAC': vifRecord['MAC'],
'MAC_autogenerated': False,
'MTU': vifRecord['MTU'],
'VM': backup_vm,
'current_operations': vifRecord['current_operations'],
'currently_attached': vifRecord['currently_attached'],
'device': vifRecord['device'],
'ipv4_allowed': vifRecord['ipv4_allowed'],
'ipv6_allowed': vifRecord['ipv6_allowed'],
'locking_mode': vifRecord['locking_mode'],
'network': networkRef,
'other_config': vifRecord['other_config'],
'qos_algorithm_params': vifRecord['qos_algorithm_params'],
'qos_algorithm_type': vifRecord['qos_algorithm_type'],
'qos_supported_algorithms': vifRecord['qos_supported_algorithms'],
'runtime_properties': vifRecord['runtime_properties'],
'status_code': vifRecord['status_code'],
'status_detail': vifRecord['status_detail']
}
try:
session.xenapi.VIF.create(data)
except Exception as error:
result = (1,error)
return result
if self.start_vm in ['true', '1', 't', 'y', 'yes', 'oui']:
session.xenapi.VM.start(backup_vm,False,True)
session.xenapi.VM.set_name_description(backup_vm,"snapshot created by tisbackup on : %s"%(now.strftime("%Y-%m-%d %H:%M")))
size_backup = 0
for vbd in session.xenapi.VM.get_VBDs(backup_vm):
if session.xenapi.VBD.get_type(vbd) == 'CD' and session.xenapi.VBD.get_record(vbd)['empty'] == False:
session.xenapi.VBD.eject(vbd)
else:
vdi = session.xenapi.VBD.get_VDI(vbd)
if not 'NULL' in vdi:
size_backup = size_backup + int(session.xenapi.VDI.get_record(vdi)['physical_utilisation'])
result = (0,size_backup)
if self.delete_snapshot == 'no':
return result
#Disable automatic boot
if 'auto_poweron' in session.xenapi.VM.get_other_config(backup_vm):
session.xenapi.VM.remove_from_other_config(backup_vm, "auto_poweron")
if not str2bool(self.halt_vm):
#delete the snapshot
try:
for vbd in session.xenapi.VM.get_VBDs(snapshot):
if session.xenapi.VBD.get_type(vbd) == 'CD' and session.xenapi.VBD.get_record(vbd)['empty'] == False:
session.xenapi.VBD.eject(vbd)
else:
vdi = session.xenapi.VBD.get_VDI(vbd)
if not 'NULL' in vdi:
session.xenapi.VDI.destroy(vdi)
session.xenapi.VM.destroy(snapshot)
except XenAPI.Failure as error:
result = (1,"error when destroy snapshot %s"%(error))
return result
else:
if status_vm == "Running":
self.logger.debug("[%s] Starting in progress",self.backup_name)
if dry_run:
print("session.xenapi.VM.start(vm,False,True)")
else:
session.xenapi.VM.start(vm,False,True)
return result return result
# get vm to copy opaqueRef
def do_backup(self,stats):
try: try:
timestamp = int(time.time()) vm = session.xenapi.VM.get_by_name_label(vm_name)[0]
except IndexError as error:
result = (1, "error get VM opaqueref %s" % (error))
return result
# get vm backup network opaqueRef
try:
networkRef = session.xenapi.network.get_by_name_label(self.network_name)[0]
except IndexError as error:
result = (1, "error get VM network opaqueref %s" % (error))
return result
if str2bool(self.halt_vm):
status_vm = session.xenapi.VM.get_power_state(vm)
self.logger.debug("[%s] Status of VM: %s", self.backup_name, status_vm)
if status_vm == "Running":
self.logger.debug("[%s] Shutdown in progress", self.backup_name)
if dry_run:
print("session.xenapi.VM.clean_shutdown(vm)")
else:
session.xenapi.VM.clean_shutdown(vm)
snapshot = vm
else:
# do the snapshot
self.logger.debug("[%s] Snapshot in progress", self.backup_name)
try:
snapshot = session.xenapi.VM.snapshot(vm, "tisbackup-%s" % (vm_name))
except XenAPI.Failure as error:
result = (1, "error when snapshot %s" % (error))
return result
# get snapshot opaqueRef
snapshot = session.xenapi.VM.get_by_name_label("tisbackup-%s" % (vm_name))[0]
session.xenapi.VM.set_name_description(snapshot, "snapshot created by tisbackup on : %s" % (now.strftime("%Y-%m-%d %H:%M")))
vm_backup_name = "zzz-%s-" % (vm_name)
# Check if old backup exit
list_backups = []
for vm_ref in session.xenapi.VM.get_all():
name_lablel = session.xenapi.VM.get_name_label(vm_ref)
if vm_backup_name in name_lablel:
list_backups.append(name_lablel)
list_backups.sort()
if len(list_backups) >= 1:
# Shutting last backup if started
last_backup_vm = session.xenapi.VM.get_by_name_label(list_backups[-1])[0]
if "Halted" not in session.xenapi.VM.get_power_state(last_backup_vm):
self.logger.debug("[%s] Shutting down last backup vm : %s", self.backup_name, list_backups[-1])
session.xenapi.VM.hard_shutdown(last_backup_vm)
# Delete oldest backup if exist
if len(list_backups) >= int(self.max_copies):
for i in range(len(list_backups) - int(self.max_copies) + 1):
oldest_backup_vm = session.xenapi.VM.get_by_name_label(list_backups[i])[0]
if "Halted" not in session.xenapi.VM.get_power_state(oldest_backup_vm):
self.logger.debug("[%s] Shutting down old vm : %s", self.backup_name, list_backups[i])
session.xenapi.VM.hard_shutdown(oldest_backup_vm)
try:
self.logger.debug("[%s] Deleting old vm : %s", self.backup_name, list_backups[i])
for vbd in session.xenapi.VM.get_VBDs(oldest_backup_vm):
if session.xenapi.VBD.get_type(vbd) == "CD" and not session.xenapi.VBD.get_record(vbd)["empty"]:
session.xenapi.VBD.eject(vbd)
else:
vdi = session.xenapi.VBD.get_VDI(vbd)
if "NULL" not in vdi:
session.xenapi.VDI.destroy(vdi)
session.xenapi.VM.destroy(oldest_backup_vm)
except XenAPI.Failure as error:
result = (1, "error when destroy old backup vm %s" % (error))
return result
self.logger.debug("[%s] Copy %s in progress on %s", self.backup_name, vm_name, storage_name)
try:
backup_vm = session.xenapi.VM.copy(snapshot, vm_backup_name + now.strftime("%Y-%m-%d %H:%M"), storage)
except XenAPI.Failure as error:
result = (1, "error when copy %s" % (error))
return result
# define VM as a template
session.xenapi.VM.set_is_a_template(backup_vm, False)
# change the network of the new VM
try:
vifDestroy = session.xenapi.VM.get_VIFs(backup_vm)
except IndexError as error:
result = (1, "error get VIF opaqueref %s" % (error))
return result
for i in vifDestroy:
vifRecord = session.xenapi.VIF.get_record(i)
session.xenapi.VIF.destroy(i)
data = {
"MAC": vifRecord["MAC"],
"MAC_autogenerated": False,
"MTU": vifRecord["MTU"],
"VM": backup_vm,
"current_operations": vifRecord["current_operations"],
"currently_attached": vifRecord["currently_attached"],
"device": vifRecord["device"],
"ipv4_allowed": vifRecord["ipv4_allowed"],
"ipv6_allowed": vifRecord["ipv6_allowed"],
"locking_mode": vifRecord["locking_mode"],
"network": networkRef,
"other_config": vifRecord["other_config"],
"qos_algorithm_params": vifRecord["qos_algorithm_params"],
"qos_algorithm_type": vifRecord["qos_algorithm_type"],
"qos_supported_algorithms": vifRecord["qos_supported_algorithms"],
"runtime_properties": vifRecord["runtime_properties"],
"status_code": vifRecord["status_code"],
"status_detail": vifRecord["status_detail"],
}
try:
session.xenapi.VIF.create(data)
except Exception as error:
result = (1, error)
return result
if self.start_vm in ["true", "1", "t", "y", "yes", "oui"]:
session.xenapi.VM.start(backup_vm, False, True)
session.xenapi.VM.set_name_description(backup_vm, "snapshot created by tisbackup on : %s" % (now.strftime("%Y-%m-%d %H:%M")))
size_backup = 0
for vbd in session.xenapi.VM.get_VBDs(backup_vm):
if session.xenapi.VBD.get_type(vbd) == "CD" and not session.xenapi.VBD.get_record(vbd)["empty"]:
session.xenapi.VBD.eject(vbd)
else:
vdi = session.xenapi.VBD.get_VDI(vbd)
if "NULL" not in vdi:
size_backup = size_backup + int(session.xenapi.VDI.get_record(vdi)["physical_utilisation"])
result = (0, size_backup)
if self.delete_snapshot == "no":
return result
# Disable automatic boot
if "auto_poweron" in session.xenapi.VM.get_other_config(backup_vm):
session.xenapi.VM.remove_from_other_config(backup_vm, "auto_poweron")
if not str2bool(self.halt_vm):
# delete the snapshot
try:
for vbd in session.xenapi.VM.get_VBDs(snapshot):
if session.xenapi.VBD.get_type(vbd) == "CD" and not session.xenapi.VBD.get_record(vbd)["empty"]:
session.xenapi.VBD.eject(vbd)
else:
vdi = session.xenapi.VBD.get_VDI(vbd)
if "NULL" not in vdi:
session.xenapi.VDI.destroy(vdi)
session.xenapi.VM.destroy(snapshot)
except XenAPI.Failure as error:
result = (1, "error when destroy snapshot %s" % (error))
return result
else:
if status_vm == "Running":
self.logger.debug("[%s] Starting in progress", self.backup_name)
if dry_run:
print("session.xenapi.VM.start(vm,False,True)")
else:
session.xenapi.VM.start(vm, False, True)
return result
def do_backup(self, stats):
try:
# timestamp = int(time.time())
cmd = self.copy_vm_to_sr(self.vm_name, self.storage_name, self.dry_run, delete_snapshot=self.delete_snapshot) cmd = self.copy_vm_to_sr(self.vm_name, self.storage_name, self.dry_run, delete_snapshot=self.delete_snapshot)
if cmd[0] == 0: if cmd[0] == 0:
timeExec = int(time.time()) - timestamp # timeExec = int(time.time()) - timestamp
stats['log']='copy of %s to an other storage OK' % (self.backup_name) stats["log"] = "copy of %s to an other storage OK" % (self.backup_name)
stats['status']='OK' stats["status"] = "OK"
stats['total_files_count'] = 1 stats["total_files_count"] = 1
stats['total_bytes'] = cmd[1] stats["total_bytes"] = cmd[1]
stats['backup_location'] = self.storage_name stats["backup_location"] = self.storage_name
else: else:
stats['status']='ERROR' stats["status"] = "ERROR"
stats['log']=cmd[1] stats["log"] = cmd[1]
except BaseException as e: except BaseException as e:
stats['status']='ERROR' stats["status"] = "ERROR"
stats['log']=str(e) stats["log"] = str(e)
raise raise
def register_existingbackups(self): def register_existingbackups(self):
"""scan backup dir and insert stats in database""" """scan backup dir and insert stats in database"""
#This backup is on target server, no data available on this server # This backup is on target server, no data available on this server
pass pass
register_driver(copy_vm_xcp) register_driver(copy_vm_xcp)

View File

@ -3,21 +3,37 @@
# Copyright (c) 2007 Tim Lauridsen <tla@rasmil.dk> # Copyright (c) 2007 Tim Lauridsen <tla@rasmil.dk>
# All Rights Reserved. See LICENSE-PSF & LICENSE for details. # All Rights Reserved. See LICENSE-PSF & LICENSE for details.
from .compat import ConfigParser, RawConfigParser, SafeConfigParser
from .config import BasicConfig, ConfigNamespace
from .configparser import (DEFAULTSECT, MAX_INTERPOLATION_DEPTH,
DuplicateSectionError, InterpolationDepthError,
InterpolationMissingOptionError,
InterpolationSyntaxError, NoOptionError,
NoSectionError)
from .ini import INIConfig, change_comment_syntax from .ini import INIConfig, change_comment_syntax
from .config import BasicConfig, ConfigNamespace
from .compat import RawConfigParser, ConfigParser, SafeConfigParser
from .utils import tidy from .utils import tidy
from .configparser import (
DuplicateSectionError,
NoSectionError,
NoOptionError,
InterpolationMissingOptionError,
InterpolationDepthError,
InterpolationSyntaxError,
DEFAULTSECT,
MAX_INTERPOLATION_DEPTH,
)
__all__ = [ __all__ = [
'BasicConfig', 'ConfigNamespace', "BasicConfig",
'INIConfig', 'tidy', 'change_comment_syntax', "ConfigNamespace",
'RawConfigParser', 'ConfigParser', 'SafeConfigParser', "INIConfig",
'DuplicateSectionError', 'NoSectionError', 'NoOptionError', "tidy",
'InterpolationMissingOptionError', 'InterpolationDepthError', "change_comment_syntax",
'InterpolationSyntaxError', 'DEFAULTSECT', 'MAX_INTERPOLATION_DEPTH', "RawConfigParser",
"ConfigParser",
"SafeConfigParser",
"DuplicateSectionError",
"NoSectionError",
"NoOptionError",
"InterpolationMissingOptionError",
"InterpolationDepthError",
"InterpolationSyntaxError",
"DEFAULTSECT",
"MAX_INTERPOLATION_DEPTH",
] ]

View File

@ -12,41 +12,48 @@ The underlying INIConfig object can be accessed as cfg.data
""" """
import re import re
from typing import Dict, List, TextIO, Optional, Type, Union, Tuple
import six from .configparser import (
DuplicateSectionError,
NoSectionError,
NoOptionError,
InterpolationMissingOptionError,
InterpolationDepthError,
InterpolationSyntaxError,
DEFAULTSECT,
MAX_INTERPOLATION_DEPTH,
)
# These are imported only for compatibility.
# The code below does not reference them directly.
from .configparser import Error, InterpolationError, MissingSectionHeaderError, ParsingError
from . import ini from . import ini
# These are imported only for compatiability.
# The code below does not reference them directly.
from .configparser import (DEFAULTSECT, MAX_INTERPOLATION_DEPTH,
DuplicateSectionError, Error,
InterpolationDepthError, InterpolationError,
InterpolationMissingOptionError,
InterpolationSyntaxError, MissingSectionHeaderError,
NoOptionError, NoSectionError, ParsingError)
class RawConfigParser(object): class RawConfigParser:
def __init__(self, defaults=None, dict_type=dict): def __init__(self, defaults: Optional[Dict[str, str]] = None, dict_type: Union[Type[Dict], str] = dict):
if dict_type != dict: if not isinstance(dict_type, dict):
raise ValueError('Custom dict types not supported') raise ValueError("Custom dict types not supported")
self.data = ini.INIConfig(defaults=defaults, optionxformsource=self) self.data = ini.INIConfig(defaults=defaults, optionxformsource=self)
def optionxform(self, optionstr): def optionxform(self, optionstr: str) -> str:
return optionstr.lower() return optionstr.lower()
def defaults(self): def defaults(self) -> Dict[str, str]:
d = {} d: Dict[str, str] = {}
secobj = self.data._defaults secobj: ini.INISection = self.data._defaults
name: str
for name in secobj._options: for name in secobj._options:
d[name] = secobj._compat_get(name) d[name] = secobj._compat_get(name)
return d return d
def sections(self): def sections(self) -> List[str]:
"""Return a list of section names, excluding [DEFAULT]""" """Return a list of section names, excluding [DEFAULT]"""
return list(self.data) return list(self.data)
def add_section(self, section): def add_section(self, section: str) -> None:
"""Create a new section in the configuration. """Create a new section in the configuration.
Raise DuplicateSectionError if a section by the specified name Raise DuplicateSectionError if a section by the specified name
@ -56,28 +63,28 @@ class RawConfigParser(object):
# The default section is the only one that gets the case-insensitive # The default section is the only one that gets the case-insensitive
# treatment - so it is special-cased here. # treatment - so it is special-cased here.
if section.lower() == "default": if section.lower() == "default":
raise ValueError('Invalid section name: %s' % section) raise ValueError("Invalid section name: %s" % section)
if self.has_section(section): if self.has_section(section):
raise DuplicateSectionError(section) raise DuplicateSectionError(section)
else: else:
self.data._new_namespace(section) self.data._new_namespace(section)
def has_section(self, section): def has_section(self, section: str) -> bool:
"""Indicate whether the named section is present in the configuration. """Indicate whether the named section is present in the configuration.
The DEFAULT section is not acknowledged. The DEFAULT section is not acknowledged.
""" """
return section in self.data return section in self.data
def options(self, section): def options(self, section: str) -> List[str]:
"""Return a list of option names for the given section name.""" """Return a list of option names for the given section name."""
if section in self.data: if section in self.data:
return list(self.data[section]) return list(self.data[section])
else: else:
raise NoSectionError(section) raise NoSectionError(section)
def read(self, filenames): def read(self, filenames: Union[List[str], str]) -> List[str]:
"""Read and parse a filename or a list of filenames. """Read and parse a filename or a list of filenames.
Files that cannot be opened are silently ignored; this is Files that cannot be opened are silently ignored; this is
@ -86,9 +93,11 @@ class RawConfigParser(object):
home directory, systemwide directory), and all existing home directory, systemwide directory), and all existing
configuration files in the list will be read. A single configuration files in the list will be read. A single
filename may also be given. filename may also be given.
Returns the list of files that were read.
""" """
files_read = [] files_read = []
if isinstance(filenames, six.string_types): if isinstance(filenames, str):
filenames = [filenames] filenames = [filenames]
for filename in filenames: for filename in filenames:
try: try:
@ -100,7 +109,7 @@ class RawConfigParser(object):
fp.close() fp.close()
return files_read return files_read
def readfp(self, fp, filename=None): def readfp(self, fp: TextIO, filename: Optional[str] = None) -> None:
"""Like read() but the argument must be a file-like object. """Like read() but the argument must be a file-like object.
The `fp' argument must have a `readline' method. Optional The `fp' argument must have a `readline' method. Optional
@ -110,60 +119,70 @@ class RawConfigParser(object):
""" """
self.data._readfp(fp) self.data._readfp(fp)
def get(self, section, option, vars=None): def get(self, section: str, option: str, vars: dict = None) -> str:
if not self.has_section(section): if not self.has_section(section):
raise NoSectionError(section) raise NoSectionError(section)
sec = self.data[section] sec: ini.INISection = self.data[section]
if option in sec: if option in sec:
return sec._compat_get(option) return sec._compat_get(option)
else: else:
raise NoOptionError(option, section) raise NoOptionError(option, section)
def items(self, section): def items(self, section: str) -> List[Tuple[str, str]]:
if section in self.data: if section in self.data:
ans = [] ans = []
opt: str
for opt in self.data[section]: for opt in self.data[section]:
ans.append((opt, self.get(section, opt))) ans.append((opt, self.get(section, opt)))
return ans return ans
else: else:
raise NoSectionError(section) raise NoSectionError(section)
def getint(self, section, option): def getint(self, section: str, option: str) -> int:
return int(self.get(section, option)) return int(self.get(section, option))
def getfloat(self, section, option): def getfloat(self, section: str, option: str) -> float:
return float(self.get(section, option)) return float(self.get(section, option))
_boolean_states = {'1': True, 'yes': True, 'true': True, 'on': True, _boolean_states = {
'0': False, 'no': False, 'false': False, 'off': False} "1": True,
"yes": True,
"true": True,
"on": True,
"0": False,
"no": False,
"false": False,
"off": False,
}
def getboolean(self, section, option): def getboolean(self, section: str, option: str) -> bool:
v = self.get(section, option) v = self.get(section, option)
if v.lower() not in self._boolean_states: if v.lower() not in self._boolean_states:
raise ValueError('Not a boolean: %s' % v) raise ValueError("Not a boolean: %s" % v)
return self._boolean_states[v.lower()] return self._boolean_states[v.lower()]
def has_option(self, section, option): def has_option(self, section: str, option: str) -> bool:
"""Check for the existence of a given option in a given section.""" """Check for the existence of a given option in a given section."""
if section in self.data: if section in self.data:
sec = self.data[section] sec = self.data[section]
else: else:
raise NoSectionError(section) raise NoSectionError(section)
return (option in sec) return option in sec
def set(self, section, option, value): def set(self, section: str, option: str, value: str) -> None:
"""Set an option.""" """Set an option."""
if section in self.data: if section in self.data:
self.data[section][option] = value self.data[section][option] = value
else: else:
raise NoSectionError(section) raise NoSectionError(section)
def write(self, fp): def write(self, fp: TextIO) -> None:
"""Write an .ini-format representation of the configuration state.""" """Write an .ini-format representation of the configuration state."""
fp.write(str(self.data)) fp.write(str(self.data))
def remove_option(self, section, option): # FIXME Return a boolean instead of integer
def remove_option(self, section: str, option: str) -> int:
"""Remove an option.""" """Remove an option."""
if section in self.data: if section in self.data:
sec = self.data[section] sec = self.data[section]
@ -175,7 +194,7 @@ class RawConfigParser(object):
else: else:
return 0 return 0
def remove_section(self, section): def remove_section(self, section: str) -> bool:
"""Remove a file section.""" """Remove a file section."""
if not self.has_section(section): if not self.has_section(section):
return False return False
@ -183,15 +202,15 @@ class RawConfigParser(object):
return True return True
class ConfigDict(object): class ConfigDict:
"""Present a dict interface to a ini section.""" """Present a dict interface to an ini section."""
def __init__(self, cfg, section, vars): def __init__(self, cfg: RawConfigParser, section: str, vars: dict):
self.cfg = cfg self.cfg: RawConfigParser = cfg
self.section = section self.section: str = section
self.vars = vars self.vars: dict = vars
def __getitem__(self, key): def __getitem__(self, key: str) -> Union[str, List[Union[int, str]]]:
try: try:
return RawConfigParser.get(self.cfg, self.section, key, self.vars) return RawConfigParser.get(self.cfg, self.section, key, self.vars)
except (NoOptionError, NoSectionError): except (NoOptionError, NoSectionError):
@ -199,8 +218,13 @@ class ConfigDict(object):
class ConfigParser(RawConfigParser): class ConfigParser(RawConfigParser):
def get(
def get(self, section, option, raw=False, vars=None): self,
section: str,
option: str,
raw: bool = False,
vars: Optional[dict] = None,
) -> object:
"""Get an option value for a given section. """Get an option value for a given section.
All % interpolations are expanded in the return values, based on the All % interpolations are expanded in the return values, based on the
@ -223,25 +247,24 @@ class ConfigParser(RawConfigParser):
d = ConfigDict(self, section, vars) d = ConfigDict(self, section, vars)
return self._interpolate(section, option, value, d) return self._interpolate(section, option, value, d)
def _interpolate(self, section, option, rawval, vars): def _interpolate(self, section: str, option: str, rawval: object, vars: "ConfigDict"):
# do the string interpolation # do the string interpolation
value = rawval value = rawval
depth = MAX_INTERPOLATION_DEPTH depth = MAX_INTERPOLATION_DEPTH
while depth: # Loop through this until it's done while depth: # Loop through this until it's done
depth -= 1 depth -= 1
if "%(" in value: if "%(" in value:
try: try:
value = value % vars value = value % vars
except KeyError as e: except KeyError as e:
raise InterpolationMissingOptionError( raise InterpolationMissingOptionError(option, section, rawval, e.args[0])
option, section, rawval, e.args[0])
else: else:
break break
if value.find("%(") != -1: if value.find("%(") != -1:
raise InterpolationDepthError(option, section, rawval) raise InterpolationDepthError(option, section, rawval)
return value return value
def items(self, section, raw=False, vars=None): def items(self, section: str, raw: bool = False, vars: Optional[dict] = None):
"""Return a list of tuples with (name, value) for each option """Return a list of tuples with (name, value) for each option
in the section. in the section.
@ -269,40 +292,37 @@ class ConfigParser(RawConfigParser):
d = ConfigDict(self, section, vars) d = ConfigDict(self, section, vars)
if raw: if raw:
return [(option, d[option]) return [(option, d[option]) for option in options]
for option in options]
else: else:
return [(option, self._interpolate(section, option, d[option], d)) return [(option, self._interpolate(section, option, d[option], d)) for option in options]
for option in options]
class SafeConfigParser(ConfigParser): class SafeConfigParser(ConfigParser):
_interpvar_re = re.compile(r"%\(([^)]+)\)s") _interpvar_re = re.compile(r"%\(([^)]+)\)s")
_badpercent_re = re.compile(r"%[^%]|%$") _badpercent_re = re.compile(r"%[^%]|%$")
def set(self, section, option, value): def set(self, section: str, option: str, value: object) -> None:
if not isinstance(value, six.string_types): if not isinstance(value, str):
raise TypeError("option values must be strings") raise TypeError("option values must be strings")
# check for bad percent signs: # check for bad percent signs:
# first, replace all "good" interpolations # first, replace all "good" interpolations
tmp_value = self._interpvar_re.sub('', value) tmp_value = self._interpvar_re.sub("", value)
# then, check if there's a lone percent sign left # then, check if there's a lone percent sign left
m = self._badpercent_re.search(tmp_value) m = self._badpercent_re.search(tmp_value)
if m: if m:
raise ValueError("invalid interpolation syntax in %r at " raise ValueError("invalid interpolation syntax in %r at " "position %d" % (value, m.start()))
"position %d" % (value, m.start()))
ConfigParser.set(self, section, option, value) ConfigParser.set(self, section, option, value)
def _interpolate(self, section, option, rawval, vars): def _interpolate(self, section: str, option: str, rawval: str, vars: ConfigDict):
# do the string interpolation # do the string interpolation
L = [] L = []
self._interpolate_some(option, L, rawval, section, vars, 1) self._interpolate_some(option, L, rawval, section, vars, 1)
return ''.join(L) return "".join(L)
_interpvar_match = re.compile(r"%\(([^)]+)\)s").match _interpvar_match = re.compile(r"%\(([^)]+)\)s").match
def _interpolate_some(self, option, accum, rest, section, map, depth): def _interpolate_some(self, option: str, accum: List[str], rest: str, section: str, map: ConfigDict, depth: int) -> None:
if depth > MAX_INTERPOLATION_DEPTH: if depth > MAX_INTERPOLATION_DEPTH:
raise InterpolationDepthError(option, section, rest) raise InterpolationDepthError(option, section, rest)
while rest: while rest:
@ -323,18 +343,14 @@ class SafeConfigParser(ConfigParser):
if m is None: if m is None:
raise InterpolationSyntaxError(option, section, "bad interpolation variable reference %r" % rest) raise InterpolationSyntaxError(option, section, "bad interpolation variable reference %r" % rest)
var = m.group(1) var = m.group(1)
rest = rest[m.end():] rest = rest[m.end() :]
try: try:
v = map[var] v = map[var]
except KeyError: except KeyError:
raise InterpolationMissingOptionError( raise InterpolationMissingOptionError(option, section, rest, var)
option, section, rest, var)
if "%" in v: if "%" in v:
self._interpolate_some(option, accum, v, self._interpolate_some(option, accum, v, section, map, depth + 1)
section, map, depth + 1)
else: else:
accum.append(v) accum.append(v)
else: else:
raise InterpolationSyntaxError( raise InterpolationSyntaxError(option, section, "'%' must be followed by '%' or '(', found: " + repr(rest))
option, section,
"'%' must be followed by '%' or '(', found: " + repr(rest))

View File

@ -1,4 +1,10 @@
class ConfigNamespace(object): from typing import Dict, Iterable, List, TextIO, Union, TYPE_CHECKING
if TYPE_CHECKING:
from .ini import INIConfig, INISection
class ConfigNamespace:
"""Abstract class representing the interface of Config objects. """Abstract class representing the interface of Config objects.
A ConfigNamespace is a collection of names mapped to values, where A ConfigNamespace is a collection of names mapped to values, where
@ -12,27 +18,27 @@ class ConfigNamespace(object):
Subclasses must implement the methods for container-like access, Subclasses must implement the methods for container-like access,
and this class will automatically provide dotted access. and this class will automatically provide dotted access.
""" """
# Methods that must be implemented by subclasses # Methods that must be implemented by subclasses
def _getitem(self, key): def _getitem(self, key: str) -> object:
return NotImplementedError(key) return NotImplementedError(key)
def __setitem__(self, key, value): def __setitem__(self, key: str, value: object):
raise NotImplementedError(key, value) raise NotImplementedError(key, value)
def __delitem__(self, key): def __delitem__(self, key: str) -> None:
raise NotImplementedError(key) raise NotImplementedError(key)
def __iter__(self): def __iter__(self) -> Iterable[str]:
# FIXME Raise instead return
return NotImplementedError() return NotImplementedError()
def _new_namespace(self, name): def _new_namespace(self, name: str) -> "ConfigNamespace":
raise NotImplementedError(name) raise NotImplementedError(name)
def __contains__(self, key): def __contains__(self, key: str) -> bool:
try: try:
self._getitem(key) self._getitem(key)
except KeyError: except KeyError:
@ -44,35 +50,35 @@ class ConfigNamespace(object):
# #
# To distinguish between accesses of class members and namespace # To distinguish between accesses of class members and namespace
# keys, we first call object.__getattribute__(). If that succeeds, # keys, we first call object.__getattribute__(). If that succeeds,
# the name is assumed to be a class member. Otherwise it is # the name is assumed to be a class member. Otherwise, it is
# treated as a namespace key. # treated as a namespace key.
# #
# Therefore, member variables should be defined in the class, # Therefore, member variables should be defined in the class,
# not just in the __init__() function. See BasicNamespace for # not just in the __init__() function. See BasicNamespace for
# an example. # an example.
def __getitem__(self, key): def __getitem__(self, key: str) -> Union[object, "Undefined"]:
try: try:
return self._getitem(key) return self._getitem(key)
except KeyError: except KeyError:
return Undefined(key, self) return Undefined(key, self)
def __getattr__(self, name): def __getattr__(self, name: str) -> Union[object, "Undefined"]:
try: try:
return self._getitem(name) return self._getitem(name)
except KeyError: except KeyError:
if name.startswith('__') and name.endswith('__'): if name.startswith("__") and name.endswith("__"):
raise AttributeError raise AttributeError
return Undefined(name, self) return Undefined(name, self)
def __setattr__(self, name, value): def __setattr__(self, name: str, value: object) -> None:
try: try:
object.__getattribute__(self, name) object.__getattribute__(self, name)
object.__setattr__(self, name, value) object.__setattr__(self, name, value)
except AttributeError: except AttributeError:
self.__setitem__(name, value) self.__setitem__(name, value)
def __delattr__(self, name): def __delattr__(self, name: str) -> None:
try: try:
object.__getattribute__(self, name) object.__getattribute__(self, name)
object.__delattr__(self, name) object.__delattr__(self, name)
@ -82,12 +88,12 @@ class ConfigNamespace(object):
# During unpickling, Python checks if the class has a __setstate__ # During unpickling, Python checks if the class has a __setstate__
# method. But, the data dicts have not been initialised yet, which # method. But, the data dicts have not been initialised yet, which
# leads to _getitem and hence __getattr__ raising an exception. So # leads to _getitem and hence __getattr__ raising an exception. So
# we explicitly impement default __setstate__ behavior. # we explicitly implement default __setstate__ behavior.
def __setstate__(self, state): def __setstate__(self, state: dict) -> None:
self.__dict__.update(state) self.__dict__.update(state)
class Undefined(object): class Undefined:
"""Helper class used to hold undefined names until assignment. """Helper class used to hold undefined names until assignment.
This class helps create any undefined subsections when an This class helps create any undefined subsections when an
@ -95,21 +101,24 @@ class Undefined(object):
statement is "cfg.a.b.c = 42", but "cfg.a.b" does not exist yet. statement is "cfg.a.b.c = 42", but "cfg.a.b" does not exist yet.
""" """
def __init__(self, name, namespace): def __init__(self, name: str, namespace: ConfigNamespace):
object.__setattr__(self, 'name', name) # FIXME These assignments into `object` feel very strange.
object.__setattr__(self, 'namespace', namespace) # What's the reason for it?
object.__setattr__(self, "name", name)
object.__setattr__(self, "namespace", namespace)
def __setattr__(self, name, value): def __setattr__(self, name: str, value: object) -> None:
obj = self.namespace._new_namespace(self.name) obj = self.namespace._new_namespace(self.name)
obj[name] = value obj[name] = value
def __setitem__(self, name, value): def __setitem__(self, name, value) -> None:
obj = self.namespace._new_namespace(self.name) obj = self.namespace._new_namespace(self.name)
obj[name] = value obj[name] = value
# ---- Basic implementation of a ConfigNamespace # ---- Basic implementation of a ConfigNamespace
class BasicConfig(ConfigNamespace): class BasicConfig(ConfigNamespace):
"""Represents a hierarchical collection of named values. """Represents a hierarchical collection of named values.
@ -161,7 +170,7 @@ class BasicConfig(ConfigNamespace):
Finally, values can be read from a file as follows: Finally, values can be read from a file as follows:
>>> from six import StringIO >>> from io import StringIO
>>> sio = StringIO(''' >>> sio = StringIO('''
... # comment ... # comment
... ui.height = 100 ... ui.height = 100
@ -181,66 +190,73 @@ class BasicConfig(ConfigNamespace):
""" """
# this makes sure that __setattr__ knows this is not a namespace key # this makes sure that __setattr__ knows this is not a namespace key
_data = None _data: Dict[str, str] = None
def __init__(self): def __init__(self):
self._data = {} self._data = {}
def _getitem(self, key): def _getitem(self, key: str) -> str:
return self._data[key] return self._data[key]
def __setitem__(self, key, value): def __setitem__(self, key: str, value: object) -> None:
# FIXME We can add any object as 'value', but when an integer is read
# from a file, it will be a string. Should we explicitly convert
# this 'value' to string, to ensure consistency?
# It will stay the original type until it is written to a file.
self._data[key] = value self._data[key] = value
def __delitem__(self, key): def __delitem__(self, key: str) -> None:
del self._data[key] del self._data[key]
def __iter__(self): def __iter__(self) -> Iterable[str]:
return iter(self._data) return iter(self._data)
def __str__(self, prefix=''): def __str__(self, prefix: str = "") -> str:
lines = [] lines: List[str] = []
keys = list(self._data.keys()) keys: List[str] = list(self._data.keys())
keys.sort() keys.sort()
for name in keys: for name in keys:
value = self._data[name] value: object = self._data[name]
if isinstance(value, ConfigNamespace): if isinstance(value, ConfigNamespace):
lines.append(value.__str__(prefix='%s%s.' % (prefix,name))) lines.append(value.__str__(prefix="%s%s." % (prefix, name)))
else: else:
if value is None: if value is None:
lines.append('%s%s' % (prefix, name)) lines.append("%s%s" % (prefix, name))
else: else:
lines.append('%s%s = %s' % (prefix, name, value)) lines.append("%s%s = %s" % (prefix, name, value))
return '\n'.join(lines) return "\n".join(lines)
def _new_namespace(self, name): def _new_namespace(self, name: str) -> "BasicConfig":
obj = BasicConfig() obj = BasicConfig()
self._data[name] = obj self._data[name] = obj
return obj return obj
def _readfp(self, fp): def _readfp(self, fp: TextIO) -> None:
while True: while True:
line = fp.readline() line: str = fp.readline()
if not line: if not line:
break break
line = line.strip() line = line.strip()
if not line: continue if not line:
if line[0] == '#': continue continue
data = line.split('=', 1) if line[0] == "#":
continue
data: List[str] = line.split("=", 1)
if len(data) == 1: if len(data) == 1:
name = line name = line
value = None value = None
else: else:
name = data[0].strip() name = data[0].strip()
value = data[1].strip() value = data[1].strip()
name_components = name.split('.') name_components = name.split(".")
ns = self ns: ConfigNamespace = self
for n in name_components[:-1]: for n in name_components[:-1]:
if n in ns: if n in ns:
ns = ns[n] maybe_ns: object = ns[n]
if not isinstance(ns, ConfigNamespace): if not isinstance(maybe_ns, ConfigNamespace):
raise TypeError('value-namespace conflict', n) raise TypeError("value-namespace conflict", n)
ns = maybe_ns
else: else:
ns = ns._new_namespace(n) ns = ns._new_namespace(n)
ns[name_components[-1]] = value ns[name_components[-1]] = value
@ -248,7 +264,8 @@ class BasicConfig(ConfigNamespace):
# ---- Utility functions # ---- Utility functions
def update_config(target, source):
def update_config(target: ConfigNamespace, source: ConfigNamespace):
"""Imports values from source into target. """Imports values from source into target.
Recursively walks the <source> ConfigNamespace and inserts values Recursively walks the <source> ConfigNamespace and inserts values
@ -276,15 +293,15 @@ def update_config(target, source):
display_clock = True display_clock = True
display_qlength = True display_qlength = True
width = 150 width = 150
""" """
for name in sorted(source): for name in sorted(source):
value = source[name] value: object = source[name]
if isinstance(value, ConfigNamespace): if isinstance(value, ConfigNamespace):
if name in target: if name in target:
myns = target[name] maybe_myns: object = target[name]
if not isinstance(myns, ConfigNamespace): if not isinstance(maybe_myns, ConfigNamespace):
raise TypeError('value-namespace conflict') raise TypeError("value-namespace conflict")
myns = maybe_myns
else: else:
myns = target._new_namespace(name) myns = target._new_namespace(name)
update_config(myns, value) update_config(myns, value)

View File

@ -1,7 +1,2 @@
try: from configparser import *
# not all objects get imported with __all__ from configparser import Error, InterpolationMissingOptionError
from ConfigParser import *
from ConfigParser import Error, InterpolationMissingOptionError
except ImportError:
from configparser import *
from configparser import Error, InterpolationMissingOptionError

View File

@ -7,7 +7,7 @@
Example: Example:
>>> from six import StringIO >>> from io import StringIO
>>> sio = StringIO('''# configure foo-application >>> sio = StringIO('''# configure foo-application
... [foo] ... [foo]
... bar1 = qualia ... bar1 = qualia
@ -39,26 +39,31 @@ Example:
# An ini parser that supports ordered sections/options # An ini parser that supports ordered sections/options
# Also supports updates, while preserving structure # Also supports updates, while preserving structure
# Backward-compatiable with ConfigParser # Backward-compatible with ConfigParser
import re import re
import six from typing import Any, Callable, Dict, TextIO, Iterator, List, Optional, Set, Union
from typing import TYPE_CHECKING
from .configparser import DEFAULTSECT, ParsingError, MissingSectionHeaderError
from . import config from . import config
from .configparser import DEFAULTSECT, MissingSectionHeaderError, ParsingError
if TYPE_CHECKING:
from compat import RawConfigParser
class LineType(object): class LineType:
line = None line: Optional[str] = None
def __init__(self, line=None): def __init__(self, line: Optional[str] = None) -> None:
if line is not None: if line is not None:
self.line = line.strip('\n') self.line = line.strip("\n")
# Return the original line for unmodified objects # Return the original line for unmodified objects
# Otherwise construct using the current attribute values # Otherwise construct using the current attribute values
def __str__(self): def __str__(self) -> str:
if self.line is not None: if self.line is not None:
return self.line return self.line
else: else:
@ -66,78 +71,87 @@ class LineType(object):
# If an attribute is modified after initialization # If an attribute is modified after initialization
# set line to None since it is no longer accurate. # set line to None since it is no longer accurate.
def __setattr__(self, name, value): def __setattr__(self, name: str, value: object) -> None:
if hasattr(self,name): if hasattr(self, name):
self.__dict__['line'] = None self.__dict__["line"] = None
self.__dict__[name] = value self.__dict__[name] = value
def to_string(self): def to_string(self) -> str:
raise Exception('This method must be overridden in derived classes') # FIXME Raise NotImplementedError instead
raise Exception("This method must be overridden in derived classes")
class SectionLine(LineType): class SectionLine(LineType):
regex = re.compile(r'^\[' regex = re.compile(r"^\[" r"(?P<name>[^]]+)" r"\]\s*" r"((?P<csep>;|#)(?P<comment>.*))?$")
r'(?P<name>[^]]+)'
r'\]\s*'
r'((?P<csep>;|#)(?P<comment>.*))?$')
def __init__(self, name, comment=None, comment_separator=None, def __init__(
comment_offset=-1, line=None): self,
super(SectionLine, self).__init__(line) name: str,
self.name = name comment: Optional[str] = None,
self.comment = comment comment_separator: Optional[str] = None,
self.comment_separator = comment_separator comment_offset: int = -1,
self.comment_offset = comment_offset line: Optional[str] = None,
) -> None:
super().__init__(line)
self.name: str = name
self.comment: Optional[str] = comment
self.comment_separator: Optional[str] = comment_separator
self.comment_offset: int = comment_offset
def to_string(self): def to_string(self) -> str:
out = '[' + self.name + ']' out: str = "[" + self.name + "]"
if self.comment is not None: if self.comment is not None:
# try to preserve indentation of comments # try to preserve indentation of comments
out = (out+' ').ljust(self.comment_offset) out = (out + " ").ljust(self.comment_offset)
out = out + self.comment_separator + self.comment out = out + self.comment_separator + self.comment
return out return out
def parse(cls, line): @classmethod
m = cls.regex.match(line.rstrip()) def parse(cls, line: str) -> Optional["SectionLine"]:
m: Optional[re.Match] = cls.regex.match(line.rstrip())
if m is None: if m is None:
return None return None
return cls(m.group('name'), m.group('comment'), return cls(m.group("name"), m.group("comment"), m.group("csep"), m.start("csep"), line)
m.group('csep'), m.start('csep'),
line)
parse = classmethod(parse)
class OptionLine(LineType): class OptionLine(LineType):
def __init__(self, name, value, separator=' = ', comment=None, def __init__(
comment_separator=None, comment_offset=-1, line=None): self,
super(OptionLine, self).__init__(line) name: str,
self.name = name value: object,
self.value = value separator: str = " = ",
self.separator = separator comment: Optional[str] = None,
self.comment = comment comment_separator: Optional[str] = None,
self.comment_separator = comment_separator comment_offset: int = -1,
self.comment_offset = comment_offset line: Optional[str] = None,
) -> None:
super().__init__(line)
self.name: str = name
self.value: object = value
self.separator: str = separator
self.comment: Optional[str] = comment
self.comment_separator: Optional[str] = comment_separator
self.comment_offset: int = comment_offset
def to_string(self): def to_string(self) -> str:
out = '%s%s%s' % (self.name, self.separator, self.value) out: str = "%s%s%s" % (self.name, self.separator, self.value)
if self.comment is not None: if self.comment is not None:
# try to preserve indentation of comments # try to preserve indentation of comments
out = (out+' ').ljust(self.comment_offset) out = (out + " ").ljust(self.comment_offset)
out = out + self.comment_separator + self.comment out = out + self.comment_separator + self.comment
return out return out
regex = re.compile(r'^(?P<name>[^:=\s[][^:=]*)' regex = re.compile(r"^(?P<name>[^:=\s[][^:=]*)" r"(?P<sep>[:=]\s*)" r"(?P<value>.*)$")
r'(?P<sep>[:=]\s*)'
r'(?P<value>.*)$')
def parse(cls, line): @classmethod
m = cls.regex.match(line.rstrip()) def parse(cls, line: str) -> Optional["OptionLine"]:
m: Optional[re.Match] = cls.regex.match(line.rstrip())
if m is None: if m is None:
return None return None
name = m.group('name').rstrip() name: str = m.group("name").rstrip()
value = m.group('value') value: str = m.group("value")
sep = m.group('name')[len(name):] + m.group('sep') sep: str = m.group("name")[len(name) :] + m.group("sep")
# comments are not detected in the regex because # comments are not detected in the regex because
# ensuring total compatibility with ConfigParser # ensuring total compatibility with ConfigParser
@ -150,123 +164,120 @@ class OptionLine(LineType):
# include ';' in the value needs to be addressed. # include ';' in the value needs to be addressed.
# Also, '#' doesn't mark comments in options... # Also, '#' doesn't mark comments in options...
coff = value.find(';') coff: int = value.find(";")
if coff != -1 and value[coff-1].isspace(): if coff != -1 and value[coff - 1].isspace():
comment = value[coff+1:] comment = value[coff + 1 :]
csep = value[coff] csep = value[coff]
value = value[:coff].rstrip() value = value[:coff].rstrip()
coff = m.start('value') + coff coff = m.start("value") + coff
else: else:
comment = None comment = None
csep = None csep = None
coff = -1 coff = -1
return cls(name, value, sep, comment, csep, coff, line) return cls(name, value, sep, comment, csep, coff, line)
parse = classmethod(parse)
def change_comment_syntax(comment_chars='%;#', allow_rem=False): def change_comment_syntax(comment_chars: str = "%;#", allow_rem: bool = False) -> None:
comment_chars = re.sub(r'([\]\-\^])', r'\\\1', comment_chars) comment_chars: str = re.sub(r"([\]\-\^])", r"\\\1", comment_chars)
regex = r'^(?P<csep>[%s]' % comment_chars regex: str = r"^(?P<csep>[%s]" % comment_chars
if allow_rem: if allow_rem:
regex += '|[rR][eE][mM]' regex += "|[rR][eE][mM]"
regex += r')(?P<comment>.*)$' regex += r")(?P<comment>.*)$"
CommentLine.regex = re.compile(regex) CommentLine.regex = re.compile(regex)
class CommentLine(LineType): class CommentLine(LineType):
regex = re.compile(r'^(?P<csep>[;#])' regex: re.Pattern = re.compile(r"^(?P<csep>[;#]|[rR][eE][mM])" r"(?P<comment>.*)$")
r'(?P<comment>.*)$')
def __init__(self, comment='', separator='#', line=None): def __init__(self, comment: str = "", separator: str = "#", line: Optional[str] = None) -> None:
super(CommentLine, self).__init__(line) super().__init__(line)
self.comment = comment self.comment: str = comment
self.separator = separator self.separator: str = separator
def to_string(self): def to_string(self) -> str:
return self.separator + self.comment return self.separator + self.comment
def parse(cls, line): @classmethod
m = cls.regex.match(line.rstrip()) def parse(cls, line: str) -> Optional["CommentLine"]:
m: Optional[re.Match] = cls.regex.match(line.rstrip())
if m is None: if m is None:
return None return None
return cls(m.group('comment'), m.group('csep'), line) return cls(m.group("comment"), m.group("csep"), line)
parse = classmethod(parse)
class EmptyLine(LineType): class EmptyLine(LineType):
# could make this a singleton # could make this a singleton
def to_string(self): def to_string(self) -> str:
return '' return ""
value = property(lambda self: '') value = property(lambda self: "")
def parse(cls, line): @classmethod
def parse(cls, line: str) -> Optional["EmptyLine"]:
if line.strip(): if line.strip():
return None return None
return cls(line) return cls(line)
parse = classmethod(parse)
class ContinuationLine(LineType): class ContinuationLine(LineType):
regex = re.compile(r'^\s+(?P<value>.*)$') regex: re.Pattern = re.compile(r"^\s+(?P<value>.*)$")
def __init__(self, value, value_offset=None, line=None): def __init__(self, value: str, value_offset: Optional[int] = None, line: Optional[str] = None) -> None:
super(ContinuationLine, self).__init__(line) super().__init__(line)
self.value = value self.value = value
if value_offset is None: if value_offset is None:
value_offset = 8 value_offset = 8
self.value_offset = value_offset self.value_offset: int = value_offset
def to_string(self): def to_string(self) -> str:
return ' '*self.value_offset + self.value return " " * self.value_offset + self.value
def parse(cls, line): @classmethod
m = cls.regex.match(line.rstrip()) def parse(cls, line: str) -> Optional["ContinuationLine"]:
m: Optional[re.Match] = cls.regex.match(line.rstrip())
if m is None: if m is None:
return None return None
return cls(m.group('value'), m.start('value'), line) return cls(m.group("value"), m.start("value"), line)
parse = classmethod(parse)
class LineContainer(object): class LineContainer:
def __init__(self, d=None): def __init__(self, d: Optional[Union[List[LineType], LineType]] = None) -> None:
self.contents = [] self.contents = []
self.orgvalue = None self.orgvalue: str = None
if d: if d:
if isinstance(d, list): self.extend(d) if isinstance(d, list):
else: self.add(d) self.extend(d)
else:
self.add(d)
def add(self, x): def add(self, x: LineType) -> None:
self.contents.append(x) self.contents.append(x)
def extend(self, x): def extend(self, x: List[LineType]) -> None:
for i in x: self.add(i) for i in x:
self.add(i)
def get_name(self): def get_name(self) -> str:
return self.contents[0].name return self.contents[0].name
def set_name(self, data): def set_name(self, data: str) -> None:
self.contents[0].name = data self.contents[0].name = data
def get_value(self): def get_value(self) -> str:
if self.orgvalue is not None: if self.orgvalue is not None:
return self.orgvalue return self.orgvalue
elif len(self.contents) == 1: elif len(self.contents) == 1:
return self.contents[0].value return self.contents[0].value
else: else:
return '\n'.join([('%s' % x.value) for x in self.contents return "\n".join([("%s" % x.value) for x in self.contents if not isinstance(x, CommentLine)])
if not isinstance(x, CommentLine)])
def set_value(self, data): def set_value(self, data: object) -> None:
self.orgvalue = data self.orgvalue = data
lines = ('%s' % data).split('\n') lines: List[str] = ("%s" % data).split("\n")
# If there is an existing ContinuationLine, use its offset # If there is an existing ContinuationLine, use its offset
value_offset = None value_offset: Optional[int] = None
for v in self.contents: for v in self.contents:
if isinstance(v, ContinuationLine): if isinstance(v, ContinuationLine):
value_offset = v.value_offset value_offset = v.value_offset
@ -282,40 +293,45 @@ class LineContainer(object):
else: else:
self.add(EmptyLine()) self.add(EmptyLine())
def get_line_number(self) -> Optional[int]:
return self.contents[0].line_number if self.contents else None
name = property(get_name, set_name) name = property(get_name, set_name)
value = property(get_value, set_value) value = property(get_value, set_value)
def __str__(self): line_number = property(get_line_number)
s = [x.__str__() for x in self.contents]
return '\n'.join(s)
def finditer(self, key): def __str__(self) -> str:
s: List[str] = [x.__str__() for x in self.contents]
return "\n".join(s)
def finditer(self, key: str) -> Iterator[Union[SectionLine, OptionLine]]:
for x in self.contents[::-1]: for x in self.contents[::-1]:
if hasattr(x, 'name') and x.name==key: if hasattr(x, "name") and x.name == key:
yield x yield x
def find(self, key): def find(self, key: str) -> Union[SectionLine, OptionLine]:
for x in self.finditer(key): for x in self.finditer(key):
return x return x
raise KeyError(key) raise KeyError(key)
def _make_xform_property(myattrname, srcattrname=None): def _make_xform_property(myattrname: str, srcattrname: Optional[str] = None) -> property:
private_attrname = myattrname + 'value' private_attrname: str = myattrname + "value"
private_srcname = myattrname + 'source' private_srcname: str = myattrname + "source"
if srcattrname is None: if srcattrname is None:
srcattrname = myattrname srcattrname = myattrname
def getfn(self): def getfn(self) -> Callable:
srcobj = getattr(self, private_srcname) srcobj: Optional[object] = getattr(self, private_srcname)
if srcobj is not None: if srcobj is not None:
return getattr(srcobj, srcattrname) return getattr(srcobj, srcattrname)
else: else:
return getattr(self, private_attrname) return getattr(self, private_attrname)
def setfn(self, value): def setfn(self, value: Callable) -> None:
srcobj = getattr(self, private_srcname) srcobj: Optional[object] = getattr(self, private_srcname)
if srcobj is not None: if srcobj is not None:
setattr(srcobj, srcattrname, value) setattr(srcobj, srcattrname, value)
else: else:
@ -325,31 +341,38 @@ def _make_xform_property(myattrname, srcattrname=None):
class INISection(config.ConfigNamespace): class INISection(config.ConfigNamespace):
_lines = None _lines: List[LineContainer] = None
_options = None _options: Dict[str, object] = None
_defaults = None _defaults: Optional["INISection"] = None
_optionxformvalue = None _optionxformvalue: "INIConfig" = None
_optionxformsource = None _optionxformsource: "INIConfig" = None
_compat_skip_empty_lines = set() _compat_skip_empty_lines: Set[str] = set()
def __init__(self, lineobj, defaults=None, optionxformvalue=None, optionxformsource=None): def __init__(
self,
lineobj: LineContainer,
defaults: Optional["INISection"] = None,
optionxformvalue: Optional["INIConfig"] = None,
optionxformsource: Optional["INIConfig"] = None,
) -> None:
self._lines = [lineobj] self._lines = [lineobj]
self._defaults = defaults self._defaults = defaults
self._optionxformvalue = optionxformvalue self._optionxformvalue = optionxformvalue
self._optionxformsource = optionxformsource self._optionxformsource = optionxformsource
self._options = {} self._options = {}
_optionxform = _make_xform_property('_optionxform') _optionxform = _make_xform_property("_optionxform")
def _compat_get(self, key): def _compat_get(self, key: str) -> str:
# identical to __getitem__ except that _compat_XXX # identical to __getitem__ except that _compat_XXX
# is checked for backward-compatible handling # is checked for backward-compatible handling
if key == '__name__': if key == "__name__":
return self._lines[-1].name return self._lines[-1].name
if self._optionxform: key = self._optionxform(key) if self._optionxform:
key = self._optionxform(key)
try: try:
value = self._options[key].value value: str = self._options[key].value
del_empty = key in self._compat_skip_empty_lines del_empty: bool = key in self._compat_skip_empty_lines
except KeyError: except KeyError:
if self._defaults and key in self._defaults._options: if self._defaults and key in self._defaults._options:
value = self._defaults._options[key].value value = self._defaults._options[key].value
@ -357,13 +380,14 @@ class INISection(config.ConfigNamespace):
else: else:
raise raise
if del_empty: if del_empty:
value = re.sub('\n+', '\n', value) value = re.sub("\n+", "\n", value)
return value return value
def _getitem(self, key): def _getitem(self, key: str) -> object:
if key == '__name__': if key == "__name__":
return self._lines[-1].name return self._lines[-1].name
if self._optionxform: key = self._optionxform(key) if self._optionxform:
key = self._optionxform(key)
try: try:
return self._options[key].value return self._options[key].value
except KeyError: except KeyError:
@ -372,22 +396,25 @@ class INISection(config.ConfigNamespace):
else: else:
raise raise
def __setitem__(self, key, value): def __setitem__(self, key: str, value: object) -> None:
if self._optionxform: xkey = self._optionxform(key) if self._optionxform:
else: xkey = key xkey = self._optionxform(key)
else:
xkey = key
if xkey in self._compat_skip_empty_lines: if xkey in self._compat_skip_empty_lines:
self._compat_skip_empty_lines.remove(xkey) self._compat_skip_empty_lines.remove(xkey)
if xkey not in self._options: if xkey not in self._options:
# create a dummy object - value may have multiple lines # create a dummy object - value may have multiple lines
obj = LineContainer(OptionLine(key, '')) obj = LineContainer(OptionLine(key, ""))
self._lines[-1].add(obj) self._lines[-1].add(obj)
self._options[xkey] = obj self._options[xkey] = obj
# the set_value() function in LineContainer # the set_value() function in LineContainer
# automatically handles multi-line values # automatically handles multi-line values
self._options[xkey].value = value self._options[xkey].value = value
def __delitem__(self, key): def __delitem__(self, key: str) -> None:
if self._optionxform: key = self._optionxform(key) if self._optionxform:
key = self._optionxform(key)
if key in self._compat_skip_empty_lines: if key in self._compat_skip_empty_lines:
self._compat_skip_empty_lines.remove(key) self._compat_skip_empty_lines.remove(key)
for l in self._lines: for l in self._lines:
@ -395,14 +422,16 @@ class INISection(config.ConfigNamespace):
for o in l.contents: for o in l.contents:
if isinstance(o, LineContainer): if isinstance(o, LineContainer):
n = o.name n = o.name
if self._optionxform: n = self._optionxform(n) if self._optionxform:
if key != n: remaining.append(o) n = self._optionxform(n)
if key != n:
remaining.append(o)
else: else:
remaining.append(o) remaining.append(o)
l.contents = remaining l.contents = remaining
del self._options[key] del self._options[key]
def __iter__(self): def __iter__(self) -> Iterator[str]:
d = set() d = set()
for l in self._lines: for l in self._lines:
for x in l.contents: for x in l.contents:
@ -421,26 +450,25 @@ class INISection(config.ConfigNamespace):
d.add(x) d.add(x)
def _new_namespace(self, name): def _new_namespace(self, name):
raise Exception('No sub-sections allowed', name) raise Exception("No sub-sections allowed", name)
def make_comment(line): def make_comment(line: str) -> CommentLine:
return CommentLine(line.rstrip('\n')) return CommentLine(line.rstrip("\n"))
def readline_iterator(f): def readline_iterator(f: TextIO) -> Iterator[str]:
"""iterate over a file by only using the file object's readline method""" """Iterate over a file by only using the file object's readline method."""
have_newline: bool = False
have_newline = False
while True: while True:
line = f.readline() line: Optional[str] = f.readline()
if not line: if not line:
if have_newline: if have_newline:
yield "" yield ""
return return
if line.endswith('\n'): if line.endswith("\n"):
have_newline = True have_newline = True
else: else:
have_newline = False have_newline = False
@ -448,57 +476,67 @@ def readline_iterator(f):
yield line yield line
def lower(x): def lower(x: str) -> str:
return x.lower() return x.lower()
class INIConfig(config.ConfigNamespace): class INIConfig(config.ConfigNamespace):
_data = None _data: LineContainer = None
_sections = None _sections: Dict[str, object] = None
_defaults = None _defaults: INISection = None
_optionxformvalue = None _optionxformvalue: Callable = None
_optionxformsource = None _optionxformsource: Optional["INIConfig"] = None
_sectionxformvalue = None _sectionxformvalue: Optional["INIConfig"] = None
_sectionxformsource = None _sectionxformsource: Optional["INIConfig"] = None
_parse_exc = None _parse_exc = None
_bom = False _bom = False
def __init__(self, fp=None, defaults=None, parse_exc=True, def __init__(
optionxformvalue=lower, optionxformsource=None, self,
sectionxformvalue=None, sectionxformsource=None): fp: TextIO = None,
defaults: Dict[str, object] = None,
parse_exc: bool = True,
optionxformvalue: Callable = lower,
optionxformsource: Optional[Union["INIConfig", "RawConfigParser"]] = None,
sectionxformvalue: Optional["INIConfig"] = None,
sectionxformsource: Optional["INIConfig"] = None,
) -> None:
self._data = LineContainer() self._data = LineContainer()
self._parse_exc = parse_exc self._parse_exc = parse_exc
self._optionxformvalue = optionxformvalue self._optionxformvalue = optionxformvalue
self._optionxformsource = optionxformsource self._optionxformsource = optionxformsource
self._sectionxformvalue = sectionxformvalue self._sectionxformvalue = sectionxformvalue
self._sectionxformsource = sectionxformsource self._sectionxformsource = sectionxformsource
self._sections = {} self._sections: Dict[str, INISection] = {}
if defaults is None: defaults = {} if defaults is None:
defaults = {}
self._defaults = INISection(LineContainer(), optionxformsource=self) self._defaults = INISection(LineContainer(), optionxformsource=self)
for name, value in defaults.items(): for name, value in defaults.items():
self._defaults[name] = value self._defaults[name] = value
if fp is not None: if fp is not None:
self._readfp(fp) self._readfp(fp)
_optionxform = _make_xform_property('_optionxform', 'optionxform') _optionxform = _make_xform_property("_optionxform", "optionxform")
_sectionxform = _make_xform_property('_sectionxform', 'optionxform') _sectionxform = _make_xform_property("_sectionxform", "optionxform")
def _getitem(self, key): def _getitem(self, key: str) -> INISection:
if key == DEFAULTSECT: if key == DEFAULTSECT:
return self._defaults return self._defaults
if self._sectionxform: key = self._sectionxform(key) if self._sectionxform:
key = self._sectionxform(key)
return self._sections[key] return self._sections[key]
def __setitem__(self, key, value): def __setitem__(self, key: str, value: object):
raise Exception('Values must be inside sections', key, value) raise Exception("Values must be inside sections", key, value)
def __delitem__(self, key): def __delitem__(self, key: str) -> None:
if self._sectionxform: key = self._sectionxform(key) if self._sectionxform:
key = self._sectionxform(key)
for line in self._sections[key]._lines: for line in self._sections[key]._lines:
self._data.contents.remove(line) self._data.contents.remove(line)
del self._sections[key] del self._sections[key]
def __iter__(self): def __iter__(self) -> Iterator[str]:
d = set() d = set()
d.add(DEFAULTSECT) d.add(DEFAULTSECT)
for x in self._data.contents: for x in self._data.contents:
@ -507,35 +545,31 @@ class INIConfig(config.ConfigNamespace):
yield x.name yield x.name
d.add(x.name) d.add(x.name)
def _new_namespace(self, name): def _new_namespace(self, name: str) -> INISection:
if self._data.contents: if self._data.contents:
self._data.add(EmptyLine()) self._data.add(EmptyLine())
obj = LineContainer(SectionLine(name)) obj = LineContainer(SectionLine(name))
self._data.add(obj) self._data.add(obj)
if self._sectionxform: name = self._sectionxform(name) if self._sectionxform:
name = self._sectionxform(name)
if name in self._sections: if name in self._sections:
ns = self._sections[name] ns = self._sections[name]
ns._lines.append(obj) ns._lines.append(obj)
else: else:
ns = INISection(obj, defaults=self._defaults, ns = INISection(obj, defaults=self._defaults, optionxformsource=self)
optionxformsource=self)
self._sections[name] = ns self._sections[name] = ns
return ns return ns
def __str__(self): def __str__(self) -> str:
if self._bom: if self._bom:
fmt = u'\ufeff%s' fmt = "\ufeff%s"
else: else:
fmt = '%s' fmt = "%s"
return fmt % self._data.__str__() return fmt % self._data.__str__()
__unicode__ = __str__ _line_types = [EmptyLine, CommentLine, SectionLine, OptionLine, ContinuationLine]
_line_types = [EmptyLine, CommentLine, def _parse(self, line: str) -> Any:
SectionLine, OptionLine,
ContinuationLine]
def _parse(self, line):
for linetype in self._line_types: for linetype in self._line_types:
lineobj = linetype.parse(line) lineobj = linetype.parse(line)
if lineobj: if lineobj:
@ -544,7 +578,7 @@ class INIConfig(config.ConfigNamespace):
# can't parse line # can't parse line
return None return None
def _readfp(self, fp): def _readfp(self, fp: TextIO) -> None:
cur_section = None cur_section = None
cur_option = None cur_option = None
cur_section_name = None cur_section_name = None
@ -554,21 +588,20 @@ class INIConfig(config.ConfigNamespace):
try: try:
fname = fp.name fname = fp.name
except AttributeError: except AttributeError:
fname = '<???>' fname = "<???>"
line_count = 0 line_count = 0
exc = None exc = None
line = None line = None
for line in readline_iterator(fp): for line in readline_iterator(fp):
# Check for BOM on first line # Check for BOM on first line
if line_count == 0 and isinstance(line, six.text_type): if line_count == 0 and isinstance(line, str):
if line[0] == u'\ufeff': if line[0] == "\ufeff":
line = line[1:] line = line[1:]
self._bom = True self._bom = True
line_obj = self._parse(line) line_obj = self._parse(line)
line_count += 1 line_count += 1
if not cur_section and not isinstance(line_obj, (CommentLine, EmptyLine, SectionLine)): if not cur_section and not isinstance(line_obj, (CommentLine, EmptyLine, SectionLine)):
if self._parse_exc: if self._parse_exc:
raise MissingSectionHeaderError(fname, line_count, line) raise MissingSectionHeaderError(fname, line_count, line)
@ -588,7 +621,7 @@ class INIConfig(config.ConfigNamespace):
cur_option.extend(pending_lines) cur_option.extend(pending_lines)
pending_lines = [] pending_lines = []
if pending_empty_lines: if pending_empty_lines:
optobj._compat_skip_empty_lines.add(cur_option_name) optobj._compat_skip_empty_lines.add(cur_option_name) # noqa : F821
pending_empty_lines = False pending_empty_lines = False
cur_option.add(line_obj) cur_option.add(line_obj)
else: else:
@ -633,9 +666,7 @@ class INIConfig(config.ConfigNamespace):
else: else:
cur_section_name = cur_section.name cur_section_name = cur_section.name
if cur_section_name not in self._sections: if cur_section_name not in self._sections:
self._sections[cur_section_name] = \ self._sections[cur_section_name] = INISection(cur_section, defaults=self._defaults, optionxformsource=self)
INISection(cur_section, defaults=self._defaults,
optionxformsource=self)
else: else:
self._sections[cur_section_name]._lines.append(cur_section) self._sections[cur_section_name]._lines.append(cur_section)
@ -644,8 +675,11 @@ class INIConfig(config.ConfigNamespace):
if isinstance(line_obj, EmptyLine): if isinstance(line_obj, EmptyLine):
pending_empty_lines = True pending_empty_lines = True
if line_obj:
line_obj.line_number = line_count
self._data.extend(pending_lines) self._data.extend(pending_lines)
if line and line[-1] == '\n': if line and line[-1] == "\n":
self._data.add(EmptyLine()) self._data.add(EmptyLine())
if exc: if exc:

View File

@ -1,8 +1,13 @@
from typing import TYPE_CHECKING, List
from . import compat from . import compat
from .ini import EmptyLine, LineContainer from .ini import EmptyLine, LineContainer
if TYPE_CHECKING:
from .ini import LineType
def tidy(cfg):
def tidy(cfg: compat.RawConfigParser):
"""Clean up blank lines. """Clean up blank lines.
This functions makes the configuration look clean and This functions makes the configuration look clean and
@ -19,8 +24,7 @@ def tidy(cfg):
if isinstance(cont[i], LineContainer): if isinstance(cont[i], LineContainer):
tidy_section(cont[i]) tidy_section(cont[i])
i += 1 i += 1
elif (isinstance(cont[i-1], EmptyLine) and elif isinstance(cont[i - 1], EmptyLine) and isinstance(cont[i], EmptyLine):
isinstance(cont[i], EmptyLine)):
del cont[i] del cont[i]
else: else:
i += 1 i += 1
@ -34,11 +38,11 @@ def tidy(cfg):
cont.append(EmptyLine()) cont.append(EmptyLine())
def tidy_section(lc): def tidy_section(lc: "LineContainer"):
cont = lc.contents cont: List[LineType] = lc.contents
i = 1 i: int = 1
while i < len(cont): while i < len(cont):
if isinstance(cont[i-1], EmptyLine) and isinstance(cont[i], EmptyLine): if isinstance(cont[i - 1], EmptyLine) and isinstance(cont[i], EmptyLine):
del cont[i] del cont[i]
else: else:
i += 1 i += 1

View File

@ -1,10 +1,11 @@
[tool.black] [tool.black]
line-length = 140 line-length = 140
[tool.ruff] [tool.ruff]
# Allow lines to be as long as 120. # Allow lines to be as long as 120.
line-length = 140 line-length = 140
indent-width = 4 indent-width = 4
[tool.ruff.lint] [tool.ruff.lint]
ignore = ["F401","F403","F405","E402"] ignore = ["F401","F403","F405","E402","E701","E722","E741"]

View File

@ -5,14 +5,16 @@ from huey import RedisHuey
from tisbackup import tis_backup from tisbackup import tis_backup
huey = RedisHuey('tisbackup', host='localhost') huey = RedisHuey("tisbackup", host="localhost")
@huey.task() @huey.task()
def run_export_backup(base, config_file, mount_point, backup_sections): def run_export_backup(base, config_file, mount_point, backup_sections):
try: try:
#Log # Log
logger = logging.getLogger('tisbackup') logger = logging.getLogger("tisbackup")
logger.setLevel(logging.INFO) logger.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s")
handler = logging.StreamHandler() handler = logging.StreamHandler()
handler.setFormatter(formatter) handler.setFormatter(formatter)
logger.addHandler(handler) logger.addHandler(handler)
@ -24,24 +26,26 @@ def run_export_backup(base, config_file, mount_point, backup_sections):
backup_sections = backup_sections.split(",") backup_sections = backup_sections.split(",")
else: else:
backup_sections = [] backup_sections = []
backup = tis_backup(dry_run=False,verbose=True,backup_base_dir=base) backup = tis_backup(dry_run=False, verbose=True, backup_base_dir=base)
backup.read_ini_file(config_file) backup.read_ini_file(config_file)
mount_point = mount_point mount_point = mount_point
backup.export_backups(backup_sections,mount_point) backup.export_backups(backup_sections, mount_point)
except Exception as e: except Exception as e:
return(str(e)) return str(e)
finally: finally:
os.system("/bin/umount %s" % mount_point) os.system("/bin/umount %s" % mount_point)
os.rmdir(mount_point) os.rmdir(mount_point)
return "ok" return "ok"
def get_task(): def get_task():
return task return task
def set_task(my_task): def set_task(my_task):
global task global task
task = my_task task = my_task
task = None task = None

View File

@ -375,10 +375,8 @@ def run_command(cmd, info=""):
def check_mount_disk(partition_name, refresh): def check_mount_disk(partition_name, refresh):
mount_point = check_already_mount(partition_name, refresh) mount_point = check_already_mount(partition_name, refresh)
if not refresh: if not refresh:
mount_point = "/mnt/TISBACKUP-" + str(time.time()) mount_point = "/mnt/TISBACKUP-" + str(time.time())
os.mkdir(mount_point) os.mkdir(mount_point)
flash("must mount " + partition_name) flash("must mount " + partition_name)
@ -425,7 +423,6 @@ def last_backup():
@app.route("/export_backup") @app.route("/export_backup")
def export_backup(): def export_backup():
raise_error("", "") raise_error("", "")
backup_dict = read_config() backup_dict = read_config()
sections = [] sections = []