TISbackup/libtisbackup/backup_xva.py

301 lines
13 KiB
Python
Raw Normal View History

2022-04-25 10:02:43 +02:00
#!/usr/bin/python3
2013-05-23 10:19:43 +02:00
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------
# This file is part of TISBackup
#
# TISBackup is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# TISBackup is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with TISBackup. If not, see <http://www.gnu.org/licenses/>.
#
# -----------------------------------------------------------------------
2022-04-25 10:02:43 +02:00
2024-11-28 23:46:48 +01:00
import datetime
import hashlib
2013-05-23 10:19:43 +02:00
import logging
import os
2024-11-28 23:46:48 +01:00
import re
2013-05-23 10:19:43 +02:00
import socket
2024-11-28 23:46:48 +01:00
import ssl
2014-07-25 15:04:10 +02:00
import tarfile
2024-11-28 23:46:48 +01:00
import urllib.error
import urllib.parse
import urllib.request
2013-05-23 10:19:43 +02:00
from stat import *
2024-11-28 23:46:48 +01:00
2022-04-25 10:02:43 +02:00
import requests
2022-04-25 10:02:43 +02:00
from . import XenAPI
2024-11-28 23:46:48 +01:00
from .common import *
if hasattr(ssl, "_create_unverified_context"):
ssl._create_default_https_context = ssl._create_unverified_context
2013-05-23 10:19:43 +02:00
class backup_xva(backup_generic):
"""Backup a VM running on a XCP server as a XVA file (requires xe tools and XenAPI)"""
2014-07-25 15:04:10 +02:00
type = "xen-xva"
required_params = backup_generic.required_params + ["xcphost", "password_file", "server_name"]
optional_params = backup_generic.optional_params + [
"enable_https",
"halt_vm",
"verify_export",
"reuse_snapshot",
"ignore_proxies",
"use_compression",
]
2014-07-25 15:04:10 +02:00
enable_https = "no"
halt_vm = "no"
verify_export = "no"
reuse_snapshot = "no"
2015-06-30 11:18:11 +02:00
ignore_proxies = "yes"
2015-07-28 10:48:45 +02:00
use_compression = "true"
if str2bool(ignore_proxies):
os.environ["http_proxy"] = ""
os.environ["https_proxy"] = ""
2014-07-25 15:04:10 +02:00
def verify_export_xva(self, filename):
self.logger.debug("[%s] Verify xva export integrity", self.server_name)
tar = tarfile.open(filename)
2014-07-25 15:04:10 +02:00
members = tar.getmembers()
for tarinfo in members:
if re.search("^[0-9]*$", os.path.basename(tarinfo.name)):
2014-07-25 15:04:10 +02:00
sha1sum = hashlib.sha1(tar.extractfile(tarinfo).read()).hexdigest()
sha1sum2 = tar.extractfile(tarinfo.name + ".checksum").read()
2014-07-25 15:04:10 +02:00
if not sha1sum == sha1sum2:
raise Exception("File corrupt")
tar.close()
def export_xva(self, vdi_name, filename, halt_vm, dry_run, enable_https=True, reuse_snapshot="no"):
user_xen, password_xen, null = open(self.password_file).read().split("\n")
session = XenAPI.Session("https://" + self.xcphost)
2013-05-23 10:19:43 +02:00
try:
session.login_with_password(user_xen, password_xen)
2022-04-25 10:02:43 +02:00
except XenAPI.Failure as error:
msg, ip = error.details
2014-07-25 15:04:10 +02:00
if msg == "HOST_IS_SLAVE":
2013-05-23 10:19:43 +02:00
xcphost = ip
session = XenAPI.Session("https://" + xcphost)
session.login_with_password(user_xen, password_xen)
2014-07-25 15:04:10 +02:00
if not session.xenapi.VM.get_by_name_label(vdi_name):
return "bad VM name: %s" % vdi_name
2014-07-25 15:04:10 +02:00
2013-05-23 10:19:43 +02:00
vm = session.xenapi.VM.get_by_name_label(vdi_name)[0]
status_vm = session.xenapi.VM.get_power_state(vm)
2014-07-25 15:04:10 +02:00
self.logger.debug("[%s] Check if previous fail backups exist", vdi_name)
backups_fail = [f for f in os.listdir(self.backup_dir) if f.startswith(vdi_name) and f.endswith(".tmp")]
2014-07-25 15:04:10 +02:00
for backup_fail in backups_fail:
self.logger.debug('[%s] Delete backup "%s"', vdi_name, backup_fail)
os.unlink(os.path.join(self.backup_dir, backup_fail))
# add snapshot option
if not str2bool(halt_vm):
self.logger.debug("[%s] Check if previous tisbackups snapshots exist", vdi_name)
old_snapshots = session.xenapi.VM.get_by_name_label("tisbackup-%s" % (vdi_name))
self.logger.debug("[%s] Old snaps count %s", vdi_name, len(old_snapshots))
if len(old_snapshots) == 1 and str2bool(reuse_snapshot):
snapshot = old_snapshots[0]
self.logger.debug('[%s] Reusing snap "%s"', vdi_name, session.xenapi.VM.get_name_description(snapshot))
vm = snapshot # vm = session.xenapi.VM.get_by_name_label("tisbackup-%s"%(vdi_name))[0]
else:
self.logger.debug("[%s] Deleting %s old snaps", vdi_name, len(old_snapshots))
for old_snapshot in old_snapshots:
self.logger.debug("[%s] Destroy snapshot %s", vdi_name, session.xenapi.VM.get_name_description(old_snapshot))
try:
for vbd in session.xenapi.VM.get_VBDs(old_snapshot):
if session.xenapi.VBD.get_type(vbd) == "CD" and not session.xenapi.VBD.get_record(vbd)["empty"]:
session.xenapi.VBD.eject(vbd)
else:
vdi = session.xenapi.VBD.get_VDI(vbd)
if "NULL" not in vdi:
session.xenapi.VDI.destroy(vdi)
session.xenapi.VM.destroy(old_snapshot)
2022-04-25 10:02:43 +02:00
except XenAPI.Failure as error:
return "error when destroy snapshot %s" % (error)
now = datetime.datetime.now()
self.logger.debug("[%s] Snapshot in progress", vdi_name)
2014-07-25 15:04:10 +02:00
try:
snapshot = session.xenapi.VM.snapshot(vm, "tisbackup-%s" % (vdi_name))
self.logger.debug("[%s] got snapshot %s", vdi_name, snapshot)
2022-04-25 10:02:43 +02:00
except XenAPI.Failure as error:
return "error when snapshot %s" % (error)
# get snapshot opaqueRef
vm = session.xenapi.VM.get_by_name_label("tisbackup-%s" % (vdi_name))[0]
session.xenapi.VM.set_name_description(snapshot, "snapshot created by tisbackup on: %s" % (now.strftime("%Y-%m-%d %H:%M")))
else:
self.logger.debug("[%s] Status of VM: %s", self.backup_name, status_vm)
2014-07-25 15:04:10 +02:00
if status_vm == "Running":
self.logger.debug("[%s] Shudown in progress", self.backup_name)
2014-07-25 15:04:10 +02:00
if dry_run:
2022-04-25 10:02:43 +02:00
print("session.xenapi.VM.clean_shutdown(vm)")
2014-07-25 15:04:10 +02:00
else:
session.xenapi.VM.clean_shutdown(vm)
2013-05-23 10:19:43 +02:00
try:
try:
filename_temp = filename + ".tmp"
self.logger.debug("[%s] Copy in progress", self.backup_name)
if not str2bool(self.use_compression):
2015-10-20 10:22:40 +02:00
socket.setdefaulttimeout(120)
2014-11-20 15:48:29 +01:00
scheme = "http://"
if str2bool(enable_https):
2014-11-20 15:48:29 +01:00
scheme = "https://"
2022-04-25 10:02:43 +02:00
# url = scheme+user_xen+":"+password_xen+"@"+self.xcphost+"/export?use_compression="+self.use_compression+"&uuid="+session.xenapi.VM.get_uuid(vm)
top_level_url = (
scheme + self.xcphost + "/export?use_compression=" + self.use_compression + "&uuid=" + session.xenapi.VM.get_uuid(vm)
)
2022-04-25 10:02:43 +02:00
r = requests.get(top_level_url, auth=(user_xen, password_xen))
open(filename_temp, "wb").write(r.content)
2013-05-23 10:19:43 +02:00
2014-11-20 15:48:56 +01:00
except Exception as e:
self.logger.error("[%s] error when fetching snap: %s", "tisbackup-%s" % (vdi_name), e)
2014-07-25 15:04:10 +02:00
if os.path.exists(filename_temp):
os.unlink(filename_temp)
2014-11-20 15:48:56 +01:00
raise
2014-07-25 15:04:10 +02:00
2013-05-23 10:19:43 +02:00
finally:
if not str2bool(halt_vm):
self.logger.debug("[%s] Destroy snapshot", "tisbackup-%s" % (vdi_name))
2014-07-25 15:04:10 +02:00
try:
for vbd in session.xenapi.VM.get_VBDs(snapshot):
if session.xenapi.VBD.get_type(vbd) == "CD" and not session.xenapi.VBD.get_record(vbd)["empty"]:
2014-07-25 15:04:10 +02:00
session.xenapi.VBD.eject(vbd)
else:
vdi = session.xenapi.VBD.get_VDI(vbd)
if "NULL" not in vdi:
session.xenapi.VDI.destroy(vdi)
session.xenapi.VM.destroy(snapshot)
2022-04-25 10:02:43 +02:00
except XenAPI.Failure as error:
return "error when destroy snapshot %s" % (error)
2014-07-25 15:04:10 +02:00
elif status_vm == "Running":
self.logger.debug("[%s] Starting in progress", self.backup_name)
2013-05-23 10:19:43 +02:00
if dry_run:
2022-04-25 10:02:43 +02:00
print("session.xenapi.Async.VM.start(vm,False,True)")
2013-05-23 10:19:43 +02:00
else:
session.xenapi.Async.VM.start(vm, False, True)
2014-07-25 15:04:10 +02:00
2013-05-23 10:19:43 +02:00
session.logout()
2014-07-25 15:04:10 +02:00
if os.path.exists(filename_temp):
2017-07-24 18:23:15 +02:00
tar = os.system('tar tf "%s" > /dev/null' % filename_temp)
if not tar == 0:
os.unlink(filename_temp)
return "Tar error"
if str2bool(self.verify_export):
2014-07-25 15:04:10 +02:00
self.verify_export_xva(filename_temp)
os.rename(filename_temp, filename)
2014-07-25 15:04:10 +02:00
return 0
2014-07-25 15:04:10 +02:00
def do_backup(self, stats):
2013-05-23 10:19:43 +02:00
try:
dest_filename = os.path.join(self.backup_dir, "%s-%s.%s" % (self.backup_name, self.backup_start_date, "xva"))
# options = []
# options_params = " ".join(options)
cmd = self.export_xva(
vdi_name=self.server_name,
filename=dest_filename,
halt_vm=self.halt_vm,
enable_https=self.enable_https,
dry_run=self.dry_run,
reuse_snapshot=self.reuse_snapshot,
)
2013-05-23 10:19:43 +02:00
if os.path.exists(dest_filename):
stats["written_bytes"] = os.stat(dest_filename)[ST_SIZE]
stats["total_files_count"] = 1
stats["written_files_count"] = 1
stats["total_bytes"] = stats["written_bytes"]
2013-05-23 10:19:43 +02:00
else:
stats["written_bytes"] = 0
2013-05-23 10:19:43 +02:00
stats["backup_location"] = dest_filename
2013-05-23 10:19:43 +02:00
if cmd == 0:
stats["log"] = "XVA backup from %s OK, %d bytes written" % (self.server_name, stats["written_bytes"])
stats["status"] = "OK"
2013-05-23 10:19:43 +02:00
else:
2014-07-25 15:04:10 +02:00
raise Exception(cmd)
2013-05-23 10:19:43 +02:00
2022-04-25 10:02:43 +02:00
except BaseException as e:
stats["status"] = "ERROR"
stats["log"] = str(e)
2013-05-23 10:19:43 +02:00
raise
2015-04-01 16:37:47 +02:00
def register_existingbackups(self):
"""scan backup dir and insert stats in database"""
registered = [
b["backup_location"]
for b in self.dbstat.query("select distinct backup_location from stats where backup_name=?", (self.backup_name,))
]
2015-04-01 16:37:47 +02:00
filelist = os.listdir(self.backup_dir)
filelist.sort()
for item in filelist:
if item.endswith(".xva"):
dir_name = os.path.join(self.backup_dir, item)
if dir_name not in registered:
start = (
datetime.datetime.strptime(item, self.backup_name + "-%Y%m%d-%Hh%Mm%S.xva") + datetime.timedelta(0, 30 * 60)
).isoformat()
if fileisodate(dir_name) > start:
2015-04-01 16:37:47 +02:00
stop = fileisodate(dir_name)
else:
stop = start
self.logger.info("Registering %s started on %s", dir_name, start)
self.logger.debug(" Disk usage %s", 'du -sb "%s"' % dir_name)
2015-04-01 16:37:47 +02:00
if not self.dry_run:
size_bytes = int(os.popen('du -sb "%s"' % dir_name).read().split("\t")[0])
2015-04-01 16:37:47 +02:00
else:
size_bytes = 0
self.logger.debug(" Size in bytes : %i", size_bytes)
2015-04-01 16:37:47 +02:00
if not self.dry_run:
self.dbstat.add(
self.backup_name,
self.server_name,
"",
backup_start=start,
backup_end=stop,
status="OK",
total_bytes=size_bytes,
backup_location=dir_name,
TYPE="BACKUP",
)
2015-04-01 16:37:47 +02:00
else:
self.logger.info("Skipping %s, already registered", dir_name)
2015-04-01 16:37:47 +02:00
2013-05-23 10:19:43 +02:00
register_driver(backup_xva)
if __name__ == "__main__":
logger = logging.getLogger("tisbackup")
2013-05-23 10:19:43 +02:00
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s")
2013-05-23 10:19:43 +02:00
handler = logging.StreamHandler()
handler.setFormatter(formatter)
logger.addHandler(handler)
cp = ConfigParser()
cp.read("/opt/tisbackup/configtest.ini")
2013-05-23 10:19:43 +02:00
b = backup_xva()
b.read_config(cp)