2022-04-25 10:02:43 +02:00
|
|
|
#!/usr/bin/python3
|
2013-05-23 10:19:43 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# -----------------------------------------------------------------------
|
|
|
|
# This file is part of TISBackup
|
|
|
|
#
|
|
|
|
# TISBackup is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# TISBackup is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with TISBackup. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
#
|
|
|
|
# -----------------------------------------------------------------------
|
2024-11-28 23:20:19 +01:00
|
|
|
import os
|
|
|
|
import sys
|
2018-08-14 10:02:43 +02:00
|
|
|
from os.path import isfile, join
|
2024-11-28 23:20:19 +01:00
|
|
|
|
2014-07-25 15:06:51 +02:00
|
|
|
tisbackup_root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__)))
|
2024-11-29 00:48:59 +01:00
|
|
|
sys.path.append(os.path.join(tisbackup_root_dir, "lib"))
|
|
|
|
sys.path.append(os.path.join(tisbackup_root_dir, "libtisbackup"))
|
2014-07-25 15:06:51 +02:00
|
|
|
|
|
|
|
|
2013-05-23 10:19:43 +02:00
|
|
|
import glob
|
2024-11-28 23:20:19 +01:00
|
|
|
import json
|
|
|
|
import logging
|
|
|
|
import re
|
2015-07-03 16:18:48 +02:00
|
|
|
import time
|
2024-11-28 23:20:19 +01:00
|
|
|
from shutil import *
|
|
|
|
from urllib.parse import urlparse
|
2015-07-08 16:02:14 +02:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
from flask import Flask, Response, abort, appcontext_pushed, flash, g, jsonify, redirect, render_template, request, session, url_for
|
2024-11-28 23:20:19 +01:00
|
|
|
from iniparse import ConfigParser, RawConfigParser
|
2015-07-03 16:18:48 +02:00
|
|
|
|
2024-11-28 23:20:19 +01:00
|
|
|
from config import huey
|
|
|
|
from libtisbackup.common import *
|
|
|
|
from tasks import get_task, run_export_backup, set_task
|
2013-05-23 10:19:43 +02:00
|
|
|
from tisbackup import tis_backup
|
|
|
|
|
2015-07-06 18:01:49 +02:00
|
|
|
cp = ConfigParser()
|
|
|
|
cp.read("/etc/tis/tisbackup_gui.ini")
|
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
CONFIG = cp.get("general", "config_tisbackup").split(",")
|
|
|
|
SECTIONS = cp.get("general", "sections")
|
|
|
|
ADMIN_EMAIL = cp.get("general", "ADMIN_EMAIL")
|
|
|
|
BASE_DIR = cp.get("general", "base_config_dir")
|
2015-07-03 16:18:48 +02:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
tisbackup_config_file = CONFIG[0]
|
|
|
|
config_number = 0
|
2015-07-03 16:18:48 +02:00
|
|
|
|
2013-05-23 10:19:43 +02:00
|
|
|
cp = ConfigParser()
|
|
|
|
cp.read(tisbackup_config_file)
|
2024-11-29 00:48:59 +01:00
|
|
|
backup_base_dir = cp.get("global", "backup_base_dir")
|
|
|
|
dbstat = BackupStat(os.path.join(backup_base_dir, "log", "tisbackup.sqlite"))
|
2013-05-23 10:19:43 +02:00
|
|
|
mindate = None
|
|
|
|
error = None
|
|
|
|
info = None
|
|
|
|
app = Flask(__name__)
|
2024-11-29 00:48:59 +01:00
|
|
|
app.secret_key = "fsiqefiuqsefARZ4Zfesfe34234dfzefzfe"
|
|
|
|
app.config["PROPAGATE_EXCEPTIONS"] = True
|
2013-05-23 10:19:43 +02:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
tasks_db = os.path.join(tisbackup_root_dir, "tasks.sqlite")
|
2015-07-08 16:02:14 +02:00
|
|
|
|
2015-07-06 18:01:49 +02:00
|
|
|
|
2018-08-14 10:02:43 +02:00
|
|
|
def read_all_configs(base_dir):
|
|
|
|
raw_configs = []
|
|
|
|
list_config = []
|
2024-11-29 00:48:59 +01:00
|
|
|
# config_base_dir = base_dir
|
|
|
|
|
2018-08-14 10:02:43 +02:00
|
|
|
for file in os.listdir(base_dir):
|
2024-11-29 00:48:59 +01:00
|
|
|
if isfile(join(base_dir, file)):
|
|
|
|
raw_configs.append(join(base_dir, file))
|
|
|
|
|
2018-08-14 10:02:43 +02:00
|
|
|
for elem in raw_configs:
|
|
|
|
line = open(elem).readline()
|
2024-11-29 00:48:59 +01:00
|
|
|
if "global" in line:
|
2018-08-14 10:02:43 +02:00
|
|
|
list_config.append(elem)
|
|
|
|
|
|
|
|
backup_dict = {}
|
2024-11-29 00:48:59 +01:00
|
|
|
backup_dict["rsync_ssh_list"] = []
|
|
|
|
backup_dict["rsync_btrfs_list"] = []
|
|
|
|
backup_dict["rsync_list"] = []
|
|
|
|
backup_dict["null_list"] = []
|
|
|
|
backup_dict["pgsql_list"] = []
|
|
|
|
backup_dict["mysql_list"] = []
|
|
|
|
# backup_dict['sqlserver_list'] = []
|
|
|
|
backup_dict["xva_list"] = []
|
|
|
|
backup_dict["metadata_list"] = []
|
|
|
|
# backup_dict['switch_list'] = []
|
|
|
|
# backup_dict['oracle_list'] = []
|
2018-08-14 10:02:43 +02:00
|
|
|
|
|
|
|
result = []
|
|
|
|
cp = ConfigParser()
|
|
|
|
for config_file in list_config:
|
|
|
|
cp.read(config_file)
|
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
backup_base_dir = cp.get("global", "backup_base_dir")
|
2018-08-14 10:02:43 +02:00
|
|
|
backup = tis_backup(backup_base_dir=backup_base_dir)
|
|
|
|
backup.read_ini_file(config_file)
|
|
|
|
|
|
|
|
backup_sections = SECTIONS or []
|
|
|
|
|
|
|
|
all_sections = [backup_item.backup_name for backup_item in backup.backup_list]
|
|
|
|
if not backup_sections:
|
|
|
|
backup_sections = all_sections
|
|
|
|
else:
|
|
|
|
for b in backup_sections:
|
2024-11-29 00:32:39 +01:00
|
|
|
if b not in all_sections:
|
2024-11-29 00:48:59 +01:00
|
|
|
raise Exception("Section %s is not defined in config file" % b)
|
2018-08-14 10:02:43 +02:00
|
|
|
|
2024-11-29 00:32:39 +01:00
|
|
|
# never used..
|
|
|
|
# if not backup_sections:
|
|
|
|
# sections = [backup_item.backup_name for backup_item in backup.backup_list]
|
2018-08-14 10:02:43 +02:00
|
|
|
|
|
|
|
for backup_item in backup.backup_list:
|
|
|
|
if backup_item.backup_name in backup_sections:
|
|
|
|
b = {}
|
|
|
|
for attrib_name in backup_item.required_params + backup_item.optional_params:
|
|
|
|
if hasattr(backup_item, attrib_name):
|
|
|
|
b[attrib_name] = getattr(backup_item, attrib_name)
|
|
|
|
result.append(b)
|
|
|
|
|
|
|
|
for row in result:
|
2024-11-29 00:48:59 +01:00
|
|
|
backup_name = row["backup_name"]
|
|
|
|
server_name = row["server_name"]
|
|
|
|
backup_type = row["type"]
|
2018-08-14 10:02:43 +02:00
|
|
|
if backup_type == "xcp-dump-metadata":
|
2024-11-29 00:48:59 +01:00
|
|
|
backup_dict["metadata_list"].append([server_name, backup_name, backup_type, ""])
|
2018-08-14 10:02:43 +02:00
|
|
|
if backup_type == "rsync+ssh":
|
2024-11-29 00:48:59 +01:00
|
|
|
remote_dir = row["remote_dir"]
|
|
|
|
backup_dict["rsync_ssh_list"].append([server_name, backup_name, backup_type, remote_dir])
|
2018-08-14 10:02:43 +02:00
|
|
|
if backup_type == "rsync+btrfs+ssh":
|
2024-11-29 00:48:59 +01:00
|
|
|
remote_dir = row["remote_dir"]
|
|
|
|
backup_dict["rsync_btrfs_list"].append([server_name, backup_name, backup_type, remote_dir])
|
2018-08-14 10:02:43 +02:00
|
|
|
if backup_type == "rsync":
|
2024-11-29 00:48:59 +01:00
|
|
|
remote_dir = row["remote_dir"]
|
|
|
|
backup_dict["rsync_list"].append([server_name, backup_name, backup_type, remote_dir])
|
2018-08-14 10:02:43 +02:00
|
|
|
if backup_type == "null":
|
2024-11-29 00:48:59 +01:00
|
|
|
backup_dict["null_list"].append([server_name, backup_name, backup_type, ""])
|
2018-08-14 10:02:43 +02:00
|
|
|
if backup_type == "pgsql+ssh":
|
2024-11-29 00:48:59 +01:00
|
|
|
db_name = row["db_name"] if len(row["db_name"]) > 0 else "*"
|
|
|
|
backup_dict["pgsql_list"].append([server_name, backup_name, backup_type, db_name])
|
2018-08-14 10:02:43 +02:00
|
|
|
if backup_type == "mysql+ssh":
|
2024-11-29 00:48:59 +01:00
|
|
|
db_name = row["db_name"] if len(row["db_name"]) > 0 else "*"
|
|
|
|
backup_dict["mysql_list"].append([server_name, backup_name, backup_type, db_name])
|
2022-04-25 10:02:43 +02:00
|
|
|
# if backup_type == "sqlserver+ssh":
|
|
|
|
# db_name = row['db_name']
|
|
|
|
# backup_dict['sqlserver_list'].append(
|
|
|
|
# [server_name, backup_name, backup_type, db_name])
|
|
|
|
# if backup_type == "oracle+ssh":
|
|
|
|
# db_name = row['db_name']
|
|
|
|
# backup_dict['oracle_list'].append(
|
|
|
|
# [server_name, backup_name, backup_type, db_name])
|
2018-08-14 10:02:43 +02:00
|
|
|
if backup_type == "xen-xva":
|
2024-11-29 00:48:59 +01:00
|
|
|
backup_dict["xva_list"].append([server_name, backup_name, backup_type, ""])
|
2022-04-25 10:02:43 +02:00
|
|
|
# if backup_type == "switch":
|
|
|
|
# backup_dict['switch_list'].append(
|
|
|
|
# [server_name, backup_name, backup_type, ""])
|
2024-11-29 00:48:59 +01:00
|
|
|
|
2018-08-14 10:02:43 +02:00
|
|
|
return backup_dict
|
|
|
|
|
2015-07-03 16:18:48 +02:00
|
|
|
|
2013-05-23 10:19:43 +02:00
|
|
|
def read_config():
|
2015-06-30 18:10:03 +02:00
|
|
|
config_file = CONFIG[config_number]
|
2013-05-23 10:19:43 +02:00
|
|
|
cp = ConfigParser()
|
|
|
|
cp.read(config_file)
|
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
backup_base_dir = cp.get("global", "backup_base_dir")
|
2013-05-23 10:19:43 +02:00
|
|
|
backup = tis_backup(backup_base_dir=backup_base_dir)
|
|
|
|
backup.read_ini_file(config_file)
|
|
|
|
|
|
|
|
backup_sections = SECTIONS or []
|
2015-06-30 14:48:34 +02:00
|
|
|
|
2013-05-23 10:19:43 +02:00
|
|
|
all_sections = [backup_item.backup_name for backup_item in backup.backup_list]
|
|
|
|
if not backup_sections:
|
|
|
|
backup_sections = all_sections
|
|
|
|
else:
|
|
|
|
for b in backup_sections:
|
2024-11-29 00:32:39 +01:00
|
|
|
if b not in all_sections:
|
2024-11-29 00:48:59 +01:00
|
|
|
raise Exception("Section %s is not defined in config file" % b)
|
2015-06-30 14:48:34 +02:00
|
|
|
|
2013-05-23 10:19:43 +02:00
|
|
|
result = []
|
2024-11-29 00:48:59 +01:00
|
|
|
|
2024-11-29 00:32:39 +01:00
|
|
|
# not used ...
|
|
|
|
# if not backup_sections:
|
|
|
|
# sections = [backup_item.backup_name for backup_item in backup.backup_list]
|
2013-05-23 10:19:43 +02:00
|
|
|
|
|
|
|
for backup_item in backup.backup_list:
|
|
|
|
if backup_item.backup_name in backup_sections:
|
|
|
|
b = {}
|
2024-11-29 00:48:59 +01:00
|
|
|
for attrib_name in backup_item.required_params + backup_item.optional_params:
|
|
|
|
if hasattr(backup_item, attrib_name):
|
|
|
|
b[attrib_name] = getattr(backup_item, attrib_name)
|
2013-05-23 10:19:43 +02:00
|
|
|
result.append(b)
|
|
|
|
|
|
|
|
backup_dict = {}
|
2024-11-29 00:48:59 +01:00
|
|
|
backup_dict["rsync_ssh_list"] = []
|
|
|
|
backup_dict["rsync_btrfs_list"] = []
|
|
|
|
backup_dict["rsync_list"] = []
|
|
|
|
backup_dict["null_list"] = []
|
|
|
|
backup_dict["pgsql_list"] = []
|
|
|
|
backup_dict["mysql_list"] = []
|
|
|
|
# backup_dict['sqlserver_list'] = []
|
|
|
|
backup_dict["xva_list"] = []
|
|
|
|
backup_dict["metadata_list"] = []
|
|
|
|
# backup_dict['switch_list'] = []
|
|
|
|
# backup_dict['oracle_list'] = []
|
2013-05-23 10:19:43 +02:00
|
|
|
for row in result:
|
2024-11-29 00:48:59 +01:00
|
|
|
backup_name = row["backup_name"]
|
|
|
|
server_name = row["server_name"]
|
|
|
|
backup_type = row["type"]
|
2013-05-23 10:19:43 +02:00
|
|
|
if backup_type == "xcp-dump-metadata":
|
2024-11-29 00:48:59 +01:00
|
|
|
backup_dict["metadata_list"].append([server_name, backup_name, backup_type, ""])
|
2013-05-23 10:19:43 +02:00
|
|
|
if backup_type == "rsync+ssh":
|
2024-11-29 00:48:59 +01:00
|
|
|
remote_dir = row["remote_dir"]
|
|
|
|
backup_dict["rsync_ssh_list"].append([server_name, backup_name, backup_type, remote_dir])
|
2014-07-25 15:06:51 +02:00
|
|
|
if backup_type == "rsync+btrfs+ssh":
|
2024-11-29 00:48:59 +01:00
|
|
|
remote_dir = row["remote_dir"]
|
|
|
|
backup_dict["rsync_btrfs_list"].append([server_name, backup_name, backup_type, remote_dir])
|
2013-05-23 10:19:43 +02:00
|
|
|
if backup_type == "rsync":
|
2024-11-29 00:48:59 +01:00
|
|
|
remote_dir = row["remote_dir"]
|
|
|
|
backup_dict["rsync_list"].append([server_name, backup_name, backup_type, remote_dir])
|
2013-05-23 10:19:43 +02:00
|
|
|
if backup_type == "null":
|
2024-11-29 00:48:59 +01:00
|
|
|
backup_dict["null_list"].append([server_name, backup_name, backup_type, ""])
|
2018-08-14 10:02:43 +02:00
|
|
|
if backup_type == "pgsql+ssh":
|
2024-11-29 00:48:59 +01:00
|
|
|
db_name = row["db_name"] if len(row["db_name"]) > 0 else "*"
|
|
|
|
backup_dict["pgsql_list"].append([server_name, backup_name, backup_type, db_name])
|
2018-08-14 10:02:43 +02:00
|
|
|
if backup_type == "mysql+ssh":
|
2024-11-29 00:48:59 +01:00
|
|
|
db_name = row["db_name"] if len(row["db_name"]) > 0 else "*"
|
|
|
|
backup_dict["mysql_list"].append([server_name, backup_name, backup_type, db_name])
|
2022-04-25 10:02:43 +02:00
|
|
|
# if backup_type == "sqlserver+ssh":
|
|
|
|
# db_name = row['db_name']
|
|
|
|
# backup_dict['sqlserver_list'].append([server_name, backup_name, backup_type, db_name])
|
|
|
|
# if backup_type == "oracle+ssh":
|
|
|
|
# db_name = row['db_name']
|
|
|
|
# backup_dict['oracle_list'].append([server_name, backup_name, backup_type, db_name])
|
2013-05-23 10:19:43 +02:00
|
|
|
if backup_type == "xen-xva":
|
2024-11-29 00:48:59 +01:00
|
|
|
backup_dict["xva_list"].append([server_name, backup_name, backup_type, ""])
|
2022-04-25 10:02:43 +02:00
|
|
|
# if backup_type == "switch":
|
|
|
|
# backup_dict['switch_list'].append([server_name, backup_name, backup_type, ""])
|
2013-05-23 10:19:43 +02:00
|
|
|
return backup_dict
|
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
|
|
|
|
@app.route("/")
|
2013-05-23 10:19:43 +02:00
|
|
|
def backup_all():
|
|
|
|
backup_dict = read_config()
|
2024-11-29 00:48:59 +01:00
|
|
|
return render_template("backups.html", backup_list=backup_dict)
|
2013-05-23 10:19:43 +02:00
|
|
|
|
2015-06-30 18:10:03 +02:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
@app.route("/config_number/")
|
|
|
|
@app.route("/config_number/<int:id>")
|
2015-06-30 18:10:03 +02:00
|
|
|
def set_config_number(id=None):
|
2024-11-29 00:32:39 +01:00
|
|
|
if id is not None and len(CONFIG) > id:
|
2015-06-30 18:10:03 +02:00
|
|
|
global config_number
|
2024-11-29 00:48:59 +01:00
|
|
|
config_number = id
|
2015-06-30 18:10:03 +02:00
|
|
|
read_config()
|
2024-11-29 00:48:59 +01:00
|
|
|
return jsonify(configs=CONFIG, config_number=config_number)
|
2015-06-30 18:10:03 +02:00
|
|
|
|
2018-08-14 10:02:43 +02:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
@app.route("/all_json")
|
2018-08-14 10:02:43 +02:00
|
|
|
def backup_all_json():
|
|
|
|
backup_dict = read_all_configs(BASE_DIR)
|
2024-11-29 00:48:59 +01:00
|
|
|
return json.dumps(
|
|
|
|
backup_dict["rsync_list"]
|
|
|
|
+ backup_dict["rsync_btrfs_list"]
|
|
|
|
+ backup_dict["rsync_ssh_list"]
|
|
|
|
+ backup_dict["pgsql_list"]
|
|
|
|
+ backup_dict["mysql_list"]
|
|
|
|
+ backup_dict["xva_list"]
|
|
|
|
+ backup_dict["null_list"]
|
|
|
|
+ backup_dict["metadata_list"]
|
|
|
|
)
|
|
|
|
# + backup_dict['switch_list'])+backup_dict['sqlserver_list']
|
|
|
|
|
|
|
|
|
|
|
|
@app.route("/json")
|
2013-05-23 10:19:43 +02:00
|
|
|
def backup_json():
|
|
|
|
backup_dict = read_config()
|
2024-11-29 00:48:59 +01:00
|
|
|
return json.dumps(
|
|
|
|
backup_dict["rsync_list"]
|
|
|
|
+ backup_dict["rsync_btrfs_list"]
|
|
|
|
+ backup_dict["rsync_ssh_list"]
|
|
|
|
+ backup_dict["pgsql_list"]
|
|
|
|
+ backup_dict["mysql_list"]
|
|
|
|
+ backup_dict["xva_list"]
|
|
|
|
+ backup_dict["null_list"]
|
|
|
|
+ backup_dict["metadata_list"]
|
|
|
|
)
|
|
|
|
# + backup_dict['switch_list'])+backup_dict['sqlserver_list']
|
2013-05-23 10:19:43 +02:00
|
|
|
|
|
|
|
|
|
|
|
def check_usb_disk():
|
|
|
|
"""This method returns the mounts point of FIRST external disk"""
|
2024-11-29 00:48:59 +01:00
|
|
|
# disk_name = []
|
2013-05-23 10:19:43 +02:00
|
|
|
usb_disk_list = []
|
2024-11-29 00:48:59 +01:00
|
|
|
for name in glob.glob("/dev/sd[a-z]"):
|
2013-05-23 10:19:43 +02:00
|
|
|
for line in os.popen("udevadm info -q env -n %s" % name):
|
|
|
|
if re.match("ID_PATH=.*usb.*", line):
|
2024-11-29 00:48:59 +01:00
|
|
|
usb_disk_list += [name]
|
2013-05-23 10:19:43 +02:00
|
|
|
|
|
|
|
if len(usb_disk_list) == 0:
|
2015-07-06 18:01:49 +02:00
|
|
|
raise_error("Cannot find any external usb disk", "You should plug the usb hard drive into the server")
|
2013-05-23 10:19:43 +02:00
|
|
|
return ""
|
2022-04-25 10:02:43 +02:00
|
|
|
print(usb_disk_list)
|
2013-05-23 10:19:43 +02:00
|
|
|
|
|
|
|
usb_partition_list = []
|
|
|
|
for usb_disk in usb_disk_list:
|
2024-11-29 00:48:59 +01:00
|
|
|
cmd = "udevadm info -q path -n %s" % usb_disk + "1"
|
|
|
|
output = os.popen(cmd).read()
|
2022-04-25 10:02:43 +02:00
|
|
|
print("cmd : " + cmd)
|
|
|
|
print("output : " + output)
|
2013-05-23 10:19:43 +02:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
if "/devices/pci" in output:
|
|
|
|
# flash("partition found: %s1" % usb_disk)
|
2013-05-23 10:19:43 +02:00
|
|
|
usb_partition_list.append(usb_disk + "1")
|
2015-07-06 18:01:49 +02:00
|
|
|
|
2022-04-25 10:02:43 +02:00
|
|
|
print(usb_partition_list)
|
2013-05-23 10:19:43 +02:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
if len(usb_partition_list) == 0:
|
|
|
|
raise_error(
|
|
|
|
"The drive %s has no partition" % (usb_disk_list[0]),
|
|
|
|
"You should initialize the usb drive and format an ext4 partition with TISBACKUP label",
|
|
|
|
)
|
|
|
|
return ""
|
2015-07-06 18:01:49 +02:00
|
|
|
|
2013-05-23 10:19:43 +02:00
|
|
|
tisbackup_partition_list = []
|
|
|
|
for usb_partition in usb_partition_list:
|
|
|
|
if "tisbackup" in os.popen("/sbin/dumpe2fs -h %s 2>&1 |/bin/grep 'volume name'" % usb_partition).read().lower():
|
|
|
|
flash("tisbackup backup partition found: %s" % usb_partition)
|
|
|
|
tisbackup_partition_list.append(usb_partition)
|
2015-06-30 14:48:34 +02:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
print(tisbackup_partition_list)
|
2013-05-23 10:19:43 +02:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
if len(tisbackup_partition_list) == 0:
|
|
|
|
raise_error(
|
|
|
|
"No tisbackup partition exist on disk %s" % (usb_disk_list[0]),
|
|
|
|
"You should initialize the usb drive and format an ext4 partition with TISBACKUP label",
|
|
|
|
)
|
2013-05-23 10:19:43 +02:00
|
|
|
return ""
|
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
if len(tisbackup_partition_list) > 1:
|
2013-05-23 10:19:43 +02:00
|
|
|
raise_error("There are many usb disk", "You should plug remove one of them")
|
|
|
|
return ""
|
|
|
|
|
|
|
|
return tisbackup_partition_list[0]
|
|
|
|
|
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
def check_already_mount(partition_name, refresh):
|
|
|
|
with open("/proc/mounts") as f:
|
2015-07-06 18:01:49 +02:00
|
|
|
mount_point = ""
|
|
|
|
for line in f.readlines():
|
|
|
|
if line.startswith(partition_name):
|
2024-11-29 00:48:59 +01:00
|
|
|
mount_point = line.split(" ")[1]
|
2022-04-25 10:02:43 +02:00
|
|
|
if not refresh:
|
|
|
|
run_command("/bin/umount %s" % mount_point)
|
2024-11-29 00:48:59 +01:00
|
|
|
os.rmdir(mount_point)
|
2015-07-08 16:02:14 +02:00
|
|
|
return mount_point
|
2015-07-06 18:01:49 +02:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
|
2015-07-06 18:01:49 +02:00
|
|
|
def run_command(cmd, info=""):
|
2024-11-29 00:48:59 +01:00
|
|
|
flash("Executing: %s" % cmd)
|
|
|
|
from subprocess import CalledProcessError, check_output
|
|
|
|
|
|
|
|
result = ""
|
2015-07-06 18:01:49 +02:00
|
|
|
try:
|
2024-11-29 00:48:59 +01:00
|
|
|
result = check_output(cmd, stderr=subprocess.STDOUT, shell=True)
|
2024-11-29 00:32:39 +01:00
|
|
|
except CalledProcessError:
|
2024-11-29 00:48:59 +01:00
|
|
|
raise_error(result, info)
|
2015-07-06 18:01:49 +02:00
|
|
|
return result
|
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
|
|
|
|
def check_mount_disk(partition_name, refresh):
|
|
|
|
mount_point = check_already_mount(partition_name, refresh)
|
2013-05-23 10:19:43 +02:00
|
|
|
if not refresh:
|
2024-11-29 00:48:59 +01:00
|
|
|
mount_point = "/mnt/TISBACKUP-" + str(time.time())
|
2013-05-23 10:19:43 +02:00
|
|
|
os.mkdir(mount_point)
|
2024-11-29 00:48:59 +01:00
|
|
|
flash("must mount " + partition_name)
|
2013-05-23 10:19:43 +02:00
|
|
|
cmd = "mount %s %s" % (partition_name, mount_point)
|
2024-11-29 00:48:59 +01:00
|
|
|
if run_command(cmd, "You should manualy mount the usb drive") != "":
|
2015-07-06 18:01:49 +02:00
|
|
|
flash("Remove directory: %s" % mount_point)
|
2024-11-29 00:48:59 +01:00
|
|
|
os.rmdir(mount_point)
|
|
|
|
return ""
|
2013-05-23 10:19:43 +02:00
|
|
|
|
|
|
|
return mount_point
|
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
|
|
|
|
@app.route("/status.json")
|
2013-05-23 10:19:43 +02:00
|
|
|
def export_backup_status():
|
|
|
|
exports = dbstat.query('select * from stats where TYPE="EXPORT" and backup_start>="%s"' % mindate)
|
2015-07-08 16:02:14 +02:00
|
|
|
error = ""
|
2024-11-29 00:48:59 +01:00
|
|
|
finish = not runnings_backups()
|
|
|
|
if get_task() is not None and finish:
|
2015-07-08 16:02:14 +02:00
|
|
|
status = get_task().get()
|
|
|
|
if status != "ok":
|
2024-11-29 00:48:59 +01:00
|
|
|
error = "Export failing with error: " + status
|
|
|
|
|
|
|
|
return jsonify(data=exports, finish=finish, error=error)
|
2015-07-08 16:02:14 +02:00
|
|
|
|
2015-07-06 18:01:49 +02:00
|
|
|
|
|
|
|
def runnings_backups():
|
2024-11-29 00:48:59 +01:00
|
|
|
task = get_task()
|
|
|
|
is_runnig = task is not None
|
|
|
|
finish = is_runnig and task.get() is not None
|
2015-07-08 16:02:14 +02:00
|
|
|
return is_runnig and not finish
|
|
|
|
|
2013-05-23 10:19:43 +02:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
@app.route("/backups.json")
|
2013-05-23 10:19:43 +02:00
|
|
|
def last_backup_json():
|
|
|
|
exports = dbstat.query('select * from stats where TYPE="BACKUP" ORDER BY backup_start DESC ')
|
2024-11-29 00:48:59 +01:00
|
|
|
return Response(response=json.dumps(exports), status=200, mimetype="application/json")
|
2015-06-29 15:16:19 +02:00
|
|
|
|
2013-05-23 10:19:43 +02:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
@app.route("/last_backups")
|
2013-05-23 10:19:43 +02:00
|
|
|
def last_backup():
|
|
|
|
exports = dbstat.query('select * from stats where TYPE="BACKUP" ORDER BY backup_start DESC LIMIT 20 ')
|
|
|
|
return render_template("last_backups.html", backups=exports)
|
|
|
|
|
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
@app.route("/export_backup")
|
2013-05-23 10:19:43 +02:00
|
|
|
def export_backup():
|
|
|
|
raise_error("", "")
|
2014-07-25 15:06:51 +02:00
|
|
|
backup_dict = read_config()
|
|
|
|
sections = []
|
2015-07-06 18:01:49 +02:00
|
|
|
backup_sections = []
|
2024-11-29 00:48:59 +01:00
|
|
|
for backup_types in backup_dict:
|
2015-06-30 14:48:34 +02:00
|
|
|
if backup_types == "null_list":
|
|
|
|
continue
|
2014-07-25 15:06:51 +02:00
|
|
|
for section in backup_dict[backup_types]:
|
2024-11-29 00:48:59 +01:00
|
|
|
# if section.count > 0:
|
2022-04-25 10:02:43 +02:00
|
|
|
if len(section) > 0:
|
2014-07-25 15:06:51 +02:00
|
|
|
sections.append(section[1])
|
2015-06-30 14:48:34 +02:00
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
noJobs = not runnings_backups()
|
2022-04-25 10:02:43 +02:00
|
|
|
if "start" in list(request.args.keys()) or not noJobs:
|
2024-11-29 00:48:59 +01:00
|
|
|
start = True
|
2022-04-25 10:02:43 +02:00
|
|
|
if "sections" in list(request.args.keys()):
|
2024-11-29 00:48:59 +01:00
|
|
|
backup_sections = request.args.getlist("sections")
|
2015-06-30 14:48:34 +02:00
|
|
|
|
2013-05-23 10:19:43 +02:00
|
|
|
else:
|
2024-11-29 00:48:59 +01:00
|
|
|
start = False
|
2013-05-23 10:19:43 +02:00
|
|
|
cp.read(tisbackup_config_file)
|
|
|
|
|
|
|
|
partition_name = check_usb_disk()
|
|
|
|
if partition_name:
|
|
|
|
if noJobs:
|
2024-11-29 00:48:59 +01:00
|
|
|
mount_point = check_mount_disk(partition_name, False)
|
2013-05-23 10:19:43 +02:00
|
|
|
else:
|
2024-11-29 00:48:59 +01:00
|
|
|
mount_point = check_mount_disk(partition_name, True)
|
2013-05-23 10:19:43 +02:00
|
|
|
if noJobs:
|
2024-11-29 00:48:59 +01:00
|
|
|
global mindate
|
|
|
|
mindate = datetime2isodate(datetime.datetime.now())
|
2013-05-23 10:19:43 +02:00
|
|
|
if not error and start:
|
2022-04-25 10:02:43 +02:00
|
|
|
print(tisbackup_config_file)
|
2024-11-29 00:48:59 +01:00
|
|
|
task = run_export_backup(
|
|
|
|
base=backup_base_dir,
|
|
|
|
config_file=CONFIG[config_number],
|
|
|
|
mount_point=mount_point,
|
|
|
|
backup_sections=",".join([str(x) for x in backup_sections]),
|
|
|
|
)
|
2022-04-25 10:02:43 +02:00
|
|
|
set_task(task)
|
2024-11-29 00:48:59 +01:00
|
|
|
|
2014-07-25 15:06:51 +02:00
|
|
|
return render_template("export_backup.html", error=error, start=start, info=info, email=ADMIN_EMAIL, sections=sections)
|
2013-05-23 10:19:43 +02:00
|
|
|
|
|
|
|
|
|
|
|
def raise_error(strError, strInfo):
|
|
|
|
global error, info
|
|
|
|
error = strError
|
|
|
|
info = strInfo
|
2015-06-30 14:48:34 +02:00
|
|
|
|
|
|
|
|
2024-11-29 00:48:59 +01:00
|
|
|
if __name__ == "__main__":
|
2013-05-23 10:19:43 +02:00
|
|
|
read_config()
|
2015-07-03 16:18:48 +02:00
|
|
|
from os import environ
|
2024-11-29 00:48:59 +01:00
|
|
|
|
|
|
|
if "WINGDB_ACTIVE" in environ:
|
2015-07-03 16:18:48 +02:00
|
|
|
app.debug = False
|
2024-11-29 00:48:59 +01:00
|
|
|
app.run(host="0.0.0.0", port=8080)
|