Compare commits
9 Commits
c5a1ac0551
...
aa8a68aa80
Author | SHA1 | Date | |
---|---|---|---|
aa8a68aa80 | |||
7fcc5afc64 | |||
e7e98d0b47 | |||
8479c378ee | |||
274e1e2e59 | |||
eb0bdaedbd | |||
99dc6e0abf | |||
e8ba6df102 | |||
ffd9bf3d39 |
@ -19,7 +19,6 @@ jobs:
|
|||||||
- run: pip install ruff
|
- run: pip install ruff
|
||||||
- run: |
|
- run: |
|
||||||
ruff check .
|
ruff check .
|
||||||
ruff fix .
|
|
||||||
# - uses: stefanzweifel/git-auto-commit-action@v4
|
# - uses: stefanzweifel/git-auto-commit-action@v4
|
||||||
# with:
|
# with:
|
||||||
# commit_message: 'style fixes by ruff'
|
# commit_message: 'style fixes by ruff'
|
||||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -2,11 +2,13 @@
|
|||||||
*.swp
|
*.swp
|
||||||
*~
|
*~
|
||||||
*.pyc
|
*.pyc
|
||||||
|
__pycache__/*
|
||||||
/tasks.sqlite
|
/tasks.sqlite
|
||||||
/tasks.sqlite-wal
|
/tasks.sqlite-wal
|
||||||
/srvinstallation
|
/srvinstallation
|
||||||
/tasks.sqlite-shm
|
/tasks.sqlite-shm
|
||||||
.idea
|
.idea
|
||||||
|
.ruff_cache/*
|
||||||
/deb/builddir
|
/deb/builddir
|
||||||
/deb/*.deb
|
/deb/*.deb
|
||||||
/lib
|
/lib
|
||||||
|
13
.hadolint.yml
Normal file
13
.hadolint.yml
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
DL3008failure-threshold: warning
|
||||||
|
format: tty
|
||||||
|
ignored:
|
||||||
|
- DL3007
|
||||||
|
override:
|
||||||
|
error:
|
||||||
|
- DL3015
|
||||||
|
warning:
|
||||||
|
- DL3015
|
||||||
|
info:
|
||||||
|
- DL3008
|
||||||
|
style:
|
||||||
|
- DL3015
|
7
.pre-commit-config.yaml
Normal file
7
.pre-commit-config.yaml
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
repos:
|
||||||
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
|
rev: v5.0.0
|
||||||
|
hooks:
|
||||||
|
- id: trailing-whitespace
|
||||||
|
- id: end-of-file-fixer
|
||||||
|
- id: check-yaml
|
20
Dockerfile
Executable file
20
Dockerfile
Executable file
@ -0,0 +1,20 @@
|
|||||||
|
FROM python:3.12-slim
|
||||||
|
|
||||||
|
WORKDIR /opt/tisbackup
|
||||||
|
|
||||||
|
COPY entrypoint.sh /entrypoint.sh
|
||||||
|
COPY . /opt/tisbackup
|
||||||
|
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get install --no-install-recommends -y rsync ssh cron \
|
||||||
|
&& rm -rf /var/lib/apt/lists/* \
|
||||||
|
&& /usr/local/bin/python3.12 -m pip install --no-cache-dir -r requirements.txt \
|
||||||
|
&& mkdir -p /var/spool/cron/crontabs \
|
||||||
|
&& echo '59 03 * * * root /bin/bash /opt/tisbackup/backup.sh' > /etc/crontab \
|
||||||
|
&& echo '' >> /etc/crontab \
|
||||||
|
&& crontab /etc/crontab
|
||||||
|
|
||||||
|
EXPOSE 8080
|
||||||
|
|
||||||
|
ENTRYPOINT ["/entrypoint.sh"]
|
||||||
|
CMD ["/usr/local/bin/python3.12","/opt/tisbackup/tisbackup_gui.py"]
|
41
compose.yml
Executable file
41
compose.yml
Executable file
@ -0,0 +1,41 @@
|
|||||||
|
services:
|
||||||
|
tisbackup_gui:
|
||||||
|
container_name: tisbackup_gui
|
||||||
|
image: "tisbackup:latest"
|
||||||
|
build: .
|
||||||
|
volumes:
|
||||||
|
- ./config/:/etc/tis/
|
||||||
|
- ./backup/:/backup/
|
||||||
|
- /etc/timezone:/etc/timezone:ro
|
||||||
|
- /etc/localtime:/etc/localtime:ro
|
||||||
|
restart: unless-stopped
|
||||||
|
ports:
|
||||||
|
- 9980:8080
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpus: 0.50
|
||||||
|
memory: 512M
|
||||||
|
reservations:
|
||||||
|
cpus: 0.25
|
||||||
|
memory: 128M
|
||||||
|
tisbackup_cron:
|
||||||
|
container_name: tisbackup_cron
|
||||||
|
image: "tisbackup:latest"
|
||||||
|
build: .
|
||||||
|
volumes:
|
||||||
|
- ./config/:/etc/tis/
|
||||||
|
- ./ssh/:/config_ssh/
|
||||||
|
- ./backup/:/backup/
|
||||||
|
- /etc/timezone:/etc/timezone:ro
|
||||||
|
- /etc/localtime:/etc/localtime:ro
|
||||||
|
restart: always
|
||||||
|
command: "/bin/bash /opt/tisbackup/cron.sh"
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpus: 0.50
|
||||||
|
memory: 512M
|
||||||
|
reservations:
|
||||||
|
cpus: 0.25
|
||||||
|
memory: 128M
|
11
config.py
Normal file → Executable file
11
config.py
Normal file → Executable file
@ -1,10 +1,9 @@
|
|||||||
import os,sys
|
import os
|
||||||
from huey.backends.sqlite_backend import SqliteQueue,SqliteDataStore
|
import sys
|
||||||
from huey.api import Huey, create_task
|
|
||||||
|
|
||||||
|
from huey.contrib.sql_huey import SqlHuey
|
||||||
|
from huey.storage import SqliteStorage
|
||||||
|
|
||||||
tisbackup_root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__)))
|
tisbackup_root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__)))
|
||||||
tasks_db = os.path.join(tisbackup_root_dir, "tasks.sqlite")
|
tasks_db = os.path.join(tisbackup_root_dir, "tasks.sqlite")
|
||||||
queue = SqliteQueue('tisbackups',tasks_db)
|
huey = SqlHuey(name="tisbackups",filename=tasks_db,always_eager=False,storage_class=SqliteStorage)
|
||||||
result_store = SqliteDataStore('tisbackups',tasks_db)
|
|
||||||
huey = Huey(queue,result_store,always_eager=False)
|
|
||||||
|
4
cron.sh
Executable file
4
cron.sh
Executable file
@ -0,0 +1,4 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -x
|
||||||
|
echo "Starting cron job for TIS Backup"
|
||||||
|
cron -f -l 2
|
@ -52,5 +52,3 @@ The documentation for tisbackup is here: [tisbackup doc](https://tisbackup.readt
|
|||||||
dpkg --force-all --purge tis-tisbackup
|
dpkg --force-all --purge tis-tisbackup
|
||||||
apt autoremove
|
apt autoremove
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
@ -7,4 +7,3 @@ Depends: unzip, ssh, rsync, python3-paramiko, python3-pyvmomi, python3-pexpect,
|
|||||||
Maintainer: Tranquil-IT <technique@tranquil.it>
|
Maintainer: Tranquil-IT <technique@tranquil.it>
|
||||||
Description: TISBackup backup management
|
Description: TISBackup backup management
|
||||||
Homepage: https://www.tranquil.it
|
Homepage: https://www.tranquil.it
|
||||||
|
|
||||||
|
@ -32,5 +32,3 @@ rsync -aP ../samples/tisbackup-config.ini.sample ./builddir/etc/tis/tisbackup-c
|
|||||||
chmod 755 ./builddir/opt/tisbackup/tisbackup.py
|
chmod 755 ./builddir/opt/tisbackup/tisbackup.py
|
||||||
|
|
||||||
dpkg-deb --build builddir tis-tisbackup-1-${VERSION}.deb
|
dpkg-deb --build builddir tis-tisbackup-1-${VERSION}.deb
|
||||||
|
|
||||||
|
|
||||||
|
2
docs/_static/language_data.js
vendored
2
docs/_static/language_data.js
vendored
@ -293,5 +293,3 @@ function splitQuery(query) {
|
|||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
7
entrypoint.sh
Executable file
7
entrypoint.sh
Executable file
@ -0,0 +1,7 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
env >> /etc/environment
|
||||||
|
|
||||||
|
# execute CMD
|
||||||
|
echo "$@"
|
||||||
|
exec "$@"
|
@ -55,11 +55,12 @@
|
|||||||
# --------------------------------------------------------------------
|
# --------------------------------------------------------------------
|
||||||
|
|
||||||
import gettext
|
import gettext
|
||||||
import six.moves.xmlrpc_client as xmlrpclib
|
|
||||||
import six.moves.http_client as httplib
|
|
||||||
import socket
|
import socket
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
import six.moves.http_client as httplib
|
||||||
|
import six.moves.xmlrpc_client as xmlrpclib
|
||||||
|
|
||||||
translation = gettext.translation('xen-xm', fallback = True)
|
translation = gettext.translation('xen-xm', fallback = True)
|
||||||
|
|
||||||
API_VERSION_1_1 = '1.1'
|
API_VERSION_1_1 = '1.1'
|
||||||
|
@ -15,4 +15,3 @@
|
|||||||
# along with TISBackup. If not, see <http://www.gnu.org/licenses/>.
|
# along with TISBackup. If not, see <http://www.gnu.org/licenses/>.
|
||||||
#
|
#
|
||||||
# -----------------------------------------------------------------------
|
# -----------------------------------------------------------------------
|
||||||
|
|
||||||
|
@ -21,6 +21,7 @@
|
|||||||
|
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
try:
|
try:
|
||||||
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko
|
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko
|
||||||
import paramiko
|
import paramiko
|
||||||
@ -32,6 +33,7 @@ sys.stderr = sys.__stderr__
|
|||||||
|
|
||||||
from libtisbackup.common import *
|
from libtisbackup.common import *
|
||||||
|
|
||||||
|
|
||||||
class backup_mysql(backup_generic):
|
class backup_mysql(backup_generic):
|
||||||
"""Backup a mysql database as gzipped sql file through ssh"""
|
"""Backup a mysql database as gzipped sql file through ssh"""
|
||||||
type = 'mysql+ssh'
|
type = 'mysql+ssh'
|
||||||
|
@ -18,8 +18,9 @@
|
|||||||
#
|
#
|
||||||
# -----------------------------------------------------------------------
|
# -----------------------------------------------------------------------
|
||||||
|
|
||||||
import os
|
|
||||||
import datetime
|
import datetime
|
||||||
|
import os
|
||||||
|
|
||||||
from .common import *
|
from .common import *
|
||||||
|
|
||||||
|
|
||||||
@ -48,4 +49,3 @@ register_driver(backup_null)
|
|||||||
|
|
||||||
if __name__=='__main__':
|
if __name__=='__main__':
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -18,6 +18,7 @@
|
|||||||
#
|
#
|
||||||
# -----------------------------------------------------------------------
|
# -----------------------------------------------------------------------
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
try:
|
try:
|
||||||
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko
|
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko
|
||||||
import paramiko
|
import paramiko
|
||||||
@ -27,12 +28,14 @@ except ImportError as e:
|
|||||||
|
|
||||||
sys.stderr = sys.__stderr__
|
sys.stderr = sys.__stderr__
|
||||||
|
|
||||||
import datetime
|
|
||||||
import base64
|
import base64
|
||||||
|
import datetime
|
||||||
import os
|
import os
|
||||||
from libtisbackup.common import *
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
from libtisbackup.common import *
|
||||||
|
|
||||||
|
|
||||||
class backup_oracle(backup_generic):
|
class backup_oracle(backup_generic):
|
||||||
"""Backup a oracle database as zipped file through ssh"""
|
"""Backup a oracle database as zipped file through ssh"""
|
||||||
type = 'oracle+ssh'
|
type = 'oracle+ssh'
|
||||||
|
@ -18,6 +18,7 @@
|
|||||||
#
|
#
|
||||||
# -----------------------------------------------------------------------
|
# -----------------------------------------------------------------------
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
try:
|
try:
|
||||||
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko
|
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko
|
||||||
import paramiko
|
import paramiko
|
||||||
@ -29,6 +30,7 @@ sys.stderr = sys.__stderr__
|
|||||||
|
|
||||||
from .common import *
|
from .common import *
|
||||||
|
|
||||||
|
|
||||||
class backup_pgsql(backup_generic):
|
class backup_pgsql(backup_generic):
|
||||||
"""Backup a postgresql database as gzipped sql file through ssh"""
|
"""Backup a postgresql database as gzipped sql file through ssh"""
|
||||||
type = 'pgsql+ssh'
|
type = 'pgsql+ssh'
|
||||||
|
@ -18,14 +18,14 @@
|
|||||||
#
|
#
|
||||||
# -----------------------------------------------------------------------
|
# -----------------------------------------------------------------------
|
||||||
|
|
||||||
import os
|
|
||||||
import datetime
|
import datetime
|
||||||
from libtisbackup.common import *
|
|
||||||
import time
|
|
||||||
import logging
|
import logging
|
||||||
import re
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import datetime
|
import re
|
||||||
|
import time
|
||||||
|
|
||||||
|
from libtisbackup.common import *
|
||||||
|
|
||||||
|
|
||||||
class backup_rsync(backup_generic):
|
class backup_rsync(backup_generic):
|
||||||
@ -341,4 +341,3 @@ if __name__=='__main__':
|
|||||||
b.read_config(cp)
|
b.read_config(cp)
|
||||||
b.process_backup()
|
b.process_backup()
|
||||||
print((b.checknagios()))
|
print((b.checknagios()))
|
||||||
|
|
||||||
|
@ -18,14 +18,13 @@
|
|||||||
#
|
#
|
||||||
# -----------------------------------------------------------------------
|
# -----------------------------------------------------------------------
|
||||||
|
|
||||||
import os
|
|
||||||
import datetime
|
import datetime
|
||||||
from .common import *
|
|
||||||
import time
|
|
||||||
import logging
|
import logging
|
||||||
import re
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import datetime
|
import re
|
||||||
|
import time
|
||||||
|
|
||||||
from .common import *
|
from .common import *
|
||||||
|
|
||||||
|
|
||||||
@ -359,4 +358,3 @@ if __name__=='__main__':
|
|||||||
b.read_config(cp)
|
b.read_config(cp)
|
||||||
b.process_backup()
|
b.process_backup()
|
||||||
print((b.checknagios()))
|
print((b.checknagios()))
|
||||||
|
|
||||||
|
@ -21,6 +21,7 @@
|
|||||||
|
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
try:
|
try:
|
||||||
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko
|
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko
|
||||||
import paramiko
|
import paramiko
|
||||||
@ -32,6 +33,7 @@ sys.stderr = sys.__stderr__
|
|||||||
|
|
||||||
from .common import *
|
from .common import *
|
||||||
|
|
||||||
|
|
||||||
class backup_samba4(backup_generic):
|
class backup_samba4(backup_generic):
|
||||||
"""Backup a samba4 databases as gzipped tdbs file through ssh"""
|
"""Backup a samba4 databases as gzipped tdbs file through ssh"""
|
||||||
type = 'samba4'
|
type = 'samba4'
|
||||||
|
@ -21,6 +21,7 @@
|
|||||||
|
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
try:
|
try:
|
||||||
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko
|
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko
|
||||||
import paramiko
|
import paramiko
|
||||||
@ -30,11 +31,13 @@ except ImportError as e:
|
|||||||
|
|
||||||
sys.stderr = sys.__stderr__
|
sys.stderr = sys.__stderr__
|
||||||
|
|
||||||
import datetime
|
|
||||||
import base64
|
import base64
|
||||||
|
import datetime
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from .common import *
|
from .common import *
|
||||||
|
|
||||||
|
|
||||||
class backup_sqlserver(backup_generic):
|
class backup_sqlserver(backup_generic):
|
||||||
"""Backup a SQLSERVER database as gzipped sql file through ssh"""
|
"""Backup a SQLSERVER database as gzipped sql file through ssh"""
|
||||||
type = 'sqlserver+ssh'
|
type = 'sqlserver+ssh'
|
||||||
|
@ -18,23 +18,26 @@
|
|||||||
#
|
#
|
||||||
# -----------------------------------------------------------------------
|
# -----------------------------------------------------------------------
|
||||||
|
|
||||||
import os
|
|
||||||
import datetime
|
|
||||||
from .common import *
|
|
||||||
from . import XenAPI
|
|
||||||
import time
|
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
import os.path
|
|
||||||
import datetime
|
|
||||||
import select
|
|
||||||
import urllib.request, urllib.error, urllib.parse, urllib.request, urllib.parse, urllib.error
|
|
||||||
import base64
|
import base64
|
||||||
|
import datetime
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import re
|
||||||
|
import select
|
||||||
import socket
|
import socket
|
||||||
import requests
|
import time
|
||||||
import pexpect
|
import urllib.error
|
||||||
|
import urllib.parse
|
||||||
|
import urllib.request
|
||||||
from stat import *
|
from stat import *
|
||||||
|
|
||||||
|
import pexpect
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from . import XenAPI
|
||||||
|
from .common import *
|
||||||
|
|
||||||
|
|
||||||
class backup_switch(backup_generic):
|
class backup_switch(backup_generic):
|
||||||
"""Backup a startup-config on a switch"""
|
"""Backup a startup-config on a switch"""
|
||||||
@ -259,4 +262,3 @@ if __name__=='__main__':
|
|||||||
cp.read('/opt/tisbackup/configtest.ini')
|
cp.read('/opt/tisbackup/configtest.ini')
|
||||||
b = backup_xva()
|
b = backup_xva()
|
||||||
b.read_config(cp)
|
b.read_config(cp)
|
||||||
|
|
||||||
|
@ -18,25 +18,25 @@
|
|||||||
#
|
#
|
||||||
# -----------------------------------------------------------------------
|
# -----------------------------------------------------------------------
|
||||||
|
|
||||||
from .common import *
|
|
||||||
import pyVmomi
|
|
||||||
from pyVmomi import vim
|
|
||||||
from pyVmomi import vmodl
|
|
||||||
from pyVim.connect import SmartConnect, Disconnect
|
|
||||||
|
|
||||||
from datetime import datetime, date, timedelta
|
|
||||||
import atexit
|
import atexit
|
||||||
import getpass
|
import getpass
|
||||||
|
from datetime import date, datetime, timedelta
|
||||||
|
|
||||||
|
import pyVmomi
|
||||||
import requests
|
import requests
|
||||||
|
from pyVim.connect import Disconnect, SmartConnect
|
||||||
|
from pyVmomi import vim, vmodl
|
||||||
# Disable HTTPS verification warnings.
|
# Disable HTTPS verification warnings.
|
||||||
from requests.packages import urllib3
|
from requests.packages import urllib3
|
||||||
|
|
||||||
|
from .common import *
|
||||||
|
|
||||||
urllib3.disable_warnings()
|
urllib3.disable_warnings()
|
||||||
import os
|
import os
|
||||||
import time
|
|
||||||
import tarfile
|
|
||||||
import re
|
import re
|
||||||
|
import tarfile
|
||||||
|
import time
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
|
|
||||||
from stat import *
|
from stat import *
|
||||||
|
|
||||||
|
|
||||||
@ -279,4 +279,3 @@ class backup_vmdk(backup_generic):
|
|||||||
|
|
||||||
|
|
||||||
register_driver(backup_vmdk)
|
register_driver(backup_vmdk)
|
||||||
|
|
||||||
|
@ -20,9 +20,11 @@
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
from .common import *
|
|
||||||
import paramiko
|
import paramiko
|
||||||
|
|
||||||
|
from .common import *
|
||||||
|
|
||||||
|
|
||||||
class backup_xcp_metadata(backup_generic):
|
class backup_xcp_metadata(backup_generic):
|
||||||
"""Backup metatdata of a xcp pool using xe pool-dump-database"""
|
"""Backup metatdata of a xcp pool using xe pool-dump-database"""
|
||||||
type = 'xcp-dump-metadata'
|
type = 'xcp-dump-metadata'
|
||||||
|
@ -18,20 +18,23 @@
|
|||||||
#
|
#
|
||||||
# -----------------------------------------------------------------------
|
# -----------------------------------------------------------------------
|
||||||
|
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
import os
|
|
||||||
import datetime
|
import datetime
|
||||||
import urllib.request, urllib.parse, urllib.error
|
|
||||||
import socket
|
|
||||||
import tarfile
|
|
||||||
import hashlib
|
import hashlib
|
||||||
from stat import *
|
import logging
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import socket
|
||||||
import ssl
|
import ssl
|
||||||
|
import tarfile
|
||||||
|
import urllib.error
|
||||||
|
import urllib.parse
|
||||||
|
import urllib.request
|
||||||
|
from stat import *
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from .common import *
|
|
||||||
from . import XenAPI
|
from . import XenAPI
|
||||||
|
from .common import *
|
||||||
|
|
||||||
if hasattr(ssl, '_create_unverified_context'):
|
if hasattr(ssl, '_create_unverified_context'):
|
||||||
ssl._create_default_https_context = ssl._create_unverified_context
|
ssl._create_default_https_context = ssl._create_unverified_context
|
||||||
|
@ -18,19 +18,19 @@
|
|||||||
#
|
#
|
||||||
# -----------------------------------------------------------------------
|
# -----------------------------------------------------------------------
|
||||||
|
|
||||||
from abc import ABC, abstractmethod
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
import re
|
|
||||||
import logging
|
|
||||||
import datetime
|
import datetime
|
||||||
import time
|
import logging
|
||||||
from iniparse import ConfigParser
|
import os
|
||||||
import sqlite3
|
import re
|
||||||
import shutil
|
|
||||||
import select
|
import select
|
||||||
|
import shutil
|
||||||
|
import sqlite3
|
||||||
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
import time
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
|
||||||
|
from iniparse import ConfigParser
|
||||||
|
|
||||||
try:
|
try:
|
||||||
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko
|
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko
|
||||||
|
@ -18,22 +18,24 @@
|
|||||||
#
|
#
|
||||||
# -----------------------------------------------------------------------
|
# -----------------------------------------------------------------------
|
||||||
|
|
||||||
import os
|
|
||||||
import datetime
|
|
||||||
from .common import *
|
|
||||||
from . import XenAPI
|
|
||||||
import time
|
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
import os.path
|
|
||||||
import os
|
|
||||||
import datetime
|
|
||||||
import select
|
|
||||||
import urllib.request, urllib.error, urllib.parse
|
|
||||||
import base64
|
import base64
|
||||||
|
import datetime
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import re
|
||||||
|
import select
|
||||||
import socket
|
import socket
|
||||||
from stat import *
|
|
||||||
import ssl
|
import ssl
|
||||||
|
import time
|
||||||
|
import urllib.error
|
||||||
|
import urllib.parse
|
||||||
|
import urllib.request
|
||||||
|
from stat import *
|
||||||
|
|
||||||
|
from . import XenAPI
|
||||||
|
from .common import *
|
||||||
|
|
||||||
if hasattr(ssl, '_create_unverified_context'):
|
if hasattr(ssl, '_create_unverified_context'):
|
||||||
ssl._create_default_https_context = ssl._create_unverified_context
|
ssl._create_default_https_context = ssl._create_unverified_context
|
||||||
|
|
||||||
|
@ -3,18 +3,16 @@
|
|||||||
# Copyright (c) 2007 Tim Lauridsen <tla@rasmil.dk>
|
# Copyright (c) 2007 Tim Lauridsen <tla@rasmil.dk>
|
||||||
# All Rights Reserved. See LICENSE-PSF & LICENSE for details.
|
# All Rights Reserved. See LICENSE-PSF & LICENSE for details.
|
||||||
|
|
||||||
from .ini import INIConfig, change_comment_syntax
|
from .compat import ConfigParser, RawConfigParser, SafeConfigParser
|
||||||
from .config import BasicConfig, ConfigNamespace
|
from .config import BasicConfig, ConfigNamespace
|
||||||
from .compat import RawConfigParser, ConfigParser, SafeConfigParser
|
from .configparser import (DEFAULTSECT, MAX_INTERPOLATION_DEPTH,
|
||||||
|
DuplicateSectionError, InterpolationDepthError,
|
||||||
|
InterpolationMissingOptionError,
|
||||||
|
InterpolationSyntaxError, NoOptionError,
|
||||||
|
NoSectionError)
|
||||||
|
from .ini import INIConfig, change_comment_syntax
|
||||||
from .utils import tidy
|
from .utils import tidy
|
||||||
|
|
||||||
from .configparser import DuplicateSectionError, \
|
|
||||||
NoSectionError, NoOptionError, \
|
|
||||||
InterpolationMissingOptionError, \
|
|
||||||
InterpolationDepthError, \
|
|
||||||
InterpolationSyntaxError, \
|
|
||||||
DEFAULTSECT, MAX_INTERPOLATION_DEPTH
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'BasicConfig', 'ConfigNamespace',
|
'BasicConfig', 'ConfigNamespace',
|
||||||
'INIConfig', 'tidy', 'change_comment_syntax',
|
'INIConfig', 'tidy', 'change_comment_syntax',
|
||||||
|
@ -12,21 +12,18 @@ The underlying INIConfig object can be accessed as cfg.data
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
from .configparser import DuplicateSectionError, \
|
|
||||||
NoSectionError, NoOptionError, \
|
|
||||||
InterpolationMissingOptionError, \
|
|
||||||
InterpolationDepthError, \
|
|
||||||
InterpolationSyntaxError, \
|
|
||||||
DEFAULTSECT, MAX_INTERPOLATION_DEPTH
|
|
||||||
|
|
||||||
# These are imported only for compatiability.
|
|
||||||
# The code below does not reference them directly.
|
|
||||||
from .configparser import Error, InterpolationError, \
|
|
||||||
MissingSectionHeaderError, ParsingError
|
|
||||||
|
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from . import ini
|
from . import ini
|
||||||
|
# These are imported only for compatiability.
|
||||||
|
# The code below does not reference them directly.
|
||||||
|
from .configparser import (DEFAULTSECT, MAX_INTERPOLATION_DEPTH,
|
||||||
|
DuplicateSectionError, Error,
|
||||||
|
InterpolationDepthError, InterpolationError,
|
||||||
|
InterpolationMissingOptionError,
|
||||||
|
InterpolationSyntaxError, MissingSectionHeaderError,
|
||||||
|
NoOptionError, NoSectionError, ParsingError)
|
||||||
|
|
||||||
|
|
||||||
class RawConfigParser(object):
|
class RawConfigParser(object):
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
try:
|
try:
|
||||||
from ConfigParser import *
|
|
||||||
# not all objects get imported with __all__
|
# not all objects get imported with __all__
|
||||||
|
from ConfigParser import *
|
||||||
from ConfigParser import Error, InterpolationMissingOptionError
|
from ConfigParser import Error, InterpolationMissingOptionError
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from configparser import *
|
from configparser import *
|
||||||
|
@ -42,11 +42,11 @@ Example:
|
|||||||
# Backward-compatiable with ConfigParser
|
# Backward-compatiable with ConfigParser
|
||||||
|
|
||||||
import re
|
import re
|
||||||
from .configparser import DEFAULTSECT, ParsingError, MissingSectionHeaderError
|
|
||||||
|
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from . import config
|
from . import config
|
||||||
|
from .configparser import DEFAULTSECT, MissingSectionHeaderError, ParsingError
|
||||||
|
|
||||||
|
|
||||||
class LineType(object):
|
class LineType(object):
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
from . import compat
|
from . import compat
|
||||||
from .ini import LineContainer, EmptyLine
|
from .ini import EmptyLine, LineContainer
|
||||||
|
|
||||||
|
|
||||||
def tidy(cfg):
|
def tidy(cfg):
|
||||||
|
10
pyproject.toml
Normal file
10
pyproject.toml
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
[tool.black]
|
||||||
|
line-length = 140
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
# Allow lines to be as long as 120.
|
||||||
|
line-length = 140
|
||||||
|
indent-width = 4
|
||||||
|
|
||||||
|
[tool.ruff.lint]
|
||||||
|
ignore = ["F401","F403","F405","E402"]
|
7
requirements.txt
Normal file → Executable file
7
requirements.txt
Normal file → Executable file
@ -1,3 +1,10 @@
|
|||||||
six
|
six
|
||||||
requests
|
requests
|
||||||
paramiko
|
paramiko
|
||||||
|
pexpect
|
||||||
|
flask
|
||||||
|
simplejson
|
||||||
|
huey
|
||||||
|
iniparse
|
||||||
|
redis
|
||||||
|
peewee
|
||||||
|
@ -12,4 +12,3 @@ echo $VERSION > __VERSION__
|
|||||||
|
|
||||||
rpmbuild -bb --buildroot $PWD/builddir -v --clean tis-tisbackup.spec
|
rpmbuild -bb --buildroot $PWD/builddir -v --clean tis-tisbackup.spec
|
||||||
cp RPMS/*/*.rpm .
|
cp RPMS/*/*.rpm .
|
||||||
|
|
||||||
|
@ -14,5 +14,3 @@ else
|
|||||||
sleep 3
|
sleep 3
|
||||||
fi
|
fi
|
||||||
echo $(date +%Y-%m-%d\ %H:%M:%S) : Fin Export TISBackup sur Disque USB : $target >> /var/log/tisbackup.log
|
echo $(date +%Y-%m-%d\ %H:%M:%S) : Fin Export TISBackup sur Disque USB : $target >> /var/log/tisbackup.log
|
||||||
|
|
||||||
|
|
||||||
|
@ -95,4 +95,3 @@ maximum_backup_age=30
|
|||||||
;type=xcp-dump-metadata
|
;type=xcp-dump-metadata
|
||||||
;server_name=srvxen1
|
;server_name=srvxen1
|
||||||
;private_key=/root/.ssh/id_rsa
|
;private_key=/root/.ssh/id_rsa
|
||||||
|
|
||||||
|
@ -18,4 +18,3 @@ password_file=/home/homes/ssamson/tisbackup-pra/xen_passwd
|
|||||||
network_name=net-test
|
network_name=net-test
|
||||||
#start_vm=no
|
#start_vm=no
|
||||||
#max_copies=3
|
#max_copies=3
|
||||||
|
|
||||||
|
@ -4,4 +4,3 @@
|
|||||||
# m h dom mon dow user command
|
# m h dom mon dow user command
|
||||||
30 22 * * * root /opt/tisbackup/tisbackup.py -c /etc/tis/tisbackup-config.ini backup >> /var/log/tisbackup.log 2>&1
|
30 22 * * * root /opt/tisbackup/tisbackup.py -c /etc/tis/tisbackup-config.ini backup >> /var/log/tisbackup.log 2>&1
|
||||||
30 12 * * * root /opt/tisbackup/tisbackup.py -c /etc/tis/tisbackup-config.ini cleanup >> /var/log/tisbackup.log 2>&1
|
30 12 * * * root /opt/tisbackup/tisbackup.py -c /etc/tis/tisbackup-config.ini cleanup >> /var/log/tisbackup.log 2>&1
|
||||||
|
|
||||||
|
@ -95,4 +95,3 @@ case "$1" in
|
|||||||
esac
|
esac
|
||||||
|
|
||||||
exit 0
|
exit 0
|
||||||
|
|
||||||
|
@ -95,4 +95,3 @@ case "$1" in
|
|||||||
esac
|
esac
|
||||||
|
|
||||||
exit 0
|
exit 0
|
||||||
|
|
||||||
|
1
static/js/jquery.dataTables.js
vendored
1
static/js/jquery.dataTables.js
vendored
@ -14948,4 +14948,3 @@
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
}(window, document));
|
}(window, document));
|
||||||
|
|
||||||
|
6
tasks.py
6
tasks.py
@ -1,6 +1,8 @@
|
|||||||
from huey import RedisHuey
|
|
||||||
import os
|
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
from huey import RedisHuey
|
||||||
|
|
||||||
from tisbackup import tis_backup
|
from tisbackup import tis_backup
|
||||||
|
|
||||||
huey = RedisHuey('tisbackup', host='localhost')
|
huey = RedisHuey('tisbackup', host='localhost')
|
||||||
|
@ -98,4 +98,3 @@
|
|||||||
</script>
|
</script>
|
||||||
|
|
||||||
</body></html>
|
</body></html>
|
||||||
|
|
||||||
|
237
tisbackup.py
237
tisbackup.py
@ -18,37 +18,39 @@
|
|||||||
#
|
#
|
||||||
# -----------------------------------------------------------------------
|
# -----------------------------------------------------------------------
|
||||||
import datetime
|
import datetime
|
||||||
import subprocess
|
import os
|
||||||
import os,sys
|
import sys
|
||||||
from os.path import isfile, join
|
from os.path import isfile, join
|
||||||
|
|
||||||
tisbackup_root_dir = os.path.dirname(os.path.realpath(__file__))
|
tisbackup_root_dir = os.path.dirname(os.path.realpath(__file__))
|
||||||
sys.path.insert(0,os.path.join(tisbackup_root_dir,'lib'))
|
sys.path.insert(0, os.path.join(tisbackup_root_dir, "lib"))
|
||||||
sys.path.insert(0,os.path.join(tisbackup_root_dir,'libtisbackup'))
|
sys.path.insert(0, os.path.join(tisbackup_root_dir, "libtisbackup"))
|
||||||
|
|
||||||
from iniparse import ini,ConfigParser
|
|
||||||
from optparse import OptionParser
|
|
||||||
import re
|
|
||||||
import getopt
|
|
||||||
import os.path
|
|
||||||
import logging
|
|
||||||
import errno
|
import errno
|
||||||
from libtisbackup.common import *
|
import logging
|
||||||
|
import os.path
|
||||||
|
|
||||||
|
from optparse import OptionParser
|
||||||
|
|
||||||
|
from iniparse import ConfigParser, ini
|
||||||
|
|
||||||
from libtisbackup.backup_mysql import backup_mysql
|
from libtisbackup.backup_mysql import backup_mysql
|
||||||
from libtisbackup.backup_rsync import backup_rsync
|
|
||||||
from libtisbackup.backup_rsync import backup_rsync_ssh
|
|
||||||
#from libtisbackup.backup_oracle import backup_oracle
|
|
||||||
from libtisbackup.backup_rsync_btrfs import backup_rsync_btrfs
|
|
||||||
from libtisbackup.backup_rsync_btrfs import backup_rsync__btrfs_ssh
|
|
||||||
from libtisbackup.backup_pgsql import backup_pgsql
|
|
||||||
from libtisbackup.backup_xva import backup_xva
|
|
||||||
# from libtisbackup.backup_vmdk import backup_vmdk
|
# from libtisbackup.backup_vmdk import backup_vmdk
|
||||||
# from libtisbackup.backup_switch import backup_switch
|
# from libtisbackup.backup_switch import backup_switch
|
||||||
from libtisbackup.backup_null import backup_null
|
from libtisbackup.backup_null import backup_null
|
||||||
from libtisbackup.backup_xcp_metadata import backup_xcp_metadata
|
from libtisbackup.backup_pgsql import backup_pgsql
|
||||||
from libtisbackup.copy_vm_xcp import copy_vm_xcp
|
from libtisbackup.backup_rsync import backup_rsync, backup_rsync_ssh
|
||||||
|
|
||||||
|
# from libtisbackup.backup_oracle import backup_oracle
|
||||||
|
from libtisbackup.backup_rsync_btrfs import backup_rsync__btrfs_ssh, backup_rsync_btrfs
|
||||||
|
|
||||||
# from libtisbackup.backup_sqlserver import backup_sqlserver
|
# from libtisbackup.backup_sqlserver import backup_sqlserver
|
||||||
from libtisbackup.backup_samba4 import backup_samba4
|
from libtisbackup.backup_samba4 import backup_samba4
|
||||||
|
from libtisbackup.backup_xcp_metadata import backup_xcp_metadata
|
||||||
|
from libtisbackup.backup_xva import backup_xva
|
||||||
|
from libtisbackup.common import *
|
||||||
|
from libtisbackup.copy_vm_xcp import copy_vm_xcp
|
||||||
|
|
||||||
__version__ = "2.0"
|
__version__ = "2.0"
|
||||||
|
|
||||||
@ -70,23 +72,45 @@ action is either :
|
|||||||
version = "VERSION"
|
version = "VERSION"
|
||||||
|
|
||||||
parser = OptionParser(usage=usage, version="%prog " + version)
|
parser = OptionParser(usage=usage, version="%prog " + version)
|
||||||
parser.add_option("-c","--config", dest="config", default='/etc/tis/tisbackup-config.ini', help="Config file full path (default: %default)")
|
parser.add_option(
|
||||||
parser.add_option("-d","--dry-run", dest="dry_run", default=False, action='store_true', help="Dry run (default: %default)")
|
"-c", "--config", dest="config", default="/etc/tis/tisbackup-config.ini", help="Config file full path (default: %default)"
|
||||||
parser.add_option("-v","--verbose", dest="verbose", default=False, action='store_true', help="More information (default: %default)")
|
)
|
||||||
parser.add_option("-s","--sections", dest="sections", default='', help="Comma separated list of sections (backups) to process (default: All)")
|
parser.add_option("-d", "--dry-run", dest="dry_run", default=False, action="store_true", help="Dry run (default: %default)")
|
||||||
parser.add_option("-l","--loglevel", dest="loglevel", default='info', type='choice', choices=['debug','warning','info','error','critical'], metavar='LOGLEVEL',help="Loglevel (default: %default)")
|
parser.add_option("-v", "--verbose", dest="verbose", default=False, action="store_true", help="More information (default: %default)")
|
||||||
parser.add_option("-n","--len", dest="statscount", default=30, type='int', help="Number of lines to list for dumpstat (default: %default)")
|
parser.add_option(
|
||||||
parser.add_option("-b","--backupdir", dest="backup_base_dir", default='', help="Base directory for all backups (default: [global] backup_base_dir in config file)")
|
"-s", "--sections", dest="sections", default="", help="Comma separated list of sections (backups) to process (default: All)"
|
||||||
parser.add_option("-x","--exportdir", dest="exportdir", default='', help="Directory where to export latest backups with exportbackup (nodefault)")
|
)
|
||||||
|
parser.add_option(
|
||||||
|
"-l",
|
||||||
|
"--loglevel",
|
||||||
|
dest="loglevel",
|
||||||
|
default="info",
|
||||||
|
type="choice",
|
||||||
|
choices=["debug", "warning", "info", "error", "critical"],
|
||||||
|
metavar="LOGLEVEL",
|
||||||
|
help="Loglevel (default: %default)",
|
||||||
|
)
|
||||||
|
parser.add_option("-n", "--len", dest="statscount", default=30, type="int", help="Number of lines to list for dumpstat (default: %default)")
|
||||||
|
parser.add_option(
|
||||||
|
"-b",
|
||||||
|
"--backupdir",
|
||||||
|
dest="backup_base_dir",
|
||||||
|
default="",
|
||||||
|
help="Base directory for all backups (default: [global] backup_base_dir in config file)",
|
||||||
|
)
|
||||||
|
parser.add_option(
|
||||||
|
"-x", "--exportdir", dest="exportdir", default="", help="Directory where to export latest backups with exportbackup (nodefault)"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class tis_backup:
|
class tis_backup:
|
||||||
logger = logging.getLogger('tisbackup')
|
logger = logging.getLogger("tisbackup")
|
||||||
|
|
||||||
def __init__(self,dry_run=False,verbose=False,backup_base_dir=''):
|
def __init__(self, dry_run=False, verbose=False, backup_base_dir=""):
|
||||||
self.dry_run = dry_run
|
self.dry_run = dry_run
|
||||||
self.verbose = verbose
|
self.verbose = verbose
|
||||||
self.backup_base_dir = backup_base_dir
|
self.backup_base_dir = backup_base_dir
|
||||||
self.backup_base_dir = ''
|
self.backup_base_dir = ""
|
||||||
self.backup_list = []
|
self.backup_list = []
|
||||||
self.dry_run = dry_run
|
self.dry_run = dry_run
|
||||||
self.verbose = False
|
self.verbose = False
|
||||||
@ -97,22 +121,23 @@ class tis_backup:
|
|||||||
cp.read(filename)
|
cp.read(filename)
|
||||||
|
|
||||||
if not self.backup_base_dir:
|
if not self.backup_base_dir:
|
||||||
self.backup_base_dir = cp.get('global','backup_base_dir')
|
self.backup_base_dir = cp.get("global", "backup_base_dir")
|
||||||
if not os.path.isdir(self.backup_base_dir):
|
if not os.path.isdir(self.backup_base_dir):
|
||||||
self.logger.info('Creating backup directory %s' % self.backup_base_dir)
|
self.logger.info("Creating backup directory %s" % self.backup_base_dir)
|
||||||
os.makedirs(self.backup_base_dir)
|
os.makedirs(self.backup_base_dir)
|
||||||
|
|
||||||
self.logger.debug("backup directory : " + self.backup_base_dir)
|
self.logger.debug("backup directory : " + self.backup_base_dir)
|
||||||
self.dbstat = BackupStat(os.path.join(self.backup_base_dir,'log','tisbackup.sqlite'))
|
self.dbstat = BackupStat(os.path.join(self.backup_base_dir, "log", "tisbackup.sqlite"))
|
||||||
|
|
||||||
for section in cp.sections():
|
for section in cp.sections():
|
||||||
if (section != 'global'):
|
if section != "global":
|
||||||
self.logger.debug("reading backup config " + section)
|
self.logger.debug("reading backup config " + section)
|
||||||
backup_item = None
|
backup_item = None
|
||||||
type = cp.get(section,'type')
|
type = cp.get(section, "type")
|
||||||
|
|
||||||
backup_item = backup_drivers[type](backup_name=section,
|
backup_item = backup_drivers[type](
|
||||||
backup_dir=os.path.join(self.backup_base_dir,section),dbstat=self.dbstat,dry_run=self.dry_run)
|
backup_name=section, backup_dir=os.path.join(self.backup_base_dir, section), dbstat=self.dbstat, dry_run=self.dry_run
|
||||||
|
)
|
||||||
backup_item.read_config(cp)
|
backup_item.read_config(cp)
|
||||||
backup_item.verbose = self.verbose
|
backup_item.verbose = self.verbose
|
||||||
|
|
||||||
@ -122,20 +147,19 @@ class tis_backup:
|
|||||||
# TODO socket.gethostbyaddr('64.236.16.20')
|
# TODO socket.gethostbyaddr('64.236.16.20')
|
||||||
# TODO limit backup to one backup on the command line
|
# TODO limit backup to one backup on the command line
|
||||||
|
|
||||||
|
|
||||||
def checknagios(self, sections=[]):
|
def checknagios(self, sections=[]):
|
||||||
try:
|
try:
|
||||||
if not sections:
|
if not sections:
|
||||||
sections = [backup_item.backup_name for backup_item in self.backup_list]
|
sections = [backup_item.backup_name for backup_item in self.backup_list]
|
||||||
|
|
||||||
self.logger.debug('Start of check nagios for %s' % (','.join(sections),))
|
self.logger.debug("Start of check nagios for %s" % (",".join(sections),))
|
||||||
try:
|
try:
|
||||||
worst_nagiosstatus = None
|
worst_nagiosstatus = None
|
||||||
ok = []
|
ok = []
|
||||||
warning = []
|
warning = []
|
||||||
critical = []
|
critical = []
|
||||||
unknown = []
|
unknown = []
|
||||||
nagiosoutput = ''
|
nagiosoutput = ""
|
||||||
for backup_item in self.backup_list:
|
for backup_item in self.backup_list:
|
||||||
if not sections or backup_item.backup_name in sections:
|
if not sections or backup_item.backup_name in sections:
|
||||||
(nagiosstatus, log) = backup_item.checknagios()
|
(nagiosstatus, log) = backup_item.checknagios()
|
||||||
@ -150,7 +174,7 @@ class tis_backup:
|
|||||||
self.logger.debug('[%s] nagios:"%i" log: %s', backup_item.backup_name, nagiosstatus, log)
|
self.logger.debug('[%s] nagios:"%i" log: %s', backup_item.backup_name, nagiosstatus, log)
|
||||||
|
|
||||||
if not ok and not critical and not unknown and not warning:
|
if not ok and not critical and not unknown and not warning:
|
||||||
self.logger.debug('Nothing processed')
|
self.logger.debug("Nothing processed")
|
||||||
worst_nagiosstatus = nagiosStateUnknown
|
worst_nagiosstatus = nagiosStateUnknown
|
||||||
nagiosoutput = 'UNKNOWN : Unknown backup sections "%s"' % sections
|
nagiosoutput = 'UNKNOWN : Unknown backup sections "%s"' % sections
|
||||||
|
|
||||||
@ -159,40 +183,39 @@ class tis_backup:
|
|||||||
if unknown:
|
if unknown:
|
||||||
if not worst_nagiosstatus:
|
if not worst_nagiosstatus:
|
||||||
worst_nagiosstatus = nagiosStateUnknown
|
worst_nagiosstatus = nagiosStateUnknown
|
||||||
nagiosoutput = 'UNKNOWN status backups %s' % (','.join([b[0] for b in unknown]))
|
nagiosoutput = "UNKNOWN status backups %s" % (",".join([b[0] for b in unknown]))
|
||||||
globallog.extend(unknown)
|
globallog.extend(unknown)
|
||||||
|
|
||||||
if critical:
|
if critical:
|
||||||
if not worst_nagiosstatus:
|
if not worst_nagiosstatus:
|
||||||
worst_nagiosstatus = nagiosStateCritical
|
worst_nagiosstatus = nagiosStateCritical
|
||||||
nagiosoutput = 'CRITICAL backups %s' % (','.join([b[0] for b in critical]))
|
nagiosoutput = "CRITICAL backups %s" % (",".join([b[0] for b in critical]))
|
||||||
globallog.extend(critical)
|
globallog.extend(critical)
|
||||||
|
|
||||||
if warning:
|
if warning:
|
||||||
if not worst_nagiosstatus:
|
if not worst_nagiosstatus:
|
||||||
worst_nagiosstatus = nagiosStateWarning
|
worst_nagiosstatus = nagiosStateWarning
|
||||||
nagiosoutput = 'WARNING backups %s' % (','.join([b[0] for b in warning]))
|
nagiosoutput = "WARNING backups %s" % (",".join([b[0] for b in warning]))
|
||||||
globallog.extend(warning)
|
globallog.extend(warning)
|
||||||
|
|
||||||
if ok:
|
if ok:
|
||||||
if not worst_nagiosstatus:
|
if not worst_nagiosstatus:
|
||||||
worst_nagiosstatus = nagiosStateOk
|
worst_nagiosstatus = nagiosStateOk
|
||||||
nagiosoutput = 'OK backups %s' % (','.join([b[0] for b in ok]))
|
nagiosoutput = "OK backups %s" % (",".join([b[0] for b in ok]))
|
||||||
globallog.extend(ok)
|
globallog.extend(ok)
|
||||||
|
|
||||||
if worst_nagiosstatus == nagiosStateOk:
|
if worst_nagiosstatus == nagiosStateOk:
|
||||||
nagiosoutput = 'ALL backups OK %s' % (','.join(sections))
|
nagiosoutput = "ALL backups OK %s" % (",".join(sections))
|
||||||
|
|
||||||
|
|
||||||
except BaseException as e:
|
except BaseException as e:
|
||||||
worst_nagiosstatus = nagiosStateCritical
|
worst_nagiosstatus = nagiosStateCritical
|
||||||
nagiosoutput = 'EXCEPTION',"Critical : %s" % str(e)
|
nagiosoutput = "EXCEPTION", "Critical : %s" % str(e)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
self.logger.debug('worst nagios status :"%i"', worst_nagiosstatus)
|
self.logger.debug('worst nagios status :"%i"', worst_nagiosstatus)
|
||||||
print('%s (tisbackup V%s)' %(nagiosoutput,version))
|
print("%s (tisbackup V%s)" % (nagiosoutput, version))
|
||||||
print('\n'.join(["[%s]:%s" % (l[0],l[1]) for l in globallog]))
|
print("\n".join(["[%s]:%s" % (log_elem[0], log_elem[1]) for log_elem in globallog]))
|
||||||
sys.exit(worst_nagiosstatus)
|
sys.exit(worst_nagiosstatus)
|
||||||
|
|
||||||
def process_backup(self, sections=[]):
|
def process_backup(self, sections=[]):
|
||||||
@ -201,50 +224,50 @@ class tis_backup:
|
|||||||
if not sections:
|
if not sections:
|
||||||
sections = [backup_item.backup_name for backup_item in self.backup_list]
|
sections = [backup_item.backup_name for backup_item in self.backup_list]
|
||||||
|
|
||||||
self.logger.info('Processing backup for %s' % (','.join(sections)) )
|
self.logger.info("Processing backup for %s" % (",".join(sections)))
|
||||||
for backup_item in self.backup_list:
|
for backup_item in self.backup_list:
|
||||||
if not sections or backup_item.backup_name in sections:
|
if not sections or backup_item.backup_name in sections:
|
||||||
try:
|
try:
|
||||||
assert(isinstance(backup_item,backup_generic))
|
assert isinstance(backup_item, backup_generic)
|
||||||
self.logger.info('Processing [%s]',(backup_item.backup_name))
|
self.logger.info("Processing [%s]", (backup_item.backup_name))
|
||||||
stats = backup_item.process_backup()
|
stats = backup_item.process_backup()
|
||||||
processed.append((backup_item.backup_name, stats))
|
processed.append((backup_item.backup_name, stats))
|
||||||
except BaseException as e:
|
except BaseException as e:
|
||||||
self.logger.critical('Backup [%s] processed with error : %s',backup_item.backup_name,e)
|
self.logger.critical("Backup [%s] processed with error : %s", backup_item.backup_name, e)
|
||||||
errors.append((backup_item.backup_name, str(e)))
|
errors.append((backup_item.backup_name, str(e)))
|
||||||
if not processed and not errors:
|
if not processed and not errors:
|
||||||
self.logger.critical('No backup properly finished or processed')
|
self.logger.critical("No backup properly finished or processed")
|
||||||
else:
|
else:
|
||||||
if processed:
|
if processed:
|
||||||
self.logger.info('Backup processed : %s' , ",".join([b[0] for b in processed]))
|
self.logger.info("Backup processed : %s", ",".join([b[0] for b in processed]))
|
||||||
if errors:
|
if errors:
|
||||||
self.logger.error('Backup processed with errors: %s' , ",".join([b[0] for b in errors]))
|
self.logger.error("Backup processed with errors: %s", ",".join([b[0] for b in errors]))
|
||||||
|
|
||||||
def export_backups(self,sections=[],exportdir=''):
|
def export_backups(self, sections=[], exportdir=""):
|
||||||
processed = []
|
processed = []
|
||||||
errors = []
|
errors = []
|
||||||
if not sections:
|
if not sections:
|
||||||
sections = [backup_item.backup_name for backup_item in self.backup_list]
|
sections = [backup_item.backup_name for backup_item in self.backup_list]
|
||||||
|
|
||||||
self.logger.info('Exporting OK backups for %s to %s' % (','.join(sections),exportdir) )
|
self.logger.info("Exporting OK backups for %s to %s" % (",".join(sections), exportdir))
|
||||||
|
|
||||||
for backup_item in self.backup_list:
|
for backup_item in self.backup_list:
|
||||||
if backup_item.backup_name in sections:
|
if backup_item.backup_name in sections:
|
||||||
try:
|
try:
|
||||||
assert(isinstance(backup_item,backup_generic))
|
assert isinstance(backup_item, backup_generic)
|
||||||
self.logger.info('Processing [%s]',(backup_item.backup_name))
|
self.logger.info("Processing [%s]", (backup_item.backup_name))
|
||||||
stats = backup_item.export_latestbackup(destdir=exportdir)
|
stats = backup_item.export_latestbackup(destdir=exportdir)
|
||||||
processed.append((backup_item.backup_name, stats))
|
processed.append((backup_item.backup_name, stats))
|
||||||
except BaseException as e:
|
except BaseException as e:
|
||||||
self.logger.critical('Export Backup [%s] processed with error : %s',backup_item.backup_name,e)
|
self.logger.critical("Export Backup [%s] processed with error : %s", backup_item.backup_name, e)
|
||||||
errors.append((backup_item.backup_name, str(e)))
|
errors.append((backup_item.backup_name, str(e)))
|
||||||
if not processed and not errors:
|
if not processed and not errors:
|
||||||
self.logger.critical('No export backup properly finished or processed')
|
self.logger.critical("No export backup properly finished or processed")
|
||||||
else:
|
else:
|
||||||
if processed:
|
if processed:
|
||||||
self.logger.info('Export Backups processed : %s' , ",".join([b[0] for b in processed]))
|
self.logger.info("Export Backups processed : %s", ",".join([b[0] for b in processed]))
|
||||||
if errors:
|
if errors:
|
||||||
self.logger.error('Export Backups processed with errors: %s' , ",".join([b[0] for b in errors]))
|
self.logger.error("Export Backups processed with errors: %s", ",".join([b[0] for b in errors]))
|
||||||
|
|
||||||
def retry_failed_backups(self, maxage_hours=30):
|
def retry_failed_backups(self, maxage_hours=30):
|
||||||
processed = []
|
processed = []
|
||||||
@ -252,63 +275,62 @@ class tis_backup:
|
|||||||
|
|
||||||
# before mindate, backup is too old
|
# before mindate, backup is too old
|
||||||
mindate = datetime2isodate((datetime.datetime.now() - datetime.timedelta(hours=maxage_hours)))
|
mindate = datetime2isodate((datetime.datetime.now() - datetime.timedelta(hours=maxage_hours)))
|
||||||
failed_backups = self.dbstat.query("""\
|
failed_backups = self.dbstat.query(
|
||||||
|
"""\
|
||||||
select distinct backup_name as bname
|
select distinct backup_name as bname
|
||||||
from stats
|
from stats
|
||||||
where status="OK" and backup_start>=?""",(mindate,))
|
where status="OK" and backup_start>=?""",
|
||||||
|
(mindate,),
|
||||||
|
)
|
||||||
|
|
||||||
defined_backups = list(map(lambda f: f.backup_name, [x for x in self.backup_list if not isinstance(x, backup_null)]))
|
defined_backups = list(map(lambda f: f.backup_name, [x for x in self.backup_list if not isinstance(x, backup_null)]))
|
||||||
failed_backups_names = set(defined_backups) - set([b['bname'] for b in failed_backups if b['bname'] in defined_backups])
|
failed_backups_names = set(defined_backups) - set([b["bname"] for b in failed_backups if b["bname"] in defined_backups])
|
||||||
|
|
||||||
|
|
||||||
if failed_backups_names:
|
if failed_backups_names:
|
||||||
self.logger.info('Processing backup for %s',','.join(failed_backups_names))
|
self.logger.info("Processing backup for %s", ",".join(failed_backups_names))
|
||||||
for backup_item in self.backup_list:
|
for backup_item in self.backup_list:
|
||||||
if backup_item.backup_name in failed_backups_names:
|
if backup_item.backup_name in failed_backups_names:
|
||||||
try:
|
try:
|
||||||
assert(isinstance(backup_item,backup_generic))
|
assert isinstance(backup_item, backup_generic)
|
||||||
self.logger.info('Processing [%s]',(backup_item.backup_name))
|
self.logger.info("Processing [%s]", (backup_item.backup_name))
|
||||||
stats = backup_item.process_backup()
|
stats = backup_item.process_backup()
|
||||||
processed.append((backup_item.backup_name, stats))
|
processed.append((backup_item.backup_name, stats))
|
||||||
except BaseException as e:
|
except BaseException as e:
|
||||||
self.logger.critical('Backup [%s] not processed, error : %s',backup_item.backup_name,e)
|
self.logger.critical("Backup [%s] not processed, error : %s", backup_item.backup_name, e)
|
||||||
errors.append((backup_item.backup_name, str(e)))
|
errors.append((backup_item.backup_name, str(e)))
|
||||||
if not processed and not errors:
|
if not processed and not errors:
|
||||||
self.logger.critical('No backup properly finished or processed')
|
self.logger.critical("No backup properly finished or processed")
|
||||||
else:
|
else:
|
||||||
if processed:
|
if processed:
|
||||||
self.logger.info('Backup processed : %s' , ",".join([b[0] for b in errors]))
|
self.logger.info("Backup processed : %s", ",".join([b[0] for b in errors]))
|
||||||
if errors:
|
if errors:
|
||||||
self.logger.error('Backup processed with errors: %s' , ",".join([b[0] for b in errors]))
|
self.logger.error("Backup processed with errors: %s", ",".join([b[0] for b in errors]))
|
||||||
else:
|
else:
|
||||||
self.logger.info('No recent failed backups found in database')
|
self.logger.info("No recent failed backups found in database")
|
||||||
|
|
||||||
|
|
||||||
def cleanup_backup_section(self, sections=[]):
|
def cleanup_backup_section(self, sections=[]):
|
||||||
log = ''
|
|
||||||
processed = False
|
processed = False
|
||||||
if not sections:
|
if not sections:
|
||||||
sections = [backup_item.backup_name for backup_item in self.backup_list]
|
sections = [backup_item.backup_name for backup_item in self.backup_list]
|
||||||
|
|
||||||
self.logger.info('Processing cleanup for %s' % (','.join(sections)) )
|
self.logger.info("Processing cleanup for %s" % (",".join(sections)))
|
||||||
for backup_item in self.backup_list:
|
for backup_item in self.backup_list:
|
||||||
if backup_item.backup_name in sections:
|
if backup_item.backup_name in sections:
|
||||||
try:
|
try:
|
||||||
assert(isinstance(backup_item,backup_generic))
|
assert isinstance(backup_item, backup_generic)
|
||||||
self.logger.info('Processing cleanup of [%s]',(backup_item.backup_name))
|
self.logger.info("Processing cleanup of [%s]", (backup_item.backup_name))
|
||||||
backup_item.cleanup_backup()
|
backup_item.cleanup_backup()
|
||||||
processed = True
|
processed = True
|
||||||
except BaseException as e:
|
except BaseException as e:
|
||||||
self.logger.critical('Cleanup of [%s] not processed, error : %s',backup_item.backup_name,e)
|
self.logger.critical("Cleanup of [%s] not processed, error : %s", backup_item.backup_name, e)
|
||||||
if not processed:
|
if not processed:
|
||||||
self.logger.critical('No cleanup properly finished or processed')
|
self.logger.critical("No cleanup properly finished or processed")
|
||||||
|
|
||||||
def register_existingbackups(self, sections=[]):
|
def register_existingbackups(self, sections=[]):
|
||||||
if not sections:
|
if not sections:
|
||||||
sections = [backup_item.backup_name for backup_item in self.backup_list]
|
sections = [backup_item.backup_name for backup_item in self.backup_list]
|
||||||
|
|
||||||
self.logger.info('Append existing backups to database...')
|
self.logger.info("Append existing backups to database...")
|
||||||
for backup_item in self.backup_list:
|
for backup_item in self.backup_list:
|
||||||
if backup_item.backup_name in sections:
|
if backup_item.backup_name in sections:
|
||||||
backup_item.register_existingbackups()
|
backup_item.register_existingbackups()
|
||||||
@ -316,15 +338,15 @@ class tis_backup:
|
|||||||
def html_report(self):
|
def html_report(self):
|
||||||
for backup_item in self.backup_list:
|
for backup_item in self.backup_list:
|
||||||
if not section or section == backup_item.backup_name:
|
if not section or section == backup_item.backup_name:
|
||||||
assert(isinstance(backup_item,backup_generic))
|
assert isinstance(backup_item, backup_generic)
|
||||||
if not maxage_hours:
|
if not maxage_hours:
|
||||||
maxage_hours = backup_item.maximum_backup_age
|
maxage_hours = backup_item.maximum_backup_age
|
||||||
(nagiosstatus, log) = backup_item.checknagios(maxage_hours=maxage_hours)
|
(nagiosstatus, log) = backup_item.checknagios(maxage_hours=maxage_hours)
|
||||||
globallog.append('[%s] %s' % (backup_item.backup_name,log))
|
globallog.append("[%s] %s" % (backup_item.backup_name, log))
|
||||||
self.logger.debug('[%s] nagios:"%i" log: %s', backup_item.backup_name, nagiosstatus, log)
|
self.logger.debug('[%s] nagios:"%i" log: %s', backup_item.backup_name, nagiosstatus, log)
|
||||||
processed = True
|
# processed = True
|
||||||
if nagiosstatus >= worst_nagiosstatus:
|
# if nagiosstatus >= worst_nagiosstatus:
|
||||||
worst_nagiosstatus = nagiosstatus
|
# worst_nagiosstatus = nagiosstatus
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
@ -335,7 +357,7 @@ def main():
|
|||||||
parser.print_usage()
|
parser.print_usage()
|
||||||
sys.exit(2)
|
sys.exit(2)
|
||||||
|
|
||||||
backup_start_date = datetime.datetime.now().strftime('%Y%m%d-%Hh%Mm%S')
|
backup_start_date = datetime.datetime.now().strftime("%Y%m%d-%Hh%Mm%S")
|
||||||
|
|
||||||
# options
|
# options
|
||||||
action = args[0]
|
action = args[0]
|
||||||
@ -351,16 +373,16 @@ def main():
|
|||||||
loglevel = options.loglevel
|
loglevel = options.loglevel
|
||||||
|
|
||||||
# setup Logger
|
# setup Logger
|
||||||
logger = logging.getLogger('tisbackup')
|
logger = logging.getLogger("tisbackup")
|
||||||
hdlr = logging.StreamHandler()
|
hdlr = logging.StreamHandler()
|
||||||
hdlr.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
|
hdlr.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s"))
|
||||||
logger.addHandler(hdlr)
|
logger.addHandler(hdlr)
|
||||||
|
|
||||||
# set loglevel
|
# set loglevel
|
||||||
if loglevel in ('debug','warning','info','error','critical'):
|
if loglevel in ("debug", "warning", "info", "error", "critical"):
|
||||||
numeric_level = getattr(logging, loglevel.upper(), None)
|
numeric_level = getattr(logging, loglevel.upper(), None)
|
||||||
if not isinstance(numeric_level, int):
|
if not isinstance(numeric_level, int):
|
||||||
raise ValueError('Invalid log level: %s' % loglevel)
|
raise ValueError("Invalid log level: %s" % loglevel)
|
||||||
logger.setLevel(numeric_level)
|
logger.setLevel(numeric_level)
|
||||||
|
|
||||||
# Config file
|
# Config file
|
||||||
@ -371,19 +393,19 @@ def main():
|
|||||||
cp = ConfigParser()
|
cp = ConfigParser()
|
||||||
cp.read(config_file)
|
cp.read(config_file)
|
||||||
|
|
||||||
backup_base_dir = options.backup_base_dir or cp.get('global','backup_base_dir')
|
backup_base_dir = options.backup_base_dir or cp.get("global", "backup_base_dir")
|
||||||
log_dir = os.path.join(backup_base_dir,'log')
|
log_dir = os.path.join(backup_base_dir, "log")
|
||||||
if not os.path.exists(log_dir):
|
if not os.path.exists(log_dir):
|
||||||
os.makedirs(log_dir)
|
os.makedirs(log_dir)
|
||||||
|
|
||||||
# if we run the nagios check, we don't create log file, everything is piped to stdout
|
# if we run the nagios check, we don't create log file, everything is piped to stdout
|
||||||
if action!='checknagios':
|
if action != "checknagios":
|
||||||
try:
|
try:
|
||||||
hdlr = logging.FileHandler(os.path.join(log_dir,'tisbackup_%s.log' % (backup_start_date)))
|
hdlr = logging.FileHandler(os.path.join(log_dir, "tisbackup_%s.log" % (backup_start_date)))
|
||||||
hdlr.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
|
hdlr.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s"))
|
||||||
logger.addHandler(hdlr)
|
logger.addHandler(hdlr)
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
if action == 'cleanup' and e.errno == errno.ENOSPC:
|
if action == "cleanup" and e.errno == errno.ENOSPC:
|
||||||
logger.warning("No space left on device, disabling file logging.")
|
logger.warning("No space left on device, disabling file logging.")
|
||||||
else:
|
else:
|
||||||
raise e
|
raise e
|
||||||
@ -392,15 +414,15 @@ def main():
|
|||||||
backup = tis_backup(dry_run=dry_run, verbose=verbose, backup_base_dir=backup_base_dir)
|
backup = tis_backup(dry_run=dry_run, verbose=verbose, backup_base_dir=backup_base_dir)
|
||||||
backup.read_ini_file(config_file)
|
backup.read_ini_file(config_file)
|
||||||
|
|
||||||
backup_sections = options.sections.split(',') if options.sections else []
|
backup_sections = options.sections.split(",") if options.sections else []
|
||||||
|
|
||||||
all_sections = [backup_item.backup_name for backup_item in backup.backup_list]
|
all_sections = [backup_item.backup_name for backup_item in backup.backup_list]
|
||||||
if not backup_sections:
|
if not backup_sections:
|
||||||
backup_sections = all_sections
|
backup_sections = all_sections
|
||||||
else:
|
else:
|
||||||
for b in backup_sections:
|
for b in backup_sections:
|
||||||
if not b in all_sections:
|
if b not in all_sections:
|
||||||
raise Exception('Section %s is not defined in config file' % b)
|
raise Exception("Section %s is not defined in config file" % b)
|
||||||
|
|
||||||
if dry_run:
|
if dry_run:
|
||||||
logger.warning("WARNING : DRY RUN, nothing will be done, just printing on screen...")
|
logger.warning("WARNING : DRY RUN, nothing will be done, just printing on screen...")
|
||||||
@ -409,7 +431,7 @@ def main():
|
|||||||
backup.process_backup(backup_sections)
|
backup.process_backup(backup_sections)
|
||||||
elif action == "exportbackup":
|
elif action == "exportbackup":
|
||||||
if not options.exportdir:
|
if not options.exportdir:
|
||||||
raise Exception('No export directory supplied dor exportbackup action')
|
raise Exception("No export directory supplied dor exportbackup action")
|
||||||
backup.export_backups(backup_sections, options.exportdir)
|
backup.export_backups(backup_sections, options.exportdir)
|
||||||
elif action == "cleanup":
|
elif action == "cleanup":
|
||||||
backup.cleanup_backup_section(backup_sections)
|
backup.cleanup_backup_section(backup_sections)
|
||||||
@ -423,7 +445,6 @@ def main():
|
|||||||
elif action == "register_existing":
|
elif action == "register_existing":
|
||||||
backup.register_existingbackups(backup_sections)
|
backup.register_existingbackups(backup_sections)
|
||||||
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
logger.error('Unhandled action "%s", quitting...', action)
|
logger.error('Unhandled action "%s", quitting...', action)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
282
tisbackup_gui.py
282
tisbackup_gui.py
@ -17,52 +17,52 @@
|
|||||||
# along with TISBackup. If not, see <http://www.gnu.org/licenses/>.
|
# along with TISBackup. If not, see <http://www.gnu.org/licenses/>.
|
||||||
#
|
#
|
||||||
# -----------------------------------------------------------------------
|
# -----------------------------------------------------------------------
|
||||||
import os,sys
|
import os
|
||||||
|
import sys
|
||||||
from os.path import isfile, join
|
from os.path import isfile, join
|
||||||
|
|
||||||
tisbackup_root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__)))
|
tisbackup_root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__)))
|
||||||
sys.path.append(os.path.join(tisbackup_root_dir,'lib'))
|
sys.path.append(os.path.join(tisbackup_root_dir, "lib"))
|
||||||
sys.path.append(os.path.join(tisbackup_root_dir,'libtisbackup'))
|
sys.path.append(os.path.join(tisbackup_root_dir, "libtisbackup"))
|
||||||
|
|
||||||
|
|
||||||
from shutil import *
|
|
||||||
from iniparse import ConfigParser,RawConfigParser
|
|
||||||
from libtisbackup.common import *
|
|
||||||
import time
|
|
||||||
from flask import request, Flask, session, g, appcontext_pushed, redirect, url_for, abort, render_template, flash, jsonify, Response
|
|
||||||
from urllib.parse import urlparse
|
|
||||||
import json
|
|
||||||
import glob
|
import glob
|
||||||
import time
|
import json
|
||||||
|
|
||||||
from config import huey
|
|
||||||
from tasks import run_export_backup, get_task, set_task
|
|
||||||
|
|
||||||
from tisbackup import tis_backup
|
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
|
import time
|
||||||
|
from shutil import *
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
from flask import Flask, Response, abort, appcontext_pushed, flash, g, jsonify, redirect, render_template, request, session, url_for
|
||||||
|
from iniparse import ConfigParser, RawConfigParser
|
||||||
|
|
||||||
|
from config import huey
|
||||||
|
from libtisbackup.common import *
|
||||||
|
from tasks import get_task, run_export_backup, set_task
|
||||||
|
from tisbackup import tis_backup
|
||||||
|
|
||||||
cp = ConfigParser()
|
cp = ConfigParser()
|
||||||
cp.read("/etc/tis/tisbackup_gui.ini")
|
cp.read("/etc/tis/tisbackup_gui.ini")
|
||||||
|
|
||||||
CONFIG = cp.get('general','config_tisbackup').split(",")
|
CONFIG = cp.get("general", "config_tisbackup").split(",")
|
||||||
SECTIONS = cp.get('general','sections')
|
SECTIONS = cp.get("general", "sections")
|
||||||
ADMIN_EMAIL = cp.get('general','ADMIN_EMAIL')
|
ADMIN_EMAIL = cp.get("general", "ADMIN_EMAIL")
|
||||||
BASE_DIR = cp.get('general','base_config_dir')
|
BASE_DIR = cp.get("general", "base_config_dir")
|
||||||
|
|
||||||
tisbackup_config_file = CONFIG[0]
|
tisbackup_config_file = CONFIG[0]
|
||||||
config_number = 0
|
config_number = 0
|
||||||
|
|
||||||
cp = ConfigParser()
|
cp = ConfigParser()
|
||||||
cp.read(tisbackup_config_file)
|
cp.read(tisbackup_config_file)
|
||||||
backup_base_dir = cp.get('global','backup_base_dir')
|
backup_base_dir = cp.get("global", "backup_base_dir")
|
||||||
dbstat = BackupStat(os.path.join(backup_base_dir,'log','tisbackup.sqlite'))
|
dbstat = BackupStat(os.path.join(backup_base_dir, "log", "tisbackup.sqlite"))
|
||||||
mindate = None
|
mindate = None
|
||||||
error = None
|
error = None
|
||||||
info = None
|
info = None
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
app.secret_key = 'fsiqefiuqsefARZ4Zfesfe34234dfzefzfe'
|
app.secret_key = "fsiqefiuqsefARZ4Zfesfe34234dfzefzfe"
|
||||||
app.config['PROPAGATE_EXCEPTIONS'] = True
|
app.config["PROPAGATE_EXCEPTIONS"] = True
|
||||||
|
|
||||||
tasks_db = os.path.join(tisbackup_root_dir, "tasks.sqlite")
|
tasks_db = os.path.join(tisbackup_root_dir, "tasks.sqlite")
|
||||||
|
|
||||||
@ -70,7 +70,7 @@ tasks_db = os.path.join(tisbackup_root_dir,"tasks.sqlite")
|
|||||||
def read_all_configs(base_dir):
|
def read_all_configs(base_dir):
|
||||||
raw_configs = []
|
raw_configs = []
|
||||||
list_config = []
|
list_config = []
|
||||||
config_base_dir = base_dir
|
# config_base_dir = base_dir
|
||||||
|
|
||||||
for file in os.listdir(base_dir):
|
for file in os.listdir(base_dir):
|
||||||
if isfile(join(base_dir, file)):
|
if isfile(join(base_dir, file)):
|
||||||
@ -78,19 +78,19 @@ def read_all_configs(base_dir):
|
|||||||
|
|
||||||
for elem in raw_configs:
|
for elem in raw_configs:
|
||||||
line = open(elem).readline()
|
line = open(elem).readline()
|
||||||
if 'global' in line:
|
if "global" in line:
|
||||||
list_config.append(elem)
|
list_config.append(elem)
|
||||||
|
|
||||||
backup_dict = {}
|
backup_dict = {}
|
||||||
backup_dict['rsync_ssh_list'] = []
|
backup_dict["rsync_ssh_list"] = []
|
||||||
backup_dict['rsync_btrfs_list'] = []
|
backup_dict["rsync_btrfs_list"] = []
|
||||||
backup_dict['rsync_list'] = []
|
backup_dict["rsync_list"] = []
|
||||||
backup_dict['null_list'] = []
|
backup_dict["null_list"] = []
|
||||||
backup_dict['pgsql_list'] = []
|
backup_dict["pgsql_list"] = []
|
||||||
backup_dict['mysql_list'] = []
|
backup_dict["mysql_list"] = []
|
||||||
# backup_dict['sqlserver_list'] = []
|
# backup_dict['sqlserver_list'] = []
|
||||||
backup_dict['xva_list'] = []
|
backup_dict["xva_list"] = []
|
||||||
backup_dict['metadata_list'] = []
|
backup_dict["metadata_list"] = []
|
||||||
# backup_dict['switch_list'] = []
|
# backup_dict['switch_list'] = []
|
||||||
# backup_dict['oracle_list'] = []
|
# backup_dict['oracle_list'] = []
|
||||||
|
|
||||||
@ -99,7 +99,7 @@ def read_all_configs(base_dir):
|
|||||||
for config_file in list_config:
|
for config_file in list_config:
|
||||||
cp.read(config_file)
|
cp.read(config_file)
|
||||||
|
|
||||||
backup_base_dir = cp.get('global', 'backup_base_dir')
|
backup_base_dir = cp.get("global", "backup_base_dir")
|
||||||
backup = tis_backup(backup_base_dir=backup_base_dir)
|
backup = tis_backup(backup_base_dir=backup_base_dir)
|
||||||
backup.read_ini_file(config_file)
|
backup.read_ini_file(config_file)
|
||||||
|
|
||||||
@ -110,11 +110,12 @@ def read_all_configs(base_dir):
|
|||||||
backup_sections = all_sections
|
backup_sections = all_sections
|
||||||
else:
|
else:
|
||||||
for b in backup_sections:
|
for b in backup_sections:
|
||||||
if not b in all_sections:
|
if b not in all_sections:
|
||||||
raise Exception('Section %s is not defined in config file' % b)
|
raise Exception("Section %s is not defined in config file" % b)
|
||||||
|
|
||||||
if not backup_sections:
|
# never used..
|
||||||
sections = [backup_item.backup_name for backup_item in backup.backup_list]
|
# if not backup_sections:
|
||||||
|
# sections = [backup_item.backup_name for backup_item in backup.backup_list]
|
||||||
|
|
||||||
for backup_item in backup.backup_list:
|
for backup_item in backup.backup_list:
|
||||||
if backup_item.backup_name in backup_sections:
|
if backup_item.backup_name in backup_sections:
|
||||||
@ -125,35 +126,28 @@ def read_all_configs(base_dir):
|
|||||||
result.append(b)
|
result.append(b)
|
||||||
|
|
||||||
for row in result:
|
for row in result:
|
||||||
backup_name = row['backup_name']
|
backup_name = row["backup_name"]
|
||||||
server_name = row['server_name']
|
server_name = row["server_name"]
|
||||||
backup_type = row['type']
|
backup_type = row["type"]
|
||||||
if backup_type == "xcp-dump-metadata":
|
if backup_type == "xcp-dump-metadata":
|
||||||
backup_dict['metadata_list'].append(
|
backup_dict["metadata_list"].append([server_name, backup_name, backup_type, ""])
|
||||||
[server_name, backup_name, backup_type, ""])
|
|
||||||
if backup_type == "rsync+ssh":
|
if backup_type == "rsync+ssh":
|
||||||
remote_dir = row['remote_dir']
|
remote_dir = row["remote_dir"]
|
||||||
backup_dict['rsync_ssh_list'].append(
|
backup_dict["rsync_ssh_list"].append([server_name, backup_name, backup_type, remote_dir])
|
||||||
[server_name, backup_name, backup_type, remote_dir])
|
|
||||||
if backup_type == "rsync+btrfs+ssh":
|
if backup_type == "rsync+btrfs+ssh":
|
||||||
remote_dir = row['remote_dir']
|
remote_dir = row["remote_dir"]
|
||||||
backup_dict['rsync_btrfs_list'].append(
|
backup_dict["rsync_btrfs_list"].append([server_name, backup_name, backup_type, remote_dir])
|
||||||
[server_name, backup_name, backup_type, remote_dir])
|
|
||||||
if backup_type == "rsync":
|
if backup_type == "rsync":
|
||||||
remote_dir = row['remote_dir']
|
remote_dir = row["remote_dir"]
|
||||||
backup_dict['rsync_list'].append(
|
backup_dict["rsync_list"].append([server_name, backup_name, backup_type, remote_dir])
|
||||||
[server_name, backup_name, backup_type, remote_dir])
|
|
||||||
if backup_type == "null":
|
if backup_type == "null":
|
||||||
backup_dict['null_list'].append(
|
backup_dict["null_list"].append([server_name, backup_name, backup_type, ""])
|
||||||
[server_name, backup_name, backup_type, ""])
|
|
||||||
if backup_type == "pgsql+ssh":
|
if backup_type == "pgsql+ssh":
|
||||||
db_name = row['db_name'] if len(row['db_name']) > 0 else '*'
|
db_name = row["db_name"] if len(row["db_name"]) > 0 else "*"
|
||||||
backup_dict['pgsql_list'].append(
|
backup_dict["pgsql_list"].append([server_name, backup_name, backup_type, db_name])
|
||||||
[server_name, backup_name, backup_type, db_name])
|
|
||||||
if backup_type == "mysql+ssh":
|
if backup_type == "mysql+ssh":
|
||||||
db_name = row['db_name'] if len(row['db_name']) > 0 else '*'
|
db_name = row["db_name"] if len(row["db_name"]) > 0 else "*"
|
||||||
backup_dict['mysql_list'].append(
|
backup_dict["mysql_list"].append([server_name, backup_name, backup_type, db_name])
|
||||||
[server_name, backup_name, backup_type, db_name])
|
|
||||||
# if backup_type == "sqlserver+ssh":
|
# if backup_type == "sqlserver+ssh":
|
||||||
# db_name = row['db_name']
|
# db_name = row['db_name']
|
||||||
# backup_dict['sqlserver_list'].append(
|
# backup_dict['sqlserver_list'].append(
|
||||||
@ -163,8 +157,7 @@ def read_all_configs(base_dir):
|
|||||||
# backup_dict['oracle_list'].append(
|
# backup_dict['oracle_list'].append(
|
||||||
# [server_name, backup_name, backup_type, db_name])
|
# [server_name, backup_name, backup_type, db_name])
|
||||||
if backup_type == "xen-xva":
|
if backup_type == "xen-xva":
|
||||||
backup_dict['xva_list'].append(
|
backup_dict["xva_list"].append([server_name, backup_name, backup_type, ""])
|
||||||
[server_name, backup_name, backup_type, ""])
|
|
||||||
# if backup_type == "switch":
|
# if backup_type == "switch":
|
||||||
# backup_dict['switch_list'].append(
|
# backup_dict['switch_list'].append(
|
||||||
# [server_name, backup_name, backup_type, ""])
|
# [server_name, backup_name, backup_type, ""])
|
||||||
@ -177,7 +170,7 @@ def read_config():
|
|||||||
cp = ConfigParser()
|
cp = ConfigParser()
|
||||||
cp.read(config_file)
|
cp.read(config_file)
|
||||||
|
|
||||||
backup_base_dir = cp.get('global','backup_base_dir')
|
backup_base_dir = cp.get("global", "backup_base_dir")
|
||||||
backup = tis_backup(backup_base_dir=backup_base_dir)
|
backup = tis_backup(backup_base_dir=backup_base_dir)
|
||||||
backup.read_ini_file(config_file)
|
backup.read_ini_file(config_file)
|
||||||
|
|
||||||
@ -188,12 +181,14 @@ def read_config():
|
|||||||
backup_sections = all_sections
|
backup_sections = all_sections
|
||||||
else:
|
else:
|
||||||
for b in backup_sections:
|
for b in backup_sections:
|
||||||
if not b in all_sections:
|
if b not in all_sections:
|
||||||
raise Exception('Section %s is not defined in config file' % b)
|
raise Exception("Section %s is not defined in config file" % b)
|
||||||
|
|
||||||
result = []
|
result = []
|
||||||
if not backup_sections:
|
|
||||||
sections = [backup_item.backup_name for backup_item in backup.backup_list]
|
# not used ...
|
||||||
|
# if not backup_sections:
|
||||||
|
# sections = [backup_item.backup_name for backup_item in backup.backup_list]
|
||||||
|
|
||||||
for backup_item in backup.backup_list:
|
for backup_item in backup.backup_list:
|
||||||
if backup_item.backup_name in backup_sections:
|
if backup_item.backup_name in backup_sections:
|
||||||
@ -204,40 +199,40 @@ def read_config():
|
|||||||
result.append(b)
|
result.append(b)
|
||||||
|
|
||||||
backup_dict = {}
|
backup_dict = {}
|
||||||
backup_dict['rsync_ssh_list'] = []
|
backup_dict["rsync_ssh_list"] = []
|
||||||
backup_dict['rsync_btrfs_list'] = []
|
backup_dict["rsync_btrfs_list"] = []
|
||||||
backup_dict['rsync_list'] = []
|
backup_dict["rsync_list"] = []
|
||||||
backup_dict['null_list'] = []
|
backup_dict["null_list"] = []
|
||||||
backup_dict['pgsql_list'] = []
|
backup_dict["pgsql_list"] = []
|
||||||
backup_dict['mysql_list'] = []
|
backup_dict["mysql_list"] = []
|
||||||
# backup_dict['sqlserver_list'] = []
|
# backup_dict['sqlserver_list'] = []
|
||||||
backup_dict['xva_list'] = []
|
backup_dict["xva_list"] = []
|
||||||
backup_dict['metadata_list'] = []
|
backup_dict["metadata_list"] = []
|
||||||
# backup_dict['switch_list'] = []
|
# backup_dict['switch_list'] = []
|
||||||
# backup_dict['oracle_list'] = []
|
# backup_dict['oracle_list'] = []
|
||||||
for row in result:
|
for row in result:
|
||||||
backup_name = row['backup_name']
|
backup_name = row["backup_name"]
|
||||||
server_name = row['server_name']
|
server_name = row["server_name"]
|
||||||
backup_type = row['type']
|
backup_type = row["type"]
|
||||||
if backup_type == "xcp-dump-metadata":
|
if backup_type == "xcp-dump-metadata":
|
||||||
backup_dict['metadata_list'].append([server_name, backup_name, backup_type, ""])
|
backup_dict["metadata_list"].append([server_name, backup_name, backup_type, ""])
|
||||||
if backup_type == "rsync+ssh":
|
if backup_type == "rsync+ssh":
|
||||||
remote_dir = row['remote_dir']
|
remote_dir = row["remote_dir"]
|
||||||
backup_dict['rsync_ssh_list'].append([server_name, backup_name, backup_type,remote_dir])
|
backup_dict["rsync_ssh_list"].append([server_name, backup_name, backup_type, remote_dir])
|
||||||
if backup_type == "rsync+btrfs+ssh":
|
if backup_type == "rsync+btrfs+ssh":
|
||||||
remote_dir = row['remote_dir']
|
remote_dir = row["remote_dir"]
|
||||||
backup_dict['rsync_btrfs_list'].append([server_name, backup_name, backup_type,remote_dir])
|
backup_dict["rsync_btrfs_list"].append([server_name, backup_name, backup_type, remote_dir])
|
||||||
if backup_type == "rsync":
|
if backup_type == "rsync":
|
||||||
remote_dir = row['remote_dir']
|
remote_dir = row["remote_dir"]
|
||||||
backup_dict['rsync_list'].append([server_name, backup_name, backup_type,remote_dir])
|
backup_dict["rsync_list"].append([server_name, backup_name, backup_type, remote_dir])
|
||||||
if backup_type == "null":
|
if backup_type == "null":
|
||||||
backup_dict['null_list'].append([server_name, backup_name, backup_type, ""])
|
backup_dict["null_list"].append([server_name, backup_name, backup_type, ""])
|
||||||
if backup_type == "pgsql+ssh":
|
if backup_type == "pgsql+ssh":
|
||||||
db_name = row['db_name'] if len(row['db_name']) > 0 else '*'
|
db_name = row["db_name"] if len(row["db_name"]) > 0 else "*"
|
||||||
backup_dict['pgsql_list'].append([server_name, backup_name, backup_type, db_name])
|
backup_dict["pgsql_list"].append([server_name, backup_name, backup_type, db_name])
|
||||||
if backup_type == "mysql+ssh":
|
if backup_type == "mysql+ssh":
|
||||||
db_name = row['db_name'] if len(row['db_name']) > 0 else '*'
|
db_name = row["db_name"] if len(row["db_name"]) > 0 else "*"
|
||||||
backup_dict['mysql_list'].append([server_name, backup_name, backup_type, db_name])
|
backup_dict["mysql_list"].append([server_name, backup_name, backup_type, db_name])
|
||||||
# if backup_type == "sqlserver+ssh":
|
# if backup_type == "sqlserver+ssh":
|
||||||
# db_name = row['db_name']
|
# db_name = row['db_name']
|
||||||
# backup_dict['sqlserver_list'].append([server_name, backup_name, backup_type, db_name])
|
# backup_dict['sqlserver_list'].append([server_name, backup_name, backup_type, db_name])
|
||||||
@ -245,38 +240,57 @@ def read_config():
|
|||||||
# db_name = row['db_name']
|
# db_name = row['db_name']
|
||||||
# backup_dict['oracle_list'].append([server_name, backup_name, backup_type, db_name])
|
# backup_dict['oracle_list'].append([server_name, backup_name, backup_type, db_name])
|
||||||
if backup_type == "xen-xva":
|
if backup_type == "xen-xva":
|
||||||
backup_dict['xva_list'].append([server_name, backup_name, backup_type, ""])
|
backup_dict["xva_list"].append([server_name, backup_name, backup_type, ""])
|
||||||
# if backup_type == "switch":
|
# if backup_type == "switch":
|
||||||
# backup_dict['switch_list'].append([server_name, backup_name, backup_type, ""])
|
# backup_dict['switch_list'].append([server_name, backup_name, backup_type, ""])
|
||||||
return backup_dict
|
return backup_dict
|
||||||
|
|
||||||
@app.route('/')
|
|
||||||
|
@app.route("/")
|
||||||
def backup_all():
|
def backup_all():
|
||||||
backup_dict = read_config()
|
backup_dict = read_config()
|
||||||
return render_template('backups.html', backup_list = backup_dict)
|
return render_template("backups.html", backup_list=backup_dict)
|
||||||
|
|
||||||
|
|
||||||
@app.route('/config_number/')
|
@app.route("/config_number/")
|
||||||
@app.route('/config_number/<int:id>')
|
@app.route("/config_number/<int:id>")
|
||||||
def set_config_number(id=None):
|
def set_config_number(id=None):
|
||||||
if id != None and len(CONFIG) > id:
|
if id is not None and len(CONFIG) > id:
|
||||||
global config_number
|
global config_number
|
||||||
config_number = id
|
config_number = id
|
||||||
read_config()
|
read_config()
|
||||||
return jsonify(configs=CONFIG, config_number=config_number)
|
return jsonify(configs=CONFIG, config_number=config_number)
|
||||||
|
|
||||||
|
|
||||||
@app.route('/all_json')
|
@app.route("/all_json")
|
||||||
def backup_all_json():
|
def backup_all_json():
|
||||||
backup_dict = read_all_configs(BASE_DIR)
|
backup_dict = read_all_configs(BASE_DIR)
|
||||||
return json.dumps(backup_dict['rsync_list']+backup_dict['rsync_btrfs_list']+backup_dict['rsync_ssh_list']+backup_dict['pgsql_list']+backup_dict['mysql_list']+backup_dict['xva_list']+backup_dict['null_list']+backup_dict['metadata_list'])
|
return json.dumps(
|
||||||
|
backup_dict["rsync_list"]
|
||||||
|
+ backup_dict["rsync_btrfs_list"]
|
||||||
|
+ backup_dict["rsync_ssh_list"]
|
||||||
|
+ backup_dict["pgsql_list"]
|
||||||
|
+ backup_dict["mysql_list"]
|
||||||
|
+ backup_dict["xva_list"]
|
||||||
|
+ backup_dict["null_list"]
|
||||||
|
+ backup_dict["metadata_list"]
|
||||||
|
)
|
||||||
# + backup_dict['switch_list'])+backup_dict['sqlserver_list']
|
# + backup_dict['switch_list'])+backup_dict['sqlserver_list']
|
||||||
|
|
||||||
|
|
||||||
@app.route('/json')
|
@app.route("/json")
|
||||||
def backup_json():
|
def backup_json():
|
||||||
backup_dict = read_config()
|
backup_dict = read_config()
|
||||||
return json.dumps(backup_dict['rsync_list']+backup_dict['rsync_btrfs_list']+backup_dict['rsync_ssh_list']+backup_dict['pgsql_list']+backup_dict['mysql_list']+backup_dict['xva_list']+backup_dict['null_list']+backup_dict['metadata_list'])
|
return json.dumps(
|
||||||
|
backup_dict["rsync_list"]
|
||||||
|
+ backup_dict["rsync_btrfs_list"]
|
||||||
|
+ backup_dict["rsync_ssh_list"]
|
||||||
|
+ backup_dict["pgsql_list"]
|
||||||
|
+ backup_dict["mysql_list"]
|
||||||
|
+ backup_dict["xva_list"]
|
||||||
|
+ backup_dict["null_list"]
|
||||||
|
+ backup_dict["metadata_list"]
|
||||||
|
)
|
||||||
# + backup_dict['switch_list'])+backup_dict['sqlserver_list']
|
# + backup_dict['switch_list'])+backup_dict['sqlserver_list']
|
||||||
|
|
||||||
|
|
||||||
@ -284,7 +298,7 @@ def check_usb_disk():
|
|||||||
"""This method returns the mounts point of FIRST external disk"""
|
"""This method returns the mounts point of FIRST external disk"""
|
||||||
# disk_name = []
|
# disk_name = []
|
||||||
usb_disk_list = []
|
usb_disk_list = []
|
||||||
for name in glob.glob('/dev/sd[a-z]'):
|
for name in glob.glob("/dev/sd[a-z]"):
|
||||||
for line in os.popen("udevadm info -q env -n %s" % name):
|
for line in os.popen("udevadm info -q env -n %s" % name):
|
||||||
if re.match("ID_PATH=.*usb.*", line):
|
if re.match("ID_PATH=.*usb.*", line):
|
||||||
usb_disk_list += [name]
|
usb_disk_list += [name]
|
||||||
@ -296,19 +310,22 @@ def check_usb_disk():
|
|||||||
|
|
||||||
usb_partition_list = []
|
usb_partition_list = []
|
||||||
for usb_disk in usb_disk_list:
|
for usb_disk in usb_disk_list:
|
||||||
cmd = "udevadm info -q path -n %s" % usb_disk + '1'
|
cmd = "udevadm info -q path -n %s" % usb_disk + "1"
|
||||||
output = os.popen(cmd).read()
|
output = os.popen(cmd).read()
|
||||||
print("cmd : " + cmd)
|
print("cmd : " + cmd)
|
||||||
print("output : " + output)
|
print("output : " + output)
|
||||||
|
|
||||||
if '/devices/pci' in output:
|
if "/devices/pci" in output:
|
||||||
# flash("partition found: %s1" % usb_disk)
|
# flash("partition found: %s1" % usb_disk)
|
||||||
usb_partition_list.append(usb_disk + "1")
|
usb_partition_list.append(usb_disk + "1")
|
||||||
|
|
||||||
print(usb_partition_list)
|
print(usb_partition_list)
|
||||||
|
|
||||||
if len(usb_partition_list) == 0:
|
if len(usb_partition_list) == 0:
|
||||||
raise_error("The drive %s has no partition" % (usb_disk_list[0] ), "You should initialize the usb drive and format an ext4 partition with TISBACKUP label")
|
raise_error(
|
||||||
|
"The drive %s has no partition" % (usb_disk_list[0]),
|
||||||
|
"You should initialize the usb drive and format an ext4 partition with TISBACKUP label",
|
||||||
|
)
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
tisbackup_partition_list = []
|
tisbackup_partition_list = []
|
||||||
@ -320,44 +337,48 @@ def check_usb_disk():
|
|||||||
print(tisbackup_partition_list)
|
print(tisbackup_partition_list)
|
||||||
|
|
||||||
if len(tisbackup_partition_list) == 0:
|
if len(tisbackup_partition_list) == 0:
|
||||||
raise_error("No tisbackup partition exist on disk %s" % (usb_disk_list[0] ), "You should initialize the usb drive and format an ext4 partition with TISBACKUP label")
|
raise_error(
|
||||||
|
"No tisbackup partition exist on disk %s" % (usb_disk_list[0]),
|
||||||
|
"You should initialize the usb drive and format an ext4 partition with TISBACKUP label",
|
||||||
|
)
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
if len(tisbackup_partition_list) > 1:
|
if len(tisbackup_partition_list) > 1:
|
||||||
raise_error("There are many usb disk", "You should plug remove one of them")
|
raise_error("There are many usb disk", "You should plug remove one of them")
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
return tisbackup_partition_list[0]
|
return tisbackup_partition_list[0]
|
||||||
|
|
||||||
|
|
||||||
def check_already_mount(partition_name, refresh):
|
def check_already_mount(partition_name, refresh):
|
||||||
with open('/proc/mounts') as f:
|
with open("/proc/mounts") as f:
|
||||||
mount_point = ""
|
mount_point = ""
|
||||||
for line in f.readlines():
|
for line in f.readlines():
|
||||||
if line.startswith(partition_name):
|
if line.startswith(partition_name):
|
||||||
mount_point = line.split(' ')[1]
|
mount_point = line.split(" ")[1]
|
||||||
if not refresh:
|
if not refresh:
|
||||||
run_command("/bin/umount %s" % mount_point)
|
run_command("/bin/umount %s" % mount_point)
|
||||||
os.rmdir(mount_point)
|
os.rmdir(mount_point)
|
||||||
return mount_point
|
return mount_point
|
||||||
|
|
||||||
|
|
||||||
def run_command(cmd, info=""):
|
def run_command(cmd, info=""):
|
||||||
flash("Executing: %s" % cmd)
|
flash("Executing: %s" % cmd)
|
||||||
from subprocess import CalledProcessError, check_output
|
from subprocess import CalledProcessError, check_output
|
||||||
|
|
||||||
result = ""
|
result = ""
|
||||||
try:
|
try:
|
||||||
result = check_output(cmd, stderr=subprocess.STDOUT, shell=True)
|
result = check_output(cmd, stderr=subprocess.STDOUT, shell=True)
|
||||||
except CalledProcessError as e:
|
except CalledProcessError:
|
||||||
raise_error(result, info)
|
raise_error(result, info)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def check_mount_disk(partition_name, refresh):
|
def check_mount_disk(partition_name, refresh):
|
||||||
|
|
||||||
mount_point = check_already_mount(partition_name, refresh)
|
mount_point = check_already_mount(partition_name, refresh)
|
||||||
if not refresh:
|
if not refresh:
|
||||||
|
|
||||||
|
|
||||||
mount_point = "/mnt/TISBACKUP-" + str(time.time())
|
mount_point = "/mnt/TISBACKUP-" + str(time.time())
|
||||||
os.mkdir(mount_point)
|
os.mkdir(mount_point)
|
||||||
flash("must mount " + partition_name)
|
flash("must mount " + partition_name)
|
||||||
@ -369,41 +390,40 @@ def check_mount_disk(partition_name, refresh):
|
|||||||
|
|
||||||
return mount_point
|
return mount_point
|
||||||
|
|
||||||
@app.route('/status.json')
|
|
||||||
|
@app.route("/status.json")
|
||||||
def export_backup_status():
|
def export_backup_status():
|
||||||
exports = dbstat.query('select * from stats where TYPE="EXPORT" and backup_start>="%s"' % mindate)
|
exports = dbstat.query('select * from stats where TYPE="EXPORT" and backup_start>="%s"' % mindate)
|
||||||
error = ""
|
error = ""
|
||||||
finish = not runnings_backups()
|
finish = not runnings_backups()
|
||||||
if get_task() != None and finish:
|
if get_task() is not None and finish:
|
||||||
status = get_task().get()
|
status = get_task().get()
|
||||||
if status != "ok":
|
if status != "ok":
|
||||||
error = "Export failing with error: " + status
|
error = "Export failing with error: " + status
|
||||||
|
|
||||||
|
|
||||||
return jsonify(data=exports, finish=finish, error=error)
|
return jsonify(data=exports, finish=finish, error=error)
|
||||||
|
|
||||||
|
|
||||||
def runnings_backups():
|
def runnings_backups():
|
||||||
task = get_task()
|
task = get_task()
|
||||||
is_runnig = (task != None)
|
is_runnig = task is not None
|
||||||
finish = ( is_runnig and task.get() != None)
|
finish = is_runnig and task.get() is not None
|
||||||
return is_runnig and not finish
|
return is_runnig and not finish
|
||||||
|
|
||||||
|
|
||||||
@app.route('/backups.json')
|
@app.route("/backups.json")
|
||||||
def last_backup_json():
|
def last_backup_json():
|
||||||
exports = dbstat.query('select * from stats where TYPE="BACKUP" ORDER BY backup_start DESC ')
|
exports = dbstat.query('select * from stats where TYPE="BACKUP" ORDER BY backup_start DESC ')
|
||||||
return Response(response=json.dumps(exports),
|
return Response(response=json.dumps(exports), status=200, mimetype="application/json")
|
||||||
status=200,
|
|
||||||
mimetype="application/json")
|
|
||||||
|
|
||||||
|
|
||||||
@app.route('/last_backups')
|
@app.route("/last_backups")
|
||||||
def last_backup():
|
def last_backup():
|
||||||
exports = dbstat.query('select * from stats where TYPE="BACKUP" ORDER BY backup_start DESC LIMIT 20 ')
|
exports = dbstat.query('select * from stats where TYPE="BACKUP" ORDER BY backup_start DESC LIMIT 20 ')
|
||||||
return render_template("last_backups.html", backups=exports)
|
return render_template("last_backups.html", backups=exports)
|
||||||
|
|
||||||
|
|
||||||
@app.route('/export_backup')
|
@app.route("/export_backup")
|
||||||
def export_backup():
|
def export_backup():
|
||||||
|
|
||||||
raise_error("", "")
|
raise_error("", "")
|
||||||
@ -418,12 +438,11 @@ def export_backup():
|
|||||||
if len(section) > 0:
|
if len(section) > 0:
|
||||||
sections.append(section[1])
|
sections.append(section[1])
|
||||||
|
|
||||||
noJobs = (not runnings_backups())
|
noJobs = not runnings_backups()
|
||||||
if "start" in list(request.args.keys()) or not noJobs:
|
if "start" in list(request.args.keys()) or not noJobs:
|
||||||
start = True
|
start = True
|
||||||
if "sections" in list(request.args.keys()):
|
if "sections" in list(request.args.keys()):
|
||||||
backup_sections = request.args.getlist('sections')
|
backup_sections = request.args.getlist("sections")
|
||||||
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
start = False
|
start = False
|
||||||
@ -440,10 +459,14 @@ def export_backup():
|
|||||||
mindate = datetime2isodate(datetime.datetime.now())
|
mindate = datetime2isodate(datetime.datetime.now())
|
||||||
if not error and start:
|
if not error and start:
|
||||||
print(tisbackup_config_file)
|
print(tisbackup_config_file)
|
||||||
task = run_export_backup(base=backup_base_dir, config_file=CONFIG[config_number], mount_point=mount_point, backup_sections=",".join([str(x) for x in backup_sections]))
|
task = run_export_backup(
|
||||||
|
base=backup_base_dir,
|
||||||
|
config_file=CONFIG[config_number],
|
||||||
|
mount_point=mount_point,
|
||||||
|
backup_sections=",".join([str(x) for x in backup_sections]),
|
||||||
|
)
|
||||||
set_task(task)
|
set_task(task)
|
||||||
|
|
||||||
|
|
||||||
return render_template("export_backup.html", error=error, start=start, info=info, email=ADMIN_EMAIL, sections=sections)
|
return render_template("export_backup.html", error=error, start=start, info=info, email=ADMIN_EMAIL, sections=sections)
|
||||||
|
|
||||||
|
|
||||||
@ -456,6 +479,7 @@ def raise_error(strError, strInfo):
|
|||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
read_config()
|
read_config()
|
||||||
from os import environ
|
from os import environ
|
||||||
if 'WINGDB_ACTIVE' in environ:
|
|
||||||
|
if "WINGDB_ACTIVE" in environ:
|
||||||
app.debug = False
|
app.debug = False
|
||||||
app.run(host= '0.0.0.0',port=8080)
|
app.run(host="0.0.0.0", port=8080)
|
||||||
|
Loading…
Reference in New Issue
Block a user