Possibilité de choisir les backups à exporter sur disque dur externe
This commit is contained in:
parent
3439115500
commit
f7056732f4
@ -325,7 +325,9 @@ create table stats (
|
|||||||
create index idx_stats_backup_name on stats(backup_name);""")
|
create index idx_stats_backup_name on stats(backup_name);""")
|
||||||
self.db.execute("""
|
self.db.execute("""
|
||||||
create index idx_stats_backup_location on stats(backup_location);""")
|
create index idx_stats_backup_location on stats(backup_location);""")
|
||||||
self.db.commit()
|
self.db.execute("""
|
||||||
|
CREATE INDEX idx_stats_backup_name_start on stats(backup_name,backup_start);""")
|
||||||
|
self.db.commit()
|
||||||
|
|
||||||
def start(self,backup_name,server_name,TYPE,description='',backup_location=None):
|
def start(self,backup_name,server_name,TYPE,description='',backup_location=None):
|
||||||
""" Add in stat DB a record for the newly running backup"""
|
""" Add in stat DB a record for the newly running backup"""
|
||||||
@ -743,6 +745,8 @@ class backup_generic:
|
|||||||
return (nagiosStateCritical,"CRITICAL Backup %s (%s), %s seems older than start of backup" % (self.backup_name,isodate2datetime(b['backup_end']),b['log']))
|
return (nagiosStateCritical,"CRITICAL Backup %s (%s), %s seems older than start of backup" % (self.backup_name,isodate2datetime(b['backup_end']),b['log']))
|
||||||
elif os.path.isdir(b['backup_location']):
|
elif os.path.isdir(b['backup_location']):
|
||||||
return (nagiosStateOk,"OK Backup %s (%s), %s" % (self.backup_name,isodate2datetime(b['backup_end']),b['log']))
|
return (nagiosStateOk,"OK Backup %s (%s), %s" % (self.backup_name,isodate2datetime(b['backup_end']),b['log']))
|
||||||
|
elif self.type == 'copy-vm-xcp':
|
||||||
|
return (nagiosStateOk,"OK Backup %s (%s), %s" % (self.backup_name,isodate2datetime(b['backup_end']),b['log']))
|
||||||
else:
|
else:
|
||||||
return (nagiosStateCritical,"CRITICAL Backup %s (%s), %s has disapeared from backup location %s" % (self.backup_name,isodate2datetime(b['backup_end']),b['log'],b['backup_location']))
|
return (nagiosStateCritical,"CRITICAL Backup %s (%s), %s has disapeared from backup location %s" % (self.backup_name,isodate2datetime(b['backup_end']),b['log'],b['backup_location']))
|
||||||
|
|
||||||
@ -768,7 +772,18 @@ class backup_generic:
|
|||||||
if os.path.isdir(oldbackup_location) and self.backup_dir in oldbackup_location :
|
if os.path.isdir(oldbackup_location) and self.backup_dir in oldbackup_location :
|
||||||
self.logger.info('[%s] removing directory "%s"',self.backup_name,oldbackup_location)
|
self.logger.info('[%s] removing directory "%s"',self.backup_name,oldbackup_location)
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
shutil.rmtree(oldbackup_location.encode('ascii'))
|
if self.type =="rsync+btrfs+ssh" or self.type == "rsync+btrfs":
|
||||||
|
cmd = "/sbin/btrfs subvolume delete %s"%oldbackup_location.encode('ascii')
|
||||||
|
process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True)
|
||||||
|
log = monitor_stdout(process,'',self)
|
||||||
|
returncode = process.returncode
|
||||||
|
if (returncode != 0):
|
||||||
|
self.logger.error("[" + self.backup_name + "] shell program exited with error code: %s"%log)
|
||||||
|
raise Exception("[" + self.backup_name + "] shell program exited with error code " + str(returncode), cmd)
|
||||||
|
else:
|
||||||
|
self.logger.info("[" + self.backup_name + "] deleting snapshot volume: %s"%oldbackup_location.encode('ascii'))
|
||||||
|
else:
|
||||||
|
shutil.rmtree(oldbackup_location.encode('ascii'))
|
||||||
if os.path.isfile(oldbackup_location) and self.backup_dir in oldbackup_location :
|
if os.path.isfile(oldbackup_location) and self.backup_dir in oldbackup_location :
|
||||||
self.logger.debug('[%s] removing file "%s"',self.backup_name,oldbackup_location)
|
self.logger.debug('[%s] removing file "%s"',self.backup_name,oldbackup_location)
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
@ -860,13 +875,13 @@ class backup_generic:
|
|||||||
|
|
||||||
for l in log.splitlines():
|
for l in log.splitlines():
|
||||||
if l.startswith('Number of files:'):
|
if l.startswith('Number of files:'):
|
||||||
stats['total_files_count'] += int(l.split(':')[1])
|
stats['total_files_count'] += int(re.findall('[0-9]+', l.split(':')[1])[0])
|
||||||
if l.startswith('Number of files transferred:'):
|
if l.startswith('Number of files transferred:'):
|
||||||
stats['written_files_count'] += int(l.split(':')[1])
|
stats['written_files_count'] += int(l.split(':')[1])
|
||||||
if l.startswith('Total file size:'):
|
if l.startswith('Total file size:'):
|
||||||
stats['total_bytes'] += int(l.split(':')[1].split()[0])
|
stats['total_bytes'] += float(l.replace(',','').split(':')[1].split()[0])
|
||||||
if l.startswith('Total transferred file size:'):
|
if l.startswith('Total transferred file size:'):
|
||||||
stats['written_bytes'] += int(l.split(':')[1].split()[0])
|
stats['written_bytes'] += float(l.replace(',','').split(':')[1].split()[0])
|
||||||
returncode = process.returncode
|
returncode = process.returncode
|
||||||
## deal with exit code 24 (file vanished)
|
## deal with exit code 24 (file vanished)
|
||||||
if (returncode == 24):
|
if (returncode == 24):
|
||||||
@ -884,7 +899,6 @@ class backup_generic:
|
|||||||
|
|
||||||
endtime = time.time()
|
endtime = time.time()
|
||||||
duration = (endtime-starttime)/3600.0
|
duration = (endtime-starttime)/3600.0
|
||||||
|
|
||||||
if not self.dry_run and self.dbstat:
|
if not self.dry_run and self.dbstat:
|
||||||
self.dbstat.finish(stat_rowid,
|
self.dbstat.finish(stat_rowid,
|
||||||
backup_end=datetime2isodate(datetime.datetime.now()),
|
backup_end=datetime2isodate(datetime.datetime.now()),
|
||||||
@ -895,7 +909,7 @@ class backup_generic:
|
|||||||
written_bytes=stats['written_bytes'],
|
written_bytes=stats['written_bytes'],
|
||||||
status=stats['status'],
|
status=stats['status'],
|
||||||
log=stats['log'],
|
log=stats['log'],
|
||||||
backup_location=backup_dest)
|
backup_location=backup_dest)
|
||||||
return stats
|
return stats
|
||||||
|
|
||||||
|
|
||||||
|
@ -22,6 +22,25 @@
|
|||||||
</table>
|
</table>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
|
{% if backup_list['rsync_btrfs_list']|count != 0 %}
|
||||||
|
<h2 class="title">rsync+btrfs+ssh</h2>
|
||||||
|
<table id="table-design">
|
||||||
|
<thead>
|
||||||
|
<th>Server</th>
|
||||||
|
<th>Backup</th>
|
||||||
|
<th>Directory</th>
|
||||||
|
</thead>
|
||||||
|
</tbody>
|
||||||
|
{% for entry in backup_list['rsync_btrfs_list'] %}
|
||||||
|
<tr>
|
||||||
|
<td>{{ entry[0] }}</td>
|
||||||
|
<td>{{ entry[1] }}</td>
|
||||||
|
<td>{{ entry[3] }}</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
{% if backup_list['rsync_list']|count != 0 %}
|
{% if backup_list['rsync_list']|count != 0 %}
|
||||||
<h2 class="title">Rsync</h2>
|
<h2 class="title">Rsync</h2>
|
||||||
@ -66,7 +85,7 @@
|
|||||||
|
|
||||||
|
|
||||||
{% if backup_list['mysql_list']|count != 0 %}
|
{% if backup_list['mysql_list']|count != 0 %}
|
||||||
<h2 class="title">MySQL</h2>
|
<h2 class="title">SQL Server</h2>
|
||||||
<table id="table-design">
|
<table id="table-design">
|
||||||
<thead>
|
<thead>
|
||||||
<th>Server</th>
|
<th>Server</th>
|
||||||
@ -85,6 +104,25 @@
|
|||||||
</table>
|
</table>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
|
{% if backup_list['sqlserver_list']|count != 0 %}
|
||||||
|
<h2 class="title">SQL Server</h2>
|
||||||
|
<table id="table-design">
|
||||||
|
<thead>
|
||||||
|
<th>Server</th>
|
||||||
|
<th>Backup</th>
|
||||||
|
<th>Database</th>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{% for entry in backup_list['sqlserver_list'] %}
|
||||||
|
<tr>
|
||||||
|
<td>{{ entry[0] }}</td>
|
||||||
|
<td>{{ entry[1] }}</td>
|
||||||
|
<td>{{ entry[3] }}</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
{% if backup_list['xva_list']|count != 0 %}
|
{% if backup_list['xva_list']|count != 0 %}
|
||||||
<h2 class="title">XVA</h2>
|
<h2 class="title">XVA</h2>
|
||||||
|
@ -28,9 +28,15 @@
|
|||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
<form id="backup" action='/export_backup'>
|
<form id="backup" action='/export_backup'>
|
||||||
|
<p> Select backups to save : <br />
|
||||||
|
{% for entry in sections %}
|
||||||
|
<input type="checkbox" name="sections" value="{{entry}}" checked="true">{{entry}} <br />
|
||||||
|
{% endfor %}
|
||||||
|
<p>
|
||||||
<input type="hidden" name="start" value="true" />
|
<input type="hidden" name="start" value="true" />
|
||||||
<input type="button" id="confirm_button" value="Launch Backup" style="margin-left: 400px;" />
|
<input type="button" id="confirm_button" value="Launch Backup" style="margin-left: 400px;" />
|
||||||
</form>
|
</form>
|
||||||
|
|
||||||
{% else %}
|
{% else %}
|
||||||
<h2 class="title">Backups is running: </h2>
|
<h2 class="title">Backups is running: </h2>
|
||||||
<table id="table-design">
|
<table id="table-design">
|
||||||
|
@ -7,7 +7,7 @@
|
|||||||
"sAjaxDataProp": "data",
|
"sAjaxDataProp": "data",
|
||||||
"sAjaxSource": "/backups.json",
|
"sAjaxSource": "/backups.json",
|
||||||
"iDisplayLength": 25,
|
"iDisplayLength": 25,
|
||||||
"aLengthMenu": [[25, 50, 100, 200, 500, -1], [25, 50, 100, 200, 500, "All"]],
|
// "aLengthMenu": [[25, 50, 100, 200, 500, -1], [25, 50, 100, 200, 500, "All"]],
|
||||||
"aaSorting": [[ 0, "desc" ]],
|
"aaSorting": [[ 0, "desc" ]],
|
||||||
"aoColumnDefs": [
|
"aoColumnDefs": [
|
||||||
{
|
{
|
||||||
@ -32,6 +32,13 @@
|
|||||||
var d = new Date(data);
|
var d = new Date(data);
|
||||||
return d.getFullYear()+"/"+(d.getMonth()+1)+"/"+d.getDate()+" "+d.toLocaleTimeString();
|
return d.getFullYear()+"/"+(d.getMonth()+1)+"/"+d.getDate()+" "+d.toLocaleTimeString();
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"aTargets": [ 6 ],
|
||||||
|
"mData": "written_bytes",
|
||||||
|
"mRender": function ( data, type, full ) {
|
||||||
|
return humanFileSize(data, false);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"aoColumns": [
|
"aoColumns": [
|
||||||
@ -41,7 +48,7 @@
|
|||||||
{ "mData":"backup_name" },
|
{ "mData":"backup_name" },
|
||||||
{ "mData":"backup_duration"},
|
{ "mData":"backup_duration"},
|
||||||
{ "mData":"status" },
|
{ "mData":"status" },
|
||||||
{ "mData":"written_bytes" , "bVisible": false},
|
{ "mData":"written_bytes"},
|
||||||
{ "mData":"written_files_count" , "bVisible": false},
|
{ "mData":"written_files_count" , "bVisible": false},
|
||||||
{ "mData":"total_files_count" , "bVisible": false},
|
{ "mData":"total_files_count" , "bVisible": false},
|
||||||
{ "mData":"total_bytes" , "bVisible": false },
|
{ "mData":"total_bytes" , "bVisible": false },
|
||||||
@ -49,9 +56,40 @@
|
|||||||
{ "mData":"description" , "bVisible": false },
|
{ "mData":"description" , "bVisible": false },
|
||||||
{ "mData":"log" , "bVisible": false },
|
{ "mData":"log" , "bVisible": false },
|
||||||
{ "mData":"TYPE" , "bVisible": false }
|
{ "mData":"TYPE" , "bVisible": false }
|
||||||
]
|
],
|
||||||
|
"fnFooterCallback": function ( nRow, aaData, iStart, iEnd, aiDisplay ) {
|
||||||
|
//humanFileSize(aaData[1]['written_bytes'], true)
|
||||||
|
var total_bytes = 0;
|
||||||
|
var total_time = 0;
|
||||||
|
for (var i = iStart; i < iEnd; i++) {
|
||||||
|
total_bytes += aaData[i]['written_bytes'];
|
||||||
|
total_time += aaData[i]['backup_duration'];
|
||||||
|
}
|
||||||
|
|
||||||
|
var cells_data = nRow.getElementsByTagName('th');
|
||||||
|
cells_data[1].innerHTML = humanFileSize(total_bytes, true);
|
||||||
|
|
||||||
|
|
||||||
|
var cells_time = nRow.nextElementSibling.cells;
|
||||||
|
cells_time[1].innerHTML = secondsToHms(total_time * 3600);
|
||||||
|
}
|
||||||
} );
|
} );
|
||||||
|
$('#inputDatabaseName').keyup(function () { delay(function(){ oTable.fnLengthChange($('#inputDatabaseName').val() ); }, 300 )});
|
||||||
|
$(".dataTables_length").remove()
|
||||||
|
var nb_row = GetURLParameter('row');
|
||||||
|
if (nb_row ){
|
||||||
|
oTable.fnLengthChange( nb_row) ;
|
||||||
|
$('#inputDatabaseName').val(nb_row);
|
||||||
|
}
|
||||||
|
|
||||||
} );
|
} );
|
||||||
|
var delay = (function(){
|
||||||
|
var timer = 0;
|
||||||
|
return function(callback, ms){
|
||||||
|
clearTimeout (timer);
|
||||||
|
timer = setTimeout(callback, ms);
|
||||||
|
};
|
||||||
|
})();
|
||||||
function fnShowHide( iCol )
|
function fnShowHide( iCol )
|
||||||
{
|
{
|
||||||
/* Get the DataTables object again - this is not a recreation, just a get of the object */
|
/* Get the DataTables object again - this is not a recreation, just a get of the object */
|
||||||
@ -67,6 +105,57 @@
|
|||||||
var s = Math.floor(d % 3600 % 60);
|
var s = Math.floor(d % 3600 % 60);
|
||||||
return ((h > 0 ? h + ":" : "0:") + (m > 0 ? (m < 10 ? "0" : "") + m + ":" : "00:") + (s < 10 ? "0" : "") + s);
|
return ((h > 0 ? h + ":" : "0:") + (m > 0 ? (m < 10 ? "0" : "") + m + ":" : "00:") + (s < 10 ? "0" : "") + s);
|
||||||
}
|
}
|
||||||
|
function GetURLParameter(sParam)
|
||||||
|
{
|
||||||
|
var sPageURL = window.location.search.substring(1);
|
||||||
|
var sURLVariables = sPageURL.split('&');
|
||||||
|
for (var i = 0; i < sURLVariables.length; i++)
|
||||||
|
{
|
||||||
|
var sParameterName = sURLVariables[i].split('=');
|
||||||
|
if (sParameterName[0] == sParam)
|
||||||
|
{
|
||||||
|
return sParameterName[1];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function humanFileSize(bytes, si) {
|
||||||
|
var thresh = si ? 1000 : 1024;
|
||||||
|
if(bytes < thresh) return bytes + ' B';
|
||||||
|
var units = si ? ['kB','MB','GB','TB','PB','EB','ZB','YB'] : ['KiB','MiB','GiB','TiB','PiB','EiB','ZiB','YiB'];
|
||||||
|
var u = -1;
|
||||||
|
do {
|
||||||
|
bytes /= thresh;
|
||||||
|
++u;
|
||||||
|
} while(bytes >= thresh);
|
||||||
|
return bytes.toFixed(1)+' '+units[u];
|
||||||
|
};
|
||||||
|
$.fn.dataTableExt.oApi.fnLengthChange = function ( oSettings, iDisplay )
|
||||||
|
{
|
||||||
|
oSettings._iDisplayLength = iDisplay;
|
||||||
|
oSettings.oApi._fnCalculateEnd( oSettings );
|
||||||
|
|
||||||
|
/* If we have space to show extra rows (backing up from the end point - then do so */
|
||||||
|
if ( oSettings._iDisplayEnd == oSettings.aiDisplay.length )
|
||||||
|
{
|
||||||
|
oSettings._iDisplayStart = oSettings._iDisplayEnd - oSettings._iDisplayLength;
|
||||||
|
if ( oSettings._iDisplayStart < 0 )
|
||||||
|
{
|
||||||
|
oSettings._iDisplayStart = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( oSettings._iDisplayLength == -1 )
|
||||||
|
{
|
||||||
|
oSettings._iDisplayStart = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
oSettings.oApi._fnDraw( oSettings );
|
||||||
|
|
||||||
|
if ( oSettings.aanFeatures.l )
|
||||||
|
{
|
||||||
|
$('select', oSettings.aanFeatures.l).val( iDisplay );
|
||||||
|
}
|
||||||
|
};
|
||||||
</script>
|
</script>
|
||||||
<table style='text-align: center;' cellpadding="0" cellspacing="0" border="0" class="display" id="table-design">
|
<table style='text-align: center;' cellpadding="0" cellspacing="0" border="0" class="display" id="table-design">
|
||||||
<thead style='text-align: center;'>
|
<thead style='text-align: center;'>
|
||||||
@ -90,8 +179,19 @@
|
|||||||
<tbody>
|
<tbody>
|
||||||
|
|
||||||
</tbody>
|
</tbody>
|
||||||
|
<tfoot>
|
||||||
|
<tr>
|
||||||
|
<th style="text-align:right" colspan="4" rowspan="1">Total Written Bytes</th>
|
||||||
|
<th rowspan="1" colspan="1"></th>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<th style="text-align:right" colspan="4" rowspan="1">Total Duration time</th>
|
||||||
|
<th rowspan="1" colspan="1"></th>
|
||||||
|
</tr>
|
||||||
|
</tfoot>
|
||||||
</table>
|
</table>
|
||||||
<br />
|
<br / >
|
||||||
|
<p>Nomber of rows per page : <input id="inputDatabaseName" value=25 size=4 style="text-align:center" ></p>
|
||||||
<p>
|
<p>
|
||||||
Backup start<input type="checkbox" onclick="fnShowHide( 0 );"/>
|
Backup start<input type="checkbox" onclick="fnShowHide( 0 );"/>
|
||||||
Backup end<input type="checkbox" onclick="fnShowHide( 1 );"/>
|
Backup end<input type="checkbox" onclick="fnShowHide( 1 );"/>
|
||||||
@ -117,5 +217,6 @@ $('input:checkbox:eq(2)').attr('checked', true);
|
|||||||
$('input:checkbox:eq(3)').attr('checked', true);
|
$('input:checkbox:eq(3)').attr('checked', true);
|
||||||
$('input:checkbox:eq(4)').attr('checked', true);
|
$('input:checkbox:eq(4)').attr('checked', true);
|
||||||
$('input:checkbox:eq(5)').attr('checked', true);
|
$('input:checkbox:eq(5)').attr('checked', true);
|
||||||
|
$('input:checkbox:eq(6)').attr('checked', true);
|
||||||
</script>
|
</script>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
@ -32,12 +32,15 @@ from libtisbackup.common import *
|
|||||||
from libtisbackup.backup_mysql import backup_mysql
|
from libtisbackup.backup_mysql import backup_mysql
|
||||||
from libtisbackup.backup_rsync import backup_rsync
|
from libtisbackup.backup_rsync import backup_rsync
|
||||||
from libtisbackup.backup_rsync import backup_rsync_ssh
|
from libtisbackup.backup_rsync import backup_rsync_ssh
|
||||||
|
from libtisbackup.backup_rsync_btrfs import backup_rsync_btrfs
|
||||||
|
from libtisbackup.backup_rsync_btrfs import backup_rsync__btrfs_ssh
|
||||||
from libtisbackup.backup_pgsql import backup_pgsql
|
from libtisbackup.backup_pgsql import backup_pgsql
|
||||||
from libtisbackup.backup_xva import backup_xva
|
from libtisbackup.backup_xva import backup_xva
|
||||||
#from libtisbackup.backup_switch import backup_switch
|
#from libtisbackup.backup_switch import backup_switch
|
||||||
from libtisbackup.backup_null import backup_null
|
from libtisbackup.backup_null import backup_null
|
||||||
from libtisbackup.backup_xcp_metadata import backup_xcp_metadata
|
from libtisbackup.backup_xcp_metadata import backup_xcp_metadata
|
||||||
from libtisbackup.copy_vm_xcp import copy_vm_xcp
|
from libtisbackup.copy_vm_xcp import copy_vm_xcp
|
||||||
|
from libtisbackup.backup_sqlserver import backup_sqlserver
|
||||||
|
|
||||||
usage="""\
|
usage="""\
|
||||||
%prog -c configfile action
|
%prog -c configfile action
|
||||||
|
@ -18,6 +18,10 @@
|
|||||||
#
|
#
|
||||||
# -----------------------------------------------------------------------
|
# -----------------------------------------------------------------------
|
||||||
import os,sys
|
import os,sys
|
||||||
|
tisbackup_root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__)))
|
||||||
|
sys.path.append(os.path.join(tisbackup_root_dir,'lib'))
|
||||||
|
|
||||||
|
|
||||||
from shutil import *
|
from shutil import *
|
||||||
from iniparse import ConfigParser
|
from iniparse import ConfigParser
|
||||||
from libtisbackup.common import *
|
from libtisbackup.common import *
|
||||||
@ -32,11 +36,11 @@ import logging
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
|
|
||||||
CONFIG = uwsgi.opt['config']
|
CONFIG = uwsgi.opt['config_tisbackup']
|
||||||
SECTIONS = uwsgi.opt['sections']
|
SECTIONS = uwsgi.opt['sections']
|
||||||
ADMIN_EMAIL = uwsgi.opt.get('ADMIN_EMAIL',uwsgi.opt.get('admin_email'))
|
ADMIN_EMAIL = uwsgi.opt.get('ADMIN_EMAIL',uwsgi.opt.get('admin_email'))
|
||||||
spooler = uwsgi.opt['spooler']
|
spooler = uwsgi.opt['spooler']
|
||||||
tisbackup_config_file= uwsgi.opt['config']
|
tisbackup_config_file= uwsgi.opt['config_tisbackup']
|
||||||
|
|
||||||
cp = ConfigParser()
|
cp = ConfigParser()
|
||||||
cp.read(tisbackup_config_file)
|
cp.read(tisbackup_config_file)
|
||||||
@ -82,10 +86,12 @@ def read_config():
|
|||||||
|
|
||||||
backup_dict = {}
|
backup_dict = {}
|
||||||
backup_dict['rsync_ssh_list'] = []
|
backup_dict['rsync_ssh_list'] = []
|
||||||
|
backup_dict['rsync_btrfs_list'] = []
|
||||||
backup_dict['rsync_list'] = []
|
backup_dict['rsync_list'] = []
|
||||||
backup_dict['null_list'] = []
|
backup_dict['null_list'] = []
|
||||||
backup_dict['pgsql_list'] = []
|
backup_dict['pgsql_list'] = []
|
||||||
backup_dict['mysql_list'] = []
|
backup_dict['mysql_list'] = []
|
||||||
|
backup_dict['sqlserver_list'] = []
|
||||||
backup_dict['xva_list'] = []
|
backup_dict['xva_list'] = []
|
||||||
backup_dict['metadata_list'] = []
|
backup_dict['metadata_list'] = []
|
||||||
backup_dict['switch_list'] = []
|
backup_dict['switch_list'] = []
|
||||||
@ -98,6 +104,9 @@ def read_config():
|
|||||||
if backup_type == "rsync+ssh":
|
if backup_type == "rsync+ssh":
|
||||||
remote_dir = row['remote_dir']
|
remote_dir = row['remote_dir']
|
||||||
backup_dict['rsync_ssh_list'].append([server_name, backup_name, backup_type,remote_dir])
|
backup_dict['rsync_ssh_list'].append([server_name, backup_name, backup_type,remote_dir])
|
||||||
|
if backup_type == "rsync+btrfs+ssh":
|
||||||
|
remote_dir = row['remote_dir']
|
||||||
|
backup_dict['rsync_btrfs_list'].append([server_name, backup_name, backup_type,remote_dir])
|
||||||
if backup_type == "rsync":
|
if backup_type == "rsync":
|
||||||
remote_dir = row['remote_dir']
|
remote_dir = row['remote_dir']
|
||||||
backup_dict['rsync_list'].append([server_name, backup_name, backup_type,remote_dir])
|
backup_dict['rsync_list'].append([server_name, backup_name, backup_type,remote_dir])
|
||||||
@ -109,6 +118,9 @@ def read_config():
|
|||||||
if backup_type == "mysql+ssh":
|
if backup_type == "mysql+ssh":
|
||||||
db_name = row['db_name']
|
db_name = row['db_name']
|
||||||
backup_dict['mysql_list'].append([server_name, backup_name, backup_type, db_name])
|
backup_dict['mysql_list'].append([server_name, backup_name, backup_type, db_name])
|
||||||
|
if backup_type == "sqlserver+ssh":
|
||||||
|
db_name = row['db_name']
|
||||||
|
backup_dict['sqlserver_list'].append([server_name, backup_name, backup_type, db_name])
|
||||||
if backup_type == "xen-xva":
|
if backup_type == "xen-xva":
|
||||||
backup_dict['xva_list'].append([server_name, backup_name, backup_type, ""])
|
backup_dict['xva_list'].append([server_name, backup_name, backup_type, ""])
|
||||||
if backup_type == "switch":
|
if backup_type == "switch":
|
||||||
@ -123,37 +135,7 @@ def backup_all():
|
|||||||
@app.route('/json')
|
@app.route('/json')
|
||||||
def backup_json():
|
def backup_json():
|
||||||
backup_dict = read_config()
|
backup_dict = read_config()
|
||||||
return json.dumps(backup_dict['rsync_list']+backup_dict['rsync_ssh_list']+backup_dict['pgsql_list']+backup_dict['mysql_list']+backup_dict['xva_list']+backup_dict['null_list']+backup_dict['metadata_list']+ backup_dict['switch_list'])
|
return json.dumps(backup_dict['rsync_list']+backup_dict['rsync_btrfs_list']+backup_dict['rsync_ssh_list']+backup_dict['pgsql_list']+backup_dict['mysql_list']+backup_dict['xva_list']+backup_dict['null_list']+backup_dict['metadata_list']+ backup_dict['switch_list'])
|
||||||
|
|
||||||
#def check_usb_disk():
|
|
||||||
# """This method returns the mounts point of FIRST external disk"""
|
|
||||||
# disk_name = []
|
|
||||||
# for name in glob.glob('/dev/sd[a-z]'):
|
|
||||||
# for line in os.popen("udevinfo --query=env --name %s" % name):
|
|
||||||
# if "ID_BUS=usb" in line:
|
|
||||||
# disk_name += [ name ]
|
|
||||||
# if len(disk_name) == 0:
|
|
||||||
# raise_error("cannot find external usb disk", "You should plug the usb hard drive into the server")
|
|
||||||
# return ""
|
|
||||||
# elif len(disk_name) > 1:
|
|
||||||
# raise_error("There are many usb disk", "You should plug remove one of them")
|
|
||||||
# return ""
|
|
||||||
# else:
|
|
||||||
# disk_name = disk_name[0]
|
|
||||||
# flash("The first usb media is: %s" % disk_name)
|
|
||||||
# if os.path.exists(disk_name+"1"):
|
|
||||||
# flash("partition found: %s1" % disk_name)
|
|
||||||
# partition_name = disk_name+"1"
|
|
||||||
# else:
|
|
||||||
# raise_error("No partition exist", "You should initialize the usb drive")
|
|
||||||
# return ""
|
|
||||||
# if not "tisbackup" in os.popen("/sbin/dumpe2fs -h %s 2>&1 |/bin/grep 'volume name'" % partition_name).read():
|
|
||||||
# raise_error("the label is not vaid", "You should use 'TISBACKUP' label")
|
|
||||||
# return ""
|
|
||||||
# if not "ext4" in os.popen("/sbin/fsck -N %s 2>&1" % partition_name).read():
|
|
||||||
# raise_error("bad file system", "You should format usb drive into ext4")
|
|
||||||
# return ""
|
|
||||||
# return partition_name
|
|
||||||
|
|
||||||
|
|
||||||
def check_usb_disk():
|
def check_usb_disk():
|
||||||
@ -256,10 +238,19 @@ def last_backup():
|
|||||||
@app.route('/export_backup')
|
@app.route('/export_backup')
|
||||||
def export_backup():
|
def export_backup():
|
||||||
raise_error("", "")
|
raise_error("", "")
|
||||||
|
backup_dict = read_config()
|
||||||
|
sections = []
|
||||||
|
for backup_types in backup_dict:
|
||||||
|
for section in backup_dict[backup_types]:
|
||||||
|
if section.count > 0:
|
||||||
|
sections.append(section[1])
|
||||||
|
|
||||||
noJobs = ( len(os.listdir(spooler)) == 0 )
|
noJobs = ( len(os.listdir(spooler)) == 0 )
|
||||||
if "start" in request.args.keys() or not noJobs:
|
if "start" in request.args.keys() or not noJobs:
|
||||||
start=True
|
start=True
|
||||||
|
if "sections" in request.args.keys():
|
||||||
|
backup_sections = request.args.getlist('sections')
|
||||||
|
|
||||||
else:
|
else:
|
||||||
start=False
|
start=False
|
||||||
cp.read(tisbackup_config_file)
|
cp.read(tisbackup_config_file)
|
||||||
@ -274,9 +265,9 @@ def export_backup():
|
|||||||
global mindate
|
global mindate
|
||||||
mindate = datetime2isodate(datetime.datetime.now())
|
mindate = datetime2isodate(datetime.datetime.now())
|
||||||
if not error and start:
|
if not error and start:
|
||||||
run_export_backup.spool(base=backup_base_dir, config_file=tisbackup_config_file, mount_point=mount_point)
|
run_export_backup.spool(base=backup_base_dir, config_file=tisbackup_config_file, mount_point=mount_point, backup_sections=",".join([str(x) for x in backup_sections]))
|
||||||
|
|
||||||
return render_template("export_backup.html", error=error, start=start, info=info, email=ADMIN_EMAIL)
|
return render_template("export_backup.html", error=error, start=start, info=info, email=ADMIN_EMAIL, sections=sections)
|
||||||
|
|
||||||
|
|
||||||
def raise_error(strError, strInfo):
|
def raise_error(strError, strInfo):
|
||||||
@ -303,8 +294,11 @@ def run_export_backup(args):
|
|||||||
# Main
|
# Main
|
||||||
logger.info("Running export....")
|
logger.info("Running export....")
|
||||||
|
|
||||||
|
if args['backup_sections']:
|
||||||
backup_sections = []
|
backup_sections = args['backup_sections'].split(",")
|
||||||
|
else:
|
||||||
|
backup_sections = []
|
||||||
|
|
||||||
backup = tis_backup(dry_run=False,verbose=True,backup_base_dir=args['base'])
|
backup = tis_backup(dry_run=False,verbose=True,backup_base_dir=args['base'])
|
||||||
backup.read_ini_file(args['config_file'])
|
backup.read_ini_file(args['config_file'])
|
||||||
mount_point = args['mount_point']
|
mount_point = args['mount_point']
|
||||||
|
Loading…
Reference in New Issue
Block a user