ajout paramètres tmp_dir = '/tmp' et encoding = 'UTF8' pour le driver backup pgsql

This commit is contained in:
htouvet 2018-02-06 15:21:55 +01:00
parent 1190eb4d9d
commit c29ad67075

View File

@ -21,7 +21,7 @@ import sys
try: try:
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko sys.stderr = open('/dev/null') # Silence silly warnings from paramiko
import paramiko import paramiko
except ImportError,e: except ImportError,e:
print "Error : can not load paramiko library %s" % e print "Error : can not load paramiko library %s" % e
raise raise
@ -31,15 +31,17 @@ from common import *
class backup_pgsql(backup_generic): class backup_pgsql(backup_generic):
"""Backup a postgresql database as gzipped sql file through ssh""" """Backup a postgresql database as gzipped sql file through ssh"""
type = 'pgsql+ssh' type = 'pgsql+ssh'
required_params = backup_generic.required_params + ['private_key'] required_params = backup_generic.required_params + ['private_key']
optional_params = backup_generic.optional_params + ['db_name'] optional_params = backup_generic.optional_params + ['db_name','tmp_dir','encoding']
db_name='' db_name = ''
tmp_dir = '/tmp'
encoding = 'UTF8'
def do_backup(self,stats): def do_backup(self,stats):
self.dest_dir = os.path.join(self.backup_dir,self.backup_start_date) self.dest_dir = os.path.join(self.backup_dir,self.backup_start_date)
if not os.path.isdir(self.dest_dir): if not os.path.isdir(self.dest_dir):
if not self.dry_run: if not self.dry_run:
os.makedirs(self.dest_dir) os.makedirs(self.dest_dir)
@ -60,32 +62,36 @@ class backup_pgsql(backup_generic):
if self.db_name: if self.db_name:
stats['log']= "Successfully backup processed to the following database :" stats['log']= "Successfully backup processed to the following database :"
self.do_pgsqldump(stats) self.do_pgsqldump(stats)
else: else:
stats['log']= "Successfully backuping processed to the following databases :" stats['log']= "Successfully backuping processed to the following databases :"
stats['status']='List' stats['status']='List'
cmd = """su - postgres -c 'psql -t -c "SELECT datname FROM pg_database WHERE datistemplate = false;"' 2> /dev/null""" cmd = """su - postgres -c 'psql -t -c "SELECT datname FROM pg_database WHERE datistemplate = false;"' 2> /dev/null"""
self.logger.debug('[%s] List databases: %s',self.backup_name,cmd) self.logger.debug('[%s] List databases: %s',self.backup_name,cmd)
(error_code,output) = ssh_exec(cmd,ssh=self.ssh) (error_code,output) = ssh_exec(cmd,ssh=self.ssh)
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output) self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
if error_code: if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd)) raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
databases = output.split('\n') databases = output.split('\n')
for database in databases: for database in databases:
if database.strip() not in ("", "template0", "template1"): if database.strip() not in ("", "template0", "template1"):
self.db_name = database.strip() self.db_name = database.strip()
self.do_pgsqldump(stats) self.do_pgsqldump(stats)
stats['status']='OK' stats['status']='OK'
def do_pgsqldump(self,stats): def do_pgsqldump(self,stats):
t = datetime.datetime.now() t = datetime.datetime.now()
backup_start_date = t.strftime('%Y%m%d-%Hh%Mm%S') backup_start_date = t.strftime('%Y%m%d-%Hh%Mm%S')
# dump db # dump db
cmd = " su - postgres -c 'pg_dump "+ self.db_name + ' > /tmp/' + self.db_name + '-' + backup_start_date + ".sql '" cmd = " su - postgres -c 'pg_dump -E %(encoding)s %(db_name)s > %(tmp_dir)s/%(db_name)s-%(backup_start_date)s.sql'" % {
'encoding':self.encoding,
'db_name':self.db_name,
'tmp_dir':self.tmp_dir,
'backup_start_date':backup_start_date}
self.logger.debug('[%s] %s ',self.backup_name,cmd) self.logger.debug('[%s] %s ',self.backup_name,cmd)
if not self.dry_run: if not self.dry_run:
(error_code,output) = ssh_exec(cmd,ssh=self.ssh) (error_code,output) = ssh_exec(cmd,ssh=self.ssh)
@ -116,9 +122,9 @@ class backup_pgsql(backup_generic):
stats['total_files_count']=1 + stats.get('total_files_count', 0) stats['total_files_count']=1 + stats.get('total_files_count', 0)
stats['written_files_count']=1 + stats.get('written_files_count', 0) stats['written_files_count']=1 + stats.get('written_files_count', 0)
stats['total_bytes']=os.stat(localpath).st_size + stats.get('total_bytes', 0) stats['total_bytes']=os.stat(localpath).st_size + stats.get('total_bytes', 0)
stats['written_bytes']=os.stat(localpath).st_size + stats.get('written_bytes', 0) stats['written_bytes']=os.stat(localpath).st_size + stats.get('written_bytes', 0)
stats['log'] = '%s "%s"' % (stats['log'] ,self.db_name) stats['log'] = '%s "%s"' % (stats['log'] ,self.db_name)
stats['backup_location'] = self.dest_dir stats['backup_location'] = self.dest_dir
cmd = 'rm -f /tmp/' + self.db_name + '-' + backup_start_date + '.sql.gz' cmd = 'rm -f /tmp/' + self.db_name + '-' + backup_start_date + '.sql.gz'
self.logger.debug('[%s] %s ',self.backup_name,cmd) self.logger.debug('[%s] %s ',self.backup_name,cmd)
@ -137,7 +143,7 @@ class backup_pgsql(backup_generic):
filelist = os.listdir(self.backup_dir) filelist = os.listdir(self.backup_dir)
filelist.sort() filelist.sort()
p = re.compile('^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$') p = re.compile('^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$')
for item in filelist: for item in filelist:
if p.match(item): if p.match(item):
dir_name = os.path.join(self.backup_dir,item) dir_name = os.path.join(self.backup_dir,item)