Postgres database is optional
This commit is contained in:
parent
b248df8194
commit
169602758e
@ -21,7 +21,7 @@ import sys
|
|||||||
try:
|
try:
|
||||||
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko
|
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko
|
||||||
import paramiko
|
import paramiko
|
||||||
except ImportError,e:
|
except ImportError,e:
|
||||||
print "Error : can not load paramiko library %s" % e
|
print "Error : can not load paramiko library %s" % e
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@ -37,28 +37,63 @@ from common import *
|
|||||||
class backup_pgsql(backup_generic):
|
class backup_pgsql(backup_generic):
|
||||||
"""Backup a postgresql database as gzipped sql file through ssh"""
|
"""Backup a postgresql database as gzipped sql file through ssh"""
|
||||||
type = 'pgsql+ssh'
|
type = 'pgsql+ssh'
|
||||||
required_params = backup_generic.required_params + ['db_name','private_key']
|
required_params = backup_generic.required_params + ['private_key']
|
||||||
|
optional_params = backup_generic.optional_params + ['db_name']
|
||||||
|
|
||||||
db_name=''
|
db_name=''
|
||||||
|
|
||||||
def do_backup(self,stats):
|
def do_backup(self,stats):
|
||||||
|
self.dest_dir = os.path.join(self.backup_dir,self.backup_start_date)
|
||||||
|
|
||||||
|
if not os.path.isdir(self.dest_dir):
|
||||||
|
if not self.dry_run:
|
||||||
|
os.makedirs(self.dest_dir)
|
||||||
|
else:
|
||||||
|
print 'mkdir "%s"' % self.dest_dir
|
||||||
|
else:
|
||||||
|
raise Exception('backup destination directory already exists : %s' % self.dest_dir)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
mykey = paramiko.RSAKey.from_private_key_file(self.private_key)
|
mykey = paramiko.RSAKey.from_private_key_file(self.private_key)
|
||||||
except paramiko.SSHException:
|
except paramiko.SSHException:
|
||||||
mykey = paramiko.DSSKey.from_private_key_file(self.private_key)
|
mykey = paramiko.DSSKey.from_private_key_file(self.private_key)
|
||||||
|
|
||||||
self.logger.debug('[%s] Trying to connect to "%s" with username root and key "%s"',self.backup_name,self.server_name,self.private_key)
|
self.logger.debug('[%s] Trying to connect to "%s" with username root and key "%s"',self.backup_name,self.server_name,self.private_key)
|
||||||
ssh = paramiko.SSHClient()
|
self.ssh = paramiko.SSHClient()
|
||||||
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||||
ssh.connect(self.server_name,username='root',pkey = mykey,port=self.ssh_port)
|
self.ssh.connect(self.server_name,username='root',pkey = mykey,port=self.ssh_port)
|
||||||
|
|
||||||
|
|
||||||
|
if self.db_name:
|
||||||
|
stats['log']= "Successfully backup processed to the following database :"
|
||||||
|
self.do_pgsqldump(stats)
|
||||||
|
else:
|
||||||
|
stats['log']= "Successfully backuping processed to the following databases :"
|
||||||
|
stats['status']='List'
|
||||||
|
cmd = "su - postgres -c 'psql -l -t' | cut -d'|' -f1 | sed -e 's/ //g' -e '/^$/d'"
|
||||||
|
self.logger.debug('[%s] List databases: %s',self.backup_name,cmd)
|
||||||
|
(error_code,output) = ssh_exec(cmd,ssh=self.ssh)
|
||||||
|
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
|
||||||
|
if error_code:
|
||||||
|
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
|
||||||
|
databases = output.split('\n')
|
||||||
|
for database in databases:
|
||||||
|
if database.rstrip() not in ("", "template0", "template1"):
|
||||||
|
self.db_name = database.rstrip()
|
||||||
|
self.do_pgsqldump(stats)
|
||||||
|
|
||||||
|
|
||||||
|
stats['status']='OK'
|
||||||
|
|
||||||
|
|
||||||
|
def do_pgsqldump(self,stats):
|
||||||
t = datetime.datetime.now()
|
t = datetime.datetime.now()
|
||||||
backup_start_date = t.strftime('%Y%m%d-%Hh%Mm%S')
|
backup_start_date = t.strftime('%Y%m%d-%Hh%Mm%S')
|
||||||
|
|
||||||
# dump db
|
# dump db
|
||||||
cmd = 'sudo -u postgres pg_dump ' + self.db_name + ' > /tmp/' + self.db_name + '-' + backup_start_date + '.sql'
|
cmd = " su - postgres -c 'pg_dump "+ self.db_name + ' > /tmp/' + self.db_name + '-' + backup_start_date + ".sql '"
|
||||||
self.logger.debug('[%s] %s ',self.backup_name,cmd)
|
self.logger.debug('[%s] %s ',self.backup_name,cmd)
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
(error_code,output) = ssh_exec(cmd,ssh=ssh)
|
(error_code,output) = ssh_exec(cmd,ssh=self.ssh)
|
||||||
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
|
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
|
||||||
if error_code:
|
if error_code:
|
||||||
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
|
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
|
||||||
@ -67,39 +102,38 @@ class backup_pgsql(backup_generic):
|
|||||||
cmd = 'gzip /tmp/' + self.db_name + '-' + backup_start_date + '.sql'
|
cmd = 'gzip /tmp/' + self.db_name + '-' + backup_start_date + '.sql'
|
||||||
self.logger.debug('[%s] %s ',self.backup_name,cmd)
|
self.logger.debug('[%s] %s ',self.backup_name,cmd)
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
(error_code,output) = ssh_exec(cmd,ssh=ssh)
|
(error_code,output) = ssh_exec(cmd,ssh=self.ssh)
|
||||||
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
|
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
|
||||||
if error_code:
|
if error_code:
|
||||||
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
|
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
|
||||||
|
|
||||||
# get the file
|
# get the file
|
||||||
filepath = '/tmp/' + self.db_name + '-' + backup_start_date + '.sql.gz'
|
filepath = '/tmp/' + self.db_name + '-' + backup_start_date + '.sql.gz'
|
||||||
localpath = self.backup_dir + '/' + self.db_name + '-' + backup_start_date + '.sql.gz'
|
localpath = self.dest_dir + '/' + self.db_name + '-' + backup_start_date + '.sql.gz'
|
||||||
self.logger.debug('[%s] get the file using sftp from "%s" to "%s" ',self.backup_name,filepath,localpath)
|
self.logger.debug('[%s] get the file using sftp from "%s" to "%s" ',self.backup_name,filepath,localpath)
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
transport = ssh.get_transport()
|
transport = self.ssh.get_transport()
|
||||||
sftp = paramiko.SFTPClient.from_transport(transport)
|
sftp = paramiko.SFTPClient.from_transport(transport)
|
||||||
sftp.get(filepath, localpath)
|
sftp.get(filepath, localpath)
|
||||||
sftp.close()
|
sftp.close()
|
||||||
|
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
stats['total_files_count']=1
|
stats['total_files_count']=1 + stats.get('total_files_count', 0)
|
||||||
stats['written_files_count']=1
|
stats['written_files_count']=1 + stats.get('written_files_count', 0)
|
||||||
stats['total_bytes']=os.stat(localpath).st_size
|
stats['total_bytes']=os.stat(localpath).st_size + stats.get('total_bytes', 0)
|
||||||
stats['written_bytes']=os.stat(localpath).st_size
|
stats['written_bytes']=os.stat(localpath).st_size + stats.get('written_bytes', 0)
|
||||||
stats['log']='gzip dump of DB %s:%s (%d bytes) to %s' % (self.server_name,self.db_name, stats['written_bytes'], localpath)
|
stats['log'] = '%s "%s"' % (stats['log'] ,self.db_name)
|
||||||
|
stats['backup_location'] = self.dest_dir
|
||||||
stats['backup_location'] = localpath
|
|
||||||
|
|
||||||
cmd = 'rm -f /tmp/' + self.db_name + '-' + backup_start_date + '.sql.gz'
|
cmd = 'rm -f /tmp/' + self.db_name + '-' + backup_start_date + '.sql.gz'
|
||||||
self.logger.debug('[%s] %s ',self.backup_name,cmd)
|
self.logger.debug('[%s] %s ',self.backup_name,cmd)
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
(error_code,output) = ssh_exec(cmd,ssh=ssh)
|
(error_code,output) = ssh_exec(cmd,ssh=self.ssh)
|
||||||
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
|
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
|
||||||
if error_code:
|
if error_code:
|
||||||
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
|
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
|
||||||
|
|
||||||
stats['status']='OK'
|
|
||||||
|
|
||||||
def register_existingbackups(self):
|
def register_existingbackups(self):
|
||||||
"""scan backup dir and insert stats in database"""
|
"""scan backup dir and insert stats in database"""
|
||||||
@ -108,20 +142,27 @@ class backup_pgsql(backup_generic):
|
|||||||
|
|
||||||
filelist = os.listdir(self.backup_dir)
|
filelist = os.listdir(self.backup_dir)
|
||||||
filelist.sort()
|
filelist.sort()
|
||||||
p = re.compile('^%s-(?P<date>\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}).sql.gz$' % self.db_name)
|
p = re.compile('^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$')
|
||||||
for item in filelist:
|
for item in filelist:
|
||||||
sr = p.match(item)
|
if p.match(item):
|
||||||
if sr:
|
dir_name = os.path.join(self.backup_dir,item)
|
||||||
file_name = os.path.join(self.backup_dir,item)
|
if not dir_name in registered:
|
||||||
start = datetime.datetime.strptime(sr.groups()[0],'%Y%m%d-%Hh%Mm%S').isoformat()
|
start = datetime.datetime.strptime(item,'%Y%m%d-%Hh%Mm%S').isoformat()
|
||||||
if not file_name in registered:
|
if fileisodate(dir_name)>start:
|
||||||
self.logger.info('Registering %s from %s',file_name,fileisodate(file_name))
|
stop = fileisodate(dir_name)
|
||||||
size_bytes = int(os.popen('du -sb "%s"' % file_name).read().split('\t')[0])
|
else:
|
||||||
|
stop = start
|
||||||
|
self.logger.info('Registering %s started on %s',dir_name,start)
|
||||||
|
self.logger.debug(' Disk usage %s','du -sb "%s"' % dir_name)
|
||||||
|
if not self.dry_run:
|
||||||
|
size_bytes = int(os.popen('du -sb "%s"' % dir_name).read().split('\t')[0])
|
||||||
|
else:
|
||||||
|
size_bytes = 0
|
||||||
self.logger.debug(' Size in bytes : %i',size_bytes)
|
self.logger.debug(' Size in bytes : %i',size_bytes)
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
self.dbstat.add(self.backup_name,self.server_name,'',\
|
self.dbstat.add(self.backup_name,self.server_name,'',\
|
||||||
backup_start=start,backup_end=fileisodate(file_name),status='OK',total_bytes=size_bytes,backup_location=file_name)
|
backup_start=start,backup_end = stop,status='OK',total_bytes=size_bytes,backup_location=dir_name)
|
||||||
else:
|
else:
|
||||||
self.logger.info('Skipping %s from %s, already registered',file_name,fileisodate(file_name))
|
self.logger.info('Skipping %s, already registered',dir_name)
|
||||||
|
|
||||||
register_driver(backup_pgsql)
|
register_driver(backup_pgsql)
|
Loading…
Reference in New Issue
Block a user