comment DSSKey and add ed25519
This commit is contained in:
parent
82ca9dfa35
commit
d079b542be
@ -34,13 +34,13 @@ from common import *
|
|||||||
|
|
||||||
class backup_mysql(backup_generic):
|
class backup_mysql(backup_generic):
|
||||||
"""Backup a mysql database as gzipped sql file through ssh"""
|
"""Backup a mysql database as gzipped sql file through ssh"""
|
||||||
type = 'mysql+ssh'
|
type = 'mysql+ssh'
|
||||||
required_params = backup_generic.required_params + ['db_user','db_passwd','private_key']
|
required_params = backup_generic.required_params + ['db_user','db_passwd','private_key']
|
||||||
optional_params = backup_generic.optional_params + ['db_name']
|
optional_params = backup_generic.optional_params + ['db_name']
|
||||||
|
|
||||||
db_name=''
|
db_name=''
|
||||||
db_user=''
|
db_user=''
|
||||||
db_passwd=''
|
db_passwd=''
|
||||||
|
|
||||||
dest_dir = ""
|
dest_dir = ""
|
||||||
|
|
||||||
@ -60,7 +60,8 @@ class backup_mysql(backup_generic):
|
|||||||
try:
|
try:
|
||||||
mykey = paramiko.RSAKey.from_private_key_file(self.private_key)
|
mykey = paramiko.RSAKey.from_private_key_file(self.private_key)
|
||||||
except paramiko.SSHException:
|
except paramiko.SSHException:
|
||||||
mykey = paramiko.DSSKey.from_private_key_file(self.private_key)
|
#mykey = paramiko.DSSKey.from_private_key_file(self.private_key)
|
||||||
|
mykey = paramiko.Ed25519Key.from_private_key_file(self.private_key)
|
||||||
|
|
||||||
self.ssh = paramiko.SSHClient()
|
self.ssh = paramiko.SSHClient()
|
||||||
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||||
@ -75,14 +76,14 @@ class backup_mysql(backup_generic):
|
|||||||
(error_code,output) = ssh_exec(cmd,ssh=self.ssh)
|
(error_code,output) = ssh_exec(cmd,ssh=self.ssh)
|
||||||
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
|
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
|
||||||
if error_code:
|
if error_code:
|
||||||
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
|
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
|
||||||
databases = output.split('\n')
|
databases = output.split('\n')
|
||||||
for database in databases:
|
for database in databases:
|
||||||
if database != "":
|
if database != "":
|
||||||
self.db_name = database.rstrip()
|
self.db_name = database.rstrip()
|
||||||
self.do_mysqldump(stats)
|
self.do_mysqldump(stats)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
stats['log']= "Successfully backup processed to the following database :"
|
stats['log']= "Successfully backup processed to the following database :"
|
||||||
self.do_mysqldump(stats)
|
self.do_mysqldump(stats)
|
||||||
|
|
||||||
@ -130,9 +131,9 @@ class backup_mysql(backup_generic):
|
|||||||
stats['total_files_count']=1 + stats.get('total_files_count', 0)
|
stats['total_files_count']=1 + stats.get('total_files_count', 0)
|
||||||
stats['written_files_count']=1 + stats.get('written_files_count', 0)
|
stats['written_files_count']=1 + stats.get('written_files_count', 0)
|
||||||
stats['total_bytes']=os.stat(localpath).st_size + stats.get('total_bytes', 0)
|
stats['total_bytes']=os.stat(localpath).st_size + stats.get('total_bytes', 0)
|
||||||
stats['written_bytes']=os.stat(localpath).st_size + stats.get('written_bytes', 0)
|
stats['written_bytes']=os.stat(localpath).st_size + stats.get('written_bytes', 0)
|
||||||
stats['log'] = '%s "%s"' % (stats['log'] ,self.db_name)
|
stats['log'] = '%s "%s"' % (stats['log'] ,self.db_name)
|
||||||
stats['backup_location'] = self.dest_dir
|
stats['backup_location'] = self.dest_dir
|
||||||
|
|
||||||
stats['status']='RMTemp'
|
stats['status']='RMTemp'
|
||||||
cmd = 'rm -f /tmp/' + self.db_name + '-' + backup_start_date + '.sql.gz'
|
cmd = 'rm -f /tmp/' + self.db_name + '-' + backup_start_date + '.sql.gz'
|
||||||
@ -151,7 +152,7 @@ class backup_mysql(backup_generic):
|
|||||||
|
|
||||||
filelist = os.listdir(self.backup_dir)
|
filelist = os.listdir(self.backup_dir)
|
||||||
filelist.sort()
|
filelist.sort()
|
||||||
p = re.compile('^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$')
|
p = re.compile('^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$')
|
||||||
for item in filelist:
|
for item in filelist:
|
||||||
if p.match(item):
|
if p.match(item):
|
||||||
dir_name = os.path.join(self.backup_dir,item)
|
dir_name = os.path.join(self.backup_dir,item)
|
||||||
|
@ -49,7 +49,8 @@ class backup_oracle(backup_generic):
|
|||||||
try:
|
try:
|
||||||
mykey = paramiko.RSAKey.from_private_key_file(self.private_key)
|
mykey = paramiko.RSAKey.from_private_key_file(self.private_key)
|
||||||
except paramiko.SSHException:
|
except paramiko.SSHException:
|
||||||
mykey = paramiko.DSSKey.from_private_key_file(self.private_key)
|
#mykey = paramiko.DSSKey.from_private_key_file(self.private_key)
|
||||||
|
mykey = paramiko.Ed25519Key.from_private_key_file(self.private_key)
|
||||||
|
|
||||||
self.ssh = paramiko.SSHClient()
|
self.ssh = paramiko.SSHClient()
|
||||||
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||||
|
@ -53,7 +53,8 @@ class backup_pgsql(backup_generic):
|
|||||||
try:
|
try:
|
||||||
mykey = paramiko.RSAKey.from_private_key_file(self.private_key)
|
mykey = paramiko.RSAKey.from_private_key_file(self.private_key)
|
||||||
except paramiko.SSHException:
|
except paramiko.SSHException:
|
||||||
mykey = paramiko.DSSKey.from_private_key_file(self.private_key)
|
#mykey = paramiko.DSSKey.from_private_key_file(self.private_key)
|
||||||
|
mykey = paramiko.Ed25519Key.from_private_key_file(self.private_key)
|
||||||
|
|
||||||
self.logger.debug('[%s] Trying to connect to "%s" with username root and key "%s"',self.backup_name,self.server_name,self.private_key)
|
self.logger.debug('[%s] Trying to connect to "%s" with username root and key "%s"',self.backup_name,self.server_name,self.private_key)
|
||||||
self.ssh = paramiko.SSHClient()
|
self.ssh = paramiko.SSHClient()
|
||||||
|
@ -55,7 +55,8 @@ class backup_samba4(backup_generic):
|
|||||||
try:
|
try:
|
||||||
mykey = paramiko.RSAKey.from_private_key_file(self.private_key)
|
mykey = paramiko.RSAKey.from_private_key_file(self.private_key)
|
||||||
except paramiko.SSHException:
|
except paramiko.SSHException:
|
||||||
mykey = paramiko.DSSKey.from_private_key_file(self.private_key)
|
#mykey = paramiko.DSSKey.from_private_key_file(self.private_key)
|
||||||
|
mykey = paramiko.Ed25519Key.from_private_key_file(self.private_key)
|
||||||
|
|
||||||
self.ssh = paramiko.SSHClient()
|
self.ssh = paramiko.SSHClient()
|
||||||
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||||
@ -117,9 +118,9 @@ class backup_samba4(backup_generic):
|
|||||||
stats['total_files_count']=1 + stats.get('total_files_count', 0)
|
stats['total_files_count']=1 + stats.get('total_files_count', 0)
|
||||||
stats['written_files_count']=1 + stats.get('written_files_count', 0)
|
stats['written_files_count']=1 + stats.get('written_files_count', 0)
|
||||||
stats['total_bytes']=os.stat(localpath).st_size + stats.get('total_bytes', 0)
|
stats['total_bytes']=os.stat(localpath).st_size + stats.get('total_bytes', 0)
|
||||||
stats['written_bytes']=os.stat(localpath).st_size + stats.get('written_bytes', 0)
|
stats['written_bytes']=os.stat(localpath).st_size + stats.get('written_bytes', 0)
|
||||||
stats['log'] = '%s "%s"' % (stats['log'] ,self.db_name)
|
stats['log'] = '%s "%s"' % (stats['log'] ,self.db_name)
|
||||||
stats['backup_location'] = self.dest_dir
|
stats['backup_location'] = self.dest_dir
|
||||||
|
|
||||||
stats['status']='RMTemp'
|
stats['status']='RMTemp'
|
||||||
cmd = 'rm -f "%s"' % filepath
|
cmd = 'rm -f "%s"' % filepath
|
||||||
@ -138,7 +139,7 @@ class backup_samba4(backup_generic):
|
|||||||
|
|
||||||
filelist = os.listdir(self.backup_dir)
|
filelist = os.listdir(self.backup_dir)
|
||||||
filelist.sort()
|
filelist.sort()
|
||||||
p = re.compile('^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$')
|
p = re.compile('^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$')
|
||||||
for item in filelist:
|
for item in filelist:
|
||||||
if p.match(item):
|
if p.match(item):
|
||||||
dir_name = os.path.join(self.backup_dir,item)
|
dir_name = os.path.join(self.backup_dir,item)
|
||||||
|
@ -37,10 +37,10 @@ from common import *
|
|||||||
|
|
||||||
class backup_sqlserver(backup_generic):
|
class backup_sqlserver(backup_generic):
|
||||||
"""Backup a SQLSERVER database as gzipped sql file through ssh"""
|
"""Backup a SQLSERVER database as gzipped sql file through ssh"""
|
||||||
type = 'sqlserver+ssh'
|
type = 'sqlserver+ssh'
|
||||||
required_params = backup_generic.required_params + ['db_name','private_key']
|
required_params = backup_generic.required_params + ['db_name','private_key']
|
||||||
optional_params = ['username', 'remote_backup_dir', 'sqlserver_before_2005', 'db_server_name', 'db_user', 'db_password']
|
optional_params = ['username', 'remote_backup_dir', 'sqlserver_before_2005', 'db_server_name', 'db_user', 'db_password']
|
||||||
db_name=''
|
db_name=''
|
||||||
db_user=''
|
db_user=''
|
||||||
db_password=''
|
db_password=''
|
||||||
userdb = "-E"
|
userdb = "-E"
|
||||||
@ -48,15 +48,16 @@ class backup_sqlserver(backup_generic):
|
|||||||
remote_backup_dir = r'c:/WINDOWS/Temp/'
|
remote_backup_dir = r'c:/WINDOWS/Temp/'
|
||||||
sqlserver_before_2005 = False
|
sqlserver_before_2005 = False
|
||||||
db_server_name = "localhost"
|
db_server_name = "localhost"
|
||||||
|
|
||||||
|
|
||||||
def do_backup(self,stats):
|
def do_backup(self,stats):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
mykey = paramiko.RSAKey.from_private_key_file(self.private_key)
|
mykey = paramiko.RSAKey.from_private_key_file(self.private_key)
|
||||||
except paramiko.SSHException:
|
except paramiko.SSHException:
|
||||||
mykey = paramiko.DSSKey.from_private_key_file(self.private_key)
|
#mykey = paramiko.DSSKey.from_private_key_file(self.private_key)
|
||||||
|
mykey = paramiko.Ed25519Key.from_private_key_file(self.private_key)
|
||||||
|
|
||||||
self.logger.debug('[%s] Connecting to %s with user root and key %s',self.backup_name,self.server_name,self.private_key)
|
self.logger.debug('[%s] Connecting to %s with user root and key %s',self.backup_name,self.server_name,self.private_key)
|
||||||
ssh = paramiko.SSHClient()
|
ssh = paramiko.SSHClient()
|
||||||
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||||
@ -64,7 +65,7 @@ class backup_sqlserver(backup_generic):
|
|||||||
|
|
||||||
t = datetime.datetime.now()
|
t = datetime.datetime.now()
|
||||||
backup_start_date = t.strftime('%Y%m%d-%Hh%Mm%S')
|
backup_start_date = t.strftime('%Y%m%d-%Hh%Mm%S')
|
||||||
|
|
||||||
backup_file = self.remote_backup_dir + '/' + self.db_name + '-' + backup_start_date + '.bak'
|
backup_file = self.remote_backup_dir + '/' + self.db_name + '-' + backup_start_date + '.bak'
|
||||||
if not self.db_user == '':
|
if not self.db_user == '':
|
||||||
self.userdb = '-U %s -P %s' % ( self.db_user, self.db_password )
|
self.userdb = '-U %s -P %s' % ( self.db_user, self.db_password )
|
||||||
@ -72,14 +73,14 @@ class backup_sqlserver(backup_generic):
|
|||||||
# dump db
|
# dump db
|
||||||
stats['status']='Dumping'
|
stats['status']='Dumping'
|
||||||
if self.sqlserver_before_2005:
|
if self.sqlserver_before_2005:
|
||||||
cmd = """osql -E -Q "BACKUP DATABASE [%s]
|
cmd = """osql -E -Q "BACKUP DATABASE [%s]
|
||||||
TO DISK='%s'
|
TO DISK='%s'
|
||||||
WITH FORMAT" """ % ( self.db_name, backup_file )
|
WITH FORMAT" """ % ( self.db_name, backup_file )
|
||||||
|
|
||||||
else:
|
else:
|
||||||
cmd = """sqlcmd %s -S "%s" -d master -Q "BACKUP DATABASE [%s]
|
cmd = """sqlcmd %s -S "%s" -d master -Q "BACKUP DATABASE [%s]
|
||||||
TO DISK = N'%s'
|
TO DISK = N'%s'
|
||||||
WITH INIT, NOUNLOAD ,
|
WITH INIT, NOUNLOAD ,
|
||||||
NAME = N'Backup %s', NOSKIP ,STATS = 10, NOFORMAT" """ % (self.userdb, self.db_server_name, self.db_name, backup_file ,self.db_name )
|
NAME = N'Backup %s', NOSKIP ,STATS = 10, NOFORMAT" """ % (self.userdb, self.db_server_name, self.db_name, backup_file ,self.db_name )
|
||||||
self.logger.debug('[%s] Dump DB : %s',self.backup_name,cmd)
|
self.logger.debug('[%s] Dump DB : %s',self.backup_name,cmd)
|
||||||
try:
|
try:
|
||||||
@ -88,7 +89,7 @@ class backup_sqlserver(backup_generic):
|
|||||||
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
|
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
|
||||||
if error_code:
|
if error_code:
|
||||||
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
|
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
|
||||||
|
|
||||||
# zip the file
|
# zip the file
|
||||||
stats['status']='Zipping'
|
stats['status']='Zipping'
|
||||||
cmd = 'gzip "%s"' % backup_file
|
cmd = 'gzip "%s"' % backup_file
|
||||||
@ -98,7 +99,7 @@ class backup_sqlserver(backup_generic):
|
|||||||
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
|
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
|
||||||
if error_code:
|
if error_code:
|
||||||
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
|
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
|
||||||
|
|
||||||
# get the file
|
# get the file
|
||||||
stats['status']='SFTP'
|
stats['status']='SFTP'
|
||||||
filepath = backup_file + '.gz'
|
filepath = backup_file + '.gz'
|
||||||
@ -109,15 +110,15 @@ class backup_sqlserver(backup_generic):
|
|||||||
sftp = paramiko.SFTPClient.from_transport(transport)
|
sftp = paramiko.SFTPClient.from_transport(transport)
|
||||||
sftp.get(filepath, localpath)
|
sftp.get(filepath, localpath)
|
||||||
sftp.close()
|
sftp.close()
|
||||||
|
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
stats['total_files_count']=1
|
stats['total_files_count']=1
|
||||||
stats['written_files_count']=1
|
stats['written_files_count']=1
|
||||||
stats['total_bytes']=os.stat(localpath).st_size
|
stats['total_bytes']=os.stat(localpath).st_size
|
||||||
stats['written_bytes']=os.stat(localpath).st_size
|
stats['written_bytes']=os.stat(localpath).st_size
|
||||||
stats['log']='gzip dump of DB %s:%s (%d bytes) to %s' % (self.server_name,self.db_name, stats['written_bytes'], localpath)
|
stats['log']='gzip dump of DB %s:%s (%d bytes) to %s' % (self.server_name,self.db_name, stats['written_bytes'], localpath)
|
||||||
stats['backup_location'] = localpath
|
stats['backup_location'] = localpath
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
stats['status']='RMTemp'
|
stats['status']='RMTemp'
|
||||||
cmd = 'rm -f "%s" "%s"' % ( backup_file + '.gz', backup_file )
|
cmd = 'rm -f "%s" "%s"' % ( backup_file + '.gz', backup_file )
|
||||||
@ -126,10 +127,10 @@ class backup_sqlserver(backup_generic):
|
|||||||
(error_code,output) = ssh_exec(cmd,ssh=ssh)
|
(error_code,output) = ssh_exec(cmd,ssh=ssh)
|
||||||
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
|
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
|
||||||
if error_code:
|
if error_code:
|
||||||
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
|
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
stats['status']='OK'
|
stats['status']='OK'
|
||||||
|
|
||||||
def register_existingbackups(self):
|
def register_existingbackups(self):
|
||||||
@ -139,7 +140,7 @@ class backup_sqlserver(backup_generic):
|
|||||||
|
|
||||||
filelist = os.listdir(self.backup_dir)
|
filelist = os.listdir(self.backup_dir)
|
||||||
filelist.sort()
|
filelist.sort()
|
||||||
p = re.compile('^%s-(?P<date>\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}).bak.gz$' % self.db_name)
|
p = re.compile('^%s-(?P<date>\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}).bak.gz$' % self.db_name)
|
||||||
for item in filelist:
|
for item in filelist:
|
||||||
sr = p.match(item)
|
sr = p.match(item)
|
||||||
if sr:
|
if sr:
|
||||||
@ -149,7 +150,7 @@ class backup_sqlserver(backup_generic):
|
|||||||
self.logger.info('Registering %s from %s',file_name,fileisodate(file_name))
|
self.logger.info('Registering %s from %s',file_name,fileisodate(file_name))
|
||||||
size_bytes = int(os.popen('du -sb "%s"' % file_name).read().split('\t')[0])
|
size_bytes = int(os.popen('du -sb "%s"' % file_name).read().split('\t')[0])
|
||||||
self.logger.debug(' Size in bytes : %i',size_bytes)
|
self.logger.debug(' Size in bytes : %i',size_bytes)
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
self.dbstat.add(self.backup_name,self.server_name,'',\
|
self.dbstat.add(self.backup_name,self.server_name,'',\
|
||||||
backup_start=start,backup_end=fileisodate(file_name),status='OK',total_bytes=size_bytes,backup_location=file_name)
|
backup_start=start,backup_end=fileisodate(file_name),status='OK',total_bytes=size_bytes,backup_location=file_name)
|
||||||
else:
|
else:
|
||||||
|
@ -476,7 +476,8 @@ def ssh_exec(command,ssh=None,server_name='',remote_user='',private_key='',ssh_p
|
|||||||
try:
|
try:
|
||||||
mykey = paramiko.RSAKey.from_private_key_file(private_key)
|
mykey = paramiko.RSAKey.from_private_key_file(private_key)
|
||||||
except paramiko.SSHException:
|
except paramiko.SSHException:
|
||||||
mykey = paramiko.DSSKey.from_private_key_file(private_key)
|
#mykey = paramiko.DSSKey.from_private_key_file(private_key)
|
||||||
|
mykey = paramiko.Ed25519Key.from_private_key_file(self.private_key)
|
||||||
|
|
||||||
ssh = paramiko.SSHClient()
|
ssh = paramiko.SSHClient()
|
||||||
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||||
|
Loading…
Reference in New Issue
Block a user