Improve oracle backups
This commit is contained in:
		
							parent
							
								
									efa6d44d0d
								
							
						
					
					
						commit
						aaa06bf4b9
					
				@ -17,9 +17,6 @@
 | 
				
			|||||||
#    along with TISBackup.  If not, see <http://www.gnu.org/licenses/>.
 | 
					#    along with TISBackup.  If not, see <http://www.gnu.org/licenses/>.
 | 
				
			||||||
#
 | 
					#
 | 
				
			||||||
# -----------------------------------------------------------------------
 | 
					# -----------------------------------------------------------------------
 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import sys
 | 
					import sys
 | 
				
			||||||
try:
 | 
					try:
 | 
				
			||||||
    sys.stderr = open('/dev/null')       # Silence silly warnings from paramiko
 | 
					    sys.stderr = open('/dev/null')       # Silence silly warnings from paramiko
 | 
				
			||||||
@ -34,58 +31,77 @@ import datetime
 | 
				
			|||||||
import base64
 | 
					import base64
 | 
				
			||||||
import os
 | 
					import os
 | 
				
			||||||
from common import *
 | 
					from common import *
 | 
				
			||||||
 | 
					import re
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class backup_oracle(backup_generic):
 | 
					class backup_oracle(backup_generic):
 | 
				
			||||||
    """Backup a oracle database as zipped file through ssh"""
 | 
					    """Backup a oracle database as zipped file through ssh"""
 | 
				
			||||||
    type = 'oracle+ssh'    
 | 
					    type = 'oracle+ssh'    
 | 
				
			||||||
    required_params = backup_generic.required_params + ['db_name','private_key']
 | 
					    required_params = backup_generic.required_params + ['db_name','private_key', 'userid']
 | 
				
			||||||
    optional_params = ['username', 'remote_backup_dir']
 | 
					    optional_params = ['username', 'remote_backup_dir', 'ignore_error_oracle_code']
 | 
				
			||||||
    db_name=''
 | 
					    db_name=''
 | 
				
			||||||
    username='oracle'
 | 
					    username='oracle'
 | 
				
			||||||
    remote_backup_dir =  r'/home/oracle/backup'
 | 
					    remote_backup_dir =  r'/home/oracle/backup'
 | 
				
			||||||
 | 
					    ignore_error_oracle_code = [ ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def do_backup(self,stats):
 | 
					    def do_backup(self,stats):
 | 
				
			||||||
        
 | 
					        
 | 
				
			||||||
        self.logger.debug('[%s] Connecting to %s with user root and key %s',self.backup_name,self.server_name,self.private_key)
 | 
					        self.logger.debug('[%s] Connecting to %s with user %s and key %s',self.backup_name,self.server_name,self.username,self.private_key)
 | 
				
			||||||
        try:
 | 
					        try:
 | 
				
			||||||
            mykey = paramiko.RSAKey.from_private_key_file(self.private_key)
 | 
					            mykey = paramiko.RSAKey.from_private_key_file(self.private_key)
 | 
				
			||||||
        except paramiko.SSHException:
 | 
					        except paramiko.SSHException:
 | 
				
			||||||
            mykey = paramiko.DSSKey.from_private_key_file(self.private_key)
 | 
					            mykey = paramiko.DSSKey.from_private_key_file(self.private_key)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        ssh = paramiko.SSHClient()
 | 
					        self.ssh = paramiko.SSHClient()
 | 
				
			||||||
        ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
 | 
					        self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
 | 
				
			||||||
        ssh.connect(self.server_name,username=self.username,pkey = mykey,port=self.ssh_port)
 | 
					        self.ssh.connect(self.server_name,username=self.username,pkey = mykey,port=self.ssh_port)      
 | 
				
			||||||
        
 | 
					        
 | 
				
			||||||
        t = datetime.datetime.now()
 | 
					        t = datetime.datetime.now()
 | 
				
			||||||
        backup_start_date =  t.strftime('%Y%m%d-%Hh%Mm%S')
 | 
					        self.backup_start_date =  t.strftime('%Y%m%d-%Hh%Mm%S')
 | 
				
			||||||
 | 
					        dumpfile= self.remote_backup_dir + '/' + self.db_name + '_' + self.backup_start_date+'.dmp'
 | 
				
			||||||
 | 
					        dumplog = self.remote_backup_dir + '/' + self.db_name + '_' + self.backup_start_date+'.log'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        self.dest_dir = os.path.join(self.backup_dir,self.backup_start_date)
 | 
				
			||||||
        
 | 
					        
 | 
				
			||||||
        # dump db
 | 
					        # dump db
 | 
				
			||||||
        stats['status']='Dumping'
 | 
					        stats['status']='Dumping'
 | 
				
			||||||
        cmd = 'expdp / full=Y compression=all directory=' + self.remote_backup_dir +' dumpfile='  + self.db_name + '_' + backup_start_date+'.dmp logfile=expd_' + self.db_name + '_' + backup_start_date +'.log'
 | 
					        cmd = "exp '%s'  file='%s' grants=y log='%s'"% (self.userid,dumpfile, dumplog) 
 | 
				
			||||||
        self.logger.debug('[%s] Dump DB : %s',self.backup_name,cmd)
 | 
					        self.logger.debug('[%s] Dump DB : %s',self.backup_name,cmd)
 | 
				
			||||||
        if not self.dry_run:
 | 
					        if not self.dry_run:
 | 
				
			||||||
            (error_code,output) = ssh_exec(cmd,ssh=ssh)
 | 
					            (error_code,output) = ssh_exec(cmd,ssh=self.ssh)
 | 
				
			||||||
            self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
 | 
					            self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
 | 
				
			||||||
            if error_code:
 | 
					            if error_code:
 | 
				
			||||||
                raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
 | 
					                localpath = os.path.join(self.dest_dir , self.db_name + '.log')
 | 
				
			||||||
 | 
					                self.logger.debug('[%s] Get log file with sftp on %s from %s to %s',self.backup_name,self.server_name,dumplog,localpath)
 | 
				
			||||||
 | 
					                transport =  self.ssh.get_transport()
 | 
				
			||||||
 | 
					                sftp = paramiko.SFTPClient.from_transport(transport)
 | 
				
			||||||
 | 
					                sftp.get(dumplog, localpath)
 | 
				
			||||||
 | 
					                sftp.close()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                file = open(localpath)
 | 
				
			||||||
 | 
					                for line in file:
 | 
				
			||||||
 | 
					                    if re.search('EXP-[0-9]+:', line) and not re.match('EXP-[0-9]+:', line).group(0).replace(':','') in self.ignore_error_oracle_code:
 | 
				
			||||||
 | 
					                        stats['status']='RMTemp'
 | 
				
			||||||
 | 
					                        self.clean_dumpfiles(dumpfile,dumplog)
 | 
				
			||||||
 | 
					                        raise Exception('Aborting, Not null exit code (%s) for "%s"' % (re.match('EXP-[0-9]+:', line).group(0).replace(':',''),cmd))
 | 
				
			||||||
 | 
					                file.close()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # zip the file
 | 
					        # zip the file
 | 
				
			||||||
        stats['status']='Zipping'
 | 
					        stats['status']='Zipping'
 | 
				
			||||||
        cmd = '/usr/bin/pigz  --best --verbose --rsyncable ' + self.remote_backup_dir  + '/'+ self.db_name + '_' + backup_start_date+'.dmp'
 | 
					        cmd = 'gzip  %s' % dumpfile 
 | 
				
			||||||
        self.logger.debug('[%s] Compress backup : %s',self.backup_name,cmd)
 | 
					        self.logger.debug('[%s] Compress backup : %s',self.backup_name,cmd)
 | 
				
			||||||
        if not self.dry_run:
 | 
					        if not self.dry_run:
 | 
				
			||||||
            (error_code,output) = ssh_exec(cmd,ssh=ssh)
 | 
					            (error_code,output) = ssh_exec(cmd,ssh=self.ssh)
 | 
				
			||||||
            self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
 | 
					            self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
 | 
				
			||||||
            if error_code:
 | 
					        if error_code:
 | 
				
			||||||
                raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
 | 
					            raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # get the file
 | 
					        # get the file
 | 
				
			||||||
        stats['status']='SFTP'
 | 
					        stats['status']='SFTP'
 | 
				
			||||||
        filepath = self.remote_backup_dir  + '/'+ self.db_name + '_' + backup_start_date+'.dmp.gz'
 | 
					        filepath = dumpfile + '.gz'
 | 
				
			||||||
        localpath = os.path.join(self.backup_dir , self.db_name + '-' + backup_start_date + '.dmp.gz')
 | 
					        localpath = os.path.join(self.dest_dir , self.db_name + '.dmp.gz')
 | 
				
			||||||
        self.logger.debug('[%s] Get gz backup with sftp on %s from %s to %s',self.backup_name,self.server_name,filepath,localpath)
 | 
					        self.logger.debug('[%s] Get gz backup with sftp on %s from %s to %s',self.backup_name,self.server_name,filepath,localpath)
 | 
				
			||||||
        if not self.dry_run:
 | 
					        if not self.dry_run:
 | 
				
			||||||
            transport =  ssh.get_transport()
 | 
					            transport =  self.ssh.get_transport()
 | 
				
			||||||
            sftp = paramiko.SFTPClient.from_transport(transport)
 | 
					            sftp = paramiko.SFTPClient.from_transport(transport)
 | 
				
			||||||
            sftp.get(filepath, localpath)
 | 
					            sftp.get(filepath, localpath)
 | 
				
			||||||
            sftp.close()
 | 
					            sftp.close()
 | 
				
			||||||
@ -96,23 +112,9 @@ class backup_oracle(backup_generic):
 | 
				
			|||||||
            stats['total_bytes']=os.stat(localpath).st_size
 | 
					            stats['total_bytes']=os.stat(localpath).st_size
 | 
				
			||||||
            stats['written_bytes']=os.stat(localpath).st_size    
 | 
					            stats['written_bytes']=os.stat(localpath).st_size    
 | 
				
			||||||
        stats['log']='gzip dump of DB %s:%s (%d bytes) to %s' % (self.server_name,self.db_name, stats['written_bytes'], localpath)
 | 
					        stats['log']='gzip dump of DB %s:%s (%d bytes) to %s' % (self.server_name,self.db_name, stats['written_bytes'], localpath)
 | 
				
			||||||
        stats['backup_location'] = localpath
 | 
					        stats['backup_location'] = self.dest_dir
 | 
				
			||||||
 | 
					 | 
				
			||||||
        stats['status']='RMTemp'
 | 
					        stats['status']='RMTemp'
 | 
				
			||||||
        cmd = 'rm -f '+self.remote_backup_dir  + '/' +self.db_name + '_' + backup_start_date+'.dmp.gz'
 | 
					        self.clean_dumpfiles(dumpfile,dumplog)
 | 
				
			||||||
        self.logger.debug('[%s] Remove temp gzip : %s',self.backup_name,cmd)
 | 
					 | 
				
			||||||
        if not self.dry_run:
 | 
					 | 
				
			||||||
            (error_code,output) = ssh_exec(cmd,ssh=ssh)
 | 
					 | 
				
			||||||
            self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
 | 
					 | 
				
			||||||
            if error_code:
 | 
					 | 
				
			||||||
                raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
 | 
					 | 
				
			||||||
	cmd = 'rm -f '+self.remote_backup_dir  + '/' + self.db_name + '_' + backup_start_date+'.dmp'
 | 
					 | 
				
			||||||
        self.logger.debug('[%s] Remove temp dump : %s',self.backup_name,cmd)
 | 
					 | 
				
			||||||
        if not self.dry_run:
 | 
					 | 
				
			||||||
            (error_code,output) = ssh_exec(cmd,ssh=ssh)
 | 
					 | 
				
			||||||
            self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
 | 
					 | 
				
			||||||
            if error_code:
 | 
					 | 
				
			||||||
                raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
 | 
					 | 
				
			||||||
        stats['status']='OK'
 | 
					        stats['status']='OK'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def register_existingbackups(self):
 | 
					    def register_existingbackups(self):
 | 
				
			||||||
@ -122,20 +124,44 @@ class backup_oracle(backup_generic):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
        filelist = os.listdir(self.backup_dir)
 | 
					        filelist = os.listdir(self.backup_dir)
 | 
				
			||||||
        filelist.sort()
 | 
					        filelist.sort()
 | 
				
			||||||
        p = re.compile('^%s-(?P<date>\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}).sql.gz$' % self.db_name) 
 | 
					        p = re.compile('^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$') 
 | 
				
			||||||
        for item in filelist:
 | 
					        for item in filelist:
 | 
				
			||||||
            sr = p.match(item)
 | 
					            if p.match(item):
 | 
				
			||||||
            if sr:
 | 
					                dir_name = os.path.join(self.backup_dir,item)
 | 
				
			||||||
                file_name = os.path.join(self.backup_dir,item)
 | 
					                if not dir_name in registered:
 | 
				
			||||||
                start = datetime.datetime.strptime(sr.groups()[0],'%Y%m%d-%Hh%Mm%S').isoformat()
 | 
					                    start = datetime.datetime.strptime(item,'%Y%m%d-%Hh%Mm%S').isoformat()
 | 
				
			||||||
                if not file_name in registered:
 | 
					                    if fileisodate(dir_name)>start:
 | 
				
			||||||
                    self.logger.info('Registering %s from %s',file_name,fileisodate(file_name))
 | 
					                        stop = fileisodate(dir_name)
 | 
				
			||||||
                    size_bytes = int(os.popen('du -sb "%s"' % file_name).read().split('\t')[0])
 | 
					                    else:
 | 
				
			||||||
 | 
					                        stop = start
 | 
				
			||||||
 | 
					                    self.logger.info('Registering %s started on %s',dir_name,start)
 | 
				
			||||||
 | 
					                    self.logger.debug('  Disk usage %s','du -sb "%s"' % dir_name)
 | 
				
			||||||
 | 
					                    if not self.dry_run:
 | 
				
			||||||
 | 
					                        size_bytes = int(os.popen('du -sb "%s"' % dir_name).read().split('\t')[0])
 | 
				
			||||||
 | 
					                    else:
 | 
				
			||||||
 | 
					                        size_bytes = 0
 | 
				
			||||||
                    self.logger.debug('  Size in bytes : %i',size_bytes)
 | 
					                    self.logger.debug('  Size in bytes : %i',size_bytes)
 | 
				
			||||||
                    if not self.dry_run:
 | 
					                    if not self.dry_run:
 | 
				
			||||||
                        self.dbstat.add(self.backup_name,self.server_name,'',\
 | 
					                        self.dbstat.add(self.backup_name,self.server_name,'',\
 | 
				
			||||||
                                        backup_start=start,backup_end=fileisodate(file_name),status='OK',total_bytes=size_bytes,backup_location=file_name)
 | 
					                                        backup_start=start,backup_end = stop,status='OK',total_bytes=size_bytes,backup_location=dir_name)
 | 
				
			||||||
                else:
 | 
					                else:
 | 
				
			||||||
                    self.logger.info('Skipping %s from %s, already registered',file_name,fileisodate(file_name))
 | 
					                    self.logger.info('Skipping %s, already registered',dir_name)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def clean_dumpfiles(self,dumpfile,dumplog):
 | 
				
			||||||
 | 
					        cmd = 'rm -f "%s.gz" "%s"' %( dumpfile , dumplog)
 | 
				
			||||||
 | 
					        self.logger.debug('[%s] Remove temp gzip : %s',self.backup_name,cmd)
 | 
				
			||||||
 | 
					        if not self.dry_run:
 | 
				
			||||||
 | 
					            (error_code,output) = ssh_exec(cmd,ssh=self.ssh)
 | 
				
			||||||
 | 
					            self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
 | 
				
			||||||
 | 
					            if error_code:
 | 
				
			||||||
 | 
					                raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
 | 
				
			||||||
 | 
					        cmd = 'rm -f '+self.remote_backup_dir  + '/' + self.db_name + '_' + self.backup_start_date+'.dmp'
 | 
				
			||||||
 | 
					        self.logger.debug('[%s] Remove temp dump : %s',self.backup_name,cmd)
 | 
				
			||||||
 | 
					        if not self.dry_run:
 | 
				
			||||||
 | 
					            (error_code,output) = ssh_exec(cmd,ssh=self.ssh)
 | 
				
			||||||
 | 
					            self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
 | 
				
			||||||
 | 
					            if error_code:
 | 
				
			||||||
 | 
					                raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
register_driver(backup_oracle)
 | 
					register_driver(backup_oracle)
 | 
				
			||||||
@ -256,6 +256,35 @@
 | 
				
			|||||||
</div>
 | 
					</div>
 | 
				
			||||||
{% endif %}
 | 
					{% endif %}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					{% if backup_list['oracle_list']|count != 0 %}
 | 
				
			||||||
 | 
					<div class="accordion-group">
 | 
				
			||||||
 | 
					    <div class="accordion-heading">
 | 
				
			||||||
 | 
						<a class="accordion-toggle" data-toggle="collapse" data-parent="#accordion2" href="#collapse10"><h2>Type: Oracle serveurs</h2></a>
 | 
				
			||||||
 | 
					    </div>
 | 
				
			||||||
 | 
					    <div id="collapse10" class="accordion-body collapse ">
 | 
				
			||||||
 | 
					      <div class="accordion-inner">
 | 
				
			||||||
 | 
					    <table class="table table-bordered">
 | 
				
			||||||
 | 
						    <thead>
 | 
				
			||||||
 | 
						        <th>Server</th>
 | 
				
			||||||
 | 
						        <th>Backup</th>
 | 
				
			||||||
 | 
						        <th>Database</th>
 | 
				
			||||||
 | 
					        </thead>
 | 
				
			||||||
 | 
					        <tbody>
 | 
				
			||||||
 | 
						        {% for entry in backup_list['oracle_list'] %}
 | 
				
			||||||
 | 
						        <tr>
 | 
				
			||||||
 | 
						            <td>{{ entry[0] }}</td>
 | 
				
			||||||
 | 
						            <td>{{ entry[1] }}</td>
 | 
				
			||||||
 | 
						            <td>{{ entry[3] }}</td>
 | 
				
			||||||
 | 
						        </tr>
 | 
				
			||||||
 | 
						        {% endfor %}
 | 
				
			||||||
 | 
					        </tbody>
 | 
				
			||||||
 | 
					    </table>
 | 
				
			||||||
 | 
					         </div>
 | 
				
			||||||
 | 
					  </div>
 | 
				
			||||||
 | 
					</div>
 | 
				
			||||||
 | 
					{% endif %}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
</div>
 | 
					</div>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
				
			|||||||
@ -32,6 +32,7 @@ from libtisbackup.common import *
 | 
				
			|||||||
from libtisbackup.backup_mysql import backup_mysql 
 | 
					from libtisbackup.backup_mysql import backup_mysql 
 | 
				
			||||||
from libtisbackup.backup_rsync import backup_rsync
 | 
					from libtisbackup.backup_rsync import backup_rsync
 | 
				
			||||||
from libtisbackup.backup_rsync import backup_rsync_ssh
 | 
					from libtisbackup.backup_rsync import backup_rsync_ssh
 | 
				
			||||||
 | 
					from libtisbackup.backup_oracle import backup_oracle
 | 
				
			||||||
from libtisbackup.backup_rsync_btrfs import backup_rsync_btrfs
 | 
					from libtisbackup.backup_rsync_btrfs import backup_rsync_btrfs
 | 
				
			||||||
from libtisbackup.backup_rsync_btrfs import backup_rsync__btrfs_ssh
 | 
					from libtisbackup.backup_rsync_btrfs import backup_rsync__btrfs_ssh
 | 
				
			||||||
from libtisbackup.backup_pgsql import backup_pgsql
 | 
					from libtisbackup.backup_pgsql import backup_pgsql
 | 
				
			||||||
 | 
				
			|||||||
@ -108,6 +108,7 @@ def read_config():
 | 
				
			|||||||
    backup_dict['xva_list'] = []
 | 
					    backup_dict['xva_list'] = []
 | 
				
			||||||
    backup_dict['metadata_list'] = []
 | 
					    backup_dict['metadata_list'] = []
 | 
				
			||||||
    backup_dict['switch_list'] = []
 | 
					    backup_dict['switch_list'] = []
 | 
				
			||||||
 | 
					    backup_dict['oracle_list'] = []
 | 
				
			||||||
    for row in result:
 | 
					    for row in result:
 | 
				
			||||||
        backup_name = row['backup_name']
 | 
					        backup_name = row['backup_name']
 | 
				
			||||||
        server_name = row['server_name']
 | 
					        server_name = row['server_name']
 | 
				
			||||||
@ -134,6 +135,9 @@ def read_config():
 | 
				
			|||||||
        if backup_type == "sqlserver+ssh":
 | 
					        if backup_type == "sqlserver+ssh":
 | 
				
			||||||
            db_name = row['db_name']
 | 
					            db_name = row['db_name']
 | 
				
			||||||
            backup_dict['sqlserver_list'].append([server_name, backup_name, backup_type, db_name])
 | 
					            backup_dict['sqlserver_list'].append([server_name, backup_name, backup_type, db_name])
 | 
				
			||||||
 | 
					        if backup_type == "oracle+ssh":
 | 
				
			||||||
 | 
						    db_name = row['db_name']
 | 
				
			||||||
 | 
						    backup_dict['oracle_list'].append([server_name, backup_name, backup_type, db_name])	    
 | 
				
			||||||
        if backup_type == "xen-xva":
 | 
					        if backup_type == "xen-xva":
 | 
				
			||||||
            backup_dict['xva_list'].append([server_name, backup_name, backup_type, ""])
 | 
					            backup_dict['xva_list'].append([server_name, backup_name, backup_type, ""])
 | 
				
			||||||
        if backup_type == "switch":
 | 
					        if backup_type == "switch":
 | 
				
			||||||
 | 
				
			|||||||
		Loading…
	
		Reference in New Issue
	
	Block a user