Fist commit

This commit is contained in:
ssamson-tis 2013-05-23 10:19:43 +02:00
parent cd150a78f2
commit bd05ae8f25
60 changed files with 18864 additions and 3 deletions

View File

@ -1,4 +1,29 @@
tisbackup # -----------------------------------------------------------------------
========= # This file is part of TISBackup
#
# TISBackup is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# TISBackup is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with TISBackup. If not, see <http://www.gnu.org/licenses/>.
#
# -----------------------------------------------------------------------
backup server side executed python scripts for managing linux and windows system and application data backups, developed by adminsys for adminsys
Le script tisbackup se base sur un fichier de configuration .ini. Cf le fichier d'exemple pour le format
Pour lancer le backup, lancer la commande
./tisbackup.py -c fichierconf.ini
Pour lancer une section particulière du fichier .ini
./tisbackup.py -c fichierconf.ini -s section_choisi
Pour mettre le mode debug
./tisbackup.py -c fichierconf.ini -l debug

25
iniparse/__init__.py Executable file
View File

@ -0,0 +1,25 @@
# Copyright (c) 2001, 2002, 2003 Python Software Foundation
# Copyright (c) 2004-2008 Paramjit Oberoi <param.cs.wisc.edu>
# Copyright (c) 2007 Tim Lauridsen <tla@rasmil.dk>
# All Rights Reserved. See LICENSE-PSF & LICENSE for details.
from ini import INIConfig, change_comment_syntax
from config import BasicConfig, ConfigNamespace
from compat import RawConfigParser, ConfigParser, SafeConfigParser
from utils import tidy
from ConfigParser import DuplicateSectionError, \
NoSectionError, NoOptionError, \
InterpolationMissingOptionError, \
InterpolationDepthError, \
InterpolationSyntaxError, \
DEFAULTSECT, MAX_INTERPOLATION_DEPTH
__all__ = [
'BasicConfig', 'ConfigNamespace',
'INIConfig', 'tidy', 'change_comment_syntax',
'RawConfigParser', 'ConfigParser', 'SafeConfigParser',
'DuplicateSectionError', 'NoSectionError', 'NoOptionError',
'InterpolationMissingOptionError', 'InterpolationDepthError',
'InterpolationSyntaxError', 'DEFAULTSECT', 'MAX_INTERPOLATION_DEPTH',
]

343
iniparse/compat.py Executable file
View File

@ -0,0 +1,343 @@
# Copyright (c) 2001, 2002, 2003 Python Software Foundation
# Copyright (c) 2004-2008 Paramjit Oberoi <param.cs.wisc.edu>
# All Rights Reserved. See LICENSE-PSF & LICENSE for details.
"""Compatibility interfaces for ConfigParser
Interfaces of ConfigParser, RawConfigParser and SafeConfigParser
should be completely identical to the Python standard library
versions. Tested with the unit tests included with Python-2.3.4
The underlying INIConfig object can be accessed as cfg.data
"""
import re
from ConfigParser import DuplicateSectionError, \
NoSectionError, NoOptionError, \
InterpolationMissingOptionError, \
InterpolationDepthError, \
InterpolationSyntaxError, \
DEFAULTSECT, MAX_INTERPOLATION_DEPTH
# These are imported only for compatiability.
# The code below does not reference them directly.
from ConfigParser import Error, InterpolationError, \
MissingSectionHeaderError, ParsingError
import ini
class RawConfigParser(object):
def __init__(self, defaults=None, dict_type=dict):
if dict_type != dict:
raise ValueError('Custom dict types not supported')
self.data = ini.INIConfig(defaults=defaults, optionxformsource=self)
def optionxform(self, optionstr):
return optionstr.lower()
def defaults(self):
d = {}
secobj = self.data._defaults
for name in secobj._options:
d[name] = secobj._compat_get(name)
return d
def sections(self):
"""Return a list of section names, excluding [DEFAULT]"""
return list(self.data)
def add_section(self, section):
"""Create a new section in the configuration.
Raise DuplicateSectionError if a section by the specified name
already exists. Raise ValueError if name is DEFAULT or any of
its case-insensitive variants.
"""
# The default section is the only one that gets the case-insensitive
# treatment - so it is special-cased here.
if section.lower() == "default":
raise ValueError, 'Invalid section name: %s' % section
if self.has_section(section):
raise DuplicateSectionError(section)
else:
self.data._new_namespace(section)
def has_section(self, section):
"""Indicate whether the named section is present in the configuration.
The DEFAULT section is not acknowledged.
"""
return (section in self.data)
def options(self, section):
"""Return a list of option names for the given section name."""
if section in self.data:
return list(self.data[section])
else:
raise NoSectionError(section)
def read(self, filenames):
"""Read and parse a filename or a list of filenames.
Files that cannot be opened are silently ignored; this is
designed so that you can specify a list of potential
configuration file locations (e.g. current directory, user's
home directory, systemwide directory), and all existing
configuration files in the list will be read. A single
filename may also be given.
"""
files_read = []
if isinstance(filenames, basestring):
filenames = [filenames]
for filename in filenames:
try:
fp = open(filename)
except IOError:
continue
files_read.append(filename)
self.data._readfp(fp)
fp.close()
return files_read
def readfp(self, fp, filename=None):
"""Like read() but the argument must be a file-like object.
The `fp' argument must have a `readline' method. Optional
second argument is the `filename', which if not given, is
taken from fp.name. If fp has no `name' attribute, `<???>' is
used.
"""
self.data._readfp(fp)
def get(self, section, option, vars=None):
if not self.has_section(section):
raise NoSectionError(section)
if vars is not None and option in vars:
value = vars[option]
sec = self.data[section]
if option in sec:
return sec._compat_get(option)
else:
raise NoOptionError(option, section)
def items(self, section):
if section in self.data:
ans = []
for opt in self.data[section]:
ans.append((opt, self.get(section, opt)))
return ans
else:
raise NoSectionError(section)
def getint(self, section, option):
return int(self.get(section, option))
def getfloat(self, section, option):
return float(self.get(section, option))
_boolean_states = {'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False}
def getboolean(self, section, option):
v = self.get(section, option)
if v.lower() not in self._boolean_states:
raise ValueError, 'Not a boolean: %s' % v
return self._boolean_states[v.lower()]
def has_option(self, section, option):
"""Check for the existence of a given option in a given section."""
if section in self.data:
sec = self.data[section]
else:
raise NoSectionError(section)
return (option in sec)
def set(self, section, option, value):
"""Set an option."""
if section in self.data:
self.data[section][option] = value
else:
raise NoSectionError(section)
def write(self, fp):
"""Write an .ini-format representation of the configuration state."""
fp.write(str(self.data))
def remove_option(self, section, option):
"""Remove an option."""
if section in self.data:
sec = self.data[section]
else:
raise NoSectionError(section)
if option in sec:
del sec[option]
return 1
else:
return 0
def remove_section(self, section):
"""Remove a file section."""
if not self.has_section(section):
return False
del self.data[section]
return True
class ConfigDict(object):
"""Present a dict interface to a ini section."""
def __init__(self, cfg, section, vars):
self.cfg = cfg
self.section = section
self.vars = vars
def __getitem__(self, key):
try:
return RawConfigParser.get(self.cfg, self.section, key, self.vars)
except (NoOptionError, NoSectionError):
raise KeyError(key)
class ConfigParser(RawConfigParser):
def get(self, section, option, raw=False, vars=None):
"""Get an option value for a given section.
All % interpolations are expanded in the return values, based on the
defaults passed into the constructor, unless the optional argument
`raw' is true. Additional substitutions may be provided using the
`vars' argument, which must be a dictionary whose contents overrides
any pre-existing defaults.
The section DEFAULT is special.
"""
if section != DEFAULTSECT and not self.has_section(section):
raise NoSectionError(section)
option = self.optionxform(option)
value = RawConfigParser.get(self, section, option, vars)
if raw:
return value
else:
d = ConfigDict(self, section, vars)
return self._interpolate(section, option, value, d)
def _interpolate(self, section, option, rawval, vars):
# do the string interpolation
value = rawval
depth = MAX_INTERPOLATION_DEPTH
while depth: # Loop through this until it's done
depth -= 1
if "%(" in value:
try:
value = value % vars
except KeyError, e:
raise InterpolationMissingOptionError(
option, section, rawval, e.args[0])
else:
break
if value.find("%(") != -1:
raise InterpolationDepthError(option, section, rawval)
return value
def items(self, section, raw=False, vars=None):
"""Return a list of tuples with (name, value) for each option
in the section.
All % interpolations are expanded in the return values, based on the
defaults passed into the constructor, unless the optional argument
`raw' is true. Additional substitutions may be provided using the
`vars' argument, which must be a dictionary whose contents overrides
any pre-existing defaults.
The section DEFAULT is special.
"""
if section != DEFAULTSECT and not self.has_section(section):
raise NoSectionError(section)
if vars is None:
options = list(self.data[section])
else:
options = []
for x in self.data[section]:
if x not in vars:
options.append(x)
options.extend(vars.keys())
if "__name__" in options:
options.remove("__name__")
d = ConfigDict(self, section, vars)
if raw:
return [(option, d[option])
for option in options]
else:
return [(option, self._interpolate(section, option, d[option], d))
for option in options]
class SafeConfigParser(ConfigParser):
_interpvar_re = re.compile(r"%\(([^)]+)\)s")
_badpercent_re = re.compile(r"%[^%]|%$")
def set(self, section, option, value):
if not isinstance(value, basestring):
raise TypeError("option values must be strings")
# check for bad percent signs:
# first, replace all "good" interpolations
tmp_value = self._interpvar_re.sub('', value)
# then, check if there's a lone percent sign left
m = self._badpercent_re.search(tmp_value)
if m:
raise ValueError("invalid interpolation syntax in %r at "
"position %d" % (value, m.start()))
ConfigParser.set(self, section, option, value)
def _interpolate(self, section, option, rawval, vars):
# do the string interpolation
L = []
self._interpolate_some(option, L, rawval, section, vars, 1)
return ''.join(L)
_interpvar_match = re.compile(r"%\(([^)]+)\)s").match
def _interpolate_some(self, option, accum, rest, section, map, depth):
if depth > MAX_INTERPOLATION_DEPTH:
raise InterpolationDepthError(option, section, rest)
while rest:
p = rest.find("%")
if p < 0:
accum.append(rest)
return
if p > 0:
accum.append(rest[:p])
rest = rest[p:]
# p is no longer used
c = rest[1:2]
if c == "%":
accum.append("%")
rest = rest[2:]
elif c == "(":
m = self._interpvar_match(rest)
if m is None:
raise InterpolationSyntaxError(option, section,
"bad interpolation variable reference %r" % rest)
var = m.group(1)
rest = rest[m.end():]
try:
v = map[var]
except KeyError:
raise InterpolationMissingOptionError(
option, section, rest, var)
if "%" in v:
self._interpolate_some(option, accum, v,
section, map, depth + 1)
else:
accum.append(v)
else:
raise InterpolationSyntaxError(
option, section,
"'%' must be followed by '%' or '(', found: " + repr(rest))

294
iniparse/config.py Executable file
View File

@ -0,0 +1,294 @@
class ConfigNamespace(object):
"""Abstract class representing the interface of Config objects.
A ConfigNamespace is a collection of names mapped to values, where
the values may be nested namespaces. Values can be accessed via
container notation - obj[key] - or via dotted notation - obj.key.
Both these access methods are equivalent.
To minimize name conflicts between namespace keys and class members,
the number of class members should be minimized, and the names of
all class members should start with an underscore.
Subclasses must implement the methods for container-like access,
and this class will automatically provide dotted access.
"""
# Methods that must be implemented by subclasses
def _getitem(self, key):
return NotImplementedError(key)
def __setitem__(self, key, value):
raise NotImplementedError(key, value)
def __delitem__(self, key):
raise NotImplementedError(key)
def __iter__(self):
return NotImplementedError()
def _new_namespace(self, name):
raise NotImplementedError(name)
def __contains__(self, key):
try:
self._getitem(key)
except KeyError:
return False
return True
# Machinery for converting dotted access into container access,
# and automatically creating new sections/namespaces.
#
# To distinguish between accesses of class members and namespace
# keys, we first call object.__getattribute__(). If that succeeds,
# the name is assumed to be a class member. Otherwise it is
# treated as a namespace key.
#
# Therefore, member variables should be defined in the class,
# not just in the __init__() function. See BasicNamespace for
# an example.
def __getitem__(self, key):
try:
return self._getitem(key)
except KeyError:
return Undefined(key, self)
def __getattr__(self, name):
try:
return self._getitem(name)
except KeyError:
if name.startswith('__') and name.endswith('__'):
raise AttributeError
return Undefined(name, self)
def __setattr__(self, name, value):
try:
object.__getattribute__(self, name)
object.__setattr__(self, name, value)
except AttributeError:
self.__setitem__(name, value)
def __delattr__(self, name):
try:
object.__getattribute__(self, name)
object.__delattr__(self, name)
except AttributeError:
self.__delitem__(name)
# During unpickling, Python checks if the class has a __setstate__
# method. But, the data dicts have not been initialised yet, which
# leads to _getitem and hence __getattr__ raising an exception. So
# we explicitly impement default __setstate__ behavior.
def __setstate__(self, state):
self.__dict__.update(state)
class Undefined(object):
"""Helper class used to hold undefined names until assignment.
This class helps create any undefined subsections when an
assignment is made to a nested value. For example, if the
statement is "cfg.a.b.c = 42", but "cfg.a.b" does not exist yet.
"""
def __init__(self, name, namespace):
object.__setattr__(self, 'name', name)
object.__setattr__(self, 'namespace', namespace)
def __setattr__(self, name, value):
obj = self.namespace._new_namespace(self.name)
obj[name] = value
def __setitem__(self, name, value):
obj = self.namespace._new_namespace(self.name)
obj[name] = value
# ---- Basic implementation of a ConfigNamespace
class BasicConfig(ConfigNamespace):
"""Represents a hierarchical collection of named values.
Values are added using dotted notation:
>>> n = BasicConfig()
>>> n.x = 7
>>> n.name.first = 'paramjit'
>>> n.name.last = 'oberoi'
...and accessed the same way, or with [...]:
>>> n.x
7
>>> n.name.first
'paramjit'
>>> n.name.last
'oberoi'
>>> n['x']
7
>>> n['name']['first']
'paramjit'
Iterating over the namespace object returns the keys:
>>> l = list(n)
>>> l.sort()
>>> l
['name', 'x']
Values can be deleted using 'del' and printed using 'print'.
>>> n.aaa = 42
>>> del n.x
>>> print n
aaa = 42
name.first = paramjit
name.last = oberoi
Nested namepsaces are also namespaces:
>>> isinstance(n.name, ConfigNamespace)
True
>>> print n.name
first = paramjit
last = oberoi
>>> sorted(list(n.name))
['first', 'last']
Finally, values can be read from a file as follows:
>>> from StringIO import StringIO
>>> sio = StringIO('''
... # comment
... ui.height = 100
... ui.width = 150
... complexity = medium
... have_python
... data.secret.password = goodness=gracious me
... ''')
>>> n = BasicConfig()
>>> n._readfp(sio)
>>> print n
complexity = medium
data.secret.password = goodness=gracious me
have_python
ui.height = 100
ui.width = 150
"""
# this makes sure that __setattr__ knows this is not a namespace key
_data = None
def __init__(self):
self._data = {}
def _getitem(self, key):
return self._data[key]
def __setitem__(self, key, value):
self._data[key] = value
def __delitem__(self, key):
del self._data[key]
def __iter__(self):
return iter(self._data)
def __str__(self, prefix=''):
lines = []
keys = self._data.keys()
keys.sort()
for name in keys:
value = self._data[name]
if isinstance(value, ConfigNamespace):
lines.append(value.__str__(prefix='%s%s.' % (prefix,name)))
else:
if value is None:
lines.append('%s%s' % (prefix, name))
else:
lines.append('%s%s = %s' % (prefix, name, value))
return '\n'.join(lines)
def _new_namespace(self, name):
obj = BasicConfig()
self._data[name] = obj
return obj
def _readfp(self, fp):
while True:
line = fp.readline()
if not line:
break
line = line.strip()
if not line: continue
if line[0] == '#': continue
data = line.split('=', 1)
if len(data) == 1:
name = line
value = None
else:
name = data[0].strip()
value = data[1].strip()
name_components = name.split('.')
ns = self
for n in name_components[:-1]:
if n in ns:
ns = ns[n]
if not isinstance(ns, ConfigNamespace):
raise TypeError('value-namespace conflict', n)
else:
ns = ns._new_namespace(n)
ns[name_components[-1]] = value
# ---- Utility functions
def update_config(target, source):
"""Imports values from source into target.
Recursively walks the <source> ConfigNamespace and inserts values
into the <target> ConfigNamespace. For example:
>>> n = BasicConfig()
>>> n.playlist.expand_playlist = True
>>> n.ui.display_clock = True
>>> n.ui.display_qlength = True
>>> n.ui.width = 150
>>> print n
playlist.expand_playlist = True
ui.display_clock = True
ui.display_qlength = True
ui.width = 150
>>> from iniparse import ini
>>> i = ini.INIConfig()
>>> update_config(i, n)
>>> print i
[playlist]
expand_playlist = True
<BLANKLINE>
[ui]
display_clock = True
display_qlength = True
width = 150
"""
for name in source:
value = source[name]
if isinstance(value, ConfigNamespace):
if name in target:
myns = target[name]
if not isinstance(myns, ConfigNamespace):
raise TypeError('value-namespace conflict')
else:
myns = target._new_namespace(name)
update_config(myns, value)
else:
target[name] = value

643
iniparse/ini.py Executable file
View File

@ -0,0 +1,643 @@
"""Access and/or modify INI files
* Compatiable with ConfigParser
* Preserves order of sections & options
* Preserves comments/blank lines/etc
* More conveninet access to data
Example:
>>> from StringIO import StringIO
>>> sio = StringIO('''# configure foo-application
... [foo]
... bar1 = qualia
... bar2 = 1977
... [foo-ext]
... special = 1''')
>>> cfg = INIConfig(sio)
>>> print cfg.foo.bar1
qualia
>>> print cfg['foo-ext'].special
1
>>> cfg.foo.newopt = 'hi!'
>>> cfg.baz.enabled = 0
>>> print cfg
# configure foo-application
[foo]
bar1 = qualia
bar2 = 1977
newopt = hi!
[foo-ext]
special = 1
<BLANKLINE>
[baz]
enabled = 0
"""
# An ini parser that supports ordered sections/options
# Also supports updates, while preserving structure
# Backward-compatiable with ConfigParser
import re
from ConfigParser import DEFAULTSECT, ParsingError, MissingSectionHeaderError
import config
class LineType(object):
line = None
def __init__(self, line=None):
if line is not None:
self.line = line.strip('\n')
# Return the original line for unmodified objects
# Otherwise construct using the current attribute values
def __str__(self):
if self.line is not None:
return self.line
else:
return self.to_string()
# If an attribute is modified after initialization
# set line to None since it is no longer accurate.
def __setattr__(self, name, value):
if hasattr(self,name):
self.__dict__['line'] = None
self.__dict__[name] = value
def to_string(self):
raise Exception('This method must be overridden in derived classes')
class SectionLine(LineType):
regex = re.compile(r'^\['
r'(?P<name>[^]]+)'
r'\]\s*'
r'((?P<csep>;|#)(?P<comment>.*))?$')
def __init__(self, name, comment=None, comment_separator=None,
comment_offset=-1, line=None):
super(SectionLine, self).__init__(line)
self.name = name
self.comment = comment
self.comment_separator = comment_separator
self.comment_offset = comment_offset
def to_string(self):
out = '[' + self.name + ']'
if self.comment is not None:
# try to preserve indentation of comments
out = (out+' ').ljust(self.comment_offset)
out = out + self.comment_separator + self.comment
return out
def parse(cls, line):
m = cls.regex.match(line.rstrip())
if m is None:
return None
return cls(m.group('name'), m.group('comment'),
m.group('csep'), m.start('csep'),
line)
parse = classmethod(parse)
class OptionLine(LineType):
def __init__(self, name, value, separator=' = ', comment=None,
comment_separator=None, comment_offset=-1, line=None):
super(OptionLine, self).__init__(line)
self.name = name
self.value = value
self.separator = separator
self.comment = comment
self.comment_separator = comment_separator
self.comment_offset = comment_offset
def to_string(self):
out = '%s%s%s' % (self.name, self.separator, self.value)
if self.comment is not None:
# try to preserve indentation of comments
out = (out+' ').ljust(self.comment_offset)
out = out + self.comment_separator + self.comment
return out
regex = re.compile(r'^(?P<name>[^:=\s[][^:=]*)'
r'(?P<sep>[:=]\s*)'
r'(?P<value>.*)$')
def parse(cls, line):
m = cls.regex.match(line.rstrip())
if m is None:
return None
name = m.group('name').rstrip()
value = m.group('value')
sep = m.group('name')[len(name):] + m.group('sep')
# comments are not detected in the regex because
# ensuring total compatibility with ConfigParser
# requires that:
# option = value ;comment // value=='value'
# option = value;1 ;comment // value=='value;1 ;comment'
#
# Doing this in a regex would be complicated. I
# think this is a bug. The whole issue of how to
# include ';' in the value needs to be addressed.
# Also, '#' doesn't mark comments in options...
coff = value.find(';')
if coff != -1 and value[coff-1].isspace():
comment = value[coff+1:]
csep = value[coff]
value = value[:coff].rstrip()
coff = m.start('value') + coff
else:
comment = None
csep = None
coff = -1
return cls(name, value, sep, comment, csep, coff, line)
parse = classmethod(parse)
def change_comment_syntax(comment_chars='%;#', allow_rem=False):
comment_chars = re.sub(r'([\]\-\^])', r'\\\1', comment_chars)
regex = r'^(?P<csep>[%s]' % comment_chars
if allow_rem:
regex += '|[rR][eE][mM]'
regex += r')(?P<comment>.*)$'
CommentLine.regex = re.compile(regex)
class CommentLine(LineType):
regex = re.compile(r'^(?P<csep>[;#]|[rR][eE][mM] +)'
r'(?P<comment>.*)$')
def __init__(self, comment='', separator='#', line=None):
super(CommentLine, self).__init__(line)
self.comment = comment
self.separator = separator
def to_string(self):
return self.separator + self.comment
def parse(cls, line):
m = cls.regex.match(line.rstrip())
if m is None:
return None
return cls(m.group('comment'), m.group('csep'), line)
parse = classmethod(parse)
class EmptyLine(LineType):
# could make this a singleton
def to_string(self):
return ''
value = property(lambda _: '')
def parse(cls, line):
if line.strip(): return None
return cls(line)
parse = classmethod(parse)
class ContinuationLine(LineType):
regex = re.compile(r'^\s+(?P<value>.*)$')
def __init__(self, value, value_offset=None, line=None):
super(ContinuationLine, self).__init__(line)
self.value = value
if value_offset is None:
value_offset = 8
self.value_offset = value_offset
def to_string(self):
return ' '*self.value_offset + self.value
def parse(cls, line):
m = cls.regex.match(line.rstrip())
if m is None:
return None
return cls(m.group('value'), m.start('value'), line)
parse = classmethod(parse)
class LineContainer(object):
def __init__(self, d=None):
self.contents = []
self.orgvalue = None
if d:
if isinstance(d, list): self.extend(d)
else: self.add(d)
def add(self, x):
self.contents.append(x)
def extend(self, x):
for i in x: self.add(i)
def get_name(self):
return self.contents[0].name
def set_name(self, data):
self.contents[0].name = data
def get_value(self):
if self.orgvalue is not None:
return self.orgvalue
elif len(self.contents) == 1:
return self.contents[0].value
else:
return '\n'.join([('%s' % x.value) for x in self.contents
if not isinstance(x, CommentLine)])
def set_value(self, data):
self.orgvalue = data
lines = ('%s' % data).split('\n')
# If there is an existing ContinuationLine, use its offset
value_offset = None
for v in self.contents:
if isinstance(v, ContinuationLine):
value_offset = v.value_offset
break
# Rebuild contents list, preserving initial OptionLine
self.contents = self.contents[0:1]
self.contents[0].value = lines[0]
del lines[0]
for line in lines:
if line.strip():
self.add(ContinuationLine(line, value_offset))
else:
self.add(EmptyLine())
name = property(get_name, set_name)
value = property(get_value, set_value)
def __str__(self):
s = [x.__str__() for x in self.contents]
return '\n'.join(s)
def finditer(self, key):
for x in self.contents[::-1]:
if hasattr(x, 'name') and x.name==key:
yield x
def find(self, key):
for x in self.finditer(key):
return x
raise KeyError(key)
def _make_xform_property(myattrname, srcattrname=None):
private_attrname = myattrname + 'value'
private_srcname = myattrname + 'source'
if srcattrname is None:
srcattrname = myattrname
def getfn(self):
srcobj = getattr(self, private_srcname)
if srcobj is not None:
return getattr(srcobj, srcattrname)
else:
return getattr(self, private_attrname)
def setfn(self, value):
srcobj = getattr(self, private_srcname)
if srcobj is not None:
setattr(srcobj, srcattrname, value)
else:
setattr(self, private_attrname, value)
return property(getfn, setfn)
class INISection(config.ConfigNamespace):
_lines = None
_options = None
_defaults = None
_optionxformvalue = None
_optionxformsource = None
_compat_skip_empty_lines = set()
def __init__(self, lineobj, defaults = None,
optionxformvalue=None, optionxformsource=None):
self._lines = [lineobj]
self._defaults = defaults
self._optionxformvalue = optionxformvalue
self._optionxformsource = optionxformsource
self._options = {}
_optionxform = _make_xform_property('_optionxform')
def _compat_get(self, key):
# identical to __getitem__ except that _compat_XXX
# is checked for backward-compatible handling
if key == '__name__':
return self._lines[-1].name
if self._optionxform: key = self._optionxform(key)
try:
value = self._options[key].value
del_empty = key in self._compat_skip_empty_lines
except KeyError:
if self._defaults and key in self._defaults._options:
value = self._defaults._options[key].value
del_empty = key in self._defaults._compat_skip_empty_lines
else:
raise
if del_empty:
value = re.sub('\n+', '\n', value)
return value
def _getitem(self, key):
if key == '__name__':
return self._lines[-1].name
if self._optionxform: key = self._optionxform(key)
try:
return self._options[key].value
except KeyError:
if self._defaults and key in self._defaults._options:
return self._defaults._options[key].value
else:
raise
def __setitem__(self, key, value):
if self._optionxform: xkey = self._optionxform(key)
else: xkey = key
if xkey in self._compat_skip_empty_lines:
self._compat_skip_empty_lines.remove(xkey)
if xkey not in self._options:
# create a dummy object - value may have multiple lines
obj = LineContainer(OptionLine(key, ''))
self._lines[-1].add(obj)
self._options[xkey] = obj
# the set_value() function in LineContainer
# automatically handles multi-line values
self._options[xkey].value = value
def __delitem__(self, key):
if self._optionxform: key = self._optionxform(key)
if key in self._compat_skip_empty_lines:
self._compat_skip_empty_lines.remove(key)
for l in self._lines:
remaining = []
for o in l.contents:
if isinstance(o, LineContainer):
n = o.name
if self._optionxform: n = self._optionxform(n)
if key != n: remaining.append(o)
else:
remaining.append(o)
l.contents = remaining
del self._options[key]
def __iter__(self):
d = set()
for l in self._lines:
for x in l.contents:
if isinstance(x, LineContainer):
if self._optionxform:
ans = self._optionxform(x.name)
else:
ans = x.name
if ans not in d:
yield ans
d.add(ans)
if self._defaults:
for x in self._defaults:
if x not in d:
yield x
d.add(x)
def _new_namespace(self, name):
raise Exception('No sub-sections allowed', name)
def make_comment(line):
return CommentLine(line.rstrip('\n'))
def readline_iterator(f):
"""iterate over a file by only using the file object's readline method"""
have_newline = False
while True:
line = f.readline()
if not line:
if have_newline:
yield ""
return
if line.endswith('\n'):
have_newline = True
else:
have_newline = False
yield line
def lower(x):
return x.lower()
class INIConfig(config.ConfigNamespace):
_data = None
_sections = None
_defaults = None
_optionxformvalue = None
_optionxformsource = None
_sectionxformvalue = None
_sectionxformsource = None
_parse_exc = None
_bom = False
def __init__(self, fp=None, defaults=None, parse_exc=True,
optionxformvalue=lower, optionxformsource=None,
sectionxformvalue=None, sectionxformsource=None):
self._data = LineContainer()
self._parse_exc = parse_exc
self._optionxformvalue = optionxformvalue
self._optionxformsource = optionxformsource
self._sectionxformvalue = sectionxformvalue
self._sectionxformsource = sectionxformsource
self._sections = {}
if defaults is None: defaults = {}
self._defaults = INISection(LineContainer(), optionxformsource=self)
for name, value in defaults.iteritems():
self._defaults[name] = value
if fp is not None:
self._readfp(fp)
_optionxform = _make_xform_property('_optionxform', 'optionxform')
_sectionxform = _make_xform_property('_sectionxform', 'optionxform')
def _getitem(self, key):
if key == DEFAULTSECT:
return self._defaults
if self._sectionxform: key = self._sectionxform(key)
return self._sections[key]
def __setitem__(self, key, value):
raise Exception('Values must be inside sections', key, value)
def __delitem__(self, key):
if self._sectionxform: key = self._sectionxform(key)
for line in self._sections[key]._lines:
self._data.contents.remove(line)
del self._sections[key]
def __iter__(self):
d = set()
d.add(DEFAULTSECT)
for x in self._data.contents:
if isinstance(x, LineContainer):
if x.name not in d:
yield x.name
d.add(x.name)
def _new_namespace(self, name):
if self._data.contents:
self._data.add(EmptyLine())
obj = LineContainer(SectionLine(name))
self._data.add(obj)
if self._sectionxform: name = self._sectionxform(name)
if name in self._sections:
ns = self._sections[name]
ns._lines.append(obj)
else:
ns = INISection(obj, defaults=self._defaults,
optionxformsource=self)
self._sections[name] = ns
return ns
def __str__(self):
if self._bom:
fmt = u'\ufeff%s'
else:
fmt = '%s'
return fmt % self._data.__str__()
__unicode__ = __str__
_line_types = [EmptyLine, CommentLine,
SectionLine, OptionLine,
ContinuationLine]
def _parse(self, line):
for linetype in self._line_types:
lineobj = linetype.parse(line)
if lineobj:
return lineobj
else:
# can't parse line
return None
def _readfp(self, fp):
cur_section = None
cur_option = None
cur_section_name = None
cur_option_name = None
pending_lines = []
pending_empty_lines = False
try:
fname = fp.name
except AttributeError:
fname = '<???>'
linecount = 0
exc = None
line = None
for line in readline_iterator(fp):
# Check for BOM on first line
if linecount == 0 and isinstance(line, unicode):
if line[0] == u'\ufeff':
line = line[1:]
self._bom = True
lineobj = self._parse(line)
linecount += 1
if not cur_section and not isinstance(lineobj,
(CommentLine, EmptyLine, SectionLine)):
if self._parse_exc:
raise MissingSectionHeaderError(fname, linecount, line)
else:
lineobj = make_comment(line)
if lineobj is None:
if self._parse_exc:
if exc is None: exc = ParsingError(fname)
exc.append(linecount, line)
lineobj = make_comment(line)
if isinstance(lineobj, ContinuationLine):
if cur_option:
if pending_lines:
cur_option.extend(pending_lines)
pending_lines = []
if pending_empty_lines:
optobj._compat_skip_empty_lines.add(cur_option_name)
pending_empty_lines = False
cur_option.add(lineobj)
else:
# illegal continuation line - convert to comment
if self._parse_exc:
if exc is None: exc = ParsingError(fname)
exc.append(linecount, line)
lineobj = make_comment(line)
if isinstance(lineobj, OptionLine):
if pending_lines:
cur_section.extend(pending_lines)
pending_lines = []
pending_empty_lines = False
cur_option = LineContainer(lineobj)
cur_section.add(cur_option)
if self._optionxform:
cur_option_name = self._optionxform(cur_option.name)
else:
cur_option_name = cur_option.name
if cur_section_name == DEFAULTSECT:
optobj = self._defaults
else:
optobj = self._sections[cur_section_name]
optobj._options[cur_option_name] = cur_option
if isinstance(lineobj, SectionLine):
self._data.extend(pending_lines)
pending_lines = []
pending_empty_lines = False
cur_section = LineContainer(lineobj)
self._data.add(cur_section)
cur_option = None
cur_option_name = None
if cur_section.name == DEFAULTSECT:
self._defaults._lines.append(cur_section)
cur_section_name = DEFAULTSECT
else:
if self._sectionxform:
cur_section_name = self._sectionxform(cur_section.name)
else:
cur_section_name = cur_section.name
if cur_section_name not in self._sections:
self._sections[cur_section_name] = \
INISection(cur_section, defaults=self._defaults,
optionxformsource=self)
else:
self._sections[cur_section_name]._lines.append(cur_section)
if isinstance(lineobj, (CommentLine, EmptyLine)):
pending_lines.append(lineobj)
if isinstance(lineobj, EmptyLine):
pending_empty_lines = True
self._data.extend(pending_lines)
if line and line[-1]=='\n':
self._data.add(EmptyLine())
if exc:
raise exc

47
iniparse/utils.py Executable file
View File

@ -0,0 +1,47 @@
import compat
from ini import LineContainer, EmptyLine
def tidy(cfg):
"""Clean up blank lines.
This functions makes the configuration look clean and
handwritten - consecutive empty lines and empty lines at
the start of the file are removed, and one is guaranteed
to be at the end of the file.
"""
if isinstance(cfg, compat.RawConfigParser):
cfg = cfg.data
cont = cfg._data.contents
i = 1
while i < len(cont):
if isinstance(cont[i], LineContainer):
tidy_section(cont[i])
i += 1
elif (isinstance(cont[i-1], EmptyLine) and
isinstance(cont[i], EmptyLine)):
del cont[i]
else:
i += 1
# Remove empty first line
if cont and isinstance(cont[0], EmptyLine):
del cont[0]
# Ensure a last line
if cont and not isinstance(cont[-1], EmptyLine):
cont.append(EmptyLine())
def tidy_section(lc):
cont = lc.contents
i = 1
while i < len(cont):
if (isinstance(cont[i-1], EmptyLine) and
isinstance(cont[i], EmptyLine)):
del cont[i]
else:
i += 1
# Remove empty first line
if len(cont) > 1 and isinstance(cont[1], EmptyLine):
del cont[1]

242
libtisbackup/XenAPI.py Normal file
View File

@ -0,0 +1,242 @@
#============================================================================
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#============================================================================
# Copyright (C) 2006-2007 XenSource Inc.
#============================================================================
#
# Parts of this file are based upon xmlrpclib.py, the XML-RPC client
# interface included in the Python distribution.
#
# Copyright (c) 1999-2002 by Secret Labs AB
# Copyright (c) 1999-2002 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
import gettext
import xmlrpclib
import httplib
import socket
translation = gettext.translation('xen-xm', fallback = True)
API_VERSION_1_1 = '1.1'
API_VERSION_1_2 = '1.2'
#
# Methods that have different parameters between API versions 1.1 and 1.2, and
# the number of parameters in 1.1.
#
COMPATIBILITY_METHODS_1_1 = [
('SR.create' , 8),
('SR.introduce' , 6),
('SR.make' , 7),
('VDI.snapshot' , 1),
('VDI.clone' , 1),
]
class Failure(Exception):
def __init__(self, details):
self.details = details
def __str__(self):
try:
return str(self.details)
except Exception, exn:
import sys
print >>sys.stderr, exn
return "Xen-API failure: %s" % str(self.details)
def _details_map(self):
return dict([(str(i), self.details[i])
for i in range(len(self.details))])
_RECONNECT_AND_RETRY = (lambda _ : ())
class UDSHTTPConnection(httplib.HTTPConnection):
"""HTTPConnection subclass to allow HTTP over Unix domain sockets. """
def connect(self):
path = self.host.replace("_", "/")
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.sock.connect(path)
class UDSHTTP(httplib.HTTP):
_connection_class = UDSHTTPConnection
class UDSTransport(xmlrpclib.Transport):
def make_connection(self, host):
return UDSHTTP(host)
class Session(xmlrpclib.ServerProxy):
"""A server proxy and session manager for communicating with xapi using
the Xen-API.
Example:
session = Session('http://localhost/')
session.login_with_password('me', 'mypassword')
session.xenapi.VM.start(vm_uuid)
session.xenapi.session.logout()
"""
def __init__(self, uri, transport=None, encoding=None, verbose=0,
allow_none=1):
xmlrpclib.ServerProxy.__init__(self, uri, transport, encoding,
verbose, allow_none)
self._session = None
self.last_login_method = None
self.last_login_params = None
self.API_version = API_VERSION_1_1
def xenapi_request(self, methodname, params):
if methodname.startswith('login'):
self._login(methodname, params)
return None
elif methodname == 'logout':
self._logout()
return None
else:
retry_count = 0
while retry_count < 3:
full_params = (self._session,) + params
result = _parse_result(getattr(self, methodname)(*full_params))
if result == _RECONNECT_AND_RETRY:
retry_count += 1
if self.last_login_method:
self._login(self.last_login_method,
self.last_login_params)
else:
raise xmlrpclib.Fault(401, 'You must log in')
else:
return result
raise xmlrpclib.Fault(
500, 'Tried 3 times to get a valid session, but failed')
def _login(self, method, params):
result = _parse_result(getattr(self, 'session.%s' % method)(*params))
if result == _RECONNECT_AND_RETRY:
raise xmlrpclib.Fault(
500, 'Received SESSION_INVALID when logging in')
self._session = result
self.last_login_method = method
self.last_login_params = params
if method.startswith("slave_local"):
self.API_version = API_VERSION_1_2
else:
self.API_version = self._get_api_version()
def logout(self):
try:
if self.last_login_method.startswith("slave_local"):
return _parse_result(self.session.local_logout(self._session))
else:
return _parse_result(self.session.logout(self._session))
finally:
self._session = None
self.last_login_method = None
self.last_login_params = None
self.API_version = API_VERSION_1_1
def _get_api_version(self):
pool = self.xenapi.pool.get_all()[0]
host = self.xenapi.pool.get_master(pool)
if (self.xenapi.host.get_API_version_major(host) == "1" and
self.xenapi.host.get_API_version_minor(host) == "2"):
return API_VERSION_1_2
else:
return API_VERSION_1_1
def __getattr__(self, name):
if name == 'handle':
return self._session
elif name == 'xenapi':
return _Dispatcher(self.API_version, self.xenapi_request, None)
elif name.startswith('login') or name.startswith('slave_local'):
return lambda *params: self._login(name, params)
else:
return xmlrpclib.ServerProxy.__getattr__(self, name)
def xapi_local():
return Session("http://_var_xapi_xapi/", transport=UDSTransport())
def _parse_result(result):
if type(result) != dict or 'Status' not in result:
raise xmlrpclib.Fault(500, 'Missing Status in response from server' + result)
if result['Status'] == 'Success':
if 'Value' in result:
return result['Value']
else:
raise xmlrpclib.Fault(500,
'Missing Value in response from server')
else:
if 'ErrorDescription' in result:
if result['ErrorDescription'][0] == 'SESSION_INVALID':
return _RECONNECT_AND_RETRY
else:
raise Failure(result['ErrorDescription'])
else:
raise xmlrpclib.Fault(
500, 'Missing ErrorDescription in response from server')
# Based upon _Method from xmlrpclib.
class _Dispatcher:
def __init__(self, API_version, send, name):
self.__API_version = API_version
self.__send = send
self.__name = name
def __repr__(self):
if self.__name:
return '<XenAPI._Dispatcher for %s>' % self.__name
else:
return '<XenAPI._Dispatcher>'
def __getattr__(self, name):
if self.__name is None:
return _Dispatcher(self.__API_version, self.__send, name)
else:
return _Dispatcher(self.__API_version, self.__send, "%s.%s" % (self.__name, name))
def __call__(self, *args):
if self.__API_version == API_VERSION_1_1:
for m in COMPATIBILITY_METHODS_1_1:
if self.__name == m[0]:
return self.__send(self.__name, args[0:m[1]])
return self.__send(self.__name, args)

18
libtisbackup/__init__.py Normal file
View File

@ -0,0 +1,18 @@
# -----------------------------------------------------------------------
# This file is part of TISBackup
#
# TISBackup is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# TISBackup is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with TISBackup. If not, see <http://www.gnu.org/licenses/>.
#
# -----------------------------------------------------------------------

View File

@ -0,0 +1,133 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------
# This file is part of TISBackup
#
# TISBackup is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# TISBackup is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with TISBackup. If not, see <http://www.gnu.org/licenses/>.
#
# -----------------------------------------------------------------------
import sys
try:
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko
import paramiko
except ImportError,e:
print "Error : can not load paramiko library %s" % e
raise
sys.stderr = sys.__stderr__
import datetime
import base64
import os
from common import *
class backup_mysql(backup_generic):
"""Backup a mysql database as gzipped sql file through ssh"""
type = 'mysql+ssh'
required_params = backup_generic.required_params + ['db_name','db_user','db_passwd','private_key']
db_name=''
db_user=''
db_passwd=''
def do_backup(self,stats):
self.logger.debug('[%s] Connecting to %s with user root and key %s',self.backup_name,self.server_name,self.private_key)
try:
mykey = paramiko.RSAKey.from_private_key_file(self.private_key)
except paramiko.SSHException:
mykey = paramiko.DSSKey.from_private_key_file(self.private_key)
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(self.server_name,username='root',pkey = mykey, port=self.ssh_port)
t = datetime.datetime.now()
backup_start_date = t.strftime('%Y%m%d-%Hh%Mm%S')
# dump db
stats['status']='Dumping'
cmd = 'mysqldump -u' + self.db_user +' -p' + self.db_passwd + ' ' + self.db_name + ' > /tmp/' + self.db_name + '-' + backup_start_date + '.sql'
self.logger.debug('[%s] Dump DB : %s',self.backup_name,cmd)
if not self.dry_run:
(error_code,output) = ssh_exec(cmd,ssh=ssh)
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
# zip the file
stats['status']='Zipping'
cmd = 'gzip /tmp/' + self.db_name + '-' + backup_start_date + '.sql'
self.logger.debug('[%s] Compress backup : %s',self.backup_name,cmd)
if not self.dry_run:
(error_code,output) = ssh_exec(cmd,ssh=ssh)
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
# get the file
stats['status']='SFTP'
filepath = '/tmp/' + self.db_name + '-' + backup_start_date + '.sql.gz'
localpath = os.path.join(self.backup_dir , self.db_name + '-' + backup_start_date + '.sql.gz')
self.logger.debug('[%s] Get gz backup with sftp on %s from %s to %s',self.backup_name,self.server_name,filepath,localpath)
if not self.dry_run:
transport = ssh.get_transport()
sftp = paramiko.SFTPClient.from_transport(transport)
sftp.get(filepath, localpath)
sftp.close()
if not self.dry_run:
stats['total_files_count']=1
stats['written_files_count']=1
stats['total_bytes']=os.stat(localpath).st_size
stats['written_bytes']=os.stat(localpath).st_size
stats['log']='gzip dump of DB %s:%s (%d bytes) to %s' % (self.server_name,self.db_name, stats['written_bytes'], localpath)
stats['backup_location'] = localpath
stats['status']='RMTemp'
cmd = 'rm -f /tmp/' + self.db_name + '-' + backup_start_date + '.sql.gz'
self.logger.debug('[%s] Remove temp gzip : %s',self.backup_name,cmd)
if not self.dry_run:
(error_code,output) = ssh_exec(cmd,ssh=ssh)
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
stats['status']='OK'
def register_existingbackups(self):
"""scan backup dir and insert stats in database"""
registered = [b['backup_location'] for b in self.dbstat.query('select distinct backup_location from stats where backup_name=?',(self.backup_name,))]
filelist = os.listdir(self.backup_dir)
filelist.sort()
p = re.compile('^%s-(?P<date>\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}).sql.gz$' % self.db_name)
for item in filelist:
sr = p.match(item)
if sr:
file_name = os.path.join(self.backup_dir,item)
start = datetime.datetime.strptime(sr.groups()[0],'%Y%m%d-%Hh%Mm%S').isoformat()
if not file_name in registered:
self.logger.info('Registering %s from %s',file_name,fileisodate(file_name))
size_bytes = int(os.popen('du -sb "%s"' % file_name).read().split('\t')[0])
self.logger.debug(' Size in bytes : %i',size_bytes)
if not self.dry_run:
self.dbstat.add(self.backup_name,self.server_name,'',\
backup_start=start,backup_end=fileisodate(file_name),status='OK',total_bytes=size_bytes,backup_location=file_name)
else:
self.logger.info('Skipping %s from %s, already registered',file_name,fileisodate(file_name))
register_driver(backup_mysql)

49
libtisbackup/backup_null.py Executable file
View File

@ -0,0 +1,49 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------
# This file is part of TISBackup
#
# TISBackup is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# TISBackup is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with TISBackup. If not, see <http://www.gnu.org/licenses/>.
#
# -----------------------------------------------------------------------
import os
import datetime
from common import *
class backup_null(backup_generic):
"""Null backup to register servers which don't need any backups
but we still want to know they are taken in account"""
type = 'null'
required_params = ['type','server_name','backup_name']
optional_params = []
def do_backup(self,stats):
pass
def process_backup(self):
pass
def cleanup_backup(self):
pass
def export_latestbackup(self,destdir):
return {}
def checknagios(self,maxage_hours=30):
return (nagiosStateOk,"No backups needs to be performed")
register_driver(backup_null)
if __name__=='__main__':
pass

View File

@ -0,0 +1,127 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------
# This file is part of TISBackup
#
# TISBackup is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# TISBackup is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with TISBackup. If not, see <http://www.gnu.org/licenses/>.
#
# -----------------------------------------------------------------------
import sys
try:
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko
import paramiko
except ImportError,e:
print "Error : can not load paramiko library %s" % e
raise
sys.stderr = sys.__stderr__
import datetime
import base64
import os
import logging
import re
from common import *
class backup_pgsql(backup_generic):
"""Backup a postgresql database as gzipped sql file through ssh"""
type = 'pgsql+ssh'
required_params = backup_generic.required_params + ['db_name','private_key']
db_name=''
def do_backup(self,stats):
try:
mykey = paramiko.RSAKey.from_private_key_file(self.private_key)
except paramiko.SSHException:
mykey = paramiko.DSSKey.from_private_key_file(self.private_key)
self.logger.debug('[%s] Trying to connect to "%s" with username root and key "%s"',self.backup_name,self.server_name,self.private_key)
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(self.server_name,username='root',pkey = mykey,port=self.ssh_port)
t = datetime.datetime.now()
backup_start_date = t.strftime('%Y%m%d-%Hh%Mm%S')
# dump db
cmd = 'sudo -u postgres pg_dump ' + self.db_name + ' > /tmp/' + self.db_name + '-' + backup_start_date + '.sql'
self.logger.debug('[%s] %s ',self.backup_name,cmd)
if not self.dry_run:
(error_code,output) = ssh_exec(cmd,ssh=ssh)
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
# zip the file
cmd = 'gzip /tmp/' + self.db_name + '-' + backup_start_date + '.sql'
self.logger.debug('[%s] %s ',self.backup_name,cmd)
if not self.dry_run:
(error_code,output) = ssh_exec(cmd,ssh=ssh)
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
# get the file
filepath = '/tmp/' + self.db_name + '-' + backup_start_date + '.sql.gz'
localpath = self.backup_dir + '/' + self.db_name + '-' + backup_start_date + '.sql.gz'
self.logger.debug('[%s] get the file using sftp from "%s" to "%s" ',self.backup_name,filepath,localpath)
if not self.dry_run:
transport = ssh.get_transport()
sftp = paramiko.SFTPClient.from_transport(transport)
sftp.get(filepath, localpath)
sftp.close()
if not self.dry_run:
stats['total_files_count']=1
stats['written_files_count']=1
stats['total_bytes']=os.stat(localpath).st_size
stats['written_bytes']=os.stat(localpath).st_size
stats['log']='gzip dump of DB %s:%s (%d bytes) to %s' % (self.server_name,self.db_name, stats['written_bytes'], localpath)
stats['backup_location'] = localpath
cmd = 'rm -f /tmp/' + self.db_name + '-' + backup_start_date + '.sql.gz'
self.logger.debug('[%s] %s ',self.backup_name,cmd)
if not self.dry_run:
(error_code,output) = ssh_exec(cmd,ssh=ssh)
self.logger.debug("[%s] Output of %s :\n%s",self.backup_name,cmd,output)
if error_code:
raise Exception('Aborting, Not null exit code (%i) for "%s"' % (error_code,cmd))
stats['status']='OK'
def register_existingbackups(self):
"""scan backup dir and insert stats in database"""
registered = [b['backup_location'] for b in self.dbstat.query('select distinct backup_location from stats where backup_name=?',(self.backup_name,))]
filelist = os.listdir(self.backup_dir)
filelist.sort()
p = re.compile('^%s-(?P<date>\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}).sql.gz$' % self.db_name)
for item in filelist:
sr = p.match(item)
if sr:
file_name = os.path.join(self.backup_dir,item)
start = datetime.datetime.strptime(sr.groups()[0],'%Y%m%d-%Hh%Mm%S').isoformat()
if not file_name in registered:
self.logger.info('Registering %s from %s',file_name,fileisodate(file_name))
size_bytes = int(os.popen('du -sb "%s"' % file_name).read().split('\t')[0])
self.logger.debug(' Size in bytes : %i',size_bytes)
if not self.dry_run:
self.dbstat.add(self.backup_name,self.server_name,'',\
backup_start=start,backup_end=fileisodate(file_name),status='OK',total_bytes=size_bytes,backup_location=file_name)
else:
self.logger.info('Skipping %s from %s, already registered',file_name,fileisodate(file_name))
register_driver(backup_pgsql)

View File

@ -0,0 +1,127 @@
# -----------------------------------------------------------------------
# This file is part of TISBackup
#
# TISBackup is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# TISBackup is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with TISBackup. If not, see <http://www.gnu.org/licenses/>.
#
# -----------------------------------------------------------------------
import os
import datetime
from common import *
import time
class backup_rdiff:
backup_dir=''
backup_start_date=None
backup_name=''
server_name=''
exclude_list=''
ssh_port='22'
remote_user='root'
remote_dir=''
dest_dir=''
verbose = False
dry_run=False
def __init__(self, backup_name, backup_base_dir):
self.backup_dir = backup_base_dir + '/' + backup_name
if os.path.isdir(self.backup_dir )==False:
os.makedirs(self.backup_dir)
self.backup_name = backup_name
t = datetime.datetime.now()
self.backup_start_date = t.strftime('%Y%m%d-%Hh%Mm%S')
def get_latest_backup(self):
filelist = os.listdir(self.backup_dir)
if len(filelist) == 0:
return ''
filelist.sort()
return filelist[-1]
def cleanup_backup(self):
filelist = os.listdir(self.backup_dir)
if len(filelist) == 0:
return ''
filelist.sort()
for backup_date in filelist:
today = time.time()
print backup_date
datestring = backup_date[0:8]
c = time.strptime(datestring,"%Y%m%d")
# TODO: improve
if today - c < 60 * 60 * 24* 30:
print time.strftime("%Y%m%d",c) + " is to be deleted"
def copy_latest_to_new(self):
# TODO check that latest exist
# TODO check that new does not exist
last_backup = self.get_latest_backup()
if last_backup=='':
print "*********************************"
print "*first backup for " + self.backup_name
else:
latest_backup_path = self.backup_dir + '/' + last_backup
new_backup_path = self.backup_dir + '/' + self.backup_start_date
print "#cp -al starting"
cmd = 'cp -al ' + latest_backup_path + ' ' + new_backup_path
print cmd
if self.dry_run==False:
call_external_process(cmd)
print "#cp -al finished"
def rsync_to_new(self):
self.dest_dir = self.backup_dir + '/' + self.backup_start_date + '/'
src_server = self.remote_user + '@' + self.server_name + ':"' + self.remote_dir.strip() + '/"'
print "#starting rsync"
verbose_arg=""
if self.verbose==True:
verbose_arg = "-P "
cmd = "rdiff-backup " + verbose_arg + ' --compress-level=9 --numeric-ids -az --partial -e "ssh -o StrictHostKeyChecking=no -c Blowfish -p ' + self.ssh_port + ' -i ' + self.private_key + '" --stats --delete-after ' + self.exclude_list + ' ' + src_server + ' ' + self.dest_dir
print cmd
## deal with exit code 24 (file vanished)
if self.dry_run==False:
p = subprocess.call(cmd, shell=True)
if (p ==24):
print "Note: some files vanished before transfer"
if (p != 0 and p != 24 ):
raise Exception('shell program exited with error code ' + str(p), cmd)
print "#finished rsync"
def process_backup(self):
print ""
print "#========Starting backup item ========="
self.copy_latest_to_new()
self.rsync_to_new()
print "#========Backup item finished=========="

View File

@ -0,0 +1,334 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------
# This file is part of TISBackup
#
# TISBackup is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# TISBackup is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with TISBackup. If not, see <http://www.gnu.org/licenses/>.
#
# -----------------------------------------------------------------------
import os
import datetime
from common import *
import time
import logging
import re
import os.path
import datetime
class backup_rsync(backup_generic):
"""Backup a directory on remote server with rsync and rsync protocol (requires running remote rsync daemon)"""
type = 'rsync'
required_params = backup_generic.required_params + ['remote_user','remote_dir','rsync_module','password_file']
optional_params = backup_generic.optional_params + ['compressionlevel','compression','bwlimit','exclude_list','protect_args','overload_args']
remote_user='root'
remote_dir=''
exclude_list=''
rsync_module=''
password_file = ''
compression = ''
bwlimit = 0
protect_args = '1'
overload_args = None
compressionlevel = 0
def read_config(self,iniconf):
assert(isinstance(iniconf,ConfigParser))
backup_generic.read_config(self,iniconf)
if not self.bwlimit and iniconf.has_option('global','bw_limit'):
self.bwlimit = iniconf.getint('global','bw_limit')
if not self.compressionlevel and iniconf.has_option('global','compression_level'):
self.compressionlevel = iniconf.getint('global','compression_level')
def do_backup(self,stats):
if not self.set_lock():
self.logger.error("[%s] a lock file is set, a backup maybe already running!!",self.backup_name)
return False
try:
try:
backup_source = 'undefined'
dest_dir = os.path.join(self.backup_dir,self.backup_start_date+'.rsync/')
if not os.path.isdir(dest_dir):
if not self.dry_run:
os.makedirs(dest_dir)
else:
print 'mkdir "%s"' % dest_dir
else:
raise Exception('backup destination directory already exists : %s' % dest_dir)
options = ['-rt','--stats','--delete-excluded','--numeric-ids','--delete-after']
if self.logger.level:
options.append('-P')
if self.dry_run:
options.append('-d')
if self.overload_args <> None:
options.append(self.overload_args)
elif not "cygdrive" in self.remote_dir:
# we don't preserve owner, group, links, hardlinks, perms for windows/cygwin as it is not reliable nor useful
options.append('-lpgoD')
# the protect-args option is not available in all rsync version
if not self.protect_args.lower() in ('false','no','0'):
options.append('--protect-args')
if self.compression.lower() in ('true','yes','1'):
options.append('-z')
if self.compressionlevel:
options.append('--compress-level=%s' % self.compressionlevel)
if self.bwlimit:
options.append('--bwlimit %s' % self.bwlimit)
latest = self.get_latest_backup(self.backup_start_date)
if latest:
options.extend(['--link-dest="%s"' % os.path.join('..',b,'') for b in latest])
def strip_quotes(s):
if s[0] == '"':
s = s[1:]
if s[-1] == '"':
s = s[:-1]
return s
# Add excludes
if "--exclude" in self.exclude_list:
# old settings with exclude_list=--exclude toto --exclude=titi
excludes = [strip_quotes(s).strip() for s in self.exclude_list.replace('--exclude=','').replace('--exclude ','').split()]
else:
try:
# newsettings with exclude_list='too','titi', parsed as a str python list content
excludes = eval('[%s]' % self.exclude_list)
except Exception,e:
raise Exception('Error reading exclude list : value %s, eval error %s (don\'t forget quotes and comma...)' % (self.exclude_list,e))
options.extend(['--exclude="%s"' % x for x in excludes])
if (self.rsync_module and not self.password_file):
raise Exception('You must specify a password file if you specify a rsync module')
if (not self.rsync_module and not self.private_key):
raise Exception('If you don''t use SSH, you must specify a rsync module')
#rsync_re = re.compile('(?P<server>[^:]*)::(?P<export>[^/]*)/(?P<path>.*)')
#ssh_re = re.compile('((?P<user>.*)@)?(?P<server>[^:]*):(?P<path>/.*)')
# Add ssh connection params
if self.rsync_module:
# Case of rsync exports
if self.password_file:
options.append('--password-file="%s"' % self.password_file)
backup_source = '%s@%s::%s%s' % (self.remote_user, self.server_name, self.rsync_module, self.remote_dir)
else:
# case of rsync + ssh
ssh_params = ['-o StrictHostKeyChecking=no','-c blowfish']
if self.private_key:
ssh_params.append('-i %s' % self.private_key)
if self.ssh_port <> 22:
ssh_params.append('-p %i' % self.ssh_port)
options.append('-e "/usr/bin/ssh %s"' % (" ".join(ssh_params)))
backup_source = '%s@%s:%s' % (self.remote_user,self.server_name,self.remote_dir)
# ensure there is a slash at end
if backup_source[-1] <> '/':
backup_source += '/'
options_params = " ".join(options)
cmd = '/usr/bin/rsync %s %s %s 2>&1' % (options_params,backup_source,dest_dir)
self.logger.debug("[%s] rsync : %s",self.backup_name,cmd)
if not self.dry_run:
self.line = ''
process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True)
def ondata(data,context):
if context.verbose:
print data
context.logger.debug(data)
log = monitor_stdout(process,ondata,self)
for l in log.splitlines():
if l.startswith('Number of files:'):
stats['total_files_count'] += int(l.split(':')[1])
if l.startswith('Number of files transferred:'):
stats['written_files_count'] += int(l.split(':')[1])
if l.startswith('Total file size:'):
stats['total_bytes'] += int(l.split(':')[1].split()[0])
if l.startswith('Total transferred file size:'):
stats['written_bytes'] += int(l.split(':')[1].split()[0])
returncode = process.returncode
## deal with exit code 24 (file vanished)
if (returncode == 24):
self.logger.warning("[" + self.backup_name + "] Note: some files vanished before transfer")
elif (returncode == 23):
self.logger.warning("[" + self.backup_name + "] unable so set uid on some files")
elif (returncode != 0):
self.logger.error("[" + self.backup_name + "] shell program exited with error code ")
raise Exception("[" + self.backup_name + "] shell program exited with error code " + str(returncode), cmd)
else:
print cmd
#we suppress the .rsync suffix if everything went well
finaldest = os.path.join(self.backup_dir,self.backup_start_date)
self.logger.debug("[%s] renaming target directory from %s to %s" ,self.backup_name,dest_dir,finaldest)
if not self.dry_run:
os.rename(dest_dir, finaldest)
self.logger.debug("[%s] touching datetime of target directory %s" ,self.backup_name,finaldest)
print os.popen('touch "%s"' % finaldest).read()
else:
print "mv" ,dest_dir,finaldest
stats['backup_location'] = finaldest
stats['status']='OK'
stats['log']='ssh+rsync backup from %s OK, %d bytes written for %d changed files' % (backup_source,stats['written_bytes'],stats['written_files_count'])
except BaseException , e:
stats['status']='ERROR'
stats['log']=str(e)
raise
finally:
self.remove_lock()
def get_latest_backup(self,current):
result = []
filelist = os.listdir(self.backup_dir)
filelist.sort()
filelist.reverse()
full = ''
r_full = re.compile('^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$')
r_partial = re.compile('^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}.rsync$')
# we take all latest partials younger than the latest full and the latest full
for item in filelist:
if r_partial.match(item) and item<current:
result.append(item)
elif r_full.match(item) and item<current:
result.append(item)
break
return result
def register_existingbackups(self):
"""scan backup dir and insert stats in database"""
registered = [b['backup_location'] for b in self.dbstat.query('select distinct backup_location from stats where backup_name=?',(self.backup_name,))]
filelist = os.listdir(self.backup_dir)
filelist.sort()
p = re.compile('^\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}$')
for item in filelist:
if p.match(item):
dir_name = os.path.join(self.backup_dir,item)
if not dir_name in registered:
start = datetime.datetime.strptime(item,'%Y%m%d-%Hh%Mm%S').isoformat()
if fileisodate(dir_name)>start:
stop = fileisodate(dir_name)
else:
stop = start
self.logger.info('Registering %s started on %s',dir_name,start)
self.logger.debug(' Disk usage %s','du -sb "%s"' % dir_name)
if not self.dry_run:
size_bytes = int(os.popen('du -sb "%s"' % dir_name).read().split('\t')[0])
else:
size_bytes = 0
self.logger.debug(' Size in bytes : %i',size_bytes)
if not self.dry_run:
self.dbstat.add(self.backup_name,self.server_name,'',\
backup_start=start,backup_end = stop,status='OK',total_bytes=size_bytes,backup_location=dir_name)
else:
self.logger.info('Skipping %s, already registered',dir_name)
def is_pid_still_running(self,lockfile):
f = open(lockfile)
lines = f.readlines()
f.close()
if len(lines)==0 :
self.logger.info("[" + self.backup_name + "] empty lock file, removing...")
return False
for line in lines:
if line.startswith('pid='):
pid = line.split('=')[1].strip()
if os.path.exists("/proc/" + pid):
self.logger.info("[" + self.backup_name + "] process still there")
return True
else:
self.logger.info("[" + self.backup_name + "] process not there anymore remove lock")
return False
else:
self.logger.info("[" + self.backup_name + "] incorrrect lock file : no pid line")
return False
def set_lock(self):
self.logger.debug("[" + self.backup_name + "] setting lock")
#TODO: improve for race condition
#TODO: also check if process is really there
if os.path.isfile(self.backup_dir + '/lock'):
self.logger.debug("[" + self.backup_name + "] File " + self.backup_dir + '/lock already exist')
if self.is_pid_still_running(self.backup_dir + '/lock')==False:
self.logger.info("[" + self.backup_name + "] removing lock file " + self.backup_dir + '/lock')
os.unlink(self.backup_dir + '/lock')
else:
return False
lockfile = open(self.backup_dir + '/lock',"w")
# Write all the lines at once:
lockfile.write('pid='+str(os.getpid()))
lockfile.write('\nbackup_time=' + self.backup_start_date)
lockfile.close()
return True
def remove_lock(self):
self.logger.debug("[%s] removing lock",self.backup_name )
os.unlink(self.backup_dir + '/lock')
class backup_rsync_ssh(backup_rsync):
"""Backup a directory on remote server with rsync and ssh protocol (requires rsync software on remote host)"""
type = 'rsync+ssh'
required_params = backup_generic.required_params + ['remote_user','remote_dir','private_key']
optional_params = backup_generic.optional_params + ['compression','bwlimit','ssh_port','exclude_list','protect_args','overload_args']
register_driver(backup_rsync)
register_driver(backup_rsync_ssh)
if __name__=='__main__':
logger = logging.getLogger('tisbackup')
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
handler = logging.StreamHandler()
handler.setFormatter(formatter)
logger.addHandler(handler)
cp = ConfigParser()
cp.read('/opt/tisbackup/configtest.ini')
dbstat = BackupStat('/backup/data/log/tisbackup.sqlite')
b = backup_rsync('htouvet','/backup/data/htouvet',dbstat)
b.read_config(cp)
b.process_backup()
print b.checknagios()

View File

@ -0,0 +1,181 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------
# This file is part of TISBackup
#
# TISBackup is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# TISBackup is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with TISBackup. If not, see <http://www.gnu.org/licenses/>.
#
# -----------------------------------------------------------------------
import os
import datetime
from common import *
import XenAPI
import time
import logging
import re
import os.path
import datetime
import select
import urllib2, urllib
import base64
import socket
import pexpect
from stat import *
class backup_switch(backup_generic):
"""Backup a startup-config on a switch"""
type = 'switch'
required_params = backup_generic.required_params + ['switch_ip','switch_user' , 'switch_type']
optional_params = backup_generic.optional_params + ['switch_password']
def switch_hp(self, filename):
s = socket.socket()
try:
s.connect((self.switch_ip, 23))
s.close()
except:
raise
child=pexpect.spawn('telnet '+self.switch_ip)
time.sleep(1)
if self.switch_user != "":
child.sendline(self.switch_user)
child.sendline(self.switch_password+'\r')
else:
child.sendline(self.switch_password+'\r')
try:
child.expect("#")
except:
raise Exception("Bad Credentials")
child.sendline( "terminal length 1000\r")
child.expect("#")
child.sendline( "show config\r")
child.maxread = 100000000
child.expect("Startup.+$")
lines = child.after
if "-- MORE --" in lines:
raise Exception("Terminal lenght is not sufficient")
child.expect("#")
lines += child.before
child.sendline("logout\r")
child.send('y\r')
for line in lines.split("\n")[1:-1]:
open(filename,"a").write(line.strip()+"\n")
def switch_linksys_SRW2024(self, filename):
s = socket.socket()
try:
s.connect((self.switch_ip, 23))
s.close()
except:
raise
child=pexpect.spawn('telnet '+self.switch_ip)
time.sleep(1)
if hasattr(self,'switch_password'):
child.sendline(self.switch_user+'\t')
child.sendline(self.switch_password+'\r')
else:
child.sendline(self.switch_user+'\r')
try:
child.expect('Menu')
except:
raise Exception("Bad Credentials")
child.sendline('\032')
child.expect('>')
child.sendline('lcli')
child.expect("Name:")
if hasattr(self,'switch_password'):
child.send(self.switch_user+'\r'+self.switch_password+'\r')
else:
child.sendline(self.switch_user)
child.expect(".*#")
child.sendline( "terminal datadump")
child.expect("#")
child.sendline( "show startup-config")
child.expect("#")
lines = child.before
if "Unrecognized command" in lines:
raise Exception("Bad Credentials")
child.sendline("exit")
child.expect( ">")
child.sendline("logout")
for line in lines.split("\n")[1:-1]:
open(filename,"a").write(line.strip()+"\n")
def switch_dlink_DGS1210(self, filename):
login_data = urllib.urlencode({'Login' : self.switch_user, 'Password' : self.switch_password, 'currlang' : 0, 'BrowsingPage' : 'index_dlink.htm', 'changlang' : 0})
req = urllib2.Request('http://%s/' % self.switch_ip, login_data)
resp = urllib2.urlopen(req)
if "Wrong password" in resp.read():
raise Exception("Wrong password")
resp = urllib2.urlopen("http://%s/config.bin?Gambit=gdkdcdgdidbdkdadkdbgegngjgogkdbgegngjgog&dumy=1348649950256" % self.switch_ip)
f = open(filename, 'w')
f.write(resp.read())
def do_backup(self,stats):
try:
dest_filename = os.path.join(self.backup_dir,"%s-%s" % (self.backup_name,self.backup_start_date))
options = []
options_params = " ".join(options)
if "LINKSYS-SRW2024" == self.switch_type:
dest_filename += '.txt'
self.switch_linksys_SRW2024(dest_filename)
elif self.switch_type in [ "HP-PROCURVE-4104GL", "HP-PROCURVE-2524" ]:
dest_filename += '.txt'
self.switch_hp(dest_filename)
elif "DLINK-DGS1210" == self.switch_type:
dest_filename += '.bin'
self.switch_dlink_DGS1210(dest_filename)
else:
raise Exception("Unknown Switch type")
stats['total_files_count']=1
stats['written_files_count']=1
stats['total_bytes']= os.stat(dest_filename).st_size
stats['written_bytes'] = stats['total_bytes']
stats['backup_location'] = dest_filename
stats['status']='OK'
stats['log']='Switch backup from %s OK, %d bytes written' % (self.server_name,stats['written_bytes'])
except BaseException , e:
stats['status']='ERROR'
stats['log']=str(e)
raise
register_driver(backup_switch)
if __name__=='__main__':
logger = logging.getLogger('tisbackup')
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
handler = logging.StreamHandler()
handler.setFormatter(formatter)
logger.addHandler(handler)
cp = ConfigParser()
cp.read('/opt/tisbackup/configtest.ini')
b = backup_xva()
b.read_config(cp)

View File

@ -0,0 +1,108 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------
# This file is part of TISBackup
#
# TISBackup is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# TISBackup is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with TISBackup. If not, see <http://www.gnu.org/licenses/>.
#
# -----------------------------------------------------------------------
import sys
import shutil
import datetime
import base64
import os
from common import *
class backup_xcp_metadata(backup_generic):
"""Backup metatdata of a xcp pool using xe pool-dump-database"""
type = 'xcp-dump-metadata'
required_params = ['type','server_name','xcp_user','xcp_passwd','backup_name']
xcp_user=''
xcp_passwd=''
def do_backup(self,stats):
self.logger.debug('[%s] Connecting to %s with user root and key %s',self.backup_name,self.server_name,self.private_key)
if os.path.isfile('/opt/xensource/bin/xe') == False:
raise Exception('Aborting, /opt/xensource/bin/xe binary not present"')
t = datetime.datetime.now()
backup_start_date = t.strftime('%Y%m%d-%Hh%Mm%S')
# dump pool medatadata
localpath = os.path.join(self.backup_dir , 'xcp_metadata-' + backup_start_date + '.dump.gz')
temppath = '/tmp/xcp_metadata-' + backup_start_date + '.dump'
stats['status']='Dumping'
if not self.dry_run:
cmd = "/opt/xensource/bin/xe -s %s -u %s -pw %s pool-dump-database file-name=%s" %(self.server_name,self.xcp_user,self.xcp_passwd,temppath)
self.logger.debug('[%s] Dump XCP Metadata : %s',self.backup_name,cmd)
call_external_process(cmd)
# zip the file
stats['status']='Zipping'
cmd = 'gzip %s ' %temppath
self.logger.debug('[%s] Compress backup : %s',self.backup_name,cmd)
if not self.dry_run:
call_external_process(cmd)
# get the file
stats['status']='move to backup directory'
self.logger.debug('[%s] Moving temp backup file %s to backup new path %s',self.backup_name,self.server_name,localpath)
if not self.dry_run:
shutil.move (temppath + '.gz' ,localpath)
if not self.dry_run:
stats['total_files_count']=1
stats['written_files_count']=1
stats['total_bytes']=os.stat(localpath).st_size
stats['written_bytes']=os.stat(localpath).st_size
stats['log']='gzip dump of DB %s:%s (%d bytes) to %s' % (self.server_name,'xcp metadata dump', stats['written_bytes'], localpath)
stats['backup_location'] = localpath
stats['status']='OK'
def register_existingbackups(self):
"""scan metatdata backup files and insert stats in database"""
registered = [b['backup_location'] for b in self.dbstat.query('select distinct backup_location from stats where backup_name=?',(self.backup_name,))]
filelist = os.listdir(self.backup_dir)
filelist.sort()
p = re.compile('^%s-(?P<date>\d{8,8}-\d{2,2}h\d{2,2}m\d{2,2}).dump.gz$' % self.server_name)
for item in filelist:
sr = p.match(item)
if sr:
file_name = os.path.join(self.backup_dir,item)
start = datetime.datetime.strptime(sr.groups()[0],'%Y%m%d-%Hh%Mm%S').isoformat()
if not file_name in registered:
self.logger.info('Registering %s from %s',file_name,fileisodate(file_name))
size_bytes = int(os.popen('du -sb "%s"' % file_name).read().split('\t')[0])
self.logger.debug(' Size in bytes : %i',size_bytes)
if not self.dry_run:
self.dbstat.add(self.backup_name,self.server_name,'',\
backup_start=start,backup_end=fileisodate(file_name),status='OK',total_bytes=size_bytes,backup_location=file_name)
else:
self.logger.info('Skipping %s from %s, already registered',file_name,fileisodate(file_name))
register_driver(backup_xcp_metadata)

165
libtisbackup/backup_xva.py Executable file
View File

@ -0,0 +1,165 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------
# This file is part of TISBackup
#
# TISBackup is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# TISBackup is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with TISBackup. If not, see <http://www.gnu.org/licenses/>.
#
# -----------------------------------------------------------------------
import os
import datetime
from common import *
import XenAPI
import time
import logging
import re
import os.path
import os
import datetime
import select
import urllib2
import base64
import socket
from stat import *
class backup_xva(backup_generic):
"""Backup a VM running on a XCP server as a XVA file (requires xe tools and XenAPI)"""
type = 'xen-xva'
required_params = backup_generic.required_params + ['xcphost','password_file','server_name']
optional_params = backup_generic.optional_params + ['excluded_vbds','remote_user','private_key']
def export_xva(self, vdi_name, filename, dry_run):
user_xen, password_xen, null = open(self.password_file).read().split('\n')
session = XenAPI.Session('https://'+self.xcphost)
try:
session.login_with_password(user_xen,password_xen)
except XenAPI.Failure, error:
msg,ip = error.details
if msg == 'HOST_IS_SLAVE':
xcphost = ip
session = XenAPI.Session('https://'+xcphost)
session.login_with_password(user_xen,password_xen)
vm = session.xenapi.VM.get_by_name_label(vdi_name)[0]
status_vm = session.xenapi.VM.get_power_state(vm)
self.logger.debug("[%s] Status of VM: %s",self.backup_name,status_vm)
if status_vm == "Running":
self.logger.debug("[%s] Shudown in progress",self.backup_name)
if dry_run:
print "session.xenapi.VM.clean_shutdown(vm)"
else:
session.xenapi.VM.clean_shutdown(vm)
try:
try:
self.logger.debug("[%s] Copy in progress",self.backup_name)
socket.setdefaulttimeout(120)
auth = base64.encodestring("%s:%s" % (user_xen, password_xen)).strip()
url = "https://"+self.xcphost+"/export?uuid="+session.xenapi.VM.get_uuid(vm)
request = urllib2.Request(url)
request.add_header("Authorization", "Basic %s" % auth)
result = urllib2.urlopen(request)
if dry_run:
print "request = urllib2.Request(%s)" % url
print 'outputfile = open(%s, "wb")' % filename
else:
outputfile = open(filename, "wb")
for line in result:
outputfile.write(line)
outputfile.close()
except:
if os.path.exists(filename):
os.unlink(filename)
raise
finally:
if status_vm == "Running":
self.logger.debug("[%s] Starting in progress",self.backup_name)
if dry_run:
print "session.xenapi.Async.VM.start(vm,False,True)"
else:
session.xenapi.Async.VM.start(vm,False,True)
session.logout()
if os.path.exists(filename):
import tarfile
tar = tarfile.open(filename)
if not tar.getnames():
unlink(filename)
return("Tar error")
tar.close()
return(0)
def do_backup(self,stats):
try:
dest_filename = os.path.join(self.backup_dir,"%s-%s.%s" % (self.backup_name,self.backup_start_date,'xva'))
options = []
options_params = " ".join(options)
cmd = self.export_xva( self.server_name, dest_filename, self.dry_run)
if os.path.exists(dest_filename):
stats['written_bytes'] = os.stat(dest_filename)[ST_SIZE]
stats['total_files_count'] = 1
stats['written_files_count'] = 1
stats['total_bytes'] = stats['written_bytes']
else:
stats['written_bytes'] = 0
stats['backup_location'] = dest_filename
if cmd == 0:
stats['log']='XVA backup from %s OK, %d bytes written' % (self.server_name,stats['written_bytes'])
stats['status']='OK'
else:
stats['status']='ERROR'
stats['log']=cmd
except BaseException , e:
stats['status']='ERROR'
stats['log']=str(e)
raise
register_driver(backup_xva)
if __name__=='__main__':
logger = logging.getLogger('tisbackup')
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
handler = logging.StreamHandler()
handler.setFormatter(formatter)
logger.addHandler(handler)
cp = ConfigParser()
cp.read('/opt/tisbackup/configtest.ini')
b = backup_xva()
b.read_config(cp)

909
libtisbackup/common.py Normal file
View File

@ -0,0 +1,909 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------
# This file is part of TISBackup
#
# TISBackup is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# TISBackup is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with TISBackup. If not, see <http://www.gnu.org/licenses/>.
#
# -----------------------------------------------------------------------
import os
import subprocess
import re
import logging
import datetime
import time
from iniparse import ConfigParser
import sqlite3
import shutil
import select
import sys
try:
sys.stderr = open('/dev/null') # Silence silly warnings from paramiko
import paramiko
except ImportError,e:
print "Error : can not load paramiko library %s" % e
raise
sys.stderr = sys.__stderr__
nagiosStateOk = 0
nagiosStateWarning = 1
nagiosStateCritical = 2
nagiosStateUnknown = 3
backup_drivers = {}
def register_driver(driverclass):
backup_drivers[driverclass.type] = driverclass
def datetime2isodate(adatetime=None):
if not adatetime:
adatetime = datetime.datetime.now()
assert(isinstance(adatetime,datetime.datetime))
return adatetime.isoformat()
def isodate2datetime(isodatestr):
# we remove the microseconds part as it is not working for python2.5 strptime
return datetime.datetime.strptime(isodatestr.split('.')[0] , "%Y-%m-%dT%H:%M:%S")
def time2display(adatetime):
return adatetime.strftime("%Y-%m-%d %H:%M")
def hours_minutes(hours):
if hours is None:
return None
else:
return "%02i:%02i" % ( int(hours) , int((hours - int(hours)) * 60.0))
def fileisodate(filename):
return datetime.datetime.fromtimestamp(os.stat(filename).st_mtime).isoformat()
def dateof(adatetime):
return adatetime.replace(hour=0,minute=0,second=0,microsecond=0)
#####################################
# http://code.activestate.com/recipes/498181-add-thousands-separator-commas-to-formatted-number/
# Code from Michael Robellard's comment made 28 Feb 2010
# Modified for leading +, -, space on 1 Mar 2010 by Glenn Linderman
#
# Tail recursion removed and leading garbage handled on March 12 2010, Alessandro Forghieri
def splitThousands( s, tSep=',', dSep='.'):
'''Splits a general float on thousands. GIGO on general input'''
if s == None:
return 0
if not isinstance( s, str ):
s = str( s )
cnt=0
numChars=dSep+'0123456789'
ls=len(s)
while cnt < ls and s[cnt] not in numChars: cnt += 1
lhs = s[ 0:cnt ]
s = s[ cnt: ]
if dSep == '':
cnt = -1
else:
cnt = s.rfind( dSep )
if cnt > 0:
rhs = dSep + s[ cnt+1: ]
s = s[ :cnt ]
else:
rhs = ''
splt=''
while s != '':
splt= s[ -3: ] + tSep + splt
s = s[ :-3 ]
return lhs + splt[ :-1 ] + rhs
def call_external_process(shell_string):
p = subprocess.call(shell_string, shell=True)
if (p != 0 ):
raise Exception('shell program exited with error code ' + str(p), shell_string)
def check_string(test_string):
pattern = r'[^\.A-Za-z0-9\-_]'
if re.search(pattern, test_string):
#Character other then . a-z 0-9 was found
print 'Invalid : %r' % (test_string,)
def convert_bytes(bytes):
if bytes is None:
return None
else:
bytes = float(bytes)
if bytes >= 1099511627776:
terabytes = bytes / 1099511627776
size = '%.2fT' % terabytes
elif bytes >= 1073741824:
gigabytes = bytes / 1073741824
size = '%.2fG' % gigabytes
elif bytes >= 1048576:
megabytes = bytes / 1048576
size = '%.2fM' % megabytes
elif bytes >= 1024:
kilobytes = bytes / 1024
size = '%.2fK' % kilobytes
else:
size = '%.2fb' % bytes
return size
## {{{ http://code.activestate.com/recipes/81189/ (r2)
def pp(cursor, data=None, rowlens=0, callback=None):
"""
pretty print a query result as a table
callback is a function called for each field (fieldname,value) to format the output
"""
def defaultcb(fieldname,value):
return value
if not callback:
callback = defaultcb
d = cursor.description
if not d:
return "#### NO RESULTS ###"
names = []
lengths = []
rules = []
if not data:
data = cursor.fetchall()
for dd in d: # iterate over description
l = dd[1]
if not l:
l = 12 # or default arg ...
l = max(l, len(dd[0])) # handle long names
names.append(dd[0])
lengths.append(l)
for col in range(len(lengths)):
if rowlens:
rls = [len(str(callback(d[col][0],row[col]))) for row in data if row[col]]
lengths[col] = max([lengths[col]]+rls)
rules.append("-"*lengths[col])
format = " ".join(["%%-%ss" % l for l in lengths])
result = [format % tuple(names)]
result.append(format % tuple(rules))
for row in data:
row_cb=[]
for col in range(len(d)):
row_cb.append(callback(d[col][0],row[col]))
result.append(format % tuple(row_cb))
return "\n".join(result)
## end of http://code.activestate.com/recipes/81189/ }}}
def html_table(cur,callback=None):
"""
cur est un cursor issu d'une requete
callback est une fonction qui prend (rowmap,fieldname,value)
et renvoie une representation texte
"""
def safe_unicode(iso):
if iso is None:
return None
elif isinstance(iso, str):
return iso.decode('iso8859')
else:
return iso
def itermap(cur):
for row in cur:
yield dict((cur.description[idx][0], value)
for idx, value in enumerate(row))
head=u"<tr>"+"".join(["<th>"+c[0]+"</th>" for c in cur.description])+"</tr>"
lines=""
if callback:
for r in itermap(cur):
lines=lines+"<tr>"+"".join(["<td>"+str(callback(r,c[0],safe_unicode(r[c[0]])))+"</td>" for c in cur.description])+"</tr>"
else:
for r in cur:
lines=lines+"<tr>"+"".join(["<td>"+safe_unicode(c)+"</td>" for c in r])+"</tr>"
return "<table border=1 cellpadding=2 cellspacing=0>%s%s</table>" % (head,lines)
def monitor_stdout(aprocess, onoutputdata,context):
"""Reads data from stdout and stderr from aprocess and return as a string
on each chunk, call a call back onoutputdata(dataread)
"""
assert(isinstance(aprocess,subprocess.Popen))
read_set = []
stdout = []
line = ''
if aprocess.stdout:
read_set.append(aprocess.stdout)
if aprocess.stderr:
read_set.append(aprocess.stderr)
while read_set:
try:
rlist, wlist, xlist = select.select(read_set, [], [])
except select.error, e:
if e.args[0] == errno.EINTR:
continue
raise
# Reads one line from stdout
if aprocess.stdout in rlist:
data = os.read(aprocess.stdout.fileno(), 1)
if data == "":
aprocess.stdout.close()
read_set.remove(aprocess.stdout)
while data and not data in ('\n','\r'):
line += data
data = os.read(aprocess.stdout.fileno(), 1)
if line or data in ('\n','\r'):
stdout.append(line)
if onoutputdata:
onoutputdata(line,context)
line=''
# Reads one line from stderr
if aprocess.stderr in rlist:
data = os.read(aprocess.stderr.fileno(), 1)
if data == "":
aprocess.stderr.close()
read_set.remove(aprocess.stderr)
while data and not data in ('\n','\r'):
line += data
data = os.read(aprocess.stderr.fileno(), 1)
if line or data in ('\n','\r'):
stdout.append(line)
if onoutputdata:
onoutputdata(line,context)
line=''
aprocess.wait()
if line:
stdout.append(line)
if onoutputdata:
onoutputdata(line,context)
return "\n".join(stdout)
class BackupStat:
dbpath = ''
db = None
logger = logging.getLogger('tisbackup')
def __init__(self,dbpath):
self.dbpath = dbpath
if not os.path.isfile(self.dbpath):
self.db=sqlite3.connect(self.dbpath)
self.initdb()
else:
self.db=sqlite3.connect(self.dbpath)
if not "'TYPE'" in str(self.db.execute("select * from stats").description):
self.updatedb()
def updatedb(self):
self.logger.debug('Update stat database')
self.db.execute("alter table stats add column TYPE TEXT;")
self.db.execute("update stats set TYPE='BACKUP';")
self.db.commit()
def initdb(self):
assert(isinstance(self.db,sqlite3.Connection))
self.logger.debug('Initialize stat database')
self.db.execute("""
create table stats (
backup_name TEXT,
server_name TEXT,
description TEXT,
backup_start TEXT,
backup_end TEXT,
backup_duration NUMERIC,
total_files_count INT,
written_files_count INT,
total_bytes INT,
written_bytes INT,
status TEXT,
log TEXT,
backup_location TEXT,
TYPE TEXT)""")
self.db.execute("""
create index idx_stats_backup_name on stats(backup_name);""")
self.db.execute("""
create index idx_stats_backup_location on stats(backup_location);""")
self.db.commit()
def start(self,backup_name,server_name,TYPE,description='',backup_location=None):
""" Add in stat DB a record for the newly running backup"""
return self.add(backup_name=backup_name,server_name=server_name,description=description,backup_start=datetime2isodate(),status='Running',TYPE=TYPE)
def finish(self,rowid,total_files_count=None,written_files_count=None,total_bytes=None,written_bytes=None,log=None,status='OK',backup_end=None,backup_duration=None,backup_location=None):
""" Update record in stat DB for the finished backup"""
if not backup_end:
backup_end = datetime2isodate()
if backup_duration == None:
try:
# get duration using start of backup datetime
backup_duration = (isodate2datetime(backup_end) - isodate2datetime(self.query('select backup_start from stats where rowid=?',(rowid,))[0]['backup_start'])).seconds / 3600.0
except:
backup_duration = None
# update stat record
self.db.execute("""\
update stats set
total_files_count=?,written_files_count=?,total_bytes=?,written_bytes=?,log=?,status=?,backup_end=?,backup_duration=?,backup_location=?
where
rowid = ?
""",(total_files_count,written_files_count,total_bytes,written_bytes,log,status,backup_end,backup_duration,backup_location,rowid))
self.db.commit()
def add(self,
backup_name='',
server_name='',
description='',
backup_start=None,
backup_end=None,
backup_duration=None,
total_files_count=None,
written_files_count=None,
total_bytes=None,
written_bytes=None,
status='draft',
log='',
TYPE='',
backup_location=None):
if not backup_start:
backup_start=datetime2isodate()
if not backup_end:
backup_end=datetime2isodate()
cur = self.db.execute("""\
insert into stats (
backup_name,
server_name,
description,
backup_start,
backup_end,
backup_duration,
total_files_count,
written_files_count,
total_bytes,
written_bytes,
status,
log,
backup_location,
TYPE) values (?,?,?,?,?,?,?,?,?,?,?,?,?,?)
""",(
backup_name,
server_name,
description,
backup_start,
backup_end,
backup_duration,
total_files_count,
written_files_count,
total_bytes,
written_bytes,
status,
log,
backup_location,
TYPE)
)
self.db.commit()
return cur.lastrowid
def query(self,query, args=(), one=False):
"""
execute la requete query sur la db et renvoie un tableau de dictionnaires
"""
cur = self.db.execute(query, args)
rv = [dict((cur.description[idx][0], value)
for idx, value in enumerate(row)) for row in cur.fetchall()]
return (rv[0] if rv else None) if one else rv
def last_backups(self,backup_name,count=30):
if backup_name:
cur = self.db.execute('select * from stats where backup_name=? order by backup_end desc limit ?',(backup_name,count))
else:
cur = self.db.execute('select * from stats order by backup_end desc limit ?',(count,))
def fcb(fieldname,value):
if fieldname in ('backup_start','backup_end'):
return time2display(isodate2datetime(value))
elif 'bytes' in fieldname:
return convert_bytes(value)
elif 'count' in fieldname:
return splitThousands(value,' ','.')
elif 'backup_duration' in fieldname:
return hours_minutes(value)
else:
return value
#for r in self.query('select * from stats where backup_name=? order by backup_end desc limit ?',(backup_name,count)):
print pp(cur,None,1,fcb)
def fcb(self,fields,fieldname,value):
if fieldname in ('backup_start','backup_end'):
return time2display(isodate2datetime(value))
elif 'bytes' in fieldname:
return convert_bytes(value)
elif 'count' in fieldname:
return splitThousands(value,' ','.')
elif 'backup_duration' in fieldname:
return hours_minutes(value)
else:
return value
def as_html(self,cur):
if cur:
return html_table(cur,self.fcb)
else:
return html_table(self.db.execute('select * from stats order by backup_start asc'),self.fcb)
def ssh_exec(command,ssh=None,server_name='',remote_user='',private_key='',ssh_port=22):
"""execute command on server_name using the provided ssh connection
or creates a new connection if ssh is not provided.
returns (exit_code,output)
output is the concatenation of stdout and stderr
"""
if not ssh:
assert(server_name and remote_user and private_key)
try:
mykey = paramiko.RSAKey.from_private_key_file(private_key)
except paramiko.SSHException:
mykey = paramiko.DSSKey.from_private_key_file(private_key)
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(server_name,username=remote_user,pkey = private_key,port=ssh_port)
tran = ssh.get_transport()
chan = tran.open_session()
# chan.set_combine_stderr(True)
chan.get_pty()
stdout = chan.makefile()
chan.exec_command(command)
stdout.flush()
output = stdout.read()
exit_code = chan.recv_exit_status()
return (exit_code,output)
class backup_generic:
"""Generic ancestor class for backups, not registered"""
type = 'generic'
required_params = ['type','backup_name','backup_dir','server_name','backup_retention_time','maximum_backup_age']
optional_params = ['preexec','postexec','description','private_key','remote_user','ssh_port']
logger = logging.getLogger('tisbackup')
backup_name = ''
backup_dir = ''
server_name = ''
remote_user = 'root'
description = ''
dbstat = None
dry_run = False
preexec = ''
postexec = ''
maximum_backup_age = None
backup_retention_time = None
verbose = False
private_key=''
ssh_port=22
def __init__(self,backup_name, backup_dir,dbstat=None,dry_run=False):
if not re.match('^[A-Za-z0-9_\-\.]*$',backup_name):
raise Exception('The backup name %s should contain only alphanumerical characters' % backup_name)
self.backup_name = backup_name
self.backup_dir = backup_dir
self.dbstat = dbstat
assert(isinstance(self.dbstat,BackupStat) or self.dbstat==None)
if not os.path.isdir(self.backup_dir):
os.makedirs(self.backup_dir)
self.dry_run = dry_run
@classmethod
def get_help(cls):
return """\
%(type)s : %(desc)s
Required params : %(required)s
Optional params : %(optional)s
""" % {'type':cls.type,
'desc':cls.__doc__,
'required':",".join(cls.required_params),
'optional':",".join(cls.optional_params)}
def check_required_params(self):
for name in self.required_params:
if not hasattr(self,name) or not getattr(self,name):
raise Exception('[%s] Config Attribute %s is required' % (self.backup_name,name))
if (self.preexec or self.postexec) and (not self.private_key or not self.remote_user):
raise Exception('[%s] remote_user and private_key file required if preexec or postexec is used' % self.backup_name)
def read_config(self,iniconf):
assert(isinstance(iniconf,ConfigParser))
allowed_params = self.required_params+self.optional_params
for (name,value) in iniconf.items(self.backup_name):
if not name in allowed_params:
self.logger.critical('[%s] Invalid param name "%s"', self.backup_name,name);
raise Exception('[%s] Invalid param name "%s"', self.backup_name,name)
self.logger.debug('[%s] reading param %s = %s ', self.backup_name,name,value)
setattr(self,name,value)
# if retention (in days) is not defined at section level, get default global one.
if not self.backup_retention_time:
self.backup_retention_time = iniconf.getint('global','backup_retention_time')
# for nagios, if maximum last backup age (in hours) is not defined at section level, get default global one.
if not self.maximum_backup_age:
self.maximum_backup_age = iniconf.getint('global','maximum_backup_age')
self.ssh_port = int(self.ssh_port)
self.backup_retention_time = int(self.backup_retention_time)
self.maximum_backup_age = int(self.maximum_backup_age)
self.check_required_params()
def do_preexec(self,stats):
self.logger.info("[%s] executing preexec %s ",self.backup_name,self.preexec)
try:
mykey = paramiko.RSAKey.from_private_key_file(self.private_key)
except paramiko.SSHException:
mykey = paramiko.DSSKey.from_private_key_file(self.private_key)
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(self.server_name,username=self.remote_user,pkey = mykey)
tran = ssh.get_transport()
chan = tran.open_session()
# chan.set_combine_stderr(True)
chan.get_pty()
stdout = chan.makefile()
if not self.dry_run:
chan.exec_command(self.preexec)
output = stdout.read()
exit_code = chan.recv_exit_status()
self.logger.info('[%s] preexec exit code : "%i", output : %s',self.backup_name , exit_code, output )
return exit_code
else:
return 0
def do_postexec(self,stats):
self.logger.info("[%s] executing postexec %s ",self.backup_name,self.postexec)
try:
mykey = paramiko.RSAKey.from_private_key_file(self.private_key)
except paramiko.SSHException:
mykey = paramiko.DSSKey.from_private_key_file(self.private_key)
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(self.server_name,username=self.remote_user,pkey = mykey)
tran = ssh.get_transport()
chan = tran.open_session()
# chan.set_combine_stderr(True)
chan.get_pty()
stdout = chan.makefile()
if not self.dry_run:
chan.exec_command(self.postexec)
output = stdout.read()
exit_code = chan.recv_exit_status()
self.logger.info('[%s] postexec exit code : "%i", output : %s',self.backup_name , exit_code, output )
return exit_code
else:
return 0
def do_backup(self,stats):
"""stats dict with keys : total_files_count,written_files_count,total_bytes,written_bytes"""
pass
def check_params_connections(self):
"""Perform a dry run trying to connect without actually doing backup"""
self.check_required_params()
def process_backup(self):
"""Process the backup.
launch
- do_preexec
- do_backup
- do_postexec
returns a dict for stats
"""
self.logger.info('[%s] ######### Starting backup',self.backup_name)
starttime = time.time()
self.backup_start_date = datetime.datetime.now().strftime('%Y%m%d-%Hh%Mm%S')
if not self.dry_run and self.dbstat:
stat_rowid = self.dbstat.start(backup_name=self.backup_name,server_name=self.server_name,TYPE="BACKUP")
else:
stat_rowid = None
try:
stats = {}
stats['total_files_count']=0
stats['written_files_count']=0
stats['total_bytes']=0
stats['written_bytes']=0
stats['log']=''
stats['status']='Running'
stats['backup_location']=None
if self.preexec.strip():
exit_code = self.do_preexec(stats)
if exit_code != 0 :
raise Exception('Preexec "%s" failed with exit code "%i"' % (self.preexec,exit_code))
self.do_backup(stats)
if self.postexec.strip():
exit_code = self.do_postexec(stats)
if exit_code != 0 :
raise Exception('Postexec "%s" failed with exit code "%i"' % (self.postexec,exit_code))
endtime = time.time()
duration = (endtime-starttime)/3600.0
if not self.dry_run and self.dbstat:
self.dbstat.finish(stat_rowid,
backup_end=datetime2isodate(datetime.datetime.now()),
backup_duration = duration,
total_files_count=stats['total_files_count'],
written_files_count=stats['written_files_count'],
total_bytes=stats['total_bytes'],
written_bytes=stats['written_bytes'],
status=stats['status'],
log=stats['log'],
backup_location=stats['backup_location'])
self.logger.info('[%s] ######### Backup finished : %s',self.backup_name,stats['log'])
return stats
except BaseException, e:
stats['status']='ERROR'
stats['log']=str(e)
endtime = time.time()
duration = (endtime-starttime)/3600.0
if not self.dry_run and self.dbstat:
self.dbstat.finish(stat_rowid,
backup_end=datetime2isodate(datetime.datetime.now()),
backup_duration = duration,
total_files_count=stats['total_files_count'],
written_files_count=stats['written_files_count'],
total_bytes=stats['total_bytes'],
written_bytes=stats['written_bytes'],
status=stats['status'],
log=stats['log'],
backup_location=stats['backup_location'])
self.logger.error('[%s] ######### Backup finished with ERROR: %s',self.backup_name,stats['log'])
raise
def checknagios(self,maxage_hours=30):
"""
Returns a tuple (nagiosstatus,message) for the current backup_name
Read status from dbstat database
"""
if not self.dbstat:
self.logger.warn('[%s] checknagios : no database provided',self.backup_name)
return ('No database provided',nagiosStateUnknown)
else:
self.logger.debug('[%s] checknagios : sql query "%s" %s',self.backup_name,'select status, backup_end, log from stats where TYPE=\'BACKUP\' AND backup_name=? order by backup_end desc limit 30',self.backup_name)
q = self.dbstat.query('select status, backup_start, backup_end, log, backup_location, total_bytes from stats where TYPE=\'BACKUP\' AND backup_name=? order by backup_start desc limit 30',(self.backup_name,))
if not q:
self.logger.debug('[%s] checknagios : no result from query',self.backup_name)
return (nagiosStateCritical,'CRITICAL : No backup found for %s in database' % self.backup_name)
else:
mindate = datetime2isodate((datetime.datetime.now() - datetime.timedelta(hours=maxage_hours)))
self.logger.debug('[%s] checknagios : looking for most recent OK not older than %s',self.backup_name,mindate)
for b in q:
if b['backup_end'] >= mindate and b['status'] == 'OK':
# check if backup actually exists on registered backup location and is newer than backup start date
if b['total_bytes'] == 0:
return (nagiosStateWarning,"WARNING : No data to backup was found for %s" % (self.backup_name,))
if not b['backup_location']:
return (nagiosStateWarning,"WARNING : No Backup location found for %s finished on (%s) %s" % (self.backup_name,isodate2datetime(b['backup_end']),b['log']))
if os.path.isfile(b['backup_location']):
backup_actual_date = datetime.datetime.fromtimestamp(os.stat(b['backup_location']).st_ctime)
if backup_actual_date + datetime.timedelta(hours = 1) > isodate2datetime(b['backup_start']):
return (nagiosStateOk,"OK Backup %s (%s), %s" % (self.backup_name,isodate2datetime(b['backup_end']),b['log']))
else:
return (nagiosStateCritical,"CRITICAL Backup %s (%s), %s seems older than start of backup" % (self.backup_name,isodate2datetime(b['backup_end']),b['log']))
elif os.path.isdir(b['backup_location']):
return (nagiosStateOk,"OK Backup %s (%s), %s" % (self.backup_name,isodate2datetime(b['backup_end']),b['log']))
else:
return (nagiosStateCritical,"CRITICAL Backup %s (%s), %s has disapeared from backup location %s" % (self.backup_name,isodate2datetime(b['backup_end']),b['log'],b['backup_location']))
self.logger.debug('[%s] checknagios : looking for most recent Warning or Running not older than %s',self.backup_name,mindate)
for b in q:
if b['backup_end'] >= mindate and b['status'] in ('Warning','Running'):
return (nagiosStateWarning,'WARNING : Backup %s still running or warning. %s' % (self.backup_name,b['log']))
self.logger.debug('[%s] checknagios : No Ok or warning recent backup found',self.backup_name)
return (nagiosStateCritical,'CRITICAL : No recent backup for %s' % self.backup_name )
def cleanup_backup(self):
"""Removes obsolete backups (older than backup_retention_time)"""
mindate = datetime2isodate((dateof(datetime.datetime.now()) - datetime.timedelta(days=self.backup_retention_time)))
# check if there is at least 1 "OK" backup left after cleanup :
ok_backups = self.dbstat.query('select backup_location from stats where TYPE="BACKUP" and backup_name=? and backup_start>=? and status="OK" order by backup_start desc',(self.backup_name,mindate))
removed = []
if ok_backups and os.path.exists(ok_backups[0]['backup_location']):
records = self.dbstat.query('select status, backup_start, backup_end, log, backup_location from stats where backup_name=? and backup_start<? and backup_location is not null and TYPE="BACKUP" order by backup_start',(self.backup_name,mindate))
if records:
for oldbackup_location in [rec['backup_location'] for rec in records if rec['backup_location']]:
try:
if os.path.isdir(oldbackup_location) and self.backup_dir in oldbackup_location :
self.logger.info('[%s] removing directory "%s"',self.backup_name,oldbackup_location)
if not self.dry_run:
shutil.rmtree(oldbackup_location.encode('ascii'))
if os.path.isfile(oldbackup_location) and self.backup_dir in oldbackup_location :
self.logger.debug('[%s] removing file "%s"',self.backup_name,oldbackup_location)
if not self.dry_run:
os.remove(oldbackup_location)
self.logger.debug('Cleanup_backup : Removing records from DB : [%s]-"%s"',self.backup_name,oldbackup_location)
if not self.dry_run:
self.dbstat.db.execute('update stats set TYPE="CLEAN" where backup_name=? and backup_location=?',(self.backup_name,oldbackup_location))
self.dbstat.db.commit()
except BaseException,e:
self.logger.error('cleanup_backup : Unable to remove directory/file "%s". Error %s', oldbackup_location,e)
removed.append((self.backup_name,oldbackup_location))
else:
self.logger.debug('[%s] cleanup : no result for query',self.backup_name)
else:
self.logger.info('Nothing to do because we want to keep at least one OK backup after cleaning')
self.logger.info('[%s] Cleanup finished : removed : %s' , self.backup_name,','.join([('[%s]-"%s"') % r for r in removed]) or 'Nothing')
return removed
def register_existingbackups(self):
"""scan existing backups and insert stats in database"""
registered = [b['backup_location'] for b in self.dbstat.query('select distinct backup_location from stats where backup_name=?',self.backup_name)]
raise Exception('Abstract method')
def export_latestbackup(self,destdir):
"""Copy (rsync) latest OK backup to external storage located at locally mounted "destdir"
"""
stats = {}
stats['total_files_count']=0
stats['written_files_count']=0
stats['total_bytes']=0
stats['written_bytes']=0
stats['log']=''
stats['status']='Running'
if not self.dbstat:
self.logger.critical('[%s] export_latestbackup : no database provided',self.backup_name)
raise Exception('No database')
else:
latest_sql = """\
select status, backup_start, backup_end, log, backup_location, total_bytes
from stats
where backup_name=? and status='OK' and TYPE='BACKUP'
order by backup_start desc limit 30"""
self.logger.debug('[%s] export_latestbackup : sql query "%s" %s',self.backup_name,latest_sql,self.backup_name)
q = self.dbstat.query(latest_sql,(self.backup_name,))
if not q:
self.logger.debug('[%s] export_latestbackup : no result from query',self.backup_name)
raise Exception('No OK backup found for %s in database' % self.backup_name)
else:
latest = q[0]
backup_source = latest['backup_location']
backup_dest = os.path.join(os.path.abspath(destdir),self.backup_name)
if not os.path.exists(backup_source):
raise Exception('Backup source %s doesn\'t exists' % backup_source)
# ensure there is a slash at end
if os.path.isdir(backup_source) and backup_source[-1] <> '/':
backup_source += '/'
if backup_dest[-1] <> '/':
backup_dest += '/'
if not os.path.isdir(backup_dest):
os.makedirs(backup_dest)
options = ['-aP','--stats','--delete-excluded','--numeric-ids','--delete-after']
if self.logger.level:
options.append('-P')
if self.dry_run:
options.append('-d')
options_params = " ".join(options)
cmd = '/usr/bin/rsync %s %s %s 2>&1' % (options_params,backup_source,backup_dest)
self.logger.debug("[%s] rsync : %s",self.backup_name,cmd)
if not self.dry_run:
self.line = ''
starttime = time.time()
stat_rowid = self.dbstat.start(backup_name=self.backup_name,server_name=self.server_name, TYPE="EXPORT")
process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True)
def ondata(data,context):
if context.verbose:
print data
context.logger.debug(data)
log = monitor_stdout(process,ondata,self)
for l in log.splitlines():
if l.startswith('Number of files:'):
stats['total_files_count'] += int(l.split(':')[1])
if l.startswith('Number of files transferred:'):
stats['written_files_count'] += int(l.split(':')[1])
if l.startswith('Total file size:'):
stats['total_bytes'] += int(l.split(':')[1].split()[0])
if l.startswith('Total transferred file size:'):
stats['written_bytes'] += int(l.split(':')[1].split()[0])
returncode = process.returncode
## deal with exit code 24 (file vanished)
if (returncode == 24):
self.logger.warning("[" + self.backup_name + "] Note: some files vanished before transfer")
elif (returncode == 23):
self.logger.warning("[" + self.backup_name + "] unable so set uid on some files")
elif (returncode != 0):
self.logger.error("[" + self.backup_name + "] shell program exited with error code ")
raise Exception("[" + self.backup_name + "] shell program exited with error code " + str(returncode), cmd)
else:
print cmd
stats['status']='OK'
self.logger.info('export backup from %s to %s OK, %d bytes written for %d changed files' % (backup_source,backup_dest,stats['written_bytes'],stats['written_files_count']))
endtime = time.time()
duration = (endtime-starttime)/3600.0
if not self.dry_run and self.dbstat:
self.dbstat.finish(stat_rowid,
backup_end=datetime2isodate(datetime.datetime.now()),
backup_duration = duration,
total_files_count=stats['total_files_count'],
written_files_count=stats['written_files_count'],
total_bytes=stats['total_bytes'],
written_bytes=stats['written_bytes'],
status=stats['status'],
log=stats['log'],
backup_location=backup_dest)
return stats
if __name__ == '__main__':
logger = logging.getLogger('tisbackup')
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
handler = logging.StreamHandler()
handler.setFormatter(formatter)
logger.addHandler(handler)
dbstat = BackupStat('/backup/data/log/tisbackup.sqlite')

224
libtisbackup/copy_vm_xcp.py Executable file
View File

@ -0,0 +1,224 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------
# This file is part of TISBackup
#
# TISBackup is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# TISBackup is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with TISBackup. If not, see <http://www.gnu.org/licenses/>.
#
# -----------------------------------------------------------------------
import os
import datetime
from common import *
import XenAPI
import time
import logging
import re
import os.path
import os
import datetime
import select
import urllib2
import base64
import socket
from stat import *
class copy_vm_xcp(backup_generic):
"""Backup a VM running on a XCP server on a second SR (requires xe tools and XenAPI)"""
type = 'copy-vm-xcp'
required_params = backup_generic.required_params + ['server_name','storage_name','password_file','vm_name','network_name']
optional_params = backup_generic.optional_params + ['start_vm','max_copies']
start_vm = "no"
max_copies = 1
def read_config(self,iniconf):
assert(isinstance(iniconf,ConfigParser))
backup_generic.read_config(self,iniconf)
if self.start_vm in 'no' and iniconf.has_option('global','start_vm'):
self.start_vm = iniconf.get('global','start_vm')
if self.max_copies == 1 and iniconf.has_option('global','max_copies'):
self.max_copies = iniconf.getint('global','max_copies')
def copy_vm_to_sr(self, vm_name, storage_name, dry_run):
user_xen, password_xen, null = open(self.password_file).read().split('\n')
session = XenAPI.Session('https://'+self.server_name)
try:
session.login_with_password(user_xen,password_xen)
except XenAPI.Failure, error:
msg,ip = error.details
if msg == 'HOST_IS_SLAVE':
server_name = ip
session = XenAPI.Session('https://'+server_name)
session.login_with_password(user_xen,password_xen)
self.logger.debug("[%s] VM (%s) to backup in storage: %s",self.backup_name,vm_name,storage_name)
now = datetime.datetime.now()
#get storage opaqueRef
try:
storage = session.xenapi.SR.get_by_name_label(storage_name)[0]
except IndexError,error:
return("error get storage opaqueref %s"%(error))
#get vm to copy opaqueRef
try:
vm = session.xenapi.VM.get_by_name_label(vm_name)[0]
except IndexError,error:
return("error get VM opaqueref %s"%(error))
#do the snapshot
self.logger.debug("[%s] Snapshot in progress",self.backup_name)
try:
snapshot = session.xenapi.VM.snapshot(vm,"tisbackup-%s"%(vm_name))
except XenAPI.Failure, error:
return("error when snapshot %s"%(error))
#get snapshot opaqueRef
snapshot = session.xenapi.VM.get_by_name_label("tisbackup-%s"%(vm_name))[0]
session.xenapi.VM.set_name_description(snapshot,"snapshot created by tisbackup on : %s"%(now.strftime("%Y-%m-%d %H:%M")))
vm_backup_name = "zzz-%s-"%(vm_name)
#Check if old backup exit
list_backups = []
for vm_ref in session.xenapi.VM.get_all():
name_lablel = session.xenapi.VM.get_name_label(vm_ref)
if vm_backup_name in name_lablel:
list_backups.append(name_lablel)
list_backups.sort()
if len(list_backups) >= 1:
# Shutting last backup if started
last_backup_vm = session.xenapi.VM.get_by_name_label(list_backups[-1])[0]
if not "Halted" in session.xenapi.VM.get_power_state(last_backup_vm):
self.logger.debug("[%s] Shutting down last backup vm : %s", self.backup_name, list_backups[-1] )
session.xenapi.VM.hard_shutdown(last_backup_vm)
# Delete oldest backup if exist
if len(list_backups) >= int(self.max_copies):
for i in range(len(list_backups)-int(self.max_copies)+1):
oldest_backup_vm = session.xenapi.VM.get_by_name_label(list_backups[i])[0]
if not "Halted" in session.xenapi.VM.get_power_state(oldest_backup_vm):
self.logger.debug("[%s] Shutting down old vm : %s", self.backup_name, list_backups[i] )
session.xenapi.VM.hard_shutdown(oldest_backup_vm)
try:
self.logger.debug("[%s] Deleting old vm : %s", self.backup_name, list_backups[i])
for vbd in session.xenapi.VM.get_VBDs(oldest_backup_vm):
vdi = session.xenapi.VBD.get_VDI(vbd)
if not 'NULL' in vdi:
session.xenapi.VDI.destroy(vdi)
session.xenapi.VM.destroy(oldest_backup_vm)
except XenAPI.Failure, error:
return("error when destroy old backup vm %s"%(error))
self.logger.debug("[%s] Copy %s in progress on %s",self.backup_name,vm_name,storage_name)
try:
backup_vm = session.xenapi.VM.copy(snapshot,vm_backup_name+now.strftime("%Y-%m-%d %H:%M"),storage)
except XenAPI.Failure, error:
return("error when copy %s"%(error))
# define VM as a template
session.xenapi.VM.set_is_a_template(backup_vm,False)
#change the network of the new VM
try:
vifDestroy = session.xenapi.VM.get_VIFs(backup_vm)
except IndexError,error:
return("error get VIF opaqueref %s"%(error))
for i in vifDestroy:
vifRecord = session.xenapi.VIF.get_record(i)
session.xenapi.VIF.destroy(i)
networkRef = session.xenapi.network.get_by_name_label(self.network_name)[0]
data = {'MAC': vifRecord['MAC'],
'MAC_autogenerated': False,
'MTU': vifRecord['MTU'],
'VM': backup_vm,
'current_operations': vifRecord['current_operations'],
'currently_attached': vifRecord['currently_attached'],
'device': vifRecord['device'],
'ipv4_allowed': vifRecord['ipv4_allowed'],
'ipv6_allowed': vifRecord['ipv6_allowed'],
'locking_mode': vifRecord['locking_mode'],
'network': networkRef,
'other_config': vifRecord['other_config'],
'qos_algorithm_params': vifRecord['qos_algorithm_params'],
'qos_algorithm_type': vifRecord['qos_algorithm_type'],
'qos_supported_algorithms': vifRecord['qos_supported_algorithms'],
'runtime_properties': vifRecord['runtime_properties'],
'status_code': vifRecord['status_code'],
'status_detail': vifRecord['status_detail']
}
try:
session.xenapi.VIF.create(data)
except Exception, error:
return(error)
if self.start_vm in ['true', '1', 't', 'y', 'yes', 'oui']:
session.xenapi.VM.start(backup_vm,False,True)
session.xenapi.VM.set_name_description(backup_vm,"snapshot created by tisbackup on : %s"%(now.strftime("%Y-%m-%d %H:%M")))
#delete the snapshot
try:
session.xenapi.VM.destroy(snapshot)
except XenAPI.Failure, error:
return("error when destroy snapshot %s"%(error))
return(0)
def do_backup(self,stats):
try:
timestamp = int(time.time())
cmd = self.copy_vm_to_sr(self.vm_name, self.storage_name, self.dry_run)
if cmd == 0:
timeExec = int(time.time()) - timestamp
stats['log']='copy of %s to an other storage OK' % (self.backup_name)
stats['status']='OK'
stats['total_files_count'] = 1
stats['backup_location'] = self.storage_name
else:
stats['status']='ERROR'
stats['log']=cmd
except BaseException,e:
stats['status']='ERROR'
stats['log']=str(e)
raise
register_driver(copy_vm_xcp)

View File

@ -0,0 +1,18 @@
#!/bin/sh
. /frontview/bin/functions
target=$(/frontview/bin/get_front_panel_usb_hdd)
echo $(date +%Y-%m-%d\ %H:%M:%S) : Export TISBackup sur Disque USB : $target >> /var/log/tisbackup.log
if [ -n "$target" ]; then
hotplug_lcd "Start TISBackup export"
/usr/local/bin/tisbackup -x /$target/export exportbackup >> /var/log/tisbackup.log 2>&1
hotplug_lcd "Finish TISBackup export"
sleep 3
else
hotplug_lcd "Error, no USB disk"
sleep 3
fi
echo $(date +%Y-%m-%d\ %H:%M:%S) : Fin Export TISBackup sur Disque USB : $target >> /var/log/tisbackup.log

55
samples/config.ini.sample Normal file
View File

@ -0,0 +1,55 @@
[global]
backup_base_dir = /root/tisbackup/backup_dir
# backup retention in days
backup_retention_time=90
# for nagios check in hours
maximum_backup_age=30
;[srvopenerp-slash]
;type=rsync+ssh
;server_name=srvopenerp
;remote_dir=/
;compression=True
;exclude_list="/proc/**","/sys/**","/dev/**"
;private_key=/root/.ssh/id_dsa
;ssh_port = 22
;[srvzimbra-slash]
;type=rsync+ssh
;server_name=srvzimbra
;remote_dir=/
;exclude_list="/proc/**","/sys/**","/dev/**"
;private_key=/root/.ssh/id_dsa
;ssh_port = 22
;[backup_mysql_srvintranet]
;type=mysql+ssh
;server_name=srvintranet
;private_keys=/root/.ssh/id_dsa
;db_name=*
;db_user=root
;db_passwd=mypassword
;[srvopenerp-pgsql]
;type=pgsql+ssh
;server_name=srvopenerp
;db_name=tranquil-production
;private_key=/root/.ssh/id_dsa
;ssh_port = 22
;[test-backup-xva2]
;type=xen-xva
;xcphost=srvxen1-test
;server_name=test-backup-xva2
;password_file=/root/xen_passwd
;[sw-serveur]
;type=switch
;server_name=sw-serveur
;switch_ip=192.168.149.253
;switch_user=admin
;switch_password=toto
;switch_type=LINKSYS-SRW2024

View File

@ -0,0 +1,84 @@
[global]
backup_base_dir = /backup/data/
# backup retention in days
backup_retention_time=15
# for nagios check in hours
maximum_backup_age=30
# bandwith limit for rsync
#bw_limit = 300
#compression level for rsync (0 to 9)
#compression_level=7
[srvfichiers-partages]
type=rsync+ssh
server_name=srvfichiers
remote_dir=/home/partages
exclude_list=
private_key=/root/.ssh/id_dsa
ssh_port = 22
[srvintranet-slash]
type=rsync+ssh
server_name=srvintranet
remote_dir=/
exclude_list="/proc/**","/sys/**","/dev/**"
private_key=/root/.ssh/id_dsa
ssh_port = 22
[srvads-slash]
type=rsync+ssh
server_name=srvads
remote_dir=/
exclude_list="/proc/**","/sys/**","/dev/**"
private_key=/root/.ssh/id_dsa
[srvzimbra-slash]
type=rsync+ssh
server_name=srvzimbra
remote_dir=/
exclude_list="/proc/**","/sys/**","/dev/**","/opt/**"
private_key=/root/.ssh/id_dsa
ssh_port = 22
[srvzimbra-opt]
type=rsync+ssh
server_name=srvzimbra
remote_dir=/opt
exclude_list=
private_key=/root/.ssh/id_dsa
ssh_port = 22
[gateway]
type=null
server_name=fwall
[srvopenerp6-prod-pgsql]
type=pgsql+ssh
server_name=srvopenerp6-prod
db_name=tranquil_production
private_key=/root/.ssh/id_dsa
ssh_port = 22
[srvopenerp6-form-script]
type=rsync+ssh
server_name=srvopenerp6-form
remote_dir=/home/openerp/instances/form/openobject-library/
exclude_list=
private_key=/root/.ssh/id_rsa
ssh_port = 22
;preexec=/etc/init.d/zimbra stop
;postexec=/etc/init.d/zimbra start
;[backup_mysql_srvintranet]
;type=mysql+ssh
;server_name=srvintranet
;private_keys=/root/.ssh/id_dsa
;db_name=
;db_user=root
;db_passwd=

21
samples/tisbackup-pra.ini Executable file
View File

@ -0,0 +1,21 @@
[global]
backup_base_dir = /home/homes/ssamson/
# backup retention in day
backup_retention_time=30
# for nagios check in hours
maximum_backup_age=30
compression_level=7
#max_copies=2
[test-copysr]
type=copy-vm-xcp
server_name=srvxen1-test
vm_name=test-pra
storage_name=FAST_SR2
password_file=/home/homes/ssamson/tisbackup-pra/xen_passwd
network_name=net-test
#start_vm=no
#max_copies=3

7
samples/tisbackup.cron Normal file
View File

@ -0,0 +1,7 @@
#SHELL=/bin/sh
#PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin
# m h dom mon dow user command
30 22 * * * root /opt/tisbackup/tisbackup.py -c /etc/tis/tisbackup-config.ini backup >> /var/log/tisbackup.log 2>&1
30 12 * * * root /opt/tisbackup/tisbackup.py -c /etc/tis/tisbackup-config.ini cleanup >> /var/log/tisbackup.log 2>&1

10
samples/tisbackup_gui.ini Normal file
View File

@ -0,0 +1,10 @@
[uwsgi]
http = 0.0.0.0:8080
master = true
processes = 1
wsgi=tisbackup_gui:app
chdir=/opt/tisbackup
config= /etc/tis/tisbackup-config.ini
sections=
spooler=/opt/tisbackup/myspool
ADMIN_EMAIL=technique@tranquil-it-systems.fr

133
scripts/tisbackup_gui Executable file
View File

@ -0,0 +1,133 @@
#!/usr/bin/env bash
### BEGIN INIT INFO
# Provides: tisbackup_gui-uwsgi
# Required-Start: $all
# Required-Stop: $all
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: starts the uwsgi app server for tisbackup_gui
# Description: starts uwsgi app server for tisbackup_gui using start-stop-daemon
### END INIT INFO
set -e
VERSION=$(basename $0)
PATH=/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin
DAEMON=/usr/local/bin/$VERSION
RUN=/var/run/
NAME=$VERSION
CONFIG_FILE=/etc/tis/tisbackup_gui.ini
LOGFILE=/var/log/$NAME.log
OWNER=root
DESC=$VERSION
OP=$1
DAEMON_OPTS=""
# Include uwsgi defaults if available
if [[ -f /etc/default/$VERSION ]]; then
. /etc/default/$VERSION
fi
do_pid_check()
{
local PIDFILE=$1
[[ -f $PIDFILE ]] || return 0
local PID=$(cat $PIDFILE)
for p in $(pgrep $VERSION); do
[[ $p == $PID ]] && return 1
done
return 0
}
do_start()
{
# for config in $ENABLED_CONFIGS; do
local PIDFILE=$RUN/$NAME.pid
if do_pid_check $PIDFILE; then
uwsgi -d $LOGFILE --pidfile $PIDFILE --ini $CONFIG_FILE
# sudo -u $OWNER -i $VERSION $config $DAEMON_OPTS --pidfile $PIDFILE
else
echo "Already running!"
fi
# done
}
send_sig()
{
local PIDFILE=$RUN/$NAME.pid
set +e
[[ -f $PIDFILE ]] && kill $1 $(cat $PIDFILE) > /dev/null 2>&1
set -e
}
wait_and_clean_pidfiles()
{
local PIDFILE=$RUN/$NAME.pid
until do_pid_check $PIDFILE; do
echo -n "";
done
rm -f $PIDFILE
}
do_stop()
{
send_sig -3
wait_and_clean_pidfiles
}
do_reload()
{
send_sig -1
}
do_force_reload()
{
send_sig -15
}
get_status()
{
send_sig -10
}
case "$OP" in
start)
echo "Starting $DESC: "
do_start
echo "$NAME."
;;
stop)
echo -n "Stopping $DESC: "
do_stop
echo "$NAME."
;;
reload)
echo -n "Reloading $DESC: "
do_reload
echo "$NAME."
;;
force-reload)
echo -n "Force-reloading $DESC: "
do_force_reload
echo "$NAME."
;;
restart)
echo "Restarting $DESC: "
do_stop
sleep 3
do_start
echo "$NAME."
;;
status)
get_status
;;
*)
N=/etc/init.d/$NAME
echo "Usage: $N {start|stop|restart|reload|force-reload|status}" >&2
exit 1
;;
esac
exit 0

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

BIN
static/images/bg_body.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 28 KiB

BIN
static/images/check.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

BIN
static/images/img01.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

BIN
static/images/img02.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 329 B

BIN
static/images/img03.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 372 B

BIN
static/images/img04.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.4 KiB

BIN
static/images/important.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

BIN
static/images/info.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

BIN
static/images/loader.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.1 KiB

BIN
static/images/logo-tis.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.1 KiB

BIN
static/images/sort_asc.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

BIN
static/images/sort_both.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

BIN
static/images/sort_desc.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

BIN
static/images/title.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 317 B

235
static/js/jquery.alerts.js Normal file
View File

@ -0,0 +1,235 @@
// jQuery Alert Dialogs Plugin
//
// Version 1.1
//
// Cory S.N. LaViska
// A Beautiful Site (http://abeautifulsite.net/)
// 14 May 2009
//
// Website: http://abeautifulsite.net/blog/2008/12/jquery-alert-dialogs/
//
// Usage:
// jAlert( message, [title, callback] )
// jConfirm( message, [title, callback] )
// jPrompt( message, [value, title, callback] )
//
// History:
//
// 1.00 - Released (29 December 2008)
//
// 1.01 - Fixed bug where unbinding would destroy all resize events
//
// License:
//
// This plugin is dual-licensed under the GNU General Public License and the MIT License and
// is copyright 2008 A Beautiful Site, LLC.
//
(function($) {
$.alerts = {
// These properties can be read/written by accessing $.alerts.propertyName from your scripts at any time
verticalOffset: -75, // vertical offset of the dialog from center screen, in pixels
horizontalOffset: 0, // horizontal offset of the dialog from center screen, in pixels/
repositionOnResize: true, // re-centers the dialog on window resize
overlayOpacity: .01, // transparency level of overlay
overlayColor: '#FFF', // base color of overlay
draggable: true, // make the dialogs draggable (requires UI Draggables plugin)
okButton: '&nbsp;OK&nbsp;', // text for the OK button
cancelButton: '&nbsp;Cancel&nbsp;', // text for the Cancel button
dialogClass: null, // if specified, this class will be applied to all dialogs
// Public methods
alert: function(message, title, callback) {
if( title == null ) title = 'Alert';
$.alerts._show(title, message, null, 'alert', function(result) {
if( callback ) callback(result);
});
},
confirm: function(message, title, callback) {
if( title == null ) title = 'Confirm';
$.alerts._show(title, message, null, 'confirm', function(result) {
if( callback ) callback(result);
});
},
prompt: function(message, value, title, callback) {
if( title == null ) title = 'Prompt';
$.alerts._show(title, message, value, 'prompt', function(result) {
if( callback ) callback(result);
});
},
// Private methods
_show: function(title, msg, value, type, callback) {
$.alerts._hide();
$.alerts._overlay('show');
$("BODY").append(
'<div id="popup_container">' +
'<h1 id="popup_title"></h1>' +
'<div id="popup_content">' +
'<div id="popup_message"></div>' +
'</div>' +
'</div>');
if( $.alerts.dialogClass ) $("#popup_container").addClass($.alerts.dialogClass);
// IE6 Fix
var pos = ($.browser.msie && parseInt($.browser.version) <= 6 ) ? 'absolute' : 'fixed';
$("#popup_container").css({
position: pos,
zIndex: 99999,
padding: 0,
margin: 0
});
$("#popup_title").text(title);
$("#popup_content").addClass(type);
$("#popup_message").text(msg);
$("#popup_message").html( $("#popup_message").text().replace(/\n/g, '<br />') );
$("#popup_container").css({
minWidth: $("#popup_container").outerWidth(),
maxWidth: $("#popup_container").outerWidth()
});
$.alerts._reposition();
$.alerts._maintainPosition(true);
switch( type ) {
case 'alert':
$("#popup_message").after('<div id="popup_panel"><input type="button" value="' + $.alerts.okButton + '" id="popup_ok" /></div>');
$("#popup_ok").click( function() {
$.alerts._hide();
callback(true);
});
$("#popup_ok").focus().keypress( function(e) {
if( e.keyCode == 13 || e.keyCode == 27 ) $("#popup_ok").trigger('click');
});
break;
case 'confirm':
$("#popup_message").after('<div id="popup_panel"><input type="button" value="' + $.alerts.okButton + '" id="popup_ok" /> <input type="button" value="' + $.alerts.cancelButton + '" id="popup_cancel" /></div>');
$("#popup_ok").click( function() {
$.alerts._hide();
if( callback ) callback(true);
});
$("#popup_cancel").click( function() {
$.alerts._hide();
if( callback ) callback(false);
});
$("#popup_ok").focus();
$("#popup_ok, #popup_cancel").keypress( function(e) {
if( e.keyCode == 13 ) $("#popup_ok").trigger('click');
if( e.keyCode == 27 ) $("#popup_cancel").trigger('click');
});
break;
case 'prompt':
$("#popup_message").append('<br /><input type="text" size="30" id="popup_prompt" />').after('<div id="popup_panel"><input type="button" value="' + $.alerts.okButton + '" id="popup_ok" /> <input type="button" value="' + $.alerts.cancelButton + '" id="popup_cancel" /></div>');
$("#popup_prompt").width( $("#popup_message").width() );
$("#popup_ok").click( function() {
var val = $("#popup_prompt").val();
$.alerts._hide();
if( callback ) callback( val );
});
$("#popup_cancel").click( function() {
$.alerts._hide();
if( callback ) callback( null );
});
$("#popup_prompt, #popup_ok, #popup_cancel").keypress( function(e) {
if( e.keyCode == 13 ) $("#popup_ok").trigger('click');
if( e.keyCode == 27 ) $("#popup_cancel").trigger('click');
});
if( value ) $("#popup_prompt").val(value);
$("#popup_prompt").focus().select();
break;
}
// Make draggable
if( $.alerts.draggable ) {
try {
$("#popup_container").draggable({ handle: $("#popup_title") });
$("#popup_title").css({ cursor: 'move' });
} catch(e) { /* requires jQuery UI draggables */ }
}
},
_hide: function() {
$("#popup_container").remove();
$.alerts._overlay('hide');
$.alerts._maintainPosition(false);
},
_overlay: function(status) {
switch( status ) {
case 'show':
$.alerts._overlay('hide');
$("BODY").append('<div id="popup_overlay"></div>');
$("#popup_overlay").css({
position: 'absolute',
zIndex: 99998,
top: '0px',
left: '0px',
width: '100%',
height: $(document).height(),
background: $.alerts.overlayColor,
opacity: $.alerts.overlayOpacity
});
break;
case 'hide':
$("#popup_overlay").remove();
break;
}
},
_reposition: function() {
var top = (($(window).height() / 2) - ($("#popup_container").outerHeight() / 2)) + $.alerts.verticalOffset;
var left = (($(window).width() / 2) - ($("#popup_container").outerWidth() / 2)) + $.alerts.horizontalOffset;
if( top < 0 ) top = 0;
if( left < 0 ) left = 0;
// IE6 fix
if( $.browser.msie && parseInt($.browser.version) <= 6 ) top = top + $(window).scrollTop();
$("#popup_container").css({
top: top + 'px',
left: left + 'px'
});
$("#popup_overlay").height( $(document).height() );
},
_maintainPosition: function(status) {
if( $.alerts.repositionOnResize ) {
switch(status) {
case true:
$(window).bind('resize', $.alerts._reposition);
break;
case false:
$(window).unbind('resize', $.alerts._reposition);
break;
}
}
}
}
// Shortuct functions
jAlert = function(message, title, callback) {
$.alerts.alert(message, title, callback);
}
jConfirm = function(message, title, callback) {
$.alerts.confirm(message, title, callback);
};
jPrompt = function(message, value, title, callback) {
$.alerts.prompt(message, value, title, callback);
};
})(jQuery);

12098
static/js/jquery.dataTables.js vendored Normal file

File diff suppressed because it is too large Load Diff

154
static/js/jquery.min.js vendored Normal file
View File

@ -0,0 +1,154 @@
/*!
* jQuery JavaScript Library v1.4.2
* http://jquery.com/
*
* Copyright 2010, John Resig
* Dual licensed under the MIT or GPL Version 2 licenses.
* http://jquery.org/license
*
* Includes Sizzle.js
* http://sizzlejs.com/
* Copyright 2010, The Dojo Foundation
* Released under the MIT, BSD, and GPL Licenses.
*
* Date: Sat Feb 13 22:33:48 2010 -0500
*/
(function(A,w){function ma(){if(!c.isReady){try{s.documentElement.doScroll("left")}catch(a){setTimeout(ma,1);return}c.ready()}}function Qa(a,b){b.src?c.ajax({url:b.src,async:false,dataType:"script"}):c.globalEval(b.text||b.textContent||b.innerHTML||"");b.parentNode&&b.parentNode.removeChild(b)}function X(a,b,d,f,e,j){var i=a.length;if(typeof b==="object"){for(var o in b)X(a,o,b[o],f,e,d);return a}if(d!==w){f=!j&&f&&c.isFunction(d);for(o=0;o<i;o++)e(a[o],b,f?d.call(a[o],o,e(a[o],b)):d,j);return a}return i?
e(a[0],b):w}function J(){return(new Date).getTime()}function Y(){return false}function Z(){return true}function na(a,b,d){d[0].type=a;return c.event.handle.apply(b,d)}function oa(a){var b,d=[],f=[],e=arguments,j,i,o,k,n,r;i=c.data(this,"events");if(!(a.liveFired===this||!i||!i.live||a.button&&a.type==="click")){a.liveFired=this;var u=i.live.slice(0);for(k=0;k<u.length;k++){i=u[k];i.origType.replace(O,"")===a.type?f.push(i.selector):u.splice(k--,1)}j=c(a.target).closest(f,a.currentTarget);n=0;for(r=
j.length;n<r;n++)for(k=0;k<u.length;k++){i=u[k];if(j[n].selector===i.selector){o=j[n].elem;f=null;if(i.preType==="mouseenter"||i.preType==="mouseleave")f=c(a.relatedTarget).closest(i.selector)[0];if(!f||f!==o)d.push({elem:o,handleObj:i})}}n=0;for(r=d.length;n<r;n++){j=d[n];a.currentTarget=j.elem;a.data=j.handleObj.data;a.handleObj=j.handleObj;if(j.handleObj.origHandler.apply(j.elem,e)===false){b=false;break}}return b}}function pa(a,b){return"live."+(a&&a!=="*"?a+".":"")+b.replace(/\./g,"`").replace(/ /g,
"&")}function qa(a){return!a||!a.parentNode||a.parentNode.nodeType===11}function ra(a,b){var d=0;b.each(function(){if(this.nodeName===(a[d]&&a[d].nodeName)){var f=c.data(a[d++]),e=c.data(this,f);if(f=f&&f.events){delete e.handle;e.events={};for(var j in f)for(var i in f[j])c.event.add(this,j,f[j][i],f[j][i].data)}}})}function sa(a,b,d){var f,e,j;b=b&&b[0]?b[0].ownerDocument||b[0]:s;if(a.length===1&&typeof a[0]==="string"&&a[0].length<512&&b===s&&!ta.test(a[0])&&(c.support.checkClone||!ua.test(a[0]))){e=
true;if(j=c.fragments[a[0]])if(j!==1)f=j}if(!f){f=b.createDocumentFragment();c.clean(a,b,f,d)}if(e)c.fragments[a[0]]=j?f:1;return{fragment:f,cacheable:e}}function K(a,b){var d={};c.each(va.concat.apply([],va.slice(0,b)),function(){d[this]=a});return d}function wa(a){return"scrollTo"in a&&a.document?a:a.nodeType===9?a.defaultView||a.parentWindow:false}var c=function(a,b){return new c.fn.init(a,b)},Ra=A.jQuery,Sa=A.$,s=A.document,T,Ta=/^[^<]*(<[\w\W]+>)[^>]*$|^#([\w-]+)$/,Ua=/^.[^:#\[\.,]*$/,Va=/\S/,
Wa=/^(\s|\u00A0)+|(\s|\u00A0)+$/g,Xa=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,P=navigator.userAgent,xa=false,Q=[],L,$=Object.prototype.toString,aa=Object.prototype.hasOwnProperty,ba=Array.prototype.push,R=Array.prototype.slice,ya=Array.prototype.indexOf;c.fn=c.prototype={init:function(a,b){var d,f;if(!a)return this;if(a.nodeType){this.context=this[0]=a;this.length=1;return this}if(a==="body"&&!b){this.context=s;this[0]=s.body;this.selector="body";this.length=1;return this}if(typeof a==="string")if((d=Ta.exec(a))&&
(d[1]||!b))if(d[1]){f=b?b.ownerDocument||b:s;if(a=Xa.exec(a))if(c.isPlainObject(b)){a=[s.createElement(a[1])];c.fn.attr.call(a,b,true)}else a=[f.createElement(a[1])];else{a=sa([d[1]],[f]);a=(a.cacheable?a.fragment.cloneNode(true):a.fragment).childNodes}return c.merge(this,a)}else{if(b=s.getElementById(d[2])){if(b.id!==d[2])return T.find(a);this.length=1;this[0]=b}this.context=s;this.selector=a;return this}else if(!b&&/^\w+$/.test(a)){this.selector=a;this.context=s;a=s.getElementsByTagName(a);return c.merge(this,
a)}else return!b||b.jquery?(b||T).find(a):c(b).find(a);else if(c.isFunction(a))return T.ready(a);if(a.selector!==w){this.selector=a.selector;this.context=a.context}return c.makeArray(a,this)},selector:"",jquery:"1.4.2",length:0,size:function(){return this.length},toArray:function(){return R.call(this,0)},get:function(a){return a==null?this.toArray():a<0?this.slice(a)[0]:this[a]},pushStack:function(a,b,d){var f=c();c.isArray(a)?ba.apply(f,a):c.merge(f,a);f.prevObject=this;f.context=this.context;if(b===
"find")f.selector=this.selector+(this.selector?" ":"")+d;else if(b)f.selector=this.selector+"."+b+"("+d+")";return f},each:function(a,b){return c.each(this,a,b)},ready:function(a){c.bindReady();if(c.isReady)a.call(s,c);else Q&&Q.push(a);return this},eq:function(a){return a===-1?this.slice(a):this.slice(a,+a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(R.apply(this,arguments),"slice",R.call(arguments).join(","))},map:function(a){return this.pushStack(c.map(this,
function(b,d){return a.call(b,d,b)}))},end:function(){return this.prevObject||c(null)},push:ba,sort:[].sort,splice:[].splice};c.fn.init.prototype=c.fn;c.extend=c.fn.extend=function(){var a=arguments[0]||{},b=1,d=arguments.length,f=false,e,j,i,o;if(typeof a==="boolean"){f=a;a=arguments[1]||{};b=2}if(typeof a!=="object"&&!c.isFunction(a))a={};if(d===b){a=this;--b}for(;b<d;b++)if((e=arguments[b])!=null)for(j in e){i=a[j];o=e[j];if(a!==o)if(f&&o&&(c.isPlainObject(o)||c.isArray(o))){i=i&&(c.isPlainObject(i)||
c.isArray(i))?i:c.isArray(o)?[]:{};a[j]=c.extend(f,i,o)}else if(o!==w)a[j]=o}return a};c.extend({noConflict:function(a){A.$=Sa;if(a)A.jQuery=Ra;return c},isReady:false,ready:function(){if(!c.isReady){if(!s.body)return setTimeout(c.ready,13);c.isReady=true;if(Q){for(var a,b=0;a=Q[b++];)a.call(s,c);Q=null}c.fn.triggerHandler&&c(s).triggerHandler("ready")}},bindReady:function(){if(!xa){xa=true;if(s.readyState==="complete")return c.ready();if(s.addEventListener){s.addEventListener("DOMContentLoaded",
L,false);A.addEventListener("load",c.ready,false)}else if(s.attachEvent){s.attachEvent("onreadystatechange",L);A.attachEvent("onload",c.ready);var a=false;try{a=A.frameElement==null}catch(b){}s.documentElement.doScroll&&a&&ma()}}},isFunction:function(a){return $.call(a)==="[object Function]"},isArray:function(a){return $.call(a)==="[object Array]"},isPlainObject:function(a){if(!a||$.call(a)!=="[object Object]"||a.nodeType||a.setInterval)return false;if(a.constructor&&!aa.call(a,"constructor")&&!aa.call(a.constructor.prototype,
"isPrototypeOf"))return false;var b;for(b in a);return b===w||aa.call(a,b)},isEmptyObject:function(a){for(var b in a)return false;return true},error:function(a){throw a;},parseJSON:function(a){if(typeof a!=="string"||!a)return null;a=c.trim(a);if(/^[\],:{}\s]*$/.test(a.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,"@").replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,"]").replace(/(?:^|:|,)(?:\s*\[)+/g,"")))return A.JSON&&A.JSON.parse?A.JSON.parse(a):(new Function("return "+
a))();else c.error("Invalid JSON: "+a)},noop:function(){},globalEval:function(a){if(a&&Va.test(a)){var b=s.getElementsByTagName("head")[0]||s.documentElement,d=s.createElement("script");d.type="text/javascript";if(c.support.scriptEval)d.appendChild(s.createTextNode(a));else d.text=a;b.insertBefore(d,b.firstChild);b.removeChild(d)}},nodeName:function(a,b){return a.nodeName&&a.nodeName.toUpperCase()===b.toUpperCase()},each:function(a,b,d){var f,e=0,j=a.length,i=j===w||c.isFunction(a);if(d)if(i)for(f in a){if(b.apply(a[f],
d)===false)break}else for(;e<j;){if(b.apply(a[e++],d)===false)break}else if(i)for(f in a){if(b.call(a[f],f,a[f])===false)break}else for(d=a[0];e<j&&b.call(d,e,d)!==false;d=a[++e]);return a},trim:function(a){return(a||"").replace(Wa,"")},makeArray:function(a,b){b=b||[];if(a!=null)a.length==null||typeof a==="string"||c.isFunction(a)||typeof a!=="function"&&a.setInterval?ba.call(b,a):c.merge(b,a);return b},inArray:function(a,b){if(b.indexOf)return b.indexOf(a);for(var d=0,f=b.length;d<f;d++)if(b[d]===
a)return d;return-1},merge:function(a,b){var d=a.length,f=0;if(typeof b.length==="number")for(var e=b.length;f<e;f++)a[d++]=b[f];else for(;b[f]!==w;)a[d++]=b[f++];a.length=d;return a},grep:function(a,b,d){for(var f=[],e=0,j=a.length;e<j;e++)!d!==!b(a[e],e)&&f.push(a[e]);return f},map:function(a,b,d){for(var f=[],e,j=0,i=a.length;j<i;j++){e=b(a[j],j,d);if(e!=null)f[f.length]=e}return f.concat.apply([],f)},guid:1,proxy:function(a,b,d){if(arguments.length===2)if(typeof b==="string"){d=a;a=d[b];b=w}else if(b&&
!c.isFunction(b)){d=b;b=w}if(!b&&a)b=function(){return a.apply(d||this,arguments)};if(a)b.guid=a.guid=a.guid||b.guid||c.guid++;return b},uaMatch:function(a){a=a.toLowerCase();a=/(webkit)[ \/]([\w.]+)/.exec(a)||/(opera)(?:.*version)?[ \/]([\w.]+)/.exec(a)||/(msie) ([\w.]+)/.exec(a)||!/compatible/.test(a)&&/(mozilla)(?:.*? rv:([\w.]+))?/.exec(a)||[];return{browser:a[1]||"",version:a[2]||"0"}},browser:{}});P=c.uaMatch(P);if(P.browser){c.browser[P.browser]=true;c.browser.version=P.version}if(c.browser.webkit)c.browser.safari=
true;if(ya)c.inArray=function(a,b){return ya.call(b,a)};T=c(s);if(s.addEventListener)L=function(){s.removeEventListener("DOMContentLoaded",L,false);c.ready()};else if(s.attachEvent)L=function(){if(s.readyState==="complete"){s.detachEvent("onreadystatechange",L);c.ready()}};(function(){c.support={};var a=s.documentElement,b=s.createElement("script"),d=s.createElement("div"),f="script"+J();d.style.display="none";d.innerHTML=" <link/><table></table><a href='/a' style='color:red;float:left;opacity:.55;'>a</a><input type='checkbox'/>";
var e=d.getElementsByTagName("*"),j=d.getElementsByTagName("a")[0];if(!(!e||!e.length||!j)){c.support={leadingWhitespace:d.firstChild.nodeType===3,tbody:!d.getElementsByTagName("tbody").length,htmlSerialize:!!d.getElementsByTagName("link").length,style:/red/.test(j.getAttribute("style")),hrefNormalized:j.getAttribute("href")==="/a",opacity:/^0.55$/.test(j.style.opacity),cssFloat:!!j.style.cssFloat,checkOn:d.getElementsByTagName("input")[0].value==="on",optSelected:s.createElement("select").appendChild(s.createElement("option")).selected,
parentNode:d.removeChild(d.appendChild(s.createElement("div"))).parentNode===null,deleteExpando:true,checkClone:false,scriptEval:false,noCloneEvent:true,boxModel:null};b.type="text/javascript";try{b.appendChild(s.createTextNode("window."+f+"=1;"))}catch(i){}a.insertBefore(b,a.firstChild);if(A[f]){c.support.scriptEval=true;delete A[f]}try{delete b.test}catch(o){c.support.deleteExpando=false}a.removeChild(b);if(d.attachEvent&&d.fireEvent){d.attachEvent("onclick",function k(){c.support.noCloneEvent=
false;d.detachEvent("onclick",k)});d.cloneNode(true).fireEvent("onclick")}d=s.createElement("div");d.innerHTML="<input type='radio' name='radiotest' checked='checked'/>";a=s.createDocumentFragment();a.appendChild(d.firstChild);c.support.checkClone=a.cloneNode(true).cloneNode(true).lastChild.checked;c(function(){var k=s.createElement("div");k.style.width=k.style.paddingLeft="1px";s.body.appendChild(k);c.boxModel=c.support.boxModel=k.offsetWidth===2;s.body.removeChild(k).style.display="none"});a=function(k){var n=
s.createElement("div");k="on"+k;var r=k in n;if(!r){n.setAttribute(k,"return;");r=typeof n[k]==="function"}return r};c.support.submitBubbles=a("submit");c.support.changeBubbles=a("change");a=b=d=e=j=null}})();c.props={"for":"htmlFor","class":"className",readonly:"readOnly",maxlength:"maxLength",cellspacing:"cellSpacing",rowspan:"rowSpan",colspan:"colSpan",tabindex:"tabIndex",usemap:"useMap",frameborder:"frameBorder"};var G="jQuery"+J(),Ya=0,za={};c.extend({cache:{},expando:G,noData:{embed:true,object:true,
applet:true},data:function(a,b,d){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var f=a[G],e=c.cache;if(!f&&typeof b==="string"&&d===w)return null;f||(f=++Ya);if(typeof b==="object"){a[G]=f;e[f]=c.extend(true,{},b)}else if(!e[f]){a[G]=f;e[f]={}}a=e[f];if(d!==w)a[b]=d;return typeof b==="string"?a[b]:a}},removeData:function(a,b){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var d=a[G],f=c.cache,e=f[d];if(b){if(e){delete e[b];c.isEmptyObject(e)&&c.removeData(a)}}else{if(c.support.deleteExpando)delete a[c.expando];
else a.removeAttribute&&a.removeAttribute(c.expando);delete f[d]}}}});c.fn.extend({data:function(a,b){if(typeof a==="undefined"&&this.length)return c.data(this[0]);else if(typeof a==="object")return this.each(function(){c.data(this,a)});var d=a.split(".");d[1]=d[1]?"."+d[1]:"";if(b===w){var f=this.triggerHandler("getData"+d[1]+"!",[d[0]]);if(f===w&&this.length)f=c.data(this[0],a);return f===w&&d[1]?this.data(d[0]):f}else return this.trigger("setData"+d[1]+"!",[d[0],b]).each(function(){c.data(this,
a,b)})},removeData:function(a){return this.each(function(){c.removeData(this,a)})}});c.extend({queue:function(a,b,d){if(a){b=(b||"fx")+"queue";var f=c.data(a,b);if(!d)return f||[];if(!f||c.isArray(d))f=c.data(a,b,c.makeArray(d));else f.push(d);return f}},dequeue:function(a,b){b=b||"fx";var d=c.queue(a,b),f=d.shift();if(f==="inprogress")f=d.shift();if(f){b==="fx"&&d.unshift("inprogress");f.call(a,function(){c.dequeue(a,b)})}}});c.fn.extend({queue:function(a,b){if(typeof a!=="string"){b=a;a="fx"}if(b===
w)return c.queue(this[0],a);return this.each(function(){var d=c.queue(this,a,b);a==="fx"&&d[0]!=="inprogress"&&c.dequeue(this,a)})},dequeue:function(a){return this.each(function(){c.dequeue(this,a)})},delay:function(a,b){a=c.fx?c.fx.speeds[a]||a:a;b=b||"fx";return this.queue(b,function(){var d=this;setTimeout(function(){c.dequeue(d,b)},a)})},clearQueue:function(a){return this.queue(a||"fx",[])}});var Aa=/[\n\t]/g,ca=/\s+/,Za=/\r/g,$a=/href|src|style/,ab=/(button|input)/i,bb=/(button|input|object|select|textarea)/i,
cb=/^(a|area)$/i,Ba=/radio|checkbox/;c.fn.extend({attr:function(a,b){return X(this,a,b,true,c.attr)},removeAttr:function(a){return this.each(function(){c.attr(this,a,"");this.nodeType===1&&this.removeAttribute(a)})},addClass:function(a){if(c.isFunction(a))return this.each(function(n){var r=c(this);r.addClass(a.call(this,n,r.attr("class")))});if(a&&typeof a==="string")for(var b=(a||"").split(ca),d=0,f=this.length;d<f;d++){var e=this[d];if(e.nodeType===1)if(e.className){for(var j=" "+e.className+" ",
i=e.className,o=0,k=b.length;o<k;o++)if(j.indexOf(" "+b[o]+" ")<0)i+=" "+b[o];e.className=c.trim(i)}else e.className=a}return this},removeClass:function(a){if(c.isFunction(a))return this.each(function(k){var n=c(this);n.removeClass(a.call(this,k,n.attr("class")))});if(a&&typeof a==="string"||a===w)for(var b=(a||"").split(ca),d=0,f=this.length;d<f;d++){var e=this[d];if(e.nodeType===1&&e.className)if(a){for(var j=(" "+e.className+" ").replace(Aa," "),i=0,o=b.length;i<o;i++)j=j.replace(" "+b[i]+" ",
" ");e.className=c.trim(j)}else e.className=""}return this},toggleClass:function(a,b){var d=typeof a,f=typeof b==="boolean";if(c.isFunction(a))return this.each(function(e){var j=c(this);j.toggleClass(a.call(this,e,j.attr("class"),b),b)});return this.each(function(){if(d==="string")for(var e,j=0,i=c(this),o=b,k=a.split(ca);e=k[j++];){o=f?o:!i.hasClass(e);i[o?"addClass":"removeClass"](e)}else if(d==="undefined"||d==="boolean"){this.className&&c.data(this,"__className__",this.className);this.className=
this.className||a===false?"":c.data(this,"__className__")||""}})},hasClass:function(a){a=" "+a+" ";for(var b=0,d=this.length;b<d;b++)if((" "+this[b].className+" ").replace(Aa," ").indexOf(a)>-1)return true;return false},val:function(a){if(a===w){var b=this[0];if(b){if(c.nodeName(b,"option"))return(b.attributes.value||{}).specified?b.value:b.text;if(c.nodeName(b,"select")){var d=b.selectedIndex,f=[],e=b.options;b=b.type==="select-one";if(d<0)return null;var j=b?d:0;for(d=b?d+1:e.length;j<d;j++){var i=
e[j];if(i.selected){a=c(i).val();if(b)return a;f.push(a)}}return f}if(Ba.test(b.type)&&!c.support.checkOn)return b.getAttribute("value")===null?"on":b.value;return(b.value||"").replace(Za,"")}return w}var o=c.isFunction(a);return this.each(function(k){var n=c(this),r=a;if(this.nodeType===1){if(o)r=a.call(this,k,n.val());if(typeof r==="number")r+="";if(c.isArray(r)&&Ba.test(this.type))this.checked=c.inArray(n.val(),r)>=0;else if(c.nodeName(this,"select")){var u=c.makeArray(r);c("option",this).each(function(){this.selected=
c.inArray(c(this).val(),u)>=0});if(!u.length)this.selectedIndex=-1}else this.value=r}})}});c.extend({attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true},attr:function(a,b,d,f){if(!a||a.nodeType===3||a.nodeType===8)return w;if(f&&b in c.attrFn)return c(a)[b](d);f=a.nodeType!==1||!c.isXMLDoc(a);var e=d!==w;b=f&&c.props[b]||b;if(a.nodeType===1){var j=$a.test(b);if(b in a&&f&&!j){if(e){b==="type"&&ab.test(a.nodeName)&&a.parentNode&&c.error("type property can't be changed");
a[b]=d}if(c.nodeName(a,"form")&&a.getAttributeNode(b))return a.getAttributeNode(b).nodeValue;if(b==="tabIndex")return(b=a.getAttributeNode("tabIndex"))&&b.specified?b.value:bb.test(a.nodeName)||cb.test(a.nodeName)&&a.href?0:w;return a[b]}if(!c.support.style&&f&&b==="style"){if(e)a.style.cssText=""+d;return a.style.cssText}e&&a.setAttribute(b,""+d);a=!c.support.hrefNormalized&&f&&j?a.getAttribute(b,2):a.getAttribute(b);return a===null?w:a}return c.style(a,b,d)}});var O=/\.(.*)$/,db=function(a){return a.replace(/[^\w\s\.\|`]/g,
function(b){return"\\"+b})};c.event={add:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){if(a.setInterval&&a!==A&&!a.frameElement)a=A;var e,j;if(d.handler){e=d;d=e.handler}if(!d.guid)d.guid=c.guid++;if(j=c.data(a)){var i=j.events=j.events||{},o=j.handle;if(!o)j.handle=o=function(){return typeof c!=="undefined"&&!c.event.triggered?c.event.handle.apply(o.elem,arguments):w};o.elem=a;b=b.split(" ");for(var k,n=0,r;k=b[n++];){j=e?c.extend({},e):{handler:d,data:f};if(k.indexOf(".")>-1){r=k.split(".");
k=r.shift();j.namespace=r.slice(0).sort().join(".")}else{r=[];j.namespace=""}j.type=k;j.guid=d.guid;var u=i[k],z=c.event.special[k]||{};if(!u){u=i[k]=[];if(!z.setup||z.setup.call(a,f,r,o)===false)if(a.addEventListener)a.addEventListener(k,o,false);else a.attachEvent&&a.attachEvent("on"+k,o)}if(z.add){z.add.call(a,j);if(!j.handler.guid)j.handler.guid=d.guid}u.push(j);c.event.global[k]=true}a=null}}},global:{},remove:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){var e,j=0,i,o,k,n,r,u,z=c.data(a),
C=z&&z.events;if(z&&C){if(b&&b.type){d=b.handler;b=b.type}if(!b||typeof b==="string"&&b.charAt(0)==="."){b=b||"";for(e in C)c.event.remove(a,e+b)}else{for(b=b.split(" ");e=b[j++];){n=e;i=e.indexOf(".")<0;o=[];if(!i){o=e.split(".");e=o.shift();k=new RegExp("(^|\\.)"+c.map(o.slice(0).sort(),db).join("\\.(?:.*\\.)?")+"(\\.|$)")}if(r=C[e])if(d){n=c.event.special[e]||{};for(B=f||0;B<r.length;B++){u=r[B];if(d.guid===u.guid){if(i||k.test(u.namespace)){f==null&&r.splice(B--,1);n.remove&&n.remove.call(a,u)}if(f!=
null)break}}if(r.length===0||f!=null&&r.length===1){if(!n.teardown||n.teardown.call(a,o)===false)Ca(a,e,z.handle);delete C[e]}}else for(var B=0;B<r.length;B++){u=r[B];if(i||k.test(u.namespace)){c.event.remove(a,n,u.handler,B);r.splice(B--,1)}}}if(c.isEmptyObject(C)){if(b=z.handle)b.elem=null;delete z.events;delete z.handle;c.isEmptyObject(z)&&c.removeData(a)}}}}},trigger:function(a,b,d,f){var e=a.type||a;if(!f){a=typeof a==="object"?a[G]?a:c.extend(c.Event(e),a):c.Event(e);if(e.indexOf("!")>=0){a.type=
e=e.slice(0,-1);a.exclusive=true}if(!d){a.stopPropagation();c.event.global[e]&&c.each(c.cache,function(){this.events&&this.events[e]&&c.event.trigger(a,b,this.handle.elem)})}if(!d||d.nodeType===3||d.nodeType===8)return w;a.result=w;a.target=d;b=c.makeArray(b);b.unshift(a)}a.currentTarget=d;(f=c.data(d,"handle"))&&f.apply(d,b);f=d.parentNode||d.ownerDocument;try{if(!(d&&d.nodeName&&c.noData[d.nodeName.toLowerCase()]))if(d["on"+e]&&d["on"+e].apply(d,b)===false)a.result=false}catch(j){}if(!a.isPropagationStopped()&&
f)c.event.trigger(a,b,f,true);else if(!a.isDefaultPrevented()){f=a.target;var i,o=c.nodeName(f,"a")&&e==="click",k=c.event.special[e]||{};if((!k._default||k._default.call(d,a)===false)&&!o&&!(f&&f.nodeName&&c.noData[f.nodeName.toLowerCase()])){try{if(f[e]){if(i=f["on"+e])f["on"+e]=null;c.event.triggered=true;f[e]()}}catch(n){}if(i)f["on"+e]=i;c.event.triggered=false}}},handle:function(a){var b,d,f,e;a=arguments[0]=c.event.fix(a||A.event);a.currentTarget=this;b=a.type.indexOf(".")<0&&!a.exclusive;
if(!b){d=a.type.split(".");a.type=d.shift();f=new RegExp("(^|\\.)"+d.slice(0).sort().join("\\.(?:.*\\.)?")+"(\\.|$)")}e=c.data(this,"events");d=e[a.type];if(e&&d){d=d.slice(0);e=0;for(var j=d.length;e<j;e++){var i=d[e];if(b||f.test(i.namespace)){a.handler=i.handler;a.data=i.data;a.handleObj=i;i=i.handler.apply(this,arguments);if(i!==w){a.result=i;if(i===false){a.preventDefault();a.stopPropagation()}}if(a.isImmediatePropagationStopped())break}}}return a.result},props:"altKey attrChange attrName bubbles button cancelable charCode clientX clientY ctrlKey currentTarget data detail eventPhase fromElement handler keyCode layerX layerY metaKey newValue offsetX offsetY originalTarget pageX pageY prevValue relatedNode relatedTarget screenX screenY shiftKey srcElement target toElement view wheelDelta which".split(" "),
fix:function(a){if(a[G])return a;var b=a;a=c.Event(b);for(var d=this.props.length,f;d;){f=this.props[--d];a[f]=b[f]}if(!a.target)a.target=a.srcElement||s;if(a.target.nodeType===3)a.target=a.target.parentNode;if(!a.relatedTarget&&a.fromElement)a.relatedTarget=a.fromElement===a.target?a.toElement:a.fromElement;if(a.pageX==null&&a.clientX!=null){b=s.documentElement;d=s.body;a.pageX=a.clientX+(b&&b.scrollLeft||d&&d.scrollLeft||0)-(b&&b.clientLeft||d&&d.clientLeft||0);a.pageY=a.clientY+(b&&b.scrollTop||
d&&d.scrollTop||0)-(b&&b.clientTop||d&&d.clientTop||0)}if(!a.which&&(a.charCode||a.charCode===0?a.charCode:a.keyCode))a.which=a.charCode||a.keyCode;if(!a.metaKey&&a.ctrlKey)a.metaKey=a.ctrlKey;if(!a.which&&a.button!==w)a.which=a.button&1?1:a.button&2?3:a.button&4?2:0;return a},guid:1E8,proxy:c.proxy,special:{ready:{setup:c.bindReady,teardown:c.noop},live:{add:function(a){c.event.add(this,a.origType,c.extend({},a,{handler:oa}))},remove:function(a){var b=true,d=a.origType.replace(O,"");c.each(c.data(this,
"events").live||[],function(){if(d===this.origType.replace(O,""))return b=false});b&&c.event.remove(this,a.origType,oa)}},beforeunload:{setup:function(a,b,d){if(this.setInterval)this.onbeforeunload=d;return false},teardown:function(a,b){if(this.onbeforeunload===b)this.onbeforeunload=null}}}};var Ca=s.removeEventListener?function(a,b,d){a.removeEventListener(b,d,false)}:function(a,b,d){a.detachEvent("on"+b,d)};c.Event=function(a){if(!this.preventDefault)return new c.Event(a);if(a&&a.type){this.originalEvent=
a;this.type=a.type}else this.type=a;this.timeStamp=J();this[G]=true};c.Event.prototype={preventDefault:function(){this.isDefaultPrevented=Z;var a=this.originalEvent;if(a){a.preventDefault&&a.preventDefault();a.returnValue=false}},stopPropagation:function(){this.isPropagationStopped=Z;var a=this.originalEvent;if(a){a.stopPropagation&&a.stopPropagation();a.cancelBubble=true}},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=Z;this.stopPropagation()},isDefaultPrevented:Y,isPropagationStopped:Y,
isImmediatePropagationStopped:Y};var Da=function(a){var b=a.relatedTarget;try{for(;b&&b!==this;)b=b.parentNode;if(b!==this){a.type=a.data;c.event.handle.apply(this,arguments)}}catch(d){}},Ea=function(a){a.type=a.data;c.event.handle.apply(this,arguments)};c.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(a,b){c.event.special[a]={setup:function(d){c.event.add(this,b,d&&d.selector?Ea:Da,a)},teardown:function(d){c.event.remove(this,b,d&&d.selector?Ea:Da)}}});if(!c.support.submitBubbles)c.event.special.submit=
{setup:function(){if(this.nodeName.toLowerCase()!=="form"){c.event.add(this,"click.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="submit"||d==="image")&&c(b).closest("form").length)return na("submit",this,arguments)});c.event.add(this,"keypress.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="text"||d==="password")&&c(b).closest("form").length&&a.keyCode===13)return na("submit",this,arguments)})}else return false},teardown:function(){c.event.remove(this,".specialSubmit")}};
if(!c.support.changeBubbles){var da=/textarea|input|select/i,ea,Fa=function(a){var b=a.type,d=a.value;if(b==="radio"||b==="checkbox")d=a.checked;else if(b==="select-multiple")d=a.selectedIndex>-1?c.map(a.options,function(f){return f.selected}).join("-"):"";else if(a.nodeName.toLowerCase()==="select")d=a.selectedIndex;return d},fa=function(a,b){var d=a.target,f,e;if(!(!da.test(d.nodeName)||d.readOnly)){f=c.data(d,"_change_data");e=Fa(d);if(a.type!=="focusout"||d.type!=="radio")c.data(d,"_change_data",
e);if(!(f===w||e===f))if(f!=null||e){a.type="change";return c.event.trigger(a,b,d)}}};c.event.special.change={filters:{focusout:fa,click:function(a){var b=a.target,d=b.type;if(d==="radio"||d==="checkbox"||b.nodeName.toLowerCase()==="select")return fa.call(this,a)},keydown:function(a){var b=a.target,d=b.type;if(a.keyCode===13&&b.nodeName.toLowerCase()!=="textarea"||a.keyCode===32&&(d==="checkbox"||d==="radio")||d==="select-multiple")return fa.call(this,a)},beforeactivate:function(a){a=a.target;c.data(a,
"_change_data",Fa(a))}},setup:function(){if(this.type==="file")return false;for(var a in ea)c.event.add(this,a+".specialChange",ea[a]);return da.test(this.nodeName)},teardown:function(){c.event.remove(this,".specialChange");return da.test(this.nodeName)}};ea=c.event.special.change.filters}s.addEventListener&&c.each({focus:"focusin",blur:"focusout"},function(a,b){function d(f){f=c.event.fix(f);f.type=b;return c.event.handle.call(this,f)}c.event.special[b]={setup:function(){this.addEventListener(a,
d,true)},teardown:function(){this.removeEventListener(a,d,true)}}});c.each(["bind","one"],function(a,b){c.fn[b]=function(d,f,e){if(typeof d==="object"){for(var j in d)this[b](j,f,d[j],e);return this}if(c.isFunction(f)){e=f;f=w}var i=b==="one"?c.proxy(e,function(k){c(this).unbind(k,i);return e.apply(this,arguments)}):e;if(d==="unload"&&b!=="one")this.one(d,f,e);else{j=0;for(var o=this.length;j<o;j++)c.event.add(this[j],d,i,f)}return this}});c.fn.extend({unbind:function(a,b){if(typeof a==="object"&&
!a.preventDefault)for(var d in a)this.unbind(d,a[d]);else{d=0;for(var f=this.length;d<f;d++)c.event.remove(this[d],a,b)}return this},delegate:function(a,b,d,f){return this.live(b,d,f,a)},undelegate:function(a,b,d){return arguments.length===0?this.unbind("live"):this.die(b,null,d,a)},trigger:function(a,b){return this.each(function(){c.event.trigger(a,b,this)})},triggerHandler:function(a,b){if(this[0]){a=c.Event(a);a.preventDefault();a.stopPropagation();c.event.trigger(a,b,this[0]);return a.result}},
toggle:function(a){for(var b=arguments,d=1;d<b.length;)c.proxy(a,b[d++]);return this.click(c.proxy(a,function(f){var e=(c.data(this,"lastToggle"+a.guid)||0)%d;c.data(this,"lastToggle"+a.guid,e+1);f.preventDefault();return b[e].apply(this,arguments)||false}))},hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)}});var Ga={focus:"focusin",blur:"focusout",mouseenter:"mouseover",mouseleave:"mouseout"};c.each(["live","die"],function(a,b){c.fn[b]=function(d,f,e,j){var i,o=0,k,n,r=j||this.selector,
u=j?this:c(this.context);if(c.isFunction(f)){e=f;f=w}for(d=(d||"").split(" ");(i=d[o++])!=null;){j=O.exec(i);k="";if(j){k=j[0];i=i.replace(O,"")}if(i==="hover")d.push("mouseenter"+k,"mouseleave"+k);else{n=i;if(i==="focus"||i==="blur"){d.push(Ga[i]+k);i+=k}else i=(Ga[i]||i)+k;b==="live"?u.each(function(){c.event.add(this,pa(i,r),{data:f,selector:r,handler:e,origType:i,origHandler:e,preType:n})}):u.unbind(pa(i,r),e)}}return this}});c.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error".split(" "),
function(a,b){c.fn[b]=function(d){return d?this.bind(b,d):this.trigger(b)};if(c.attrFn)c.attrFn[b]=true});A.attachEvent&&!A.addEventListener&&A.attachEvent("onunload",function(){for(var a in c.cache)if(c.cache[a].handle)try{c.event.remove(c.cache[a].handle.elem)}catch(b){}});(function(){function a(g){for(var h="",l,m=0;g[m];m++){l=g[m];if(l.nodeType===3||l.nodeType===4)h+=l.nodeValue;else if(l.nodeType!==8)h+=a(l.childNodes)}return h}function b(g,h,l,m,q,p){q=0;for(var v=m.length;q<v;q++){var t=m[q];
if(t){t=t[g];for(var y=false;t;){if(t.sizcache===l){y=m[t.sizset];break}if(t.nodeType===1&&!p){t.sizcache=l;t.sizset=q}if(t.nodeName.toLowerCase()===h){y=t;break}t=t[g]}m[q]=y}}}function d(g,h,l,m,q,p){q=0;for(var v=m.length;q<v;q++){var t=m[q];if(t){t=t[g];for(var y=false;t;){if(t.sizcache===l){y=m[t.sizset];break}if(t.nodeType===1){if(!p){t.sizcache=l;t.sizset=q}if(typeof h!=="string"){if(t===h){y=true;break}}else if(k.filter(h,[t]).length>0){y=t;break}}t=t[g]}m[q]=y}}}var f=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^[\]]*\]|['"][^'"]*['"]|[^[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g,
e=0,j=Object.prototype.toString,i=false,o=true;[0,0].sort(function(){o=false;return 0});var k=function(g,h,l,m){l=l||[];var q=h=h||s;if(h.nodeType!==1&&h.nodeType!==9)return[];if(!g||typeof g!=="string")return l;for(var p=[],v,t,y,S,H=true,M=x(h),I=g;(f.exec(""),v=f.exec(I))!==null;){I=v[3];p.push(v[1]);if(v[2]){S=v[3];break}}if(p.length>1&&r.exec(g))if(p.length===2&&n.relative[p[0]])t=ga(p[0]+p[1],h);else for(t=n.relative[p[0]]?[h]:k(p.shift(),h);p.length;){g=p.shift();if(n.relative[g])g+=p.shift();
t=ga(g,t)}else{if(!m&&p.length>1&&h.nodeType===9&&!M&&n.match.ID.test(p[0])&&!n.match.ID.test(p[p.length-1])){v=k.find(p.shift(),h,M);h=v.expr?k.filter(v.expr,v.set)[0]:v.set[0]}if(h){v=m?{expr:p.pop(),set:z(m)}:k.find(p.pop(),p.length===1&&(p[0]==="~"||p[0]==="+")&&h.parentNode?h.parentNode:h,M);t=v.expr?k.filter(v.expr,v.set):v.set;if(p.length>0)y=z(t);else H=false;for(;p.length;){var D=p.pop();v=D;if(n.relative[D])v=p.pop();else D="";if(v==null)v=h;n.relative[D](y,v,M)}}else y=[]}y||(y=t);y||k.error(D||
g);if(j.call(y)==="[object Array]")if(H)if(h&&h.nodeType===1)for(g=0;y[g]!=null;g++){if(y[g]&&(y[g]===true||y[g].nodeType===1&&E(h,y[g])))l.push(t[g])}else for(g=0;y[g]!=null;g++)y[g]&&y[g].nodeType===1&&l.push(t[g]);else l.push.apply(l,y);else z(y,l);if(S){k(S,q,l,m);k.uniqueSort(l)}return l};k.uniqueSort=function(g){if(B){i=o;g.sort(B);if(i)for(var h=1;h<g.length;h++)g[h]===g[h-1]&&g.splice(h--,1)}return g};k.matches=function(g,h){return k(g,null,null,h)};k.find=function(g,h,l){var m,q;if(!g)return[];
for(var p=0,v=n.order.length;p<v;p++){var t=n.order[p];if(q=n.leftMatch[t].exec(g)){var y=q[1];q.splice(1,1);if(y.substr(y.length-1)!=="\\"){q[1]=(q[1]||"").replace(/\\/g,"");m=n.find[t](q,h,l);if(m!=null){g=g.replace(n.match[t],"");break}}}}m||(m=h.getElementsByTagName("*"));return{set:m,expr:g}};k.filter=function(g,h,l,m){for(var q=g,p=[],v=h,t,y,S=h&&h[0]&&x(h[0]);g&&h.length;){for(var H in n.filter)if((t=n.leftMatch[H].exec(g))!=null&&t[2]){var M=n.filter[H],I,D;D=t[1];y=false;t.splice(1,1);if(D.substr(D.length-
1)!=="\\"){if(v===p)p=[];if(n.preFilter[H])if(t=n.preFilter[H](t,v,l,p,m,S)){if(t===true)continue}else y=I=true;if(t)for(var U=0;(D=v[U])!=null;U++)if(D){I=M(D,t,U,v);var Ha=m^!!I;if(l&&I!=null)if(Ha)y=true;else v[U]=false;else if(Ha){p.push(D);y=true}}if(I!==w){l||(v=p);g=g.replace(n.match[H],"");if(!y)return[];break}}}if(g===q)if(y==null)k.error(g);else break;q=g}return v};k.error=function(g){throw"Syntax error, unrecognized expression: "+g;};var n=k.selectors={order:["ID","NAME","TAG"],match:{ID:/#((?:[\w\u00c0-\uFFFF-]|\\.)+)/,
CLASS:/\.((?:[\w\u00c0-\uFFFF-]|\\.)+)/,NAME:/\[name=['"]*((?:[\w\u00c0-\uFFFF-]|\\.)+)['"]*\]/,ATTR:/\[\s*((?:[\w\u00c0-\uFFFF-]|\\.)+)\s*(?:(\S?=)\s*(['"]*)(.*?)\3|)\s*\]/,TAG:/^((?:[\w\u00c0-\uFFFF\*-]|\\.)+)/,CHILD:/:(only|nth|last|first)-child(?:\((even|odd|[\dn+-]*)\))?/,POS:/:(nth|eq|gt|lt|first|last|even|odd)(?:\((\d*)\))?(?=[^-]|$)/,PSEUDO:/:((?:[\w\u00c0-\uFFFF-]|\\.)+)(?:\((['"]?)((?:\([^\)]+\)|[^\(\)]*)+)\2\))?/},leftMatch:{},attrMap:{"class":"className","for":"htmlFor"},attrHandle:{href:function(g){return g.getAttribute("href")}},
relative:{"+":function(g,h){var l=typeof h==="string",m=l&&!/\W/.test(h);l=l&&!m;if(m)h=h.toLowerCase();m=0;for(var q=g.length,p;m<q;m++)if(p=g[m]){for(;(p=p.previousSibling)&&p.nodeType!==1;);g[m]=l||p&&p.nodeName.toLowerCase()===h?p||false:p===h}l&&k.filter(h,g,true)},">":function(g,h){var l=typeof h==="string";if(l&&!/\W/.test(h)){h=h.toLowerCase();for(var m=0,q=g.length;m<q;m++){var p=g[m];if(p){l=p.parentNode;g[m]=l.nodeName.toLowerCase()===h?l:false}}}else{m=0;for(q=g.length;m<q;m++)if(p=g[m])g[m]=
l?p.parentNode:p.parentNode===h;l&&k.filter(h,g,true)}},"":function(g,h,l){var m=e++,q=d;if(typeof h==="string"&&!/\W/.test(h)){var p=h=h.toLowerCase();q=b}q("parentNode",h,m,g,p,l)},"~":function(g,h,l){var m=e++,q=d;if(typeof h==="string"&&!/\W/.test(h)){var p=h=h.toLowerCase();q=b}q("previousSibling",h,m,g,p,l)}},find:{ID:function(g,h,l){if(typeof h.getElementById!=="undefined"&&!l)return(g=h.getElementById(g[1]))?[g]:[]},NAME:function(g,h){if(typeof h.getElementsByName!=="undefined"){var l=[];
h=h.getElementsByName(g[1]);for(var m=0,q=h.length;m<q;m++)h[m].getAttribute("name")===g[1]&&l.push(h[m]);return l.length===0?null:l}},TAG:function(g,h){return h.getElementsByTagName(g[1])}},preFilter:{CLASS:function(g,h,l,m,q,p){g=" "+g[1].replace(/\\/g,"")+" ";if(p)return g;p=0;for(var v;(v=h[p])!=null;p++)if(v)if(q^(v.className&&(" "+v.className+" ").replace(/[\t\n]/g," ").indexOf(g)>=0))l||m.push(v);else if(l)h[p]=false;return false},ID:function(g){return g[1].replace(/\\/g,"")},TAG:function(g){return g[1].toLowerCase()},
CHILD:function(g){if(g[1]==="nth"){var h=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(g[2]==="even"&&"2n"||g[2]==="odd"&&"2n+1"||!/\D/.test(g[2])&&"0n+"+g[2]||g[2]);g[2]=h[1]+(h[2]||1)-0;g[3]=h[3]-0}g[0]=e++;return g},ATTR:function(g,h,l,m,q,p){h=g[1].replace(/\\/g,"");if(!p&&n.attrMap[h])g[1]=n.attrMap[h];if(g[2]==="~=")g[4]=" "+g[4]+" ";return g},PSEUDO:function(g,h,l,m,q){if(g[1]==="not")if((f.exec(g[3])||"").length>1||/^\w/.test(g[3]))g[3]=k(g[3],null,null,h);else{g=k.filter(g[3],h,l,true^q);l||m.push.apply(m,
g);return false}else if(n.match.POS.test(g[0])||n.match.CHILD.test(g[0]))return true;return g},POS:function(g){g.unshift(true);return g}},filters:{enabled:function(g){return g.disabled===false&&g.type!=="hidden"},disabled:function(g){return g.disabled===true},checked:function(g){return g.checked===true},selected:function(g){return g.selected===true},parent:function(g){return!!g.firstChild},empty:function(g){return!g.firstChild},has:function(g,h,l){return!!k(l[3],g).length},header:function(g){return/h\d/i.test(g.nodeName)},
text:function(g){return"text"===g.type},radio:function(g){return"radio"===g.type},checkbox:function(g){return"checkbox"===g.type},file:function(g){return"file"===g.type},password:function(g){return"password"===g.type},submit:function(g){return"submit"===g.type},image:function(g){return"image"===g.type},reset:function(g){return"reset"===g.type},button:function(g){return"button"===g.type||g.nodeName.toLowerCase()==="button"},input:function(g){return/input|select|textarea|button/i.test(g.nodeName)}},
setFilters:{first:function(g,h){return h===0},last:function(g,h,l,m){return h===m.length-1},even:function(g,h){return h%2===0},odd:function(g,h){return h%2===1},lt:function(g,h,l){return h<l[3]-0},gt:function(g,h,l){return h>l[3]-0},nth:function(g,h,l){return l[3]-0===h},eq:function(g,h,l){return l[3]-0===h}},filter:{PSEUDO:function(g,h,l,m){var q=h[1],p=n.filters[q];if(p)return p(g,l,h,m);else if(q==="contains")return(g.textContent||g.innerText||a([g])||"").indexOf(h[3])>=0;else if(q==="not"){h=
h[3];l=0;for(m=h.length;l<m;l++)if(h[l]===g)return false;return true}else k.error("Syntax error, unrecognized expression: "+q)},CHILD:function(g,h){var l=h[1],m=g;switch(l){case "only":case "first":for(;m=m.previousSibling;)if(m.nodeType===1)return false;if(l==="first")return true;m=g;case "last":for(;m=m.nextSibling;)if(m.nodeType===1)return false;return true;case "nth":l=h[2];var q=h[3];if(l===1&&q===0)return true;h=h[0];var p=g.parentNode;if(p&&(p.sizcache!==h||!g.nodeIndex)){var v=0;for(m=p.firstChild;m;m=
m.nextSibling)if(m.nodeType===1)m.nodeIndex=++v;p.sizcache=h}g=g.nodeIndex-q;return l===0?g===0:g%l===0&&g/l>=0}},ID:function(g,h){return g.nodeType===1&&g.getAttribute("id")===h},TAG:function(g,h){return h==="*"&&g.nodeType===1||g.nodeName.toLowerCase()===h},CLASS:function(g,h){return(" "+(g.className||g.getAttribute("class"))+" ").indexOf(h)>-1},ATTR:function(g,h){var l=h[1];g=n.attrHandle[l]?n.attrHandle[l](g):g[l]!=null?g[l]:g.getAttribute(l);l=g+"";var m=h[2];h=h[4];return g==null?m==="!=":m===
"="?l===h:m==="*="?l.indexOf(h)>=0:m==="~="?(" "+l+" ").indexOf(h)>=0:!h?l&&g!==false:m==="!="?l!==h:m==="^="?l.indexOf(h)===0:m==="$="?l.substr(l.length-h.length)===h:m==="|="?l===h||l.substr(0,h.length+1)===h+"-":false},POS:function(g,h,l,m){var q=n.setFilters[h[2]];if(q)return q(g,l,h,m)}}},r=n.match.POS;for(var u in n.match){n.match[u]=new RegExp(n.match[u].source+/(?![^\[]*\])(?![^\(]*\))/.source);n.leftMatch[u]=new RegExp(/(^(?:.|\r|\n)*?)/.source+n.match[u].source.replace(/\\(\d+)/g,function(g,
h){return"\\"+(h-0+1)}))}var z=function(g,h){g=Array.prototype.slice.call(g,0);if(h){h.push.apply(h,g);return h}return g};try{Array.prototype.slice.call(s.documentElement.childNodes,0)}catch(C){z=function(g,h){h=h||[];if(j.call(g)==="[object Array]")Array.prototype.push.apply(h,g);else if(typeof g.length==="number")for(var l=0,m=g.length;l<m;l++)h.push(g[l]);else for(l=0;g[l];l++)h.push(g[l]);return h}}var B;if(s.documentElement.compareDocumentPosition)B=function(g,h){if(!g.compareDocumentPosition||
!h.compareDocumentPosition){if(g==h)i=true;return g.compareDocumentPosition?-1:1}g=g.compareDocumentPosition(h)&4?-1:g===h?0:1;if(g===0)i=true;return g};else if("sourceIndex"in s.documentElement)B=function(g,h){if(!g.sourceIndex||!h.sourceIndex){if(g==h)i=true;return g.sourceIndex?-1:1}g=g.sourceIndex-h.sourceIndex;if(g===0)i=true;return g};else if(s.createRange)B=function(g,h){if(!g.ownerDocument||!h.ownerDocument){if(g==h)i=true;return g.ownerDocument?-1:1}var l=g.ownerDocument.createRange(),m=
h.ownerDocument.createRange();l.setStart(g,0);l.setEnd(g,0);m.setStart(h,0);m.setEnd(h,0);g=l.compareBoundaryPoints(Range.START_TO_END,m);if(g===0)i=true;return g};(function(){var g=s.createElement("div"),h="script"+(new Date).getTime();g.innerHTML="<a name='"+h+"'/>";var l=s.documentElement;l.insertBefore(g,l.firstChild);if(s.getElementById(h)){n.find.ID=function(m,q,p){if(typeof q.getElementById!=="undefined"&&!p)return(q=q.getElementById(m[1]))?q.id===m[1]||typeof q.getAttributeNode!=="undefined"&&
q.getAttributeNode("id").nodeValue===m[1]?[q]:w:[]};n.filter.ID=function(m,q){var p=typeof m.getAttributeNode!=="undefined"&&m.getAttributeNode("id");return m.nodeType===1&&p&&p.nodeValue===q}}l.removeChild(g);l=g=null})();(function(){var g=s.createElement("div");g.appendChild(s.createComment(""));if(g.getElementsByTagName("*").length>0)n.find.TAG=function(h,l){l=l.getElementsByTagName(h[1]);if(h[1]==="*"){h=[];for(var m=0;l[m];m++)l[m].nodeType===1&&h.push(l[m]);l=h}return l};g.innerHTML="<a href='#'></a>";
if(g.firstChild&&typeof g.firstChild.getAttribute!=="undefined"&&g.firstChild.getAttribute("href")!=="#")n.attrHandle.href=function(h){return h.getAttribute("href",2)};g=null})();s.querySelectorAll&&function(){var g=k,h=s.createElement("div");h.innerHTML="<p class='TEST'></p>";if(!(h.querySelectorAll&&h.querySelectorAll(".TEST").length===0)){k=function(m,q,p,v){q=q||s;if(!v&&q.nodeType===9&&!x(q))try{return z(q.querySelectorAll(m),p)}catch(t){}return g(m,q,p,v)};for(var l in g)k[l]=g[l];h=null}}();
(function(){var g=s.createElement("div");g.innerHTML="<div class='test e'></div><div class='test'></div>";if(!(!g.getElementsByClassName||g.getElementsByClassName("e").length===0)){g.lastChild.className="e";if(g.getElementsByClassName("e").length!==1){n.order.splice(1,0,"CLASS");n.find.CLASS=function(h,l,m){if(typeof l.getElementsByClassName!=="undefined"&&!m)return l.getElementsByClassName(h[1])};g=null}}})();var E=s.compareDocumentPosition?function(g,h){return!!(g.compareDocumentPosition(h)&16)}:
function(g,h){return g!==h&&(g.contains?g.contains(h):true)},x=function(g){return(g=(g?g.ownerDocument||g:0).documentElement)?g.nodeName!=="HTML":false},ga=function(g,h){var l=[],m="",q;for(h=h.nodeType?[h]:h;q=n.match.PSEUDO.exec(g);){m+=q[0];g=g.replace(n.match.PSEUDO,"")}g=n.relative[g]?g+"*":g;q=0;for(var p=h.length;q<p;q++)k(g,h[q],l);return k.filter(m,l)};c.find=k;c.expr=k.selectors;c.expr[":"]=c.expr.filters;c.unique=k.uniqueSort;c.text=a;c.isXMLDoc=x;c.contains=E})();var eb=/Until$/,fb=/^(?:parents|prevUntil|prevAll)/,
gb=/,/;R=Array.prototype.slice;var Ia=function(a,b,d){if(c.isFunction(b))return c.grep(a,function(e,j){return!!b.call(e,j,e)===d});else if(b.nodeType)return c.grep(a,function(e){return e===b===d});else if(typeof b==="string"){var f=c.grep(a,function(e){return e.nodeType===1});if(Ua.test(b))return c.filter(b,f,!d);else b=c.filter(b,f)}return c.grep(a,function(e){return c.inArray(e,b)>=0===d})};c.fn.extend({find:function(a){for(var b=this.pushStack("","find",a),d=0,f=0,e=this.length;f<e;f++){d=b.length;
c.find(a,this[f],b);if(f>0)for(var j=d;j<b.length;j++)for(var i=0;i<d;i++)if(b[i]===b[j]){b.splice(j--,1);break}}return b},has:function(a){var b=c(a);return this.filter(function(){for(var d=0,f=b.length;d<f;d++)if(c.contains(this,b[d]))return true})},not:function(a){return this.pushStack(Ia(this,a,false),"not",a)},filter:function(a){return this.pushStack(Ia(this,a,true),"filter",a)},is:function(a){return!!a&&c.filter(a,this).length>0},closest:function(a,b){if(c.isArray(a)){var d=[],f=this[0],e,j=
{},i;if(f&&a.length){e=0;for(var o=a.length;e<o;e++){i=a[e];j[i]||(j[i]=c.expr.match.POS.test(i)?c(i,b||this.context):i)}for(;f&&f.ownerDocument&&f!==b;){for(i in j){e=j[i];if(e.jquery?e.index(f)>-1:c(f).is(e)){d.push({selector:i,elem:f});delete j[i]}}f=f.parentNode}}return d}var k=c.expr.match.POS.test(a)?c(a,b||this.context):null;return this.map(function(n,r){for(;r&&r.ownerDocument&&r!==b;){if(k?k.index(r)>-1:c(r).is(a))return r;r=r.parentNode}return null})},index:function(a){if(!a||typeof a===
"string")return c.inArray(this[0],a?c(a):this.parent().children());return c.inArray(a.jquery?a[0]:a,this)},add:function(a,b){a=typeof a==="string"?c(a,b||this.context):c.makeArray(a);b=c.merge(this.get(),a);return this.pushStack(qa(a[0])||qa(b[0])?b:c.unique(b))},andSelf:function(){return this.add(this.prevObject)}});c.each({parent:function(a){return(a=a.parentNode)&&a.nodeType!==11?a:null},parents:function(a){return c.dir(a,"parentNode")},parentsUntil:function(a,b,d){return c.dir(a,"parentNode",
d)},next:function(a){return c.nth(a,2,"nextSibling")},prev:function(a){return c.nth(a,2,"previousSibling")},nextAll:function(a){return c.dir(a,"nextSibling")},prevAll:function(a){return c.dir(a,"previousSibling")},nextUntil:function(a,b,d){return c.dir(a,"nextSibling",d)},prevUntil:function(a,b,d){return c.dir(a,"previousSibling",d)},siblings:function(a){return c.sibling(a.parentNode.firstChild,a)},children:function(a){return c.sibling(a.firstChild)},contents:function(a){return c.nodeName(a,"iframe")?
a.contentDocument||a.contentWindow.document:c.makeArray(a.childNodes)}},function(a,b){c.fn[a]=function(d,f){var e=c.map(this,b,d);eb.test(a)||(f=d);if(f&&typeof f==="string")e=c.filter(f,e);e=this.length>1?c.unique(e):e;if((this.length>1||gb.test(f))&&fb.test(a))e=e.reverse();return this.pushStack(e,a,R.call(arguments).join(","))}});c.extend({filter:function(a,b,d){if(d)a=":not("+a+")";return c.find.matches(a,b)},dir:function(a,b,d){var f=[];for(a=a[b];a&&a.nodeType!==9&&(d===w||a.nodeType!==1||!c(a).is(d));){a.nodeType===
1&&f.push(a);a=a[b]}return f},nth:function(a,b,d){b=b||1;for(var f=0;a;a=a[d])if(a.nodeType===1&&++f===b)break;return a},sibling:function(a,b){for(var d=[];a;a=a.nextSibling)a.nodeType===1&&a!==b&&d.push(a);return d}});var Ja=/ jQuery\d+="(?:\d+|null)"/g,V=/^\s+/,Ka=/(<([\w:]+)[^>]*?)\/>/g,hb=/^(?:area|br|col|embed|hr|img|input|link|meta|param)$/i,La=/<([\w:]+)/,ib=/<tbody/i,jb=/<|&#?\w+;/,ta=/<script|<object|<embed|<option|<style/i,ua=/checked\s*(?:[^=]|=\s*.checked.)/i,Ma=function(a,b,d){return hb.test(d)?
a:b+"></"+d+">"},F={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],area:[1,"<map>","</map>"],_default:[0,"",""]};F.optgroup=F.option;F.tbody=F.tfoot=F.colgroup=F.caption=F.thead;F.th=F.td;if(!c.support.htmlSerialize)F._default=[1,"div<div>","</div>"];c.fn.extend({text:function(a){if(c.isFunction(a))return this.each(function(b){var d=
c(this);d.text(a.call(this,b,d.text()))});if(typeof a!=="object"&&a!==w)return this.empty().append((this[0]&&this[0].ownerDocument||s).createTextNode(a));return c.text(this)},wrapAll:function(a){if(c.isFunction(a))return this.each(function(d){c(this).wrapAll(a.call(this,d))});if(this[0]){var b=c(a,this[0].ownerDocument).eq(0).clone(true);this[0].parentNode&&b.insertBefore(this[0]);b.map(function(){for(var d=this;d.firstChild&&d.firstChild.nodeType===1;)d=d.firstChild;return d}).append(this)}return this},
wrapInner:function(a){if(c.isFunction(a))return this.each(function(b){c(this).wrapInner(a.call(this,b))});return this.each(function(){var b=c(this),d=b.contents();d.length?d.wrapAll(a):b.append(a)})},wrap:function(a){return this.each(function(){c(this).wrapAll(a)})},unwrap:function(){return this.parent().each(function(){c.nodeName(this,"body")||c(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.appendChild(a)})},
prepend:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this)});else if(arguments.length){var a=c(arguments[0]);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,
this.nextSibling)});else if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,c(arguments[0]).toArray());return a}},remove:function(a,b){for(var d=0,f;(f=this[d])!=null;d++)if(!a||c.filter(a,[f]).length){if(!b&&f.nodeType===1){c.cleanData(f.getElementsByTagName("*"));c.cleanData([f])}f.parentNode&&f.parentNode.removeChild(f)}return this},empty:function(){for(var a=0,b;(b=this[a])!=null;a++)for(b.nodeType===1&&c.cleanData(b.getElementsByTagName("*"));b.firstChild;)b.removeChild(b.firstChild);
return this},clone:function(a){var b=this.map(function(){if(!c.support.noCloneEvent&&!c.isXMLDoc(this)){var d=this.outerHTML,f=this.ownerDocument;if(!d){d=f.createElement("div");d.appendChild(this.cloneNode(true));d=d.innerHTML}return c.clean([d.replace(Ja,"").replace(/=([^="'>\s]+\/)>/g,'="$1">').replace(V,"")],f)[0]}else return this.cloneNode(true)});if(a===true){ra(this,b);ra(this.find("*"),b.find("*"))}return b},html:function(a){if(a===w)return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(Ja,
""):null;else if(typeof a==="string"&&!ta.test(a)&&(c.support.leadingWhitespace||!V.test(a))&&!F[(La.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Ka,Ma);try{for(var b=0,d=this.length;b<d;b++)if(this[b].nodeType===1){c.cleanData(this[b].getElementsByTagName("*"));this[b].innerHTML=a}}catch(f){this.empty().append(a)}}else c.isFunction(a)?this.each(function(e){var j=c(this),i=j.html();j.empty().append(function(){return a.call(this,e,i)})}):this.empty().append(a);return this},replaceWith:function(a){if(this[0]&&
this[0].parentNode){if(c.isFunction(a))return this.each(function(b){var d=c(this),f=d.html();d.replaceWith(a.call(this,b,f))});if(typeof a!=="string")a=c(a).detach();return this.each(function(){var b=this.nextSibling,d=this.parentNode;c(this).remove();b?c(b).before(a):c(d).append(a)})}else return this.pushStack(c(c.isFunction(a)?a():a),"replaceWith",a)},detach:function(a){return this.remove(a,true)},domManip:function(a,b,d){function f(u){return c.nodeName(u,"table")?u.getElementsByTagName("tbody")[0]||
u.appendChild(u.ownerDocument.createElement("tbody")):u}var e,j,i=a[0],o=[],k;if(!c.support.checkClone&&arguments.length===3&&typeof i==="string"&&ua.test(i))return this.each(function(){c(this).domManip(a,b,d,true)});if(c.isFunction(i))return this.each(function(u){var z=c(this);a[0]=i.call(this,u,b?z.html():w);z.domManip(a,b,d)});if(this[0]){e=i&&i.parentNode;e=c.support.parentNode&&e&&e.nodeType===11&&e.childNodes.length===this.length?{fragment:e}:sa(a,this,o);k=e.fragment;if(j=k.childNodes.length===
1?(k=k.firstChild):k.firstChild){b=b&&c.nodeName(j,"tr");for(var n=0,r=this.length;n<r;n++)d.call(b?f(this[n],j):this[n],n>0||e.cacheable||this.length>1?k.cloneNode(true):k)}o.length&&c.each(o,Qa)}return this}});c.fragments={};c.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){c.fn[a]=function(d){var f=[];d=c(d);var e=this.length===1&&this[0].parentNode;if(e&&e.nodeType===11&&e.childNodes.length===1&&d.length===1){d[b](this[0]);
return this}else{e=0;for(var j=d.length;e<j;e++){var i=(e>0?this.clone(true):this).get();c.fn[b].apply(c(d[e]),i);f=f.concat(i)}return this.pushStack(f,a,d.selector)}}});c.extend({clean:function(a,b,d,f){b=b||s;if(typeof b.createElement==="undefined")b=b.ownerDocument||b[0]&&b[0].ownerDocument||s;for(var e=[],j=0,i;(i=a[j])!=null;j++){if(typeof i==="number")i+="";if(i){if(typeof i==="string"&&!jb.test(i))i=b.createTextNode(i);else if(typeof i==="string"){i=i.replace(Ka,Ma);var o=(La.exec(i)||["",
""])[1].toLowerCase(),k=F[o]||F._default,n=k[0],r=b.createElement("div");for(r.innerHTML=k[1]+i+k[2];n--;)r=r.lastChild;if(!c.support.tbody){n=ib.test(i);o=o==="table"&&!n?r.firstChild&&r.firstChild.childNodes:k[1]==="<table>"&&!n?r.childNodes:[];for(k=o.length-1;k>=0;--k)c.nodeName(o[k],"tbody")&&!o[k].childNodes.length&&o[k].parentNode.removeChild(o[k])}!c.support.leadingWhitespace&&V.test(i)&&r.insertBefore(b.createTextNode(V.exec(i)[0]),r.firstChild);i=r.childNodes}if(i.nodeType)e.push(i);else e=
c.merge(e,i)}}if(d)for(j=0;e[j];j++)if(f&&c.nodeName(e[j],"script")&&(!e[j].type||e[j].type.toLowerCase()==="text/javascript"))f.push(e[j].parentNode?e[j].parentNode.removeChild(e[j]):e[j]);else{e[j].nodeType===1&&e.splice.apply(e,[j+1,0].concat(c.makeArray(e[j].getElementsByTagName("script"))));d.appendChild(e[j])}return e},cleanData:function(a){for(var b,d,f=c.cache,e=c.event.special,j=c.support.deleteExpando,i=0,o;(o=a[i])!=null;i++)if(d=o[c.expando]){b=f[d];if(b.events)for(var k in b.events)e[k]?
c.event.remove(o,k):Ca(o,k,b.handle);if(j)delete o[c.expando];else o.removeAttribute&&o.removeAttribute(c.expando);delete f[d]}}});var kb=/z-?index|font-?weight|opacity|zoom|line-?height/i,Na=/alpha\([^)]*\)/,Oa=/opacity=([^)]*)/,ha=/float/i,ia=/-([a-z])/ig,lb=/([A-Z])/g,mb=/^-?\d+(?:px)?$/i,nb=/^-?\d/,ob={position:"absolute",visibility:"hidden",display:"block"},pb=["Left","Right"],qb=["Top","Bottom"],rb=s.defaultView&&s.defaultView.getComputedStyle,Pa=c.support.cssFloat?"cssFloat":"styleFloat",ja=
function(a,b){return b.toUpperCase()};c.fn.css=function(a,b){return X(this,a,b,true,function(d,f,e){if(e===w)return c.curCSS(d,f);if(typeof e==="number"&&!kb.test(f))e+="px";c.style(d,f,e)})};c.extend({style:function(a,b,d){if(!a||a.nodeType===3||a.nodeType===8)return w;if((b==="width"||b==="height")&&parseFloat(d)<0)d=w;var f=a.style||a,e=d!==w;if(!c.support.opacity&&b==="opacity"){if(e){f.zoom=1;b=parseInt(d,10)+""==="NaN"?"":"alpha(opacity="+d*100+")";a=f.filter||c.curCSS(a,"filter")||"";f.filter=
Na.test(a)?a.replace(Na,b):b}return f.filter&&f.filter.indexOf("opacity=")>=0?parseFloat(Oa.exec(f.filter)[1])/100+"":""}if(ha.test(b))b=Pa;b=b.replace(ia,ja);if(e)f[b]=d;return f[b]},css:function(a,b,d,f){if(b==="width"||b==="height"){var e,j=b==="width"?pb:qb;function i(){e=b==="width"?a.offsetWidth:a.offsetHeight;f!=="border"&&c.each(j,function(){f||(e-=parseFloat(c.curCSS(a,"padding"+this,true))||0);if(f==="margin")e+=parseFloat(c.curCSS(a,"margin"+this,true))||0;else e-=parseFloat(c.curCSS(a,
"border"+this+"Width",true))||0})}a.offsetWidth!==0?i():c.swap(a,ob,i);return Math.max(0,Math.round(e))}return c.curCSS(a,b,d)},curCSS:function(a,b,d){var f,e=a.style;if(!c.support.opacity&&b==="opacity"&&a.currentStyle){f=Oa.test(a.currentStyle.filter||"")?parseFloat(RegExp.$1)/100+"":"";return f===""?"1":f}if(ha.test(b))b=Pa;if(!d&&e&&e[b])f=e[b];else if(rb){if(ha.test(b))b="float";b=b.replace(lb,"-$1").toLowerCase();e=a.ownerDocument.defaultView;if(!e)return null;if(a=e.getComputedStyle(a,null))f=
a.getPropertyValue(b);if(b==="opacity"&&f==="")f="1"}else if(a.currentStyle){d=b.replace(ia,ja);f=a.currentStyle[b]||a.currentStyle[d];if(!mb.test(f)&&nb.test(f)){b=e.left;var j=a.runtimeStyle.left;a.runtimeStyle.left=a.currentStyle.left;e.left=d==="fontSize"?"1em":f||0;f=e.pixelLeft+"px";e.left=b;a.runtimeStyle.left=j}}return f},swap:function(a,b,d){var f={};for(var e in b){f[e]=a.style[e];a.style[e]=b[e]}d.call(a);for(e in b)a.style[e]=f[e]}});if(c.expr&&c.expr.filters){c.expr.filters.hidden=function(a){var b=
a.offsetWidth,d=a.offsetHeight,f=a.nodeName.toLowerCase()==="tr";return b===0&&d===0&&!f?true:b>0&&d>0&&!f?false:c.curCSS(a,"display")==="none"};c.expr.filters.visible=function(a){return!c.expr.filters.hidden(a)}}var sb=J(),tb=/<script(.|\s)*?\/script>/gi,ub=/select|textarea/i,vb=/color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week/i,N=/=\?(&|$)/,ka=/\?/,wb=/(\?|&)_=.*?(&|$)/,xb=/^(\w+:)?\/\/([^\/?#]+)/,yb=/%20/g,zb=c.fn.load;c.fn.extend({load:function(a,b,d){if(typeof a!==
"string")return zb.call(this,a);else if(!this.length)return this;var f=a.indexOf(" ");if(f>=0){var e=a.slice(f,a.length);a=a.slice(0,f)}f="GET";if(b)if(c.isFunction(b)){d=b;b=null}else if(typeof b==="object"){b=c.param(b,c.ajaxSettings.traditional);f="POST"}var j=this;c.ajax({url:a,type:f,dataType:"html",data:b,complete:function(i,o){if(o==="success"||o==="notmodified")j.html(e?c("<div />").append(i.responseText.replace(tb,"")).find(e):i.responseText);d&&j.each(d,[i.responseText,o,i])}});return this},
serialize:function(){return c.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?c.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||ub.test(this.nodeName)||vb.test(this.type))}).map(function(a,b){a=c(this).val();return a==null?null:c.isArray(a)?c.map(a,function(d){return{name:b.name,value:d}}):{name:b.name,value:a}}).get()}});c.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),
function(a,b){c.fn[b]=function(d){return this.bind(b,d)}});c.extend({get:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b=null}return c.ajax({type:"GET",url:a,data:b,success:d,dataType:f})},getScript:function(a,b){return c.get(a,null,b,"script")},getJSON:function(a,b,d){return c.get(a,b,d,"json")},post:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b={}}return c.ajax({type:"POST",url:a,data:b,success:d,dataType:f})},ajaxSetup:function(a){c.extend(c.ajaxSettings,a)},ajaxSettings:{url:location.href,
global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,xhr:A.XMLHttpRequest&&(A.location.protocol!=="file:"||!A.ActiveXObject)?function(){return new A.XMLHttpRequest}:function(){try{return new A.ActiveXObject("Microsoft.XMLHTTP")}catch(a){}},accepts:{xml:"application/xml, text/xml",html:"text/html",script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},lastModified:{},etag:{},ajax:function(a){function b(){e.success&&
e.success.call(k,o,i,x);e.global&&f("ajaxSuccess",[x,e])}function d(){e.complete&&e.complete.call(k,x,i);e.global&&f("ajaxComplete",[x,e]);e.global&&!--c.active&&c.event.trigger("ajaxStop")}function f(q,p){(e.context?c(e.context):c.event).trigger(q,p)}var e=c.extend(true,{},c.ajaxSettings,a),j,i,o,k=a&&a.context||e,n=e.type.toUpperCase();if(e.data&&e.processData&&typeof e.data!=="string")e.data=c.param(e.data,e.traditional);if(e.dataType==="jsonp"){if(n==="GET")N.test(e.url)||(e.url+=(ka.test(e.url)?
"&":"?")+(e.jsonp||"callback")+"=?");else if(!e.data||!N.test(e.data))e.data=(e.data?e.data+"&":"")+(e.jsonp||"callback")+"=?";e.dataType="json"}if(e.dataType==="json"&&(e.data&&N.test(e.data)||N.test(e.url))){j=e.jsonpCallback||"jsonp"+sb++;if(e.data)e.data=(e.data+"").replace(N,"="+j+"$1");e.url=e.url.replace(N,"="+j+"$1");e.dataType="script";A[j]=A[j]||function(q){o=q;b();d();A[j]=w;try{delete A[j]}catch(p){}z&&z.removeChild(C)}}if(e.dataType==="script"&&e.cache===null)e.cache=false;if(e.cache===
false&&n==="GET"){var r=J(),u=e.url.replace(wb,"$1_="+r+"$2");e.url=u+(u===e.url?(ka.test(e.url)?"&":"?")+"_="+r:"")}if(e.data&&n==="GET")e.url+=(ka.test(e.url)?"&":"?")+e.data;e.global&&!c.active++&&c.event.trigger("ajaxStart");r=(r=xb.exec(e.url))&&(r[1]&&r[1]!==location.protocol||r[2]!==location.host);if(e.dataType==="script"&&n==="GET"&&r){var z=s.getElementsByTagName("head")[0]||s.documentElement,C=s.createElement("script");C.src=e.url;if(e.scriptCharset)C.charset=e.scriptCharset;if(!j){var B=
false;C.onload=C.onreadystatechange=function(){if(!B&&(!this.readyState||this.readyState==="loaded"||this.readyState==="complete")){B=true;b();d();C.onload=C.onreadystatechange=null;z&&C.parentNode&&z.removeChild(C)}}}z.insertBefore(C,z.firstChild);return w}var E=false,x=e.xhr();if(x){e.username?x.open(n,e.url,e.async,e.username,e.password):x.open(n,e.url,e.async);try{if(e.data||a&&a.contentType)x.setRequestHeader("Content-Type",e.contentType);if(e.ifModified){c.lastModified[e.url]&&x.setRequestHeader("If-Modified-Since",
c.lastModified[e.url]);c.etag[e.url]&&x.setRequestHeader("If-None-Match",c.etag[e.url])}r||x.setRequestHeader("X-Requested-With","XMLHttpRequest");x.setRequestHeader("Accept",e.dataType&&e.accepts[e.dataType]?e.accepts[e.dataType]+", */*":e.accepts._default)}catch(ga){}if(e.beforeSend&&e.beforeSend.call(k,x,e)===false){e.global&&!--c.active&&c.event.trigger("ajaxStop");x.abort();return false}e.global&&f("ajaxSend",[x,e]);var g=x.onreadystatechange=function(q){if(!x||x.readyState===0||q==="abort"){E||
d();E=true;if(x)x.onreadystatechange=c.noop}else if(!E&&x&&(x.readyState===4||q==="timeout")){E=true;x.onreadystatechange=c.noop;i=q==="timeout"?"timeout":!c.httpSuccess(x)?"error":e.ifModified&&c.httpNotModified(x,e.url)?"notmodified":"success";var p;if(i==="success")try{o=c.httpData(x,e.dataType,e)}catch(v){i="parsererror";p=v}if(i==="success"||i==="notmodified")j||b();else c.handleError(e,x,i,p);d();q==="timeout"&&x.abort();if(e.async)x=null}};try{var h=x.abort;x.abort=function(){x&&h.call(x);
g("abort")}}catch(l){}e.async&&e.timeout>0&&setTimeout(function(){x&&!E&&g("timeout")},e.timeout);try{x.send(n==="POST"||n==="PUT"||n==="DELETE"?e.data:null)}catch(m){c.handleError(e,x,null,m);d()}e.async||g();return x}},handleError:function(a,b,d,f){if(a.error)a.error.call(a.context||a,b,d,f);if(a.global)(a.context?c(a.context):c.event).trigger("ajaxError",[b,a,f])},active:0,httpSuccess:function(a){try{return!a.status&&location.protocol==="file:"||a.status>=200&&a.status<300||a.status===304||a.status===
1223||a.status===0}catch(b){}return false},httpNotModified:function(a,b){var d=a.getResponseHeader("Last-Modified"),f=a.getResponseHeader("Etag");if(d)c.lastModified[b]=d;if(f)c.etag[b]=f;return a.status===304||a.status===0},httpData:function(a,b,d){var f=a.getResponseHeader("content-type")||"",e=b==="xml"||!b&&f.indexOf("xml")>=0;a=e?a.responseXML:a.responseText;e&&a.documentElement.nodeName==="parsererror"&&c.error("parsererror");if(d&&d.dataFilter)a=d.dataFilter(a,b);if(typeof a==="string")if(b===
"json"||!b&&f.indexOf("json")>=0)a=c.parseJSON(a);else if(b==="script"||!b&&f.indexOf("javascript")>=0)c.globalEval(a);return a},param:function(a,b){function d(i,o){if(c.isArray(o))c.each(o,function(k,n){b||/\[\]$/.test(i)?f(i,n):d(i+"["+(typeof n==="object"||c.isArray(n)?k:"")+"]",n)});else!b&&o!=null&&typeof o==="object"?c.each(o,function(k,n){d(i+"["+k+"]",n)}):f(i,o)}function f(i,o){o=c.isFunction(o)?o():o;e[e.length]=encodeURIComponent(i)+"="+encodeURIComponent(o)}var e=[];if(b===w)b=c.ajaxSettings.traditional;
if(c.isArray(a)||a.jquery)c.each(a,function(){f(this.name,this.value)});else for(var j in a)d(j,a[j]);return e.join("&").replace(yb,"+")}});var la={},Ab=/toggle|show|hide/,Bb=/^([+-]=)?([\d+-.]+)(.*)$/,W,va=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]];c.fn.extend({show:function(a,b){if(a||a===0)return this.animate(K("show",3),a,b);else{a=0;for(b=this.length;a<b;a++){var d=c.data(this[a],"olddisplay");
this[a].style.display=d||"";if(c.css(this[a],"display")==="none"){d=this[a].nodeName;var f;if(la[d])f=la[d];else{var e=c("<"+d+" />").appendTo("body");f=e.css("display");if(f==="none")f="block";e.remove();la[d]=f}c.data(this[a],"olddisplay",f)}}a=0;for(b=this.length;a<b;a++)this[a].style.display=c.data(this[a],"olddisplay")||"";return this}},hide:function(a,b){if(a||a===0)return this.animate(K("hide",3),a,b);else{a=0;for(b=this.length;a<b;a++){var d=c.data(this[a],"olddisplay");!d&&d!=="none"&&c.data(this[a],
"olddisplay",c.css(this[a],"display"))}a=0;for(b=this.length;a<b;a++)this[a].style.display="none";return this}},_toggle:c.fn.toggle,toggle:function(a,b){var d=typeof a==="boolean";if(c.isFunction(a)&&c.isFunction(b))this._toggle.apply(this,arguments);else a==null||d?this.each(function(){var f=d?a:c(this).is(":hidden");c(this)[f?"show":"hide"]()}):this.animate(K("toggle",3),a,b);return this},fadeTo:function(a,b,d){return this.filter(":hidden").css("opacity",0).show().end().animate({opacity:b},a,d)},
animate:function(a,b,d,f){var e=c.speed(b,d,f);if(c.isEmptyObject(a))return this.each(e.complete);return this[e.queue===false?"each":"queue"](function(){var j=c.extend({},e),i,o=this.nodeType===1&&c(this).is(":hidden"),k=this;for(i in a){var n=i.replace(ia,ja);if(i!==n){a[n]=a[i];delete a[i];i=n}if(a[i]==="hide"&&o||a[i]==="show"&&!o)return j.complete.call(this);if((i==="height"||i==="width")&&this.style){j.display=c.css(this,"display");j.overflow=this.style.overflow}if(c.isArray(a[i])){(j.specialEasing=
j.specialEasing||{})[i]=a[i][1];a[i]=a[i][0]}}if(j.overflow!=null)this.style.overflow="hidden";j.curAnim=c.extend({},a);c.each(a,function(r,u){var z=new c.fx(k,j,r);if(Ab.test(u))z[u==="toggle"?o?"show":"hide":u](a);else{var C=Bb.exec(u),B=z.cur(true)||0;if(C){u=parseFloat(C[2]);var E=C[3]||"px";if(E!=="px"){k.style[r]=(u||1)+E;B=(u||1)/z.cur(true)*B;k.style[r]=B+E}if(C[1])u=(C[1]==="-="?-1:1)*u+B;z.custom(B,u,E)}else z.custom(B,u,"")}});return true})},stop:function(a,b){var d=c.timers;a&&this.queue([]);
this.each(function(){for(var f=d.length-1;f>=0;f--)if(d[f].elem===this){b&&d[f](true);d.splice(f,1)}});b||this.dequeue();return this}});c.each({slideDown:K("show",1),slideUp:K("hide",1),slideToggle:K("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"}},function(a,b){c.fn[a]=function(d,f){return this.animate(b,d,f)}});c.extend({speed:function(a,b,d){var f=a&&typeof a==="object"?a:{complete:d||!d&&b||c.isFunction(a)&&a,duration:a,easing:d&&b||b&&!c.isFunction(b)&&b};f.duration=c.fx.off?0:typeof f.duration===
"number"?f.duration:c.fx.speeds[f.duration]||c.fx.speeds._default;f.old=f.complete;f.complete=function(){f.queue!==false&&c(this).dequeue();c.isFunction(f.old)&&f.old.call(this)};return f},easing:{linear:function(a,b,d,f){return d+f*a},swing:function(a,b,d,f){return(-Math.cos(a*Math.PI)/2+0.5)*f+d}},timers:[],fx:function(a,b,d){this.options=b;this.elem=a;this.prop=d;if(!b.orig)b.orig={}}});c.fx.prototype={update:function(){this.options.step&&this.options.step.call(this.elem,this.now,this);(c.fx.step[this.prop]||
c.fx.step._default)(this);if((this.prop==="height"||this.prop==="width")&&this.elem.style)this.elem.style.display="block"},cur:function(a){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null))return this.elem[this.prop];return(a=parseFloat(c.css(this.elem,this.prop,a)))&&a>-10000?a:parseFloat(c.curCSS(this.elem,this.prop))||0},custom:function(a,b,d){function f(j){return e.step(j)}this.startTime=J();this.start=a;this.end=b;this.unit=d||this.unit||"px";this.now=this.start;
this.pos=this.state=0;var e=this;f.elem=this.elem;if(f()&&c.timers.push(f)&&!W)W=setInterval(c.fx.tick,13)},show:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.show=true;this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur());c(this.elem).show()},hide:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.hide=true;this.custom(this.cur(),0)},step:function(a){var b=J(),d=true;if(a||b>=this.options.duration+this.startTime){this.now=
this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;for(var f in this.options.curAnim)if(this.options.curAnim[f]!==true)d=false;if(d){if(this.options.display!=null){this.elem.style.overflow=this.options.overflow;a=c.data(this.elem,"olddisplay");this.elem.style.display=a?a:this.options.display;if(c.css(this.elem,"display")==="none")this.elem.style.display="block"}this.options.hide&&c(this.elem).hide();if(this.options.hide||this.options.show)for(var e in this.options.curAnim)c.style(this.elem,
e,this.options.orig[e]);this.options.complete.call(this.elem)}return false}else{e=b-this.startTime;this.state=e/this.options.duration;a=this.options.easing||(c.easing.swing?"swing":"linear");this.pos=c.easing[this.options.specialEasing&&this.options.specialEasing[this.prop]||a](this.state,e,0,1,this.options.duration);this.now=this.start+(this.end-this.start)*this.pos;this.update()}return true}};c.extend(c.fx,{tick:function(){for(var a=c.timers,b=0;b<a.length;b++)a[b]()||a.splice(b--,1);a.length||
c.fx.stop()},stop:function(){clearInterval(W);W=null},speeds:{slow:600,fast:200,_default:400},step:{opacity:function(a){c.style(a.elem,"opacity",a.now)},_default:function(a){if(a.elem.style&&a.elem.style[a.prop]!=null)a.elem.style[a.prop]=(a.prop==="width"||a.prop==="height"?Math.max(0,a.now):a.now)+a.unit;else a.elem[a.prop]=a.now}}});if(c.expr&&c.expr.filters)c.expr.filters.animated=function(a){return c.grep(c.timers,function(b){return a===b.elem}).length};c.fn.offset="getBoundingClientRect"in s.documentElement?
function(a){var b=this[0];if(a)return this.each(function(e){c.offset.setOffset(this,a,e)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return c.offset.bodyOffset(b);var d=b.getBoundingClientRect(),f=b.ownerDocument;b=f.body;f=f.documentElement;return{top:d.top+(self.pageYOffset||c.support.boxModel&&f.scrollTop||b.scrollTop)-(f.clientTop||b.clientTop||0),left:d.left+(self.pageXOffset||c.support.boxModel&&f.scrollLeft||b.scrollLeft)-(f.clientLeft||b.clientLeft||0)}}:function(a){var b=
this[0];if(a)return this.each(function(r){c.offset.setOffset(this,a,r)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return c.offset.bodyOffset(b);c.offset.initialize();var d=b.offsetParent,f=b,e=b.ownerDocument,j,i=e.documentElement,o=e.body;f=(e=e.defaultView)?e.getComputedStyle(b,null):b.currentStyle;for(var k=b.offsetTop,n=b.offsetLeft;(b=b.parentNode)&&b!==o&&b!==i;){if(c.offset.supportsFixedPosition&&f.position==="fixed")break;j=e?e.getComputedStyle(b,null):b.currentStyle;
k-=b.scrollTop;n-=b.scrollLeft;if(b===d){k+=b.offsetTop;n+=b.offsetLeft;if(c.offset.doesNotAddBorder&&!(c.offset.doesAddBorderForTableAndCells&&/^t(able|d|h)$/i.test(b.nodeName))){k+=parseFloat(j.borderTopWidth)||0;n+=parseFloat(j.borderLeftWidth)||0}f=d;d=b.offsetParent}if(c.offset.subtractsBorderForOverflowNotVisible&&j.overflow!=="visible"){k+=parseFloat(j.borderTopWidth)||0;n+=parseFloat(j.borderLeftWidth)||0}f=j}if(f.position==="relative"||f.position==="static"){k+=o.offsetTop;n+=o.offsetLeft}if(c.offset.supportsFixedPosition&&
f.position==="fixed"){k+=Math.max(i.scrollTop,o.scrollTop);n+=Math.max(i.scrollLeft,o.scrollLeft)}return{top:k,left:n}};c.offset={initialize:function(){var a=s.body,b=s.createElement("div"),d,f,e,j=parseFloat(c.curCSS(a,"marginTop",true))||0;c.extend(b.style,{position:"absolute",top:0,left:0,margin:0,border:0,width:"1px",height:"1px",visibility:"hidden"});b.innerHTML="<div style='position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;'><div></div></div><table style='position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;' cellpadding='0' cellspacing='0'><tr><td></td></tr></table>";
a.insertBefore(b,a.firstChild);d=b.firstChild;f=d.firstChild;e=d.nextSibling.firstChild.firstChild;this.doesNotAddBorder=f.offsetTop!==5;this.doesAddBorderForTableAndCells=e.offsetTop===5;f.style.position="fixed";f.style.top="20px";this.supportsFixedPosition=f.offsetTop===20||f.offsetTop===15;f.style.position=f.style.top="";d.style.overflow="hidden";d.style.position="relative";this.subtractsBorderForOverflowNotVisible=f.offsetTop===-5;this.doesNotIncludeMarginInBodyOffset=a.offsetTop!==j;a.removeChild(b);
c.offset.initialize=c.noop},bodyOffset:function(a){var b=a.offsetTop,d=a.offsetLeft;c.offset.initialize();if(c.offset.doesNotIncludeMarginInBodyOffset){b+=parseFloat(c.curCSS(a,"marginTop",true))||0;d+=parseFloat(c.curCSS(a,"marginLeft",true))||0}return{top:b,left:d}},setOffset:function(a,b,d){if(/static/.test(c.curCSS(a,"position")))a.style.position="relative";var f=c(a),e=f.offset(),j=parseInt(c.curCSS(a,"top",true),10)||0,i=parseInt(c.curCSS(a,"left",true),10)||0;if(c.isFunction(b))b=b.call(a,
d,e);d={top:b.top-e.top+j,left:b.left-e.left+i};"using"in b?b.using.call(a,d):f.css(d)}};c.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),d=this.offset(),f=/^body|html$/i.test(b[0].nodeName)?{top:0,left:0}:b.offset();d.top-=parseFloat(c.curCSS(a,"marginTop",true))||0;d.left-=parseFloat(c.curCSS(a,"marginLeft",true))||0;f.top+=parseFloat(c.curCSS(b[0],"borderTopWidth",true))||0;f.left+=parseFloat(c.curCSS(b[0],"borderLeftWidth",true))||0;return{top:d.top-
f.top,left:d.left-f.left}},offsetParent:function(){return this.map(function(){for(var a=this.offsetParent||s.body;a&&!/^body|html$/i.test(a.nodeName)&&c.css(a,"position")==="static";)a=a.offsetParent;return a})}});c.each(["Left","Top"],function(a,b){var d="scroll"+b;c.fn[d]=function(f){var e=this[0],j;if(!e)return null;if(f!==w)return this.each(function(){if(j=wa(this))j.scrollTo(!a?f:c(j).scrollLeft(),a?f:c(j).scrollTop());else this[d]=f});else return(j=wa(e))?"pageXOffset"in j?j[a?"pageYOffset":
"pageXOffset"]:c.support.boxModel&&j.document.documentElement[d]||j.document.body[d]:e[d]}});c.each(["Height","Width"],function(a,b){var d=b.toLowerCase();c.fn["inner"+b]=function(){return this[0]?c.css(this[0],d,false,"padding"):null};c.fn["outer"+b]=function(f){return this[0]?c.css(this[0],d,false,f?"margin":"border"):null};c.fn[d]=function(f){var e=this[0];if(!e)return f==null?null:this;if(c.isFunction(f))return this.each(function(j){var i=c(this);i[d](f.call(this,j,i[d]()))});return"scrollTo"in
e&&e.document?e.document.compatMode==="CSS1Compat"&&e.document.documentElement["client"+b]||e.document.body["client"+b]:e.nodeType===9?Math.max(e.documentElement["client"+b],e.body["scroll"+b],e.documentElement["scroll"+b],e.body["offset"+b],e.documentElement["offset"+b]):f===w?c.css(e,d):this.css(d,typeof f==="string"?f:f+"px")}});A.jQuery=A.$=c})(window);

1
static/js/jquery.ui.draggable.js vendored Normal file

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,369 @@
.dataTables_wrapper {
clear: both;
position: relative;
}
.dataTables_processing {
background-color: white;
border: 1px solid #DDDDDD;
color: #999999;
font-size: 14px;
height: 30px;
left: 50%;
margin-left: -125px;
margin-top: -15px;
padding: 14px 0 2px;
position: absolute;
text-align: center;
top: 50%;
width: 250px;
}
.dataTables_length {
float: left;
width: 40%;
}
.dataTables_filter {
float: right;
text-align: right;
width: 50%;
}
.dataTables_info {
float: left;
width: 60%;
}
.dataTables_paginate {
float: right;
text-align: right;
}
.paginate_disabled_previous, .paginate_enabled_previous, .paginate_disabled_next, .paginate_enabled_next {
color: #111111 !important;
cursor: pointer;
float: left;
height: 19px;
}
.paginate_disabled_previous:hover, .paginate_enabled_previous:hover, .paginate_disabled_next:hover, .paginate_enabled_next:hover {
text-decoration: none !important;
}
.paginate_disabled_previous:active, .paginate_enabled_previous:active, .paginate_disabled_next:active, .paginate_enabled_next:active {
outline: medium none;
}
.paginate_disabled_previous, .paginate_disabled_next {
color: #666666 !important;
}
.paginate_disabled_previous, .paginate_enabled_previous {
padding-left: 23px;
}
.paginate_disabled_next, .paginate_enabled_next {
margin-left: 10px;
padding-right: 23px;
}
.paginate_disabled_previous {
background: url("/static/images/back_disabled.png") no-repeat scroll left top transparent;
}
.paginate_enabled_previous {
background: url("/static/images/back_enabled.png") no-repeat scroll left top transparent;
}
.paginate_enabled_previous:hover {
background: url("/static/images/back_enabled_hover.png") no-repeat scroll left top transparent;
}
.paginate_disabled_next {
background: url("/static/images/forward_disabled.png") no-repeat scroll right top transparent;
}
.paginate_enabled_next {
background: url("/static/images/forward_enabled.png") no-repeat scroll right top transparent;
}
.paginate_enabled_next:hover {
background: url("/static/images/forward_enabled_hover.png") no-repeat scroll right top transparent;
}
table.display {
clear: both;
margin: 0 auto;
width: 100%;
}
table.display thead th {
border-bottom: 1px solid black;
cursor: pointer;
font-weight: bold;
padding: 3px 18px 3px 10px;
}
table.display tfoot th {
border-top: 1px solid black;
font-weight: bold;
padding: 3px 18px 3px 10px;
}
table.display tr.heading2 td {
border-bottom: 1px solid #AAAAAA;
}
table.display td {
padding: 3px 10px;
}
table.display td.center {
text-align: center;
}
.sorting_asc {
background: url("/static/images/sort_asc.png") no-repeat scroll right center transparent;
}
.sorting_desc {
background: url("/static/images/sort_desc.png") no-repeat scroll right center transparent;
}
.sorting {
background: url("/static/images/sort_both.png") no-repeat scroll right center transparent;
}
.sorting_asc_disabled {
background: url("/static/images/sort_asc_disabled.png") no-repeat scroll right center transparent;
}
.sorting_desc_disabled {
background: url("/static/images/sort_desc_disabled.png") no-repeat scroll right center transparent;
}
table.display thead th:active, table.display thead td:active {
outline: medium none;
}
table.display tr.odd.gradeA {
background-color: #DDFFDD;
}
table.display tr.even.gradeA {
background-color: #EEFFEE;
}
table.display tr.odd.gradeC {
background-color: #DDDDFF;
}
table.display tr.even.gradeC {
background-color: #EEEEFF;
}
table.display tr.odd.gradeX {
background-color: #FFDDDD;
}
table.display tr.even.gradeX {
background-color: #FFEEEE;
}
table.display tr.odd.gradeU {
background-color: #DDDDDD;
}
table.display tr.even.gradeU {
background-color: #EEEEEE;
}
tr.odd {
background-color: #E2E4FF;
}
tr.even {
background-color: white;
}
.dataTables_scroll {
clear: both;
}
.dataTables_scrollBody {
}
.top, .bottom {
background-color: #F5F5F5;
border: 1px solid #CCCCCC;
padding: 15px;
}
.top .dataTables_info {
float: none;
}
.clear {
clear: both;
}
.dataTables_empty {
text-align: center;
}
tfoot input {
color: #444444;
margin: 0.5em 0;
width: 100%;
}
tfoot input.search_init {
color: #999999;
}
td.group {
background-color: #D1CFD0;
border-bottom: 2px solid #A19B9E;
border-top: 2px solid #A19B9E;
}
td.details {
background-color: #D1CFD0;
border: 2px solid #A19B9E;
}
.example_alt_pagination div.dataTables_info {
width: 40%;
}
.paging_full_numbers {
height: 22px;
line-height: 22px;
width: 400px;
}
.paging_full_numbers a:active {
outline: medium none;
}
.paging_full_numbers a:hover {
text-decoration: none;
}
.paging_full_numbers a.paginate_button, .paging_full_numbers a.paginate_active {
border: 1px solid #AAAAAA;
color: #333333 !important;
cursor: pointer;
margin: 0 3px;
padding: 2px 5px;
}
.paging_full_numbers a.paginate_button {
background-color: #DDDDDD;
}
.paging_full_numbers a.paginate_button:hover {
background-color: #CCCCCC;
text-decoration: none !important;
}
.paging_full_numbers a.paginate_active {
background-color: #99B3FF;
}
table.display tr.even.row_selected td {
background-color: #B0BED9;
}
table.display tr.odd.row_selected td {
background-color: #9FAFD1;
}
tr.odd td.sorting_1 {
background-color: #D3D6FF;
}
tr.odd td.sorting_2 {
background-color: #DADCFF;
}
tr.odd td.sorting_3 {
background-color: #E0E2FF;
}
tr.even td.sorting_1 {
background-color: #EAEBFF;
}
tr.even td.sorting_2 {
background-color: #F2F3FF;
}
tr.even td.sorting_3 {
background-color: #F9F9FF;
}
tr.odd.gradeA td.sorting_1 {
background-color: #C4FFC4;
}
tr.odd.gradeA td.sorting_2 {
background-color: #D1FFD1;
}
tr.odd.gradeA td.sorting_3 {
background-color: #D1FFD1;
}
tr.even.gradeA td.sorting_1 {
background-color: #D5FFD5;
}
tr.even.gradeA td.sorting_2 {
background-color: #E2FFE2;
}
tr.even.gradeA td.sorting_3 {
background-color: #E2FFE2;
}
tr.odd.gradeC td.sorting_1 {
background-color: #C4C4FF;
}
tr.odd.gradeC td.sorting_2 {
background-color: #D1D1FF;
}
tr.odd.gradeC td.sorting_3 {
background-color: #D1D1FF;
}
tr.even.gradeC td.sorting_1 {
background-color: #D5D5FF;
}
tr.even.gradeC td.sorting_2 {
background-color: #E2E2FF;
}
tr.even.gradeC td.sorting_3 {
background-color: #E2E2FF;
}
tr.odd.gradeX td.sorting_1 {
background-color: #FFC4C4;
}
tr.odd.gradeX td.sorting_2 {
background-color: #FFD1D1;
}
tr.odd.gradeX td.sorting_3 {
background-color: #FFD1D1;
}
tr.even.gradeX td.sorting_1 {
background-color: #FFD5D5;
}
tr.even.gradeX td.sorting_2 {
background-color: #FFE2E2;
}
tr.even.gradeX td.sorting_3 {
background-color: #FFE2E2;
}
tr.odd.gradeU td.sorting_1 {
background-color: #C4C4C4;
}
tr.odd.gradeU td.sorting_2 {
background-color: #D1D1D1;
}
tr.odd.gradeU td.sorting_3 {
background-color: #D1D1D1;
}
tr.even.gradeU td.sorting_1 {
background-color: #D5D5D5;
}
tr.even.gradeU td.sorting_2 {
background-color: #E2E2E2;
}
tr.even.gradeU td.sorting_3 {
background-color: #E2E2E2;
}
.ex_highlight #example tbody tr.even:hover, #example tbody tr.even td.highlighted {
background-color: #ECFFB3;
}
.ex_highlight #example tbody tr.odd:hover, #example tbody tr.odd td.highlighted {
background-color: #E6FF99;
}
.ex_highlight_row #example tr.even:hover {
background-color: #ECFFB3;
}
.ex_highlight_row #example tr.even:hover td.sorting_1 {
background-color: #DDFF75;
}
.ex_highlight_row #example tr.even:hover td.sorting_2 {
background-color: #E7FF9E;
}
.ex_highlight_row #example tr.even:hover td.sorting_3 {
background-color: #E2FF89;
}
.ex_highlight_row #example tr.odd:hover {
background-color: #E6FF99;
}
.ex_highlight_row #example tr.odd:hover td.sorting_1 {
background-color: #D6FF5C;
}
.ex_highlight_row #example tr.odd:hover td.sorting_2 {
background-color: #E0FF84;
}
.ex_highlight_row #example tr.odd:hover td.sorting_3 {
background-color: #DBFF70;
}
table.KeyTable td {
border: 3px solid transparent;
}
table.KeyTable td.focus {
border: 3px solid #3366FF;
}
table.display tr.gradeA {
background-color: #EEFFEE;
}
table.display tr.gradeC {
background-color: #DDDDFF;
}
table.display tr.gradeX {
background-color: #FFDDDD;
}
table.display tr.gradeU {
background-color: #DDDDDD;
}
div.box {
background-color: #E5E5FF;
border: 1px solid #8080FF;
height: 100px;
overflow: auto;
padding: 10px;
}

View File

@ -0,0 +1,57 @@
#popup_container {
font-family: Arial, sans-serif;
font-size: 12px;
min-width: 300px; /* Dialog will be no smaller than this */
max-width: 600px; /* Dialog will wrap after this width */
background: #FFF;
border: solid 5px #999;
color: #000;
-moz-border-radius: 5px;
-webkit-border-radius: 5px;
border-radius: 5px;
}
#popup_title {
font-size: 14px;
font-weight: bold;
text-align: center;
line-height: 1.75em;
color: #666;
background: #CCC url(/static/images/title.gif) top repeat-x;
border: solid 1px #FFF;
border-bottom: solid 1px #999;
cursor: default;
padding: 0em;
margin: 0em;
}
#popup_content {
background: 16px 16px no-repeat url(/static/images/info.gif);
padding: 1em 1.75em;
margin: 0em;
}
#popup_content.alert {
background-image: url(/static/images/info.gif);
}
#popup_content.confirm {
background-image: url(/static/images/important.gif);
}
#popup_content.prompt {
background-image: url(/static/images/help.gif);
}
#popup_message {
padding-left: 48px;
}
#popup_panel {
text-align: center;
margin: 1em 0em 0em 1em;
}
#popup_prompt {
margin: .5em 0em;
}

449
static/styles/style.css Normal file
View File

@ -0,0 +1,449 @@
/*
Design by Free CSS Templates
http://www.freecsstemplates.org
Released for free under a Creative Commons Attribution 2.5 License
*/
body {
margin: 0;
padding: 0;
background: #FFFFFF url(/static/images/img01.jpg) repeat-x left top;
font-family: Arial, Helvetica, sans-serif;
font-size: 12px;
color: #787878;
}
h1, h2, h3 {
margin: 0;
padding: 0;
font-weight: normal;
color: #32639A;
}
h1 {
font-size: 2em;
}
h2 {
font-size: 2.4em;
}
h3 {
font-size: 1.6em;
}
p, ul, ol {
margin-top: 0;
line-height: 180%;
}
ul, ol {
}
.error {
color: red;
font-size: 1.4em;
}
.info {
color: orange;
font-size: 1.3em;
margin-top: 5px;
}
.center-img{
vertical-align: middle;
}
a {
text-decoration: none;
color: #4486C7;
}
a:hover {
}
#wrapper {
width: 960px;
margin: 0 auto;
padding: 0;
}
/* Header */
#header {
width: 940px;
height: 148px;
margin: 0 auto;
background: url(/static/images/logo-tis.png) no-repeat left 20px;
}
/* Logo */
#logo {
float: left;
margin: 0;
padding-top: 30px;
color: #000000;
}
#logo h1, #logo p {
margin: 0;
padding: 0;
}
#logo h1 {
float: left;
padding-left: 80px;
letter-spacing: -1px;
font-size: 3.8em;
}
#logo p {
float: left;
margin: 0;
padding: 26px 0 0 10px;
font: normal 14px Georgia, "Times New Roman", Times, serif;
font-style: italic;
color: #FFFFFF;
}
#logo a {
border: none;
background: none;
text-decoration: none;
color: #FFFFFF;
}
/* Menu */
#menu {
float: left;
width: 100%;
height: 49px;
margin: 0 auto;
padding: 0;
position: relative;
}
#menu ul {
float: left;
margin: 0;
padding: 0;
list-style: none;
left:50%;
line-height: normal;
position: relative;
}
#menu ul li{
float: left;
position:relative;
right:50%;
}
#menu li {
position: relative;
}
#menu a {
display: block;
width: 155px;
height: 33px;
padding-top: 16px;
text-decoration: none;
text-align: center;
font-family: Arial, Helvetica, sans-serif;
font-size: 13px;
font-weight: bold;
color: #FFFFFF;
border: none;
}
#menu a:hover, #menu .current_page_item a {
background: #659CEF url(/static/images/img03.jpg) repeat-x left bottom;
text-decoration: none;
}
#menu .current_page_item a {
background: #FBFBFC url(/static/images/img02.jpg) repeat-x left bottom;
padding-left: 0;
color: #30476A;
}
/* Page */
#page {
width: 100%;
margin: 0 auto;
padding: 0;
}
#page-bgtop {
padding: 20px px;
}
#page-bgbtm {
}
/* Content */
#content {
width: 100%;
float: left;
padding: 30px 0px 0px 0px;
}
#content h2{
margin-left: 30px;
}
#content h3{
margin-left: 60px;
}
h4{
padding-left: 150px;
}
.post {
margin-bottom: 15px;
}
.post-bgtop {
}
.post-bgbtm {
}
.post .title {
margin-bottom: 10px;
padding: 12px 0 0 0px;
letter-spacing: -.5px;
color: #32639A;
}
.post .title a {
color: #32639A;
border: none;
}
.post .meta {
height: 30px;
background: #D8E7FE;
margin: 0px;
padding: 0px 0px 0px 0px;
text-align: left;
font-family: Arial, Helvetica, sans-serif;
font-size: 13px;
font-weight: bold;
}
.post .meta .date {
float: left;
height: 24px;
padding: 3px 15px;
color: #4A81DE;
}
.post .meta .posted {
float: right;
height: 24px;
padding: 3px 15px;
background: #A8CF64;
color: #FFFFFF;
}
.post .meta a {
color: #FFFFFF;
}
.post .entry {
padding: 0px 0px 20px 0px;
padding-bottom: 20px;
text-align: justify;
}
.links {
padding-top: 20px;
font-size: 12px;
font-weight: bold;
}
/* Sidebar */
#sidebar {
float: right;
width: 280px;
padding: 0px;
color: #787878;
background: #FFFFFF;
}
#sidebar ul {
margin: 0;
padding: 0;
list-style: none;
}
#sidebar li {
margin: 0;
padding: 0;
border-left: 1px solid #E2E2E2;
}
#sidebar li ul {
margin: 0px 0px;
padding-bottom: 30px;
}
#sidebar li li {
line-height: 35px;
border-bottom: 1px dashed #D1D1D1;
margin: 0px 30px;
border-left: none;
}
#sidebar li li span {
display: block;
margin-top: -20px;
padding: 0;
font-size: 11px;
font-style: italic;
}
#sidebar h2 {
height: 38px;
padding-left: 30px;
letter-spacing: -.5px;
font-size: 1.8em;
}
#sidebar p {
margin: 0 0px;
padding: 0px 30px 20px 30px;
text-align: justify;
}
#sidebar a {
border: none;
}
#sidebar a:hover {
text-decoration: underline;
color: #8A8A8A;
}
/* Footer */
#footer {
height: 50px;
margin: 0 auto;
padding: 0px 0 15px 0;
background: #D8E7FE;
border-top: 1px solid #D3DEF0;
font-family: Arial, Helvetica, sans-serif;
}
#footer p {
margin: 0;
padding-top: 20px;
line-height: normal;
font-size: 10px;
text-transform: uppercase;
text-align: center;
color: #A0A0A0;
}
#footer a {
color: #5389E0;
}
td.loading {
/*text-align: center;*/
vertical-align: center;
}
table.sample {
border-width: 1px;
border-spacing: 1px;
border-style: outset;
border-color: green;
border-collapse: collapse;
background-color: white;
}
table.sample th {
border-width: 1px;
padding: 1px;
border-style: inset;
border-color: gray;
background-color: white;
-moz-border-radius: ;
}
table.sample td {
border-width: 1px;
padding: 1px;
border-style: inset;
border-color: gray;
background-color: white;
-moz-border-radius: ;
}
/*Tables*/
#table-design {
margin: 0 auto;
background-color: whiteSmoke;
border-radius: 6px;
-webkit-border-radius: 6px;
-moz-border-radius: 6px;
}
#table-design td, #table-design th {
}
#table-design th {
color: #333;
font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif;
font-size: 16px;
font-style: normal;
font-weight: normal;
padding: 0 20px;
}
#table-design td {
padding: 0 20px;
line-height: 20px;
color: #0084B4;
font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif;
font-size: 14px;
border-bottom: 1px solid #fff;
border-top: 1px solid #fff;
}
#table-design tr:hover {
background-color: #fff;
}
P.message {
display: inline;
margin: 0 auto;
margin-left: auto;
margin-right: auto;
/*width: 6em;*/
background-color: whiteSmoke;
border-radius: 6px;
-webkit-border-radius: 6px;
-moz-border-radius: 6px;
font-size: 14px;
color: #0084B4;
font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif;
}
#mouter{
position:relative;
left:50%;
float:left;
clear:both;
margin:10px 0;
text-align:left;
}
#minner{
background-color: whiteSmoke;
border-radius: 6px;
-webkit-border-radius: 6px;
-moz-border-radius: 6px;
padding:5px 20px;
position:relative;
left:-50%;
text-align:left;
font-size: 14px;
color: #0084B4;
font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif;
}

161
templates/backups.html Executable file
View File

@ -0,0 +1,161 @@
{% extends "layout.html" %}
{% block content %}
{% if backup_list['rsync_ssh_list']|count != 0 %}
<h2 class="title">Rsync+ssh</h2>
<table id="table-design">
<thead>
<th>Server</th>
<th>Backup</th>
<th>Directory</th>
</thead>
</tbody>
{% for entry in backup_list['rsync_ssh_list'] %}
<tr>
<td>{{ entry[0] }}</td>
<td>{{ entry[1] }}</td>
<td>{{ entry[3] }}</td>
</tr>
{% endfor %}
</tbody>
</table>
{% endif %}
{% if backup_list['rsync_list']|count != 0 %}
<h2 class="title">Rsync</h2>
<table id="table-design">
<thead>
<th>Server</th>
<th>Backup</th>
<th>Directory</th>
</thead>
</tbody>
{% for entry in backup_list['rsync_list'] %}
<tr>
<td>{{ entry[0] }}</td>
<td>{{ entry[1] }}</td>
<td>{{ entry[3] }}</td>
</tr>
{% endfor %}
</tbody>
</table>
{% endif %}
{% if backup_list['pgsql_list']|count != 0 %}
<h2 class="title">pgSQL</h2>
<table id="table-design">
<thead>
<th>Server</th>
<th>Backup</th>
<th>Database</th>
</thead>
<tbody>
{% for entry in backup_list['pgsql_list'] %}
<tr>
<td>{{ entry[0] }}</td>
<td>{{ entry[1] }}</td>
<td>{{ entry[3] }}</td>
</tr>
{% endfor %}
</tbody>
</table>
{% endif %}
{% if backup_list['mysql_list']|count != 0 %}
<h2 class="title">MySQL</h2>
<table id="table-design">
<thead>
<th>Server</th>
<th>Backup</th>
<th>Database</th>
</thead>
<tbody>
{% for entry in backup_list['mysql_list'] %}
<tr>
<td>{{ entry[0] }}</td>
<td>{{ entry[1] }}</td>
<td>{{ entry[3] }}</td>
</tr>
{% endfor %}
</tbody>
</table>
{% endif %}
{% if backup_list['xva_list']|count != 0 %}
<h2 class="title">XVA</h2>
<table id="table-design">
<thead>
<th>Server</th>
<th>Backup</th>
</thead>
<tbody>
{% for entry in backup_list['xva_list'] %}
<tr>
<td>{{ entry[0] }}</td>
<td>{{ entry[1] }}</td>
</tr>
{% endfor %}
</tbody>
</table>
{% endif %}
{% if backup_list['metadata_list']|count != 0 %}
<h2 class="title">XCP Metadata</h2>
<table id="table-design">
<thead>
<th>Server</th>
<th>Backup</th>
</thead>
<tbody>
{% for entry in backup_list['metadata_list'] %}
<tr>
<td>{{ entry[0] }}</td>
<td>{{ entry[1] }}</td>
</tr>
{% endfor %}
</tbody>
</table>
{% endif %}
{% if backup_list['switch_list']|count != 0 %}
<h2 class="title">Switchs</h2>
<table id="table-design">
<thead>
<th>Server</th>
<th>Backup</th>
</thead>
<tbody>
{% for entry in backup_list['switch_list'] %}
<tr>
<td>{{ entry[0] }}</td>
<td>{{ entry[1] }}</td>
</tr>
{% endfor %}
</tbody>
</table>
{% endif %}
{% if backup_list['null_list']|count != 0 %}
<h2 class="title">Null</h2>
<table id="table-design">
<thead>
<th>Server</th>
<th>Backup</th>
</thead>
<tbody>
{% for entry in backup_list['null_list'] %}
<tr>
<td>{{ entry[0] }}</td>
<td>{{ entry[1] }}</td>
</tr>
{% endfor %}
</tbody>
</table>
{% endif %}
{% endblock %}

78
templates/export_backup.html Executable file
View File

@ -0,0 +1,78 @@
{% extends "layout.html" %}
{% block content %}
<h2 class="title">Hard drive verifications:</h2>
{% with messages = get_flashed_messages() %}
{% if messages %}
{% for message in messages %}
<h4>{{ message }} <img src="/static/images/check.png" width="15" height="15"/></h4>
{% endfor %}
{% endif %}
{% endwith %}
{% if error %}
<p>
<div class=error><img class="center-img" src='/static/images/important.gif' title="Notice" width="24" height="24"/> <strong>Error:</strong> {{ error }}</div>
<div class=info><img class="center-img" src='/static/images/info.gif' title="Notice" width="24" height="24"/> <strong>Notice:</strong> {{ info }}</div>
<h4>Also, you can contact your <a href="mailto:{{ email }}?Subject=TISBACKUP%20Export"> System Administrator</a> for more details </h4>
</p>
{% elif not start %}
<script>
$(document).ready( function() {
$("#confirm_button").click( function() {
jConfirm('Do you want to proced backup now ?', 'Confirmation Dialog', function(r) {
if(r == true ){
$("#backup").submit();
};
});
});
});
</script>
<form id="backup" action='/export_backup'>
<input type="hidden" name="start" value="true" />
<input type="button" id="confirm_button" value="Launch Backup" style="margin-left: 400px;" />
</form>
{% else %}
<h2 class="title">Backups is running: </h2>
<table id="table-design">
<thead>
<th>Server</th>
<th>Backup</th>
<th>Status</th>
</thead>
<tbody>
</tbody>
</table>
<script>
//Refresh periode in seconds
var refresh = 10;
var done = false;
function status(){
$.getJSON("/status.json", function(data) {
$("#table-design tbody").remove();
$.each(data.data, function(key,val){
$('#table-design').append('<tr>');
$('tbody').append('<td>'+val.server_name+'</td>');
$('tbody').append('<td>'+val.backup_name+'</td>');
if(val.status == 'Running'){
$('tbody').append('<td class=loading><img src="/static/images/loader.gif" width="15" height="15"/></td>');
done = false;
}else{
$('tbody').append('<td>'+val.status+'</td>');
done = true;
}
$('#table-design').append('</tr>');
});
});
if (done){
jAlert('Backup finished', 'TIS Backup');
window.clearInterval(timer);
};
};
var timer = window.setInterval(function(){
status();
}, refresh * 1000);
status();
</script>
{% endif %}
{% endblock %}

121
templates/last_backups.html Executable file
View File

@ -0,0 +1,121 @@
{% extends "layout.html" %}
{% block content %}
<script type="text/javascript" charset="utf-8">
$(document).ready(function() {
var oTable = $('#table-design').dataTable( {
"bProcessing": true,
"sAjaxDataProp": "data",
"sAjaxSource": "/backups.json",
"iDisplayLength": 25,
"aLengthMenu": [[25, 50, 100, 200, 500, -1], [25, 50, 100, 200, 500, "All"]],
"aaSorting": [[ 0, "desc" ]],
"aoColumnDefs": [
{
"aTargets": [ 4 ],
"mData": "backup_duration",
"mRender": function ( data, type, full ) {
return secondsToHms(data * 3600);
}
},
{
"aTargets": [ 0 ],
"mData": "backup_start",
"mRender": function ( data, type, full ) {
var d = new Date(data);
return d.getFullYear()+"/"+(d.getMonth()+1)+"/"+d.getDate()+" "+d.toLocaleTimeString();
}
},
{
"aTargets": [ 1 ],
"mData": "backup_start",
"mRender": function ( data, type, full ) {
var d = new Date(data);
return d.getFullYear()+"/"+(d.getMonth()+1)+"/"+d.getDate()+" "+d.toLocaleTimeString();
}
}
],
"aoColumns": [
{ "mData":"backup_start"},
{ "mData":"backup_end" , "bVisible": false },
{ "mData":"server_name" },
{ "mData":"backup_name" },
{ "mData":"backup_duration"},
{ "mData":"status" },
{ "mData":"written_bytes" , "bVisible": false},
{ "mData":"written_files_count" , "bVisible": false},
{ "mData":"total_files_count" , "bVisible": false},
{ "mData":"total_bytes" , "bVisible": false },
{ "mData":"backup_location" , "bVisible": false },
{ "mData":"description" , "bVisible": false },
{ "mData":"log" , "bVisible": false },
{ "mData":"TYPE" , "bVisible": false }
]
} );
} );
function fnShowHide( iCol )
{
/* Get the DataTables object again - this is not a recreation, just a get of the object */
var oTable = $('#table-design').dataTable();
var bVis = oTable.fnSettings().aoColumns[iCol].bVisible;
oTable.fnSetColumnVis( iCol, bVis ? false : true );
}
function secondsToHms(d) {
d = Number(d);
var h = Math.floor(d / 3600);
var m = Math.floor(d % 3600 / 60);
var s = Math.floor(d % 3600 % 60);
return ((h > 0 ? h + ":" : "0:") + (m > 0 ? (m < 10 ? "0" : "") + m + ":" : "00:") + (s < 10 ? "0" : "") + s);
}
</script>
<table style='text-align: center;' cellpadding="0" cellspacing="0" border="0" class="display" id="table-design">
<thead style='text-align: center;'>
<tr>
<th>Backup start</th>
<th>Backup end</th>
<th>Server name</th>
<th>Backup name</th>
<th>Backup duration</th>
<th>Status</th>
<th>Written bytes</th>
<th>Written files count</th>
<th>Total files count</th>
<th>Total bytes </th>
<th>Backup location</th>
<th>Description</th>
<th>Log</th>
<th>Type</th>
</tr>
</thead>
<tbody>
</tbody>
</table>
<br />
<p>
Backup start<input type="checkbox" onclick="fnShowHide( 0 );"/>
Backup end<input type="checkbox" onclick="fnShowHide( 1 );"/>
Server name<input type="checkbox" onclick="fnShowHide( 2 );"/>
Backup name<input type="checkbox" onclick="fnShowHide( 3 );"/>
Backup duration<input type="checkbox" onclick="fnShowHide( 4 );"/>
Status<input type="checkbox" onclick="fnShowHide( 5 );"/>
<br />
Written bytes<input type="checkbox" onclick="fnShowHide( 6 );"/>
Written files count<input type="checkbox" onclick="fnShowHide( 7 );"/>
Total files count<input type="checkbox" onclick="fnShowHide( 8 );"/>
Total bytes <input type="checkbox" onclick="fnShowHide( 9 );"/>
<br />
Backup location<input type="checkbox" onclick="fnShowHide( 10 );"/>
Description<input type="checkbox" onclick="fnShowHide( 11 );"/>
Log<input type="checkbox" onclick="fnShowHide( 12 );"/>
Type<input type="checkbox" onclick="fnShowHide( 13 );"/>
</p>
<script>
$('input:checkbox').attr('checked', false);
$('input:checkbox:eq(0)').attr('checked', true);
$('input:checkbox:eq(2)').attr('checked', true);
$('input:checkbox:eq(3)').attr('checked', true);
$('input:checkbox:eq(4)').attr('checked', true);
$('input:checkbox:eq(5)').attr('checked', true);
</script>
{% endblock %}

77
templates/layout.html Normal file
View File

@ -0,0 +1,77 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<!--
Design by Free CSS Templates
http://www.freecsstemplates.org
Released for free under a Creative Commons Attribution 2.5 License
Name : Indication
Description: A two-column, fixed-width design with dark color scheme.
Version : 1.0
Released : 20090910
-->
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta name="keywords" content="" />
<meta name="description" content="" />
<meta http-equiv="content-type" content="text/html; charset=utf-8" />
<title>Tisbackup GUI</title>
<!-- Styles -->
<link rel="stylesheet" type="text/css" href="/static/styles/datatables.css">
<link rel="stylesheet" type="text/css" href="/static/styles/style.css">
<link rel="stylesheet" type="text/css" href="/static/styles/jquery.alerts.css">
<!-- Scripts -->
<script type=text/javascript src="/static/js/jquery.min.js"></script>
<script type=text/javascript src="/static/js/jquery.dataTables.js"></script>
<script type=text/javascript src="/static/js/jquery.alerts.js"></script>
<script type=text/javascript src="/static/js/jquery.ui.draggable.js"></script>
</head>
<body background='/static/images/bg_body.gif'>
<div id="wrapper">
<div id="header">
<div id="logo">
<h1><a href="/">TIS Backup GUI</a></h1>
<p> design by Hübert</p>
</div>
</div>
<!-- end #header -->
<div id="menu">
<ul>
<li><a href="/">Backups</a></li>
<li><a href="/export_backup">Export Backup</a></li>
<li><a href="/last_backups">Last Backups</a></li>
</ul>
</div>
<!-- end #menu -->
<div id="page">
<div id="page-bgtop">
<div id="page-bgbtm">
<div id="content">
<div class="post">
{% block content %}
<div id="mouter">
<div id="minner">
<p>Hello World</p>
</div>
</div>
{% endblock %}
</div>
<div style="clear: both;">&nbsp;</div>
</div>
<div style="clear: both;">&nbsp;</div>
</div>
<!-- end #page -->
</div>
</div>
<!-- end #footer -->
</div>
<div id="footer">
<p>Copyright (c) 2012 Tranquil IT Systems. All rights reserved. Design by <a href="http://www.tranquil-it-systems.fr/">TIS</a>.</p>
</div>
</body>
</html>

418
tisbackup.py Normal file
View File

@ -0,0 +1,418 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------
# This file is part of TISBackup
#
# TISBackup is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# TISBackup is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with TISBackup. If not, see <http://www.gnu.org/licenses/>.
#
# -----------------------------------------------------------------------
import os
import datetime
import subprocess
from iniparse import ConfigParser
from optparse import OptionParser
import re
import sys
import getopt
import os.path
import logging
from libtisbackup.common import *
from libtisbackup.backup_mysql import backup_mysql
from libtisbackup.backup_rsync import backup_rsync
from libtisbackup.backup_rsync import backup_rsync_ssh
from libtisbackup.backup_pgsql import backup_pgsql
from libtisbackup.backup_xva import backup_xva
#from libtisbackup.backup_switch import backup_switch
from libtisbackup.backup_null import backup_null
from libtisbackup.backup_xcp_metadata import backup_xcp_metadata
from libtisbackup.copy_vm_xcp import copy_vm_xcp
usage="""\
%prog -c configfile action
TIS Files Backup system.
action is either :
backup : launch all backups or a specific one if -s option is used
cleanup : removed backups older than retension period
checknagios : check all or a specific backup against max_backup_age parameter
dumpstat : dump the content of database for the last 20 backups
retryfailed : try to relaunch the last failed backups
listdrivers : list available backup types and parameters for config inifile
exportbackup : copy lastest OK backups from local to location defned by --exportdir parameter
register_existing : scan backup directories and add missing backups to database"""
version = "0.7.3"
parser=OptionParser(usage=usage,version="%prog " + version)
parser.add_option("-c","--config", dest="config", default='/etc/tis/tisbackup-config.ini', help="Config file full path (default: %default)")
parser.add_option("-d","--dry-run", dest="dry_run", default=False, action='store_true', help="Dry run (default: %default)")
parser.add_option("-v","--verbose", dest="verbose", default=False, action='store_true', help="More information (default: %default)")
parser.add_option("-s","--sections", dest="sections", default='', help="Comma separated list of sections (backups) to process (default: All)")
parser.add_option("-l","--loglevel", dest="loglevel", default='info', type='choice', choices=['debug','warning','info','error','critical'], metavar='LOGLEVEL',help="Loglevel (default: %default)")
parser.add_option("-n","--len", dest="statscount", default=30, type='int', help="Number of lines to list for dumpstat (default: %default)")
parser.add_option("-b","--backupdir", dest="backup_base_dir", default='', help="Base directory for all backups (default: [global] backup_base_dir in config file)")
parser.add_option("-x","--exportdir", dest="exportdir", default='', help="Directory where to export latest backups with exportbackup (nodefault)")
class tis_backup:
logger = logging.getLogger('tisbackup')
def __init__(self,dry_run=False,verbose=False,backup_base_dir=''):
self.dry_run = dry_run
self.verbose = verbose
self.backup_base_dir = backup_base_dir
self.backup_base_dir = ''
self.backup_list = []
self.dry_run = dry_run
self.verbose=False
def read_ini_file(self,filename):
cp = ConfigParser()
cp.read(filename)
if not self.backup_base_dir:
self.backup_base_dir = cp.get('global','backup_base_dir')
if not os.path.isdir(self.backup_base_dir):
self.logger.info('Creating backup directory %s' % self.backup_base_dir)
os.makedirs(self.backup_base_dir)
self.logger.debug("backup directory : " + self.backup_base_dir)
self.dbstat = BackupStat(os.path.join(self.backup_base_dir,'log','tisbackup.sqlite'))
for section in cp.sections():
if (section != 'global'):
self.logger.debug("reading backup config " + section)
backup_item = None
type = cp.get(section,'type')
backup_item = backup_drivers[type](backup_name=section,
backup_dir=os.path.join(self.backup_base_dir,section),dbstat=self.dbstat,dry_run=self.dry_run)
backup_item.read_config(cp)
backup_item.verbose = self.verbose
self.backup_list.append(backup_item)
# TODO check hostname socket.gethostbyname_ex('cnn.com')
# TODO socket.gethostbyaddr('64.236.16.20')
# TODO limit backup to one backup on the command line
def checknagios(self,sections=[],maxage_hours=None):
try:
if not sections:
sections = [backup_item.backup_name for backup_item in self.backup_list]
self.logger.debug('Start of check nagios for %s' % (','.join(sections),))
try:
worst_nagiosstatus = None
ok = []
warning = []
critical = []
unknown = []
nagiosoutput = ''
for backup_item in self.backup_list:
if not sections or backup_item.backup_name in sections:
assert(isinstance(backup_item,backup_generic))
if not maxage_hours:
maxage_hours = backup_item.maximum_backup_age
(nagiosstatus,log) = backup_item.checknagios(maxage_hours=maxage_hours)
if nagiosstatus == nagiosStateCritical:
critical.append((backup_item.backup_name,log))
elif nagiosstatus == nagiosStateWarning :
warning.append((backup_item.backup_name,log))
elif nagiosstatus == nagiosStateOk:
ok.append((backup_item.backup_name,log))
else:
unknown.append((backup_item.backup_name,log))
self.logger.debug('[%s] nagios:"%i" log: %s',backup_item.backup_name,nagiosstatus,log)
if not ok and not critical and not unknown and not warning:
self.logger.debug('Nothing processed')
worst_nagiosstatus = nagiosStateUnknown
nagiosoutput = 'UNKNOWN : Unknown backup sections "%s"' % sections
globallog = []
if unknown:
if not worst_nagiosstatus:
worst_nagiosstatus = nagiosStateUnknown
nagiosoutput = 'UNKNOWN status backups %s' % (','.join([b[0] for b in unknown]))
globallog.extend(unknown)
if critical:
if not worst_nagiosstatus:
worst_nagiosstatus = nagiosStateCritical
nagiosoutput = 'CRITICAL backups %s' % (','.join([b[0] for b in critical]))
globallog.extend(critical)
if warning:
if not worst_nagiosstatus:
worst_nagiosstatus = nagiosStateWarning
nagiosoutput = 'WARNING backups %s' % (','.join([b[0] for b in warning]))
globallog.extend(warning)
if ok:
if not worst_nagiosstatus:
worst_nagiosstatus = nagiosStateOk
nagiosoutput = 'OK backups %s' % (','.join([b[0] for b in ok]))
globallog.extend(ok)
if worst_nagiosstatus == nagiosStateOk:
nagiosoutput = 'ALL backups OK %s' % (','.join(sections))
except BaseException,e:
worst_nagiosstatus = nagiosStateCritical
nagiosoutput = 'EXCEPTION',"Critical : %s" % str(e)
raise
finally:
self.logger.debug('worst nagios status :"%i"',worst_nagiosstatus)
print '%s (tisbackup V%s)' %(nagiosoutput,version)
print '\n'.join(["[%s]:%s" % (l[0],l[1]) for l in globallog])
sys.exit(worst_nagiosstatus)
def process_backup(self,sections=[]):
processed = []
errors = []
if not sections:
sections = [backup_item.backup_name for backup_item in self.backup_list]
self.logger.info('Processing backup for %s' % (','.join(sections)) )
for backup_item in self.backup_list:
if not sections or backup_item.backup_name in sections:
try:
assert(isinstance(backup_item,backup_generic))
self.logger.info('Processing [%s]',(backup_item.backup_name))
stats = backup_item.process_backup()
processed.append((backup_item.backup_name,stats))
except BaseException,e:
self.logger.critical('Backup [%s] processed with error : %s',backup_item.backup_name,e)
errors.append((backup_item.backup_name,str(e)))
if not processed and not errors:
self.logger.critical('No backup properly finished or processed')
else:
if processed:
self.logger.info('Backup processed : %s' , ",".join([b[0] for b in processed]))
if errors:
self.logger.error('Backup processed with errors: %s' , ",".join([b[0] for b in errors]))
def export_backups(self,sections=[],exportdir=''):
processed = []
errors = []
if not sections:
sections = [backup_item.backup_name for backup_item in self.backup_list]
self.logger.info('Exporting OK backups for %s to %s' % (','.join(sections),exportdir) )
for backup_item in self.backup_list:
if backup_item.backup_name in sections:
try:
assert(isinstance(backup_item,backup_generic))
self.logger.info('Processing [%s]',(backup_item.backup_name))
stats = backup_item.export_latestbackup(destdir=exportdir)
processed.append((backup_item.backup_name,stats))
except BaseException,e:
self.logger.critical('Export Backup [%s] processed with error : %s',backup_item.backup_name,e)
errors.append((backup_item.backup_name,str(e)))
if not processed and not errors:
self.logger.critical('No export backup properly finished or processed')
else:
if processed:
self.logger.info('Export Backups processed : %s' , ",".join([b[0] for b in processed]))
if errors:
self.logger.error('Export Backups processed with errors: %s' , ",".join([b[0] for b in errors]))
def retry_failed_backups(self,maxage_hours=30):
processed = []
errors = []
# before mindate, backup is too old
mindate = datetime2isodate((datetime.datetime.now() - datetime.timedelta(hours=maxage_hours)))
failed_backups = self.dbstat.query("""\
select distinct s.backup_name as bname,
(select max(backup_start) from stats where status="OK" and backup_name=s.backup_name) as lastok
from stats s
where
(s.status<>"OK" and (s.backup_start>lastok or lastok is null))
or (s.backup_start=lastok and s.backup_start<=?)
order by s.backup_start desc""",(mindate,))
defined_backups = map(lambda f:f.backup_name,self.backup_list)
failed_backups_names = [b['bname'] for b in failed_backups if b['bname'] in defined_backups]
if failed_backups_names:
self.logger.info('Processing backup for %s',','.join(failed_backups_names))
for backup_item in self.backup_list:
if backup_item.backup_name in failed_backups_names:
try:
assert(isinstance(backup_item,backup_generic))
self.logger.info('Processing [%s]',(backup_item.backup_name))
stats = backup_item.process_backup()
processed.append((backup_item.backup_name,stats))
except BaseException,e:
self.logger.critical('Backup [%s] not processed, error : %s',backup_item.backup_name,e)
errors.append((backup_item.backup_name,str(e)))
if not processed and not errors:
self.logger.critical('No backup properly finished or processed')
else:
if processed:
self.logger.info('Backup processed : %s' , ",".join([b[0] for b in errors]))
if errors:
self.logger.error('Backup processed with errors: %s' , ",".join([b[0] for b in errors]))
else:
self.logger.info('No recent failed backups found in database')
def cleanup_backup_section(self,sections = []):
log = ''
processed = False
if not sections:
sections = [backup_item.backup_name for backup_item in self.backup_list]
self.logger.info('Processing cleanup for %s' % (','.join(sections)) )
for backup_item in self.backup_list:
if backup_item.backup_name in sections:
try:
assert(isinstance(backup_item,backup_generic))
self.logger.info('Processing cleanup of [%s]',(backup_item.backup_name))
backup_item.cleanup_backup()
processed = True
except BaseException,e:
self.logger.critical('Cleanup of [%s] not processed, error : %s',backup_item.backup_name,e)
if not processed:
self.logger.critical('No cleanup properly finished or processed')
def register_existingbackups(self,sections = []):
if not sections:
sections = [backup_item.backup_name for backup_item in self.backup_list]
self.logger.info('Append existing backups to database...')
for backup_item in self.backup_list:
if backup_item.backup_name in sections:
backup_item.register_existingbackups()
def html_report(self):
for backup_item in self.backup_list:
if not section or section == backup_item.backup_name:
assert(isinstance(backup_item,backup_generic))
if not maxage_hours:
maxage_hours = backup_item.maximum_backup_age
(nagiosstatus,log) = backup_item.checknagios(maxage_hours=maxage_hours)
globallog.append('[%s] %s' % (backup_item.backup_name,log))
self.logger.debug('[%s] nagios:"%i" log: %s',backup_item.backup_name,nagiosstatus,log)
processed = True
if nagiosstatus >= worst_nagiosstatus:
worst_nagiosstatus = nagiosstatus
def main():
(options,args)=parser.parse_args()
if len(args) != 1:
print "ERROR : You must provide one action to perform"
parser.print_usage()
sys.exit(2)
backup_start_date = datetime.datetime.now().strftime('%Y%m%d-%Hh%Mm%S')
# options
action = args[0]
if action == "listdrivers":
for t in backup_drivers:
print backup_drivers[t].get_help()
sys.exit(0)
config_file =options.config
dry_run = options.dry_run
verbose = options.verbose
loglevel = options.loglevel
# setup Logger
logger = logging.getLogger('tisbackup')
hdlr = logging.StreamHandler()
hdlr.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
logger.addHandler(hdlr)
# set loglevel
if loglevel in ('debug','warning','info','error','critical'):
numeric_level = getattr(logging, loglevel.upper(), None)
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % loglevel)
logger.setLevel(numeric_level)
# Config file
if not os.path.isfile(config_file):
logger.error("Error : could not find file : " + config_file + ", please check the path")
logger.info("Using " + config_file + " config file")
cp = ConfigParser()
cp.read(config_file)
backup_base_dir = options.backup_base_dir or cp.get('global','backup_base_dir')
log_dir = os.path.join(backup_base_dir,'log')
if not os.path.exists(log_dir):
os.makedirs(log_dir)
# if we run the nagios check, we don't create log file, everything is piped to stdout
if action!='checknagios':
hdlr = logging.FileHandler(os.path.join(log_dir,'tisbackup_%s.log' % (backup_start_date)))
hdlr.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
logger.addHandler(hdlr)
# Main
backup = tis_backup(dry_run=dry_run,verbose=verbose,backup_base_dir=backup_base_dir)
backup.read_ini_file(config_file)
backup_sections = options.sections.split(',') if options.sections else []
all_sections = [backup_item.backup_name for backup_item in backup.backup_list]
if not backup_sections:
backup_sections = all_sections
else:
for b in backup_sections:
if not b in all_sections:
raise Exception('Section %s is not defined in config file' % b)
if dry_run:
logger.warning("WARNING : DRY RUN, nothing will be done, just printing on screen...")
if action == "backup":
backup.process_backup(backup_sections)
elif action == "exportbackup":
if not options.exportdir:
raise Exception('No export directory supplied dor exportbackup action')
backup.export_backups(backup_sections,options.exportdir)
elif action == "cleanup":
backup.cleanup_backup_section(backup_sections)
elif action == "checknagios":
backup.checknagios(backup_sections)
elif action == "dumpstat":
for s in backup_sections:
backup.dbstat.last_backups(s,count=options.statscount)
elif action == "retryfailed":
backup.retry_failed_backups()
elif action == "register_existing":
backup.register_existingbackups(backup_sections)
else:
logger.error('Unhandled action "%s", quitting...',action)
sys.exit(1)
if __name__ == "__main__":
main()

321
tisbackup_gui.py Executable file
View File

@ -0,0 +1,321 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------
# This file is part of TISBackup
#
# TISBackup is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# TISBackup is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with TISBackup. If not, see <http://www.gnu.org/licenses/>.
#
# -----------------------------------------------------------------------
import os,sys
from shutil import *
from iniparse import ConfigParser
from libtisbackup.common import *
import time
from flask import request, Flask, session, g, redirect, url_for, abort, render_template, flash, jsonify
from urlparse import urlparse
import simplejson as json
import glob
from uwsgidecorators import *
from tisbackup import tis_backup
import logging
import re
CONFIG = uwsgi.opt['config']
SECTIONS = uwsgi.opt['sections']
ADMIN_EMAIL = uwsgi.opt.get('ADMIN_EMAIL',uwsgi.opt.get('admin_email'))
spooler = uwsgi.opt['spooler']
tisbackup_config_file= uwsgi.opt['config']
cp = ConfigParser()
cp.read(tisbackup_config_file)
backup_base_dir = cp.get('global','backup_base_dir')
dbstat = BackupStat(os.path.join(backup_base_dir,'log','tisbackup.sqlite'))
mindate = None
error = None
info = None
app = Flask(__name__)
app.secret_key = 'fsiqefiuqsefARZ4Zfesfe34234dfzefzfe'
app.config['PROPAGATE_EXCEPTIONS'] = True
def read_config():
config_file = CONFIG
cp = ConfigParser()
cp.read(config_file)
backup_base_dir = cp.get('global','backup_base_dir')
backup = tis_backup(backup_base_dir=backup_base_dir)
backup.read_ini_file(config_file)
backup_sections = SECTIONS or []
all_sections = [backup_item.backup_name for backup_item in backup.backup_list]
if not backup_sections:
backup_sections = all_sections
else:
for b in backup_sections:
if not b in all_sections:
raise Exception('Section %s is not defined in config file' % b)
result = []
if not backup_sections:
sections = [backup_item.backup_name for backup_item in backup.backup_list]
for backup_item in backup.backup_list:
if backup_item.backup_name in backup_sections:
b = {}
for attrib_name in backup_item.required_params+backup_item.required_params:
if hasattr(backup_item,attrib_name):
b[attrib_name] = getattr(backup_item,attrib_name)
result.append(b)
backup_dict = {}
backup_dict['rsync_ssh_list'] = []
backup_dict['rsync_list'] = []
backup_dict['null_list'] = []
backup_dict['pgsql_list'] = []
backup_dict['mysql_list'] = []
backup_dict['xva_list'] = []
backup_dict['metadata_list'] = []
backup_dict['switch_list'] = []
for row in result:
backup_name = row['backup_name']
server_name = row['server_name']
backup_type = row['type']
if backup_type == "xcp-dump-metadata":
backup_dict['metadata_list'].append([server_name, backup_name, backup_type, ""])
if backup_type == "rsync+ssh":
remote_dir = row['remote_dir']
backup_dict['rsync_ssh_list'].append([server_name, backup_name, backup_type,remote_dir])
if backup_type == "rsync":
remote_dir = row['remote_dir']
backup_dict['rsync_list'].append([server_name, backup_name, backup_type,remote_dir])
if backup_type == "null":
backup_dict['null_list'].append([server_name, backup_name, backup_type, ""])
if backup_type == "pgsql+ssh":
db_name = row['db_name']
backup_dict['pgsql_list'].append([server_name, backup_name, backup_type, db_name])
if backup_type == "mysql+ssh":
db_name = row['db_name']
backup_dict['mysql_list'].append([server_name, backup_name, backup_type, db_name])
if backup_type == "xen-xva":
backup_dict['xva_list'].append([server_name, backup_name, backup_type, ""])
if backup_type == "switch":
backup_dict['switch_list'].append([server_name, backup_name, backup_type, ""])
return backup_dict
@app.route('/')
def backup_all():
backup_dict = read_config()
return render_template('backups.html', backup_list = backup_dict)
@app.route('/json')
def backup_json():
backup_dict = read_config()
return json.dumps(backup_dict['rsync_list']+backup_dict['rsync_ssh_list']+backup_dict['pgsql_list']+backup_dict['mysql_list']+backup_dict['xva_list']+backup_dict['null_list']+backup_dict['metadata_list']+ backup_dict['switch_list'])
#def check_usb_disk():
# """This method returns the mounts point of FIRST external disk"""
# disk_name = []
# for name in glob.glob('/dev/sd[a-z]'):
# for line in os.popen("udevinfo --query=env --name %s" % name):
# if "ID_BUS=usb" in line:
# disk_name += [ name ]
# if len(disk_name) == 0:
# raise_error("cannot find external usb disk", "You should plug the usb hard drive into the server")
# return ""
# elif len(disk_name) > 1:
# raise_error("There are many usb disk", "You should plug remove one of them")
# return ""
# else:
# disk_name = disk_name[0]
# flash("The first usb media is: %s" % disk_name)
# if os.path.exists(disk_name+"1"):
# flash("partition found: %s1" % disk_name)
# partition_name = disk_name+"1"
# else:
# raise_error("No partition exist", "You should initialize the usb drive")
# return ""
# if not "tisbackup" in os.popen("/sbin/dumpe2fs -h %s 2>&1 |/bin/grep 'volume name'" % partition_name).read():
# raise_error("the label is not vaid", "You should use 'TISBACKUP' label")
# return ""
# if not "ext4" in os.popen("/sbin/fsck -N %s 2>&1" % partition_name).read():
# raise_error("bad file system", "You should format usb drive into ext4")
# return ""
# return partition_name
def check_usb_disk():
"""This method returns the mounts point of FIRST external disk"""
# disk_name = []
usb_disk_list = []
for name in glob.glob('/dev/sd[a-z]'):
for line in os.popen("udevadm info -q env -n %s" % name):
if re.match("ID_PATH=.*usb.*", line):
usb_disk_list += [ name ]
if len(usb_disk_list) == 0:
raise_error("cannot find external usb disk", "You should plug the usb hard drive into the server")
return ""
print usb_disk_list
usb_partition_list = []
for usb_disk in usb_disk_list:
cmd = "udevadm info -q path -n %s" % usb_disk + '1'
output = os.popen(cmd).read()
print "cmd : " + cmd
print "output : " + output
if '/devices/pci' in output:
#flash("partition found: %s1" % usb_disk)
usb_partition_list.append(usb_disk + "1")
print usb_partition_list
tisbackup_partition_list = []
for usb_partition in usb_partition_list:
if "tisbackup" in os.popen("/sbin/dumpe2fs -h %s 2>&1 |/bin/grep 'volume name'" % usb_partition).read().lower():
flash("tisbackup backup partition found: %s" % usb_partition)
tisbackup_partition_list.append(usb_partition)
print tisbackup_partition_list
if len(tisbackup_partition_list) ==0:
raise_error("No tisbackup partition exist on external disk", "You should initialize the usb drive and set TISBACKUP label on ext4 partition")
return ""
if len(tisbackup_partition_list) > 1:
raise_error("There are many usb disk", "You should plug remove one of them")
return ""
return tisbackup_partition_list[0]
def check_mount_disk(partition_name, refresh):
flash("check if disk is mounted")
already_mounted=False
f = open('/proc/mounts')
lines = f.readlines()
mount_point = ""
for line in lines:
if line.startswith(partition_name):
already_mounted = True
mount_point = line.split(' ')[1]
break
f.close()
if not refresh:
if already_mounted == True:
os.system("/bin/umount %s" % mount_point)
os.rmdir(mount_point)
mount_point = "/mnt/" + str(time.time())
os.mkdir(mount_point)
flash("must mount " + partition_name )
cmd = "mount %s %s" % (partition_name, mount_point)
flash("executing : " + cmd)
result = os.popen(cmd+" 2>&1").read()
if len(result) > 1:
raise_error(result, "You should manualy mount the usb drive")
return ""
return mount_point
@app.route('/status.json')
def export_backup_status():
exports = dbstat.query('select * from stats where TYPE="EXPORT" and backup_start>="%s"' % mindate)
return jsonify(data=exports)
@app.route('/backups.json')
def last_backup_json():
exports = dbstat.query('select * from stats where TYPE="BACKUP" ORDER BY backup_start DESC ')
return jsonify(data=exports)
@app.route('/last_backups')
def last_backup():
exports = dbstat.query('select * from stats where TYPE="BACKUP" ORDER BY backup_start DESC LIMIT 20 ')
return render_template("last_backups.html", backups=exports)
@app.route('/export_backup')
def export_backup():
raise_error("", "")
noJobs = ( len(os.listdir(spooler)) == 0 )
if "start" in request.args.keys() or not noJobs:
start=True
else:
start=False
cp.read(tisbackup_config_file)
partition_name = check_usb_disk()
if partition_name:
if noJobs:
mount_point = check_mount_disk( partition_name, False)
else:
mount_point = check_mount_disk( partition_name, True)
if noJobs:
global mindate
mindate = datetime2isodate(datetime.datetime.now())
if not error and start:
run_export_backup.spool(base=backup_base_dir, config_file=tisbackup_config_file, mount_point=mount_point)
return render_template("export_backup.html", error=error, start=start, info=info, email=ADMIN_EMAIL)
def raise_error(strError, strInfo):
global error, info
error = strError
info = strInfo
def cleanup():
if os.path.isdir(spooler):
print "cleanup ", spooler
rmtree(spooler)
os.mkdir(spooler)
@spool
def run_export_backup(args):
#Log
logger = logging.getLogger('tisbackup')
logger.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
handler = logging.StreamHandler()
handler.setFormatter(formatter)
logger.addHandler(handler)
# Main
logger.info("Running export....")
backup_sections = []
backup = tis_backup(dry_run=False,verbose=True,backup_base_dir=args['base'])
backup.read_ini_file(args['config_file'])
mount_point = args['mount_point']
backup.export_backups(backup_sections,mount_point)
os.system("/bin/umount %s" % mount_point)
os.rmdir(mount_point)
cleanup()
if __name__ == "__main__":
read_config()
app.debug=True
app.run(host='0.0.0.0',port=8000, debug=True)