#
# Utility for managing LBS archives.
-import getpass, os, sys
+import getpass, os, stat, sys, time
from optparse import OptionParser
import lbs
raise ValueError("Bad checksum found")
store.cleanup()
+# Restore a snapshot, or some subset of files from it
+def cmd_restore_snapshot(args):
+ get_passphrase()
+ lowlevel = lbs.LowlevelDataStore(options.store)
+ store = lbs.ObjectStore(lowlevel)
+ snapshot = lbs.parse_full(store.load_snapshot(args[0]))
+ destdir = args[1]
+ paths = args[2:]
+
+ def warn(m, msg):
+ print "Warning: %s: %s" % (m.items.name, msg)
+
+ for m in lbs.iterate_metadata(store, snapshot['Root']):
+ pathname = os.path.normpath(m.items.name)
+ while os.path.isabs(pathname):
+ pathname = pathname[1:]
+ print pathname
+ destpath = os.path.join(destdir, pathname)
+ (path, filename) = os.path.split(destpath)
+
+ # TODO: Check for ../../../paths that might attempt to write outside
+ # the destination directory. Maybe also check attempts to follow
+ # symlinks pointing outside?
+
+ try:
+ if not os.path.isdir(path):
+ os.makedirs(path)
+
+ if m.items.type == '-':
+ file = open(destpath, 'wb')
+ verifier = lbs.ChecksumVerifier(m.items.checksum)
+ size = 0
+ for block in m.data():
+ data = store.get(block)
+ verifier.update(data)
+ size += len(data)
+ file.write(data)
+ file.close()
+ if int(m.fields['size']) != size:
+ raise ValueError("File size does not match!")
+ if not verifier.valid():
+ raise ValueError("Bad checksum found")
+ elif m.items.type == 'd':
+ if filename != '.':
+ os.mkdir(destpath)
+ elif m.items.type == 'l':
+ os.symlink(m.items.contents, destpath)
+ elif m.items.type == 'p':
+ os.mkfifo(destpath)
+ elif m.items.type in ('c', 'b'):
+ if m.items.type == 'c':
+ mode = 0600 | stat.S_IFCHR
+ else:
+ mode = 0600 | stat.S_IFBLK
+ os.mknod(destpath, mode, os.makedev(*m.items.device))
+ elif m.items.type == 's':
+ pass # TODO: Implement
+ else:
+ warn(m, "Unknown type code: " + m.items.type)
+ continue
+
+ except Exception, e:
+ warn(m, "Error restoring: %s" % (e,))
+ continue
+
+ try:
+ uid = m.items.user[0]
+ gid = m.items.group[0]
+ os.lchown(destpath, uid, gid)
+ except Exception, e:
+ warn(m, "Error restoring file ownership: %s" % (e,))
+
+ if m.items.type == 'l':
+ continue
+
+ try:
+ os.chmod(destpath, m.items.mode)
+ except Exception, e:
+ warn(m, "Error restoring file permissions: %s" % (e,))
+
+ try:
+ os.utime(destpath, (time.time(), m.items.mtime))
+ except Exception, e:
+ warn(m, "Error restoring file timestamps: %s" % (e,))
+
+ store.cleanup()
+
if len(args) == 0:
parser.print_usage()
sys.exit(1)
cmd_list_snapshot_sizes()
elif cmd == 'verify-snapshots':
cmd_verify_snapshots(args)
+elif cmd == 'restore-snapshot':
+ cmd_restore_snapshot(args)
else:
print "Unknown command:", cmd
parser.print_usage()
class MetadataItem:
"""Metadata for a single file (or directory or...) from a snapshot."""
+ # Functions for parsing various datatypes that can appear in a metadata log
+ # item.
+ @staticmethod
+ def decode_int(s):
+ """Decode an integer, expressed in decimal, octal, or hexadecimal."""
+ if s.startswith("0x"):
+ return int(s, 16)
+ elif s.startswith("0"):
+ return int(s, 8)
+ else:
+ return int(s, 10)
+
+ @staticmethod
+ def decode_str(s):
+ """Decode a URI-encoded (%xx escapes) string."""
+ def hex_decode(m): return chr(int(m.group(1), 16))
+ return re.sub(r"%([0-9a-f]{2})", hex_decode, s)
+
+ @staticmethod
+ def raw_str(s):
+ """An unecoded string."""
+ return s
+
+ @staticmethod
+ def decode_user(s):
+ """Decode a user/group to a tuple of uid/gid followed by name."""
+ items = s.split()
+ uid = MetadataItem.decode_int(items[0])
+ name = None
+ if len(items) > 1:
+ if items[1].startswith("(") and items[1].endswith(")"):
+ name = MetadataItem.decode_str(items[1][1:-1])
+ return (uid, name)
+
+ @staticmethod
+ def decode_device(s):
+ """Decode a device major/minor number."""
+ (major, minor) = map(MetadataItem.decode_int, s.split("/"))
+ return (major, minor)
+
+ class Items: pass
+
def __init__(self, fields, object_store):
"""Initialize from a dictionary of key/value pairs from metadata log."""
self.fields = fields
self.object_store = object_store
+ self.keys = []
+ self.items = self.Items()
+ for (k, v) in fields.items():
+ if k in self.field_types:
+ decoder = self.field_types[k]
+ setattr(self.items, k, decoder(v))
+ self.keys.append(k)
def data(self):
"""Return an iterator for the data blocks that make up a file."""
else:
yield ref
+# Description of fields that might appear, and how they should be parsed.
+MetadataItem.field_types = {
+ 'name': MetadataItem.decode_str,
+ 'type': MetadataItem.raw_str,
+ 'mode': MetadataItem.decode_int,
+ 'device': MetadataItem.decode_device,
+ 'user': MetadataItem.decode_user,
+ 'group': MetadataItem.decode_user,
+ 'mtime': MetadataItem.decode_int,
+ 'links': MetadataItem.decode_int,
+ 'inode': MetadataItem.raw_str,
+ 'checksum': MetadataItem.decode_str,
+ 'size': MetadataItem.decode_int,
+ 'contents': MetadataItem.decode_str,
+}
+
def iterate_metadata(object_store, root):
for d in parse(read_metadata(object_store, root), lambda l: len(l) == 0):
yield MetadataItem(d, object_store)