Update copyright dates in source files.
[cumulus.git] / lbs-util
index 8789a75..942a601 100755 (executable)
--- a/lbs-util
+++ b/lbs-util
@@ -22,6 +22,8 @@ parser.add_option("--store", dest="store",
                   help="specify path to backup data store")
 parser.add_option("--localdb", dest="localdb",
                   help="specify path to local database")
+parser.add_option("--intent", dest="intent", default=1.0,
+                  help="give expected next snapshot type when cleaning")
 (options, args) = parser.parse_args(sys.argv[1:])
 
 # Read a passphrase from the user and store it in the LBS_GPG_PASSPHRASE
@@ -38,8 +40,8 @@ def cmd_prune_db():
     db = lbs.LocalDatabase(options.localdb)
 
     # Delete old snapshots from the local database.
-    db.garbage_collect()
-    db.commit()
+    #db.garbage_collect()
+    #db.commit()
 
 # Run the segment cleaner.
 # Syntax: $0 --localdb=LOCALDB clean
@@ -47,7 +49,9 @@ def cmd_clean(clean_threshold=7.0):
     db = lbs.LocalDatabase(options.localdb)
 
     # Delete old snapshots from the local database.
-    db.garbage_collect()
+    intent = float(options.intent)
+    for s in db.list_schemes():
+        db.garbage_collect(s, intent)
 
     # Expire segments which are poorly-utilized.
     for s in db.get_segment_cleaning_list():
@@ -122,7 +126,13 @@ def cmd_read_metadata(snapshot):
     d = lbs.parse_full(store.load_snapshot(snapshot))
     check_version(d['Format'])
     metadata = lbs.read_metadata(store, d['Root'])
+    blank = True
     for l in metadata:
+        if l == '\n':
+            if blank: continue
+            blank = True
+        else:
+            blank = False
         sys.stdout.write(l)
     store.cleanup()
 
@@ -132,6 +142,7 @@ def cmd_verify_snapshots(snapshots):
     lowlevel = lbs.LowlevelDataStore(options.store)
     store = lbs.ObjectStore(lowlevel)
     for s in snapshots:
+        lbs.accessed_segments.clear()
         print "#### Snapshot", s
         d = lbs.parse_full(store.load_snapshot(s))
         check_version(d['Format'])
@@ -150,6 +161,17 @@ def cmd_verify_snapshots(snapshots):
                 raise ValueError("File size does not match!")
             if not verifier.valid():
                 raise ValueError("Bad checksum found")
+
+        # Verify that the list of segments included with the snapshot was
+        # actually accurate: covered all segments that were really read, and
+        # doesn't contain duplicates.
+        listed_segments = set(d['Segments'].split())
+        if lbs.accessed_segments - listed_segments:
+            print "Error: Some segments not listed in descriptor!"
+            print sorted(list(lbs.accessed_segments - listed_segments))
+        if listed_segments - lbs.accessed_segments :
+            print "Warning: Extra unused segments listed in descriptor!"
+            print sorted(list(listed_segments - lbs.accessed_segments))
     store.cleanup()
 
 # Restore a snapshot, or some subset of files from it
@@ -162,14 +184,91 @@ def cmd_restore_snapshot(args):
     destdir = args[1]
     paths = args[2:]
 
+    def matchpath(path):
+        "Return true if the specified path should be included in the restore."
+
+        # No specification of what to restore => restore everything
+        if len(paths) == 0: return True
+
+        for p in paths:
+            if path == p: return True
+            if path.startswith(p + "/"): return True
+        return False
+
     def warn(m, msg):
         print "Warning: %s: %s" % (m.items.name, msg)
 
+    # Phase 1: Read the complete metadata log and create directory structure.
+    metadata_items = []
+    metadata_paths = {}
+    metadata_segments = {}
     for m in lbs.iterate_metadata(store, snapshot['Root']):
         pathname = os.path.normpath(m.items.name)
         while os.path.isabs(pathname):
             pathname = pathname[1:]
-        print pathname
+        if not matchpath(pathname): continue
+
+        destpath = os.path.join(destdir, pathname)
+        if m.items.type == 'd':
+            path = destpath
+        else:
+            (path, filename) = os.path.split(destpath)
+
+        metadata_items.append((pathname, m))
+        if m.items.type in ('-', 'f'):
+            metadata_paths[pathname] = m
+            for block in m.data():
+                (segment, object, checksum, slice) \
+                    = lbs.ObjectStore.parse_ref(block)
+                if segment not in metadata_segments:
+                    metadata_segments[segment] = set()
+                metadata_segments[segment].add(pathname)
+
+        try:
+            if not os.path.isdir(path):
+                print "mkdir:", path
+                os.makedirs(path)
+        except Exception, e:
+            warn(m, "Error creating directory structure: %s" % (e,))
+            continue
+
+    # Phase 2: Restore files, ordered by how data is stored in segments.
+    def restore_file(pathname, m):
+        assert m.items.type in ('-', 'f')
+        print "extract:", pathname
+        destpath = os.path.join(destdir, pathname)
+
+        file = open(destpath, 'wb')
+        verifier = lbs.ChecksumVerifier(m.items.checksum)
+        size = 0
+        for block in m.data():
+            data = store.get(block)
+            verifier.update(data)
+            size += len(data)
+            file.write(data)
+        file.close()
+        if int(m.fields['size']) != size:
+            raise ValueError("File size does not match!")
+        if not verifier.valid():
+            raise ValueError("Bad checksum found")
+
+    while metadata_segments:
+        (segment, items) = metadata_segments.popitem()
+        print "+ Segment", segment
+        for pathname in sorted(items):
+            if pathname in metadata_paths:
+                restore_file(pathname, metadata_paths[pathname])
+                del metadata_paths[pathname]
+
+    print "+ Remaining files"
+    while metadata_paths:
+        (pathname, m) = metadata_paths.popitem()
+        restore_file(pathname, m)
+
+    # Phase 3: Restore special files (symlinks, devices).
+    # Phase 4: Restore directory permissions and modification times.
+    for (pathname, m) in reversed(metadata_items):
+        print "permissions:", pathname
         destpath = os.path.join(destdir, pathname)
         (path, filename) = os.path.split(destpath)
 
@@ -178,26 +277,8 @@ def cmd_restore_snapshot(args):
         # symlinks pointing outside?
 
         try:
-            if not os.path.isdir(path):
-                os.makedirs(path)
-
-            if m.items.type in ('-', 'f'):
-                file = open(destpath, 'wb')
-                verifier = lbs.ChecksumVerifier(m.items.checksum)
-                size = 0
-                for block in m.data():
-                    data = store.get(block)
-                    verifier.update(data)
-                    size += len(data)
-                    file.write(data)
-                file.close()
-                if int(m.fields['size']) != size:
-                    raise ValueError("File size does not match!")
-                if not verifier.valid():
-                    raise ValueError("Bad checksum found")
-            elif m.items.type == 'd':
-                if filename != '.':
-                    os.mkdir(destpath)
+            if m.items.type in ('-', 'f', 'd'):
+                pass
             elif m.items.type == 'l':
                 try:
                     target = m.items.target