lowlevel = lbs.LowlevelDataStore(options.store)
store = lbs.ObjectStore(lowlevel)
for s in snapshots:
+ lbs.accessed_segments.clear()
print "#### Snapshot", s
d = lbs.parse_full(store.load_snapshot(s))
check_version(d['Format'])
raise ValueError("File size does not match!")
if not verifier.valid():
raise ValueError("Bad checksum found")
+
+ # Verify that the list of segments included with the snapshot was
+ # actually accurate: covered all segments that were really read, and
+ # doesn't contain duplicates.
+ listed_segments = set(d['Segments'].split())
+ if lbs.accessed_segments - listed_segments:
+ print "Error: Some segments not listed in descriptor!"
+ print sorted(list(lbs.accessed_segments - listed_segments))
+ if listed_segments - lbs.accessed_segments :
+ print "Warning: Extra unused segments listed in descriptor!"
+ print sorted(list(listed_segments - lbs.accessed_segments))
store.cleanup()
# Restore a snapshot, or some subset of files from it
# Maximum number of nested indirect references allowed in a snapshot.
MAX_RECURSION_DEPTH = 3
+# All segments which have been accessed this session.
+accessed_segments = set()
+
class Struct:
"""A class which merely acts as a data container.
return (segment, object, checksum, slice)
def get_segment(self, segment):
+ accessed_segments.add(segment)
raw = self.store.lowlevel_open(segment + ".tar.gpg")
(input, output) = os.popen2("lbs-filter-gpg --decrypt")
f.close()
def load_object(self, segment, object):
+ accessed_segments.add(segment)
path = os.path.join(self.get_cachedir(), segment, object)
if not os.access(path, os.R_OK):
self.extract_segment(segment)