3 # Utility for managing Cumulus archives.
5 import getpass, os, stat, sys, time
6 from optparse import OptionParser
8 # Automatically set Python path, based on script directory. This should be
9 # removed if the tools are properly installed somewhere.
10 script_directory = os.path.dirname(sys.argv[0])
11 sys.path.append(os.path.join(script_directory, 'python'))
18 # We support up to "LBS Snapshot v0.8" formats, but are also limited by the lbs
20 FORMAT_VERSION = min(lbs.FORMAT_VERSION, (0, 8))
22 def check_version(format):
23 ver = lbs.parse_metadata_version(format)
24 if ver > FORMAT_VERSION:
25 raise RuntimeError("Unsupported LBS format: " + format)
27 # Read a passphrase from the user and store it in the LBS_GPG_PASSPHRASE
28 # environment variable.
30 ENV_KEY = 'LBS_GPG_PASSPHRASE'
31 if not os.environ.has_key(ENV_KEY):
32 os.environ[ENV_KEY] = getpass.getpass()
34 def cmd_prune_db(args):
35 """ Delete old snapshots from the local database, though do not
36 actually schedule any segment cleaning.
37 Syntax: $0 --localdb=LOCALDB prune-db
39 db = lbs.LocalDatabase(options.localdb)
41 # Delete old snapshots from the local database.
45 def cmd_clean(args, clean_threshold=7.0):
46 """ Run the segment cleaner.
47 Syntax: $0 --localdb=LOCALDB clean
49 db = lbs.LocalDatabase(options.localdb)
51 # Delete old snapshots from the local database.
52 intent = float(options.intent)
53 for s in db.list_schemes():
54 db.garbage_collect(s, intent)
56 # Expire segments which are poorly-utilized.
57 for s in db.get_segment_cleaning_list():
58 if s.cleaning_benefit > clean_threshold:
59 print "Cleaning segment %d (benefit %.2f)" % (s.id,
61 db.mark_segment_expired(s)
64 db.balance_expired_objects()
67 def cmd_list_snapshots(args):
68 """ List snapshots stored.
69 Syntax: $0 --data=DATADIR list-snapshots
71 store = lbs.LowlevelDataStore(options.store)
72 for s in sorted(store.list_snapshots()):
75 def cmd_list_snapshot_sizes(args):
76 """ List size of data needed for each snapshot.
77 Syntax: $0 --data=DATADIR list-snapshot-sizes
79 lowlevel = lbs.LowlevelDataStore(options.store)
81 store = lbs.ObjectStore(lowlevel)
84 for seg in lowlevel.store.list('segments'):
85 exts.update ([seg.split ('.', 1)])
86 for s in sorted(lowlevel.list_snapshots()):
87 d = lbs.parse_full(store.load_snapshot(s))
88 check_version(d['Format'])
91 intent = float(d['Backup-Intent'])
95 segments = d['Segments'].split()
96 (size, added, removed, addcount, remcount) = (0, 0, 0, 0, 0)
97 lo_stat = lowlevel.lowlevel_stat
99 segsize = lo_stat('.'.join ((seg, exts[seg])))['size']
101 if seg not in previous:
105 if seg not in segments:
106 removed += lo_stat('.'.join((seg, exts[seg])))['size']
108 previous = set(segments)
109 print "%s [%s]: %.3f +%.3f -%.3f (+%d/-%d segments)" % (s, intent, size / 1024.0**2, added / 1024.0**2, removed / 1024.0**2, addcount, remcount)
111 def cmd_garbage_collect(args):
112 """ Search for any files which are not needed by any current
113 snapshots and offer to delete them.
114 Syntax: $0 --store=DATADIR gc
116 lowlevel = lbs.LowlevelDataStore(options.store)
118 store = lbs.ObjectStore(lowlevel)
119 snapshots = set(lowlevel.list_snapshots())
122 d = lbs.parse_full(store.load_snapshot(s))
123 check_version(d['Format'])
124 segments.update(d['Segments'].split())
126 referenced = snapshots.union(segments)
128 for (t, r) in cumulus.store.type_patterns.items():
129 for f in lowlevel.store.list(t):
131 if m is None or m.group(1) not in referenced:
132 print "Garbage:", (t, f)
133 reclaimed += lowlevel.store.stat(t, f)['size']
134 if not options.dry_run:
135 lowlevel.store.delete(t, f)
136 print "Reclaimed space:", reclaimed
138 cmd_gc = cmd_garbage_collect
140 def cmd_object_checksums(segments):
141 """ Build checksum list for objects in the given segments, or all
142 segments if none are specified.
145 lowlevel = lbs.LowlevelDataStore(options.store)
146 store = lbs.ObjectStore(lowlevel)
147 if len(segments) == 0:
148 segments = sorted(lowlevel.list_segments())
150 for (o, data) in store.load_segment(s):
151 csum = lbs.ChecksumCreator().update(data).compute()
152 print "%s/%s:%d:%s" % (s, o, len(data), csum)
154 object_sums = cmd_object_checksums
156 def cmd_read_snapshots(snapshots):
157 """ Read a snapshot file
160 lowlevel = lbs.LowlevelDataStore(options.store)
161 store = lbs.ObjectStore(lowlevel)
163 d = lbs.parse_full(store.load_snapshot(s))
164 check_version(d['Format'])
166 print d['Segments'].split()
169 def cmd_read_metadata(args):
170 """ Produce a flattened metadata dump from a snapshot
174 lowlevel = lbs.LowlevelDataStore(options.store)
175 store = lbs.ObjectStore(lowlevel)
176 d = lbs.parse_full(store.load_snapshot(snapshot))
177 check_version(d['Format'])
178 metadata = lbs.read_metadata(store, d['Root'])
189 def cmd_verify_snapshots(snapshots):
190 """ Verify snapshot integrity
193 lowlevel = lbs.LowlevelDataStore(options.store)
194 store = lbs.ObjectStore(lowlevel)
196 lbs.accessed_segments.clear()
197 print "#### Snapshot", s
198 d = lbs.parse_full(store.load_snapshot(s))
199 check_version(d['Format'])
200 print "## Root:", d['Root']
201 metadata = lbs.iterate_metadata(store, d['Root'])
203 if m.fields['type'] not in ('-', 'f'): continue
204 print "%s [%d bytes]" % (m.fields['name'], int(m.fields['size']))
205 verifier = lbs.ChecksumVerifier(m.fields['checksum'])
207 for block in m.data():
208 data = store.get(block)
209 verifier.update(data)
211 if int(m.fields['size']) != size:
212 raise ValueError("File size does not match!")
213 if not verifier.valid():
214 raise ValueError("Bad checksum found")
216 # Verify that the list of segments included with the snapshot was
217 # actually accurate: covered all segments that were really read, and
218 # doesn't contain duplicates.
219 listed_segments = set(d['Segments'].split())
220 if lbs.accessed_segments - listed_segments:
221 print "Error: Some segments not listed in descriptor!"
222 print sorted(list(lbs.accessed_segments - listed_segments))
223 if listed_segments - lbs.accessed_segments :
224 print "Warning: Extra unused segments listed in descriptor!"
225 print sorted(list(listed_segments - lbs.accessed_segments))
228 def cmd_restore_snapshot(args):
229 """ Restore a snapshot, or some subset of files from it
232 lowlevel = lbs.LowlevelDataStore(options.store)
233 store = lbs.ObjectStore(lowlevel)
234 snapshot = lbs.parse_full(store.load_snapshot(args[0]))
235 check_version(snapshot['Format'])
240 "Return true if the specified path should be included in the restore."
242 # No specification of what to restore => restore everything
243 if len(paths) == 0: return True
246 if path == p: return True
247 if path.startswith(p + "/"): return True
251 print "Warning: %s: %s" % (m.items.name, msg)
253 # Phase 1: Read the complete metadata log and create directory structure.
256 metadata_segments = {}
257 for m in lbs.iterate_metadata(store, snapshot['Root']):
258 pathname = os.path.normpath(m.items.name)
259 while os.path.isabs(pathname):
260 pathname = pathname[1:]
261 if not matchpath(pathname): continue
263 destpath = os.path.join(destdir, pathname)
264 if m.items.type == 'd':
267 (path, filename) = os.path.split(destpath)
269 metadata_items.append((pathname, m))
270 if m.items.type in ('-', 'f'):
271 metadata_paths[pathname] = m
272 for block in m.data():
273 (segment, object, checksum, slice) \
274 = lbs.ObjectStore.parse_ref(block)
275 if segment not in metadata_segments:
276 metadata_segments[segment] = set()
277 metadata_segments[segment].add(pathname)
280 if not os.path.isdir(path):
284 warn(m, "Error creating directory structure: %s" % (e,))
287 # Phase 2: Restore files, ordered by how data is stored in segments.
288 def restore_file(pathname, m):
289 assert m.items.type in ('-', 'f')
290 print "extract:", pathname
291 destpath = os.path.join(destdir, pathname)
293 file = open(destpath, 'wb')
294 verifier = lbs.ChecksumVerifier(m.items.checksum)
296 for block in m.data():
297 data = store.get(block)
298 verifier.update(data)
302 if int(m.fields['size']) != size:
303 raise ValueError("File size does not match!")
304 if not verifier.valid():
305 raise ValueError("Bad checksum found")
307 while metadata_segments:
308 (segment, items) = metadata_segments.popitem()
309 print "+ Segment", segment
310 for pathname in sorted(items):
311 if pathname in metadata_paths:
312 restore_file(pathname, metadata_paths[pathname])
313 del metadata_paths[pathname]
315 print "+ Remaining files"
316 while metadata_paths:
317 (pathname, m) = metadata_paths.popitem()
318 restore_file(pathname, m)
320 # Phase 3: Restore special files (symlinks, devices).
321 # Phase 4: Restore directory permissions and modification times.
322 for (pathname, m) in reversed(metadata_items):
323 print "permissions:", pathname
324 destpath = os.path.join(destdir, pathname)
325 (path, filename) = os.path.split(destpath)
327 # TODO: Check for ../../../paths that might attempt to write outside
328 # the destination directory. Maybe also check attempts to follow
329 # symlinks pointing outside?
332 if m.items.type in ('-', 'f', 'd'):
334 elif m.items.type == 'l':
336 target = m.items.target
338 # Old (v0.2 format) name for 'target'
339 target = m.items.contents
340 os.symlink(target, destpath)
341 elif m.items.type == 'p':
343 elif m.items.type in ('c', 'b'):
344 if m.items.type == 'c':
345 mode = 0600 | stat.S_IFCHR
347 mode = 0600 | stat.S_IFBLK
348 os.mknod(destpath, mode, os.makedev(*m.items.device))
349 elif m.items.type == 's':
350 pass # TODO: Implement
352 warn(m, "Unknown type code: " + m.items.type)
356 warn(m, "Error restoring: %s" % (e,))
360 uid = m.items.user[0]
361 gid = m.items.group[0]
362 os.lchown(destpath, uid, gid)
364 warn(m, "Error restoring file ownership: %s" % (e,))
366 if m.items.type == 'l':
370 os.chmod(destpath, m.items.mode)
372 warn(m, "Error restoring file permissions: %s" % (e,))
375 os.utime(destpath, (time.time(), m.items.mtime))
377 warn(m, "Error restoring file timestamps: %s" % (e,))
381 usage = ["%prog [option]... command [arg]...", "", "Commands:"]
383 for cmd, method in locals().iteritems():
384 if cmd.startswith ('cmd_'):
385 usage.append(cmd[4:].replace('_', '-') + ':' + method.__doc__)
386 parser = OptionParser(usage="\n".join(usage))
387 parser.add_option("-v", action="store_true", dest="verbose", default=False,
388 help="increase verbosity")
389 parser.add_option("-n", action="store_true", dest="dry_run", default=False,
391 parser.add_option("--store", dest="store",
392 help="specify path to backup data store")
393 parser.add_option("--localdb", dest="localdb",
394 help="specify path to local database")
395 parser.add_option("--intent", dest="intent", default=1.0,
396 help="give expected next snapshot type when cleaning")
397 (options, args) = parser.parse_args(sys.argv[1:])
404 method = locals().get('cmd_' + cmd.replace('-', '_'))
408 print "Unknown command:", cmd