File reorganization: move non-essential binaries to contrib/.
[cumulus.git] / lbs-util
1 #!/usr/bin/python
2 #
3 # Utility for managing LBS archives.
4
5 import getpass, os, sys
6 from optparse import OptionParser
7 import lbs
8
9 parser = OptionParser(usage="%prog [option]... command [arg]...")
10 parser.add_option("-v", action="store_true", dest="verbose", default=False,
11                   help="increase verbosity")
12 parser.add_option("--store", dest="store",
13                   help="specify path to backup data store")
14 parser.add_option("--localdb", dest="localdb",
15                   help="specify path to local database")
16 (options, args) = parser.parse_args(sys.argv[1:])
17
18 # Read a passphrase from the user and store it in the LBS_GPG_PASSPHRASE
19 # environment variable.
20 def get_passphrase():
21     ENV_KEY = 'LBS_GPG_PASSPHRASE'
22     if not os.environ.has_key(ENV_KEY):
23         os.environ[ENV_KEY] = getpass.getpass()
24
25 # Run the segment cleaner.
26 # Syntax: $0 --localdb=LOCALDB clean
27 def cmd_clean(clean_threshold=7.0):
28     db = lbs.LocalDatabase(options.localdb)
29
30     # Delete old snapshots from the local database.
31     db.garbage_collect()
32
33     # Expire segments which are poorly-utilized.
34     for s in db.get_segment_cleaning_list():
35         if s.cleaning_benefit > clean_threshold:
36             print "Cleaning segment %d (benefit %.2f)" % (s.id,
37                                                           s.cleaning_benefit)
38             db.mark_segment_expired(s)
39         else:
40             break
41     db.balance_expired_objects()
42     db.commit()
43
44 # List snapshots stored.
45 # Syntax: $0 --data=DATADIR list-snapshots
46 def cmd_list_snapshots():
47     store = lbs.LowlevelDataStore(options.store)
48     for s in sorted(store.list_snapshots()):
49         print s
50
51 # List size of data needed for each snapshot.
52 # Syntax: $0 --data=DATADIR list-snapshot-sizes
53 def cmd_list_snapshot_sizes():
54     lowlevel = lbs.LowlevelDataStore(options.store)
55     store = lbs.ObjectStore(lowlevel)
56     previous = set()
57     for s in sorted(lowlevel.list_snapshots()):
58         d = lbs.parse_full(store.load_snapshot(s))
59         segments = d['Segments'].split()
60         (size, added, removed) = (0, 0, 0)
61         for seg in segments:
62             segsize = lowlevel.lowlevel_stat(seg + ".tar.gpg")['size']
63             size += segsize
64             if seg not in previous: added += segsize
65         for seg in previous:
66             if seg not in segments:
67                 removed += lowlevel.lowlevel_stat(seg + ".tar.gpg")['size']
68         previous = set(segments)
69         print "%s: %.3f +%.3f -%.3f" % (s, size / 1024.0**2, added / 1024.0**2, removed / 1024.0**2)
70
71 # Build checksum list for objects in the given segments, or all segments if
72 # none are specified.
73 def cmd_object_checksums(segments):
74     get_passphrase()
75     lowlevel = lbs.LowlevelDataStore(options.store)
76     store = lbs.ObjectStore(lowlevel)
77     if len(segments) == 0:
78         segments = sorted(lowlevel.list_segments())
79     for s in segments:
80         for (o, data) in store.load_segment(s):
81             csum = lbs.ChecksumCreator().update(data).compute()
82             print "%s/%s:%d:%s" % (s, o, len(data), csum)
83     store.cleanup()
84
85 # Read a snapshot file
86 def cmd_read_snapshots(snapshots):
87     get_passphrase()
88     lowlevel = lbs.LowlevelDataStore(options.store)
89     store = lbs.ObjectStore(lowlevel)
90     for s in snapshots:
91         d = lbs.parse_full(store.load_snapshot(s))
92         print d
93         print d['Segments'].split()
94     store.cleanup()
95
96 # Verify snapshot integrity
97 def cmd_verify_snapshots(snapshots):
98     get_passphrase()
99     lowlevel = lbs.LowlevelDataStore(options.store)
100     store = lbs.ObjectStore(lowlevel)
101     for s in snapshots:
102         print "#### Snapshot", s
103         d = lbs.parse_full(store.load_snapshot(s))
104         print "## Root:", d['Root']
105         metadata = lbs.iterate_metadata(store, d['Root'])
106         for m in metadata:
107             if m.fields['type'] != '-': continue
108             print "%s [%d bytes]" % (m.fields['name'], int(m.fields['size']))
109             verifier = lbs.ChecksumVerifier(m.fields['checksum'])
110             size = 0
111             for block in m.data():
112                 data = store.get(block)
113                 verifier.update(data)
114                 size += len(data)
115             if int(m.fields['size']) != size:
116                 raise ValueError("File size does not match!")
117             if not verifier.valid():
118                 raise ValueError("Bad checksum found")
119     store.cleanup()
120
121 if len(args) == 0:
122     parser.print_usage()
123     sys.exit(1)
124 cmd = args[0]
125 args = args[1:]
126 if cmd == 'clean':
127     cmd_clean()
128 elif cmd == 'list-snapshots':
129     cmd_list_snapshots()
130 elif cmd == 'object-sums':
131     cmd_object_checksums(args)
132 elif cmd == 'read-snapshots':
133     cmd_read_snapshots(args)
134 elif cmd == 'list-snapshot-sizes':
135     cmd_list_snapshot_sizes()
136 elif cmd == 'verify-snapshots':
137     cmd_verify_snapshots(args)
138 else:
139     print "Unknown command:", cmd
140     parser.print_usage()
141     sys.exit(1)