X-Git-Url: http://git.vrable.net/?a=blobdiff_plain;f=python%2Fcumulus%2F__init__.py;h=8dc4c9875e0f77ef95232f866f37a28e7ae9949e;hb=a5f66616b1ec0c38328ad5131bf1c889ccc43659;hp=ef353257cfcae6d00114c69a2ce09ea1243255c6;hpb=5949214bc01b2c762adfb724d1e63b7e130c91f4;p=cumulus.git diff --git a/python/cumulus/__init__.py b/python/cumulus/__init__.py index ef35325..8dc4c98 100644 --- a/python/cumulus/__init__.py +++ b/python/cumulus/__init__.py @@ -26,7 +26,7 @@ various parts of a Cumulus archive: - reading and maintaining the local object database """ -from __future__ import division + import hashlib import itertools import os @@ -34,7 +34,7 @@ import re import sqlite3 import tarfile import tempfile -import thread +import _thread import cumulus.store import cumulus.store.file @@ -219,7 +219,7 @@ class SearchPath(object): except cumulus.store.NotFoundError: pass if not success: - raise cumulus.store.NotFoundError(basename) + raise cumulus.store.NotFoundError(backend) def _build_segments_searchpath(prefix): for (extension, filter) in SEGMENT_FILTERS: @@ -231,6 +231,9 @@ SEARCH_PATHS = { [SearchPathEntry("meta", ".sha1sums"), SearchPathEntry("checksums", ".sha1sums"), SearchPathEntry("", ".sha1sums")]), + "meta": SearchPath( + r"^snapshot-(.*)\.meta(\.\S+)?$", + _build_segments_searchpath("meta")), "segments": SearchPath( (r"^([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})" r"\.tar(\.\S+)?$"), @@ -261,7 +264,7 @@ class BackendWrapper(object): store may either be a Store object or URL. """ - if type(backend) in (str, unicode): + if type(backend) in (str, str): if backend.find(":") >= 0: self._backend = cumulus.store.open(backend) else: @@ -289,6 +292,15 @@ class BackendWrapper(object): return ((x[1].group(1), x[0]) for x in SEARCH_PATHS[filetype].list(self._backend)) + def prefetch_generic(self): + """Calls scan on directories to prefetch file metadata.""" + directories = set() + for typeinfo in list(SEARCH_PATHS.values()): + directories.update(typeinfo.directories()) + for d in directories: + print("Prefetch", d) + self._backend.scan(d) + class CumulusStore: def __init__(self, backend): if isinstance(backend, BackendWrapper): @@ -362,7 +374,7 @@ class CumulusStore: dst.write(block) src.close() dst.close() - thread.start_new_thread(copy_thread, (filehandle, input)) + _thread.start_new_thread(copy_thread, (filehandle, input)) return output def get_segment(self, segment): @@ -427,6 +439,9 @@ class CumulusStore: return data + def prefetch(self): + self.backend.prefetch_generic() + def parse(lines, terminate=None): """Generic parser for RFC822-style "Key: Value" data streams. @@ -466,7 +481,7 @@ def parse(lines, terminate=None): def parse_full(lines): try: - return parse(lines).next() + return next(parse(lines)) except StopIteration: return {} @@ -551,7 +566,7 @@ class MetadataItem: @staticmethod def decode_device(s): """Decode a device major/minor number.""" - (major, minor) = map(MetadataItem.decode_int, s.split("/")) + (major, minor) = list(map(MetadataItem.decode_int, s.split("/"))) return (major, minor) class Items: pass @@ -563,7 +578,7 @@ class MetadataItem: self.object_store = object_store self.keys = [] self.items = self.Items() - for (k, v) in fields.items(): + for (k, v) in list(fields.items()): if k in self.field_types: decoder = self.field_types[k] setattr(self.items, k, decoder(v)) @@ -721,7 +736,7 @@ class LocalDatabase: can_delete = True if can_delete and not first: - print "Delete snapshot %d (%s)" % (id, name) + print("Delete snapshot %d (%s)" % (id, name)) cur.execute("delete from snapshots where snapshotid = ?", (id,)) first = False @@ -927,11 +942,11 @@ class LocalDatabase: target_size = max(2 * segment_size_estimate, total_bytes / target_buckets) - print "segment_size:", segment_size_estimate - print "distribution:", distribution - print "total_bytes:", total_bytes - print "target_buckets:", target_buckets - print "min, target size:", min_size, target_size + print("segment_size:", segment_size_estimate) + print("distribution:", distribution) + print("total_bytes:", total_bytes) + print("target_buckets:", target_buckets) + print("min, target size:", min_size, target_size) # Chosen cutoffs. Each bucket consists of objects with age greater # than one cutoff value, but not greater than the next largest cutoff. @@ -961,7 +976,7 @@ class LocalDatabase: cutoffs.append(-1) cutoffs.append(-1) - print "cutoffs:", cutoffs + print("cutoffs:", cutoffs) # Update the database to assign each object to the appropriate bucket. cutoffs.reverse()