X-Git-Url: http://git.vrable.net/?a=blobdiff_plain;ds=sidebyside;f=contrib%2Flbs-store-s3;fp=contrib%2Flbs-store-s3;h=0000000000000000000000000000000000000000;hb=9d3cca72ea3c0f912c7250d84e12357346e59fe2;hp=340253ac1a928e09d7b37f4857c1d53db56d1943;hpb=d3f9ee42a4023631059f16cc2a8b96b9540750b2;p=cumulus.git diff --git a/contrib/lbs-store-s3 b/contrib/lbs-store-s3 deleted file mode 100755 index 340253a..0000000 --- a/contrib/lbs-store-s3 +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/python -# -# Storage hook for writing LBS backups directly to Amazon's Simple Storage -# Service (S3). -# -# Command-line arguments: -# -# Most options are controlled by environment variables: -# AWS_ACCESS_KEY_ID Amazon Web Services credentials -# AWS_SECRET_ACCESS_KEY " " -# LBS_S3_BUCKET S3 bucket in which data should be stored -# LBS_S3_PREFIX Path prefix to add to pathnames (include trailing -# slash) -# -# This script depends upon the boto Python library for interacting with Amazon -# S3. - -import os, sys -import boto -from boto.s3.bucket import Bucket -from boto.s3.key import Key - -prefix = os.environ.get('LBS_S3_PREFIX', "") -bucket_name = os.environ['LBS_S3_BUCKET'] -(local_path, file_type, remote_path) = sys.argv[1:4] - -conn = boto.connect_s3() -bucket = Bucket(conn, bucket_name) -k = Key(bucket) -k.key = prefix + file_type + "/" + remote_path -k.set_contents_from_filename(local_path)