Uploading files to S3
A quick and dirty solution to uploading non-hidden files in the current directory to an S3 bucket using Python, works well as a post commit hook in git 🙂
Note: You will need to set your AWS access keys as environment variables in addition to specifying the name of the S3 bucket you wish to upload to. This script works for small files only.
import os import glob import boto import sys import os from boto.s3.key import Key AWS_ACCESS_KEY_ID = os.environ['AWS_ACCESS_KEY_ID'] AWS_SECRET_ACCESS_KEY = os.environ["AWS_SECRET_ACCESS_KEY"] bucket_name = '<<YOUR_BUCKET_NAME_HERE>>' conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) bucketobj = conn.get_bucket(bucket_name) k = Key(bucketobj) def main(): files = listdir_nohidden('.') for f in files: upload(f[2:].strip(), f) def listdir_nohidden(path): return glob.glob(os.path.join(path, '*')) ''''''''''''''' ONLY SMALL FILES KB! MAX ''''''''''''''' def upload(name, path): print "Uploading File: name = " + name + ", path = " + path k.key = name k.set_contents_from_filename(path, cb=percent_cb, num_cb=10) print "Done\n" def percent_cb(complete, total): sys.stdout.write('.') sys.stdout.flush() if __name__ == "__main__": main()