diff options
author | Elvis Pranskevichus <elvis@magic.io> | 2018-03-30 19:47:52 -0400 |
---|---|---|
committer | Elvis Pranskevichus <elvis@magic.io> | 2018-03-30 20:50:17 -0400 |
commit | a1b19e1d10896f3ac2ce0b97c2bf24e0c1c4520f (patch) | |
tree | 283ca8f24adc74d1092cf24b7bd66174a0014b51 /.ci/s3-upload.py | |
parent | 0e715340a78863d973302981ab98b232d6f51735 (diff) | |
download | immutables-a1b19e1d10896f3ac2ce0b97c2bf24e0c1c4520f.tar.gz immutables-a1b19e1d10896f3ac2ce0b97c2bf24e0c1c4520f.zip |
CI integration
Diffstat (limited to '.ci/s3-upload.py')
-rwxr-xr-x | .ci/s3-upload.py | 62 |
1 files changed, 62 insertions, 0 deletions
diff --git a/.ci/s3-upload.py b/.ci/s3-upload.py new file mode 100755 index 0000000..92479af --- /dev/null +++ b/.ci/s3-upload.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python3 + + +import argparse +import glob +import os +import os.path +import sys + +import tinys3 + + +def main(): + parser = argparse.ArgumentParser(description='S3 File Uploader') + parser.add_argument( + '--s3-bucket', + help=('S3 bucket name (defaults to $S3_UPLOAD_BUCKET)'), + default=os.environ.get('S3_UPLOAD_BUCKET')) + parser.add_argument( + '--s3-region', + help=('S3 region (defaults to $S3_UPLOAD_REGION)'), + default=os.environ.get('S3_UPLOAD_REGION')) + parser.add_argument( + '--s3-username', + help=('S3 username (defaults to $S3_UPLOAD_USERNAME)'), + default=os.environ.get('S3_UPLOAD_USERNAME')) + parser.add_argument( + '--s3-key', + help=('S3 access key (defaults to $S3_UPLOAD_ACCESSKEY)'), + default=os.environ.get('S3_UPLOAD_ACCESSKEY')) + parser.add_argument( + '--s3-secret', + help=('S3 secret (defaults to $S3_UPLOAD_SECRET)'), + default=os.environ.get('S3_UPLOAD_SECRET')) + parser.add_argument( + 'files', nargs='+', metavar='FILE', help='Files to upload') + + args = parser.parse_args() + + if args.s3_region: + endpoint = 's3-{}.amazonaws.com'.format(args.s3_region.lower()) + else: + endpoint = 's3.amazonaws.com' + + conn = tinys3.Connection( + access_key=args.s3_key, + secret_key=args.s3_secret, + default_bucket=args.s3_bucket, + tls=True, + endpoint=endpoint, + ) + + for pattern in args.files: + for fn in glob.iglob(pattern): + with open(fn, 'rb') as f: + conn.upload(os.path.basename(fn), f) + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) |