-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy paths3push.py
More file actions
76 lines (63 loc) · 2.49 KB
/
s3push.py
File metadata and controls
76 lines (63 loc) · 2.49 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
#!/usr/bin/env python
# author: Aditya Patawari <aditya@adityapatawari.com>
import boto
import ConfigParser
import sys
from optparse import OptionParser
import os.path
from boto.s3.key import Key
from boto.s3.lifecycle import Lifecycle
import glob
from types import NoneType
import datetime
__author__ = "Aditya Patawari <aditya@adityapatawari.com>"
Description = '''
The format for the file is :
[AWS]
consumer_key: <Consumer Key>
consumer_secret: <Consumer Secret>
[DEFAULTS]
s3_bucket: <bucket>
'''
check = os.path.isfile(os.path.expanduser('~/.aws.conf'))
if cmp(check,False) == 0:
print Description
sys.exit(2)
system_execution = 0
config = ConfigParser.ConfigParser()
config.read(os.path.expanduser('~/.aws.conf'))
id = config.get("AWS", "consumer_key", raw=True)
key = config.get("AWS", "consumer_secret", raw=True)
def_bucket = config.get("DEFAULTS", "s3_bucket", raw=True)
parser = OptionParser()
parser.add_option("-f", "--file", dest="filename", help="Upload the FILE to AWS S3", metavar="FILE")
parser.add_option("-d", "--directory", dest="dir_name", help="Define a directory structure with a trailing /", metavar="DIRECTORY_NAME")
parser.add_option("-c", "--create-bucket", dest="new_bucket", help="Creates a bucket, if it doesn't exist", metavar="BUCKET_NAME")
parser.add_option("-e", "--expiration", dest="life", help="Expiration in number of days", metavar="LIFE", type="int")
parser.add_option("-l", "--list", dest="list_bucket", help="List the contents of the bucket", metavar="LIST")
(options, args) = parser.parse_args()
conn = boto.connect_s3(id,key)
if options.new_bucket:
bucket = conn.lookup(options.new_bucket)
if type(bucket) is NoneType:
bucket = conn.create_bucket(options.new_bucket)
if options.life:
life=Lifecycle()
life.add_rule('s3push_expiration_rule','','Enabled',options.life)
bucket.configure_lifecycle(life)
elif options.list_bucket:
bucket = conn.lookup(options.list_bucket)
for key in bucket:
last_modified = datetime.datetime.strptime(key.last_modified,'%Y-%m-%dT%H:%M:%S.000Z')
print key.name.ljust(70) + '\t' + last_modified.strftime("%d %B %Y, %I:%M%p").ljust(24) + '\t' + str(key.size/1024) + 'KB'
sys.exit(0)
else:
bucket = conn.lookup(def_bucket)
if type(options.dir_name) is NoneType:
options.dir_name = ''
k = Key(bucket)
file_list = glob.glob(options.filename)
for file_name in file_list:
k.key = options.dir_name + file_name.split('/')[-1]
k.set_contents_from_filename(file_name)
print file_name+' uploaded'