Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add admin args option #114

Merged
merged 2 commits into from May 9, 2018
Merged
Changes from all commits
Commits
File filter...
Filter file types
Jump to…
Jump to file
Failed to load files.

Always

Just for now

@@ -30,6 +30,8 @@
help="s3 bucket name")
parser.add_option("--user", dest="user",
help="user")
parser.add_option("--admin", dest="admin",
help="admin user name")
parser.add_option("--sqsname", dest="sqsname",
help="sqsname")

@@ -40,6 +42,7 @@
acnumber = opts.acnumber
sqsname = opts.sqsname
user = opts.user
admin = opts.admin

if acnumber.isdigit()==False:
print "Please check your account number, it should only contain digits, no other characters."
@@ -51,12 +54,18 @@
secret_key=''
bucket=''

credentials_name = admin if admin else "default"

with open(os.environ['HOME'] + '/.aws/credentials') as f:
for line in f:
if "aws_access_key_id" in line:
access_key = line.split("=",1)[1].strip()
if "aws_secret_access_key" in line:
secret_key = line.split("=",1)[1].strip()
if credentials_name in line:
for line in f:
if "aws_access_key_id" in line:
access_key = line.split("=",1)[1].strip()
if "aws_secret_access_key" in line:
secret_key = line.split("=",1)[1].strip()
break


if not access_key:
print "Please check your ~/.aws/credentials file and make sure that access key and secret access key are set. Run aws configure to set them up"
@@ -74,7 +83,7 @@
else:
print e
sys.exit()



region = bucket.get_location()
@@ -96,27 +105,27 @@

if "None" not in str(queue_name):
#queue exists

queue_attr_raw = conn.get_queue_attributes(queue_name, attribute='All')
queue_attr = str(queue_attr_raw)
queue_attr = str(queue_attr_raw)

if s3bucket in queue_attr:
print 'Bucket already exists in queue\'s policy'

elif 'arn:aws:s3' in queue_attr:
# append the bucket to the existing policy
print "A bucket already exists in this queue's policy, appending this bucket to it"

start = '\"aws:SourceArn\":'
end = '}}}'
result = re.search('%s(.*)%s' % (start, end), queue_attr).group(1)


leftbracketremoved = result.replace('[','')
rightbracketremoved = leftbracketremoved.replace(']','')

addon = '[' + rightbracketremoved + ',' + '\"arn:aws:s3:*:*:' + s3bucket +'\"' +']'


text = """ {
"Version": "2008-10-17",
@@ -132,7 +141,7 @@
"Resource": "arn:aws:sqs:%s:%s:%s",
"Condition": {
"ArnLike": {
"aws:SourceArn": %s
"aws:SourceArn": %s
}
}
},
@@ -149,18 +158,18 @@
}
""" % (region, acnumber, queue_name, addon, acnumber, region, acnumber, queue_name)


parsed = json.loads(text)

conn.set_queue_attribute(queue_name, 'Policy', json.dumps(parsed))

# s3 bucket notification configuration
# s3 bucket notification configuration
client = boto3.client('s3', region)


response = client.put_bucket_notification_configuration(
Bucket=s3bucket,
NotificationConfiguration={
NotificationConfiguration={
"QueueConfigurations": [{
"Id": "Notification",
"Events": ["s3:ObjectCreated:*"],
@@ -169,7 +178,7 @@
}
)


else:
conn.set_queue_attribute(queue_name, 'Policy', json.dumps({
"Version": "2008-10-17",
@@ -201,13 +210,13 @@
]
}))

# s3 bucket notification configuration
# s3 bucket notification configuration
client = boto3.client('s3', region)


response = client.put_bucket_notification_configuration(
Bucket=s3bucket,
NotificationConfiguration={
NotificationConfiguration={
"QueueConfigurations": [{
"Id": "Notification",
"Events": ["s3:ObjectCreated:*"],
@@ -216,35 +225,35 @@
}
)

else:
else:
# queue does not exist and no sqs queue name is passed
if sqsname == None:
sqsname = 'loggly-s3-queue'

queue_name = conn.get_queue(sqsname)

# Default queue already exists
if queue_name!= None:
queue_attr_raw = conn.get_queue_attributes(queue_name, attribute='All')
queue_attr = str(queue_attr_raw)
queue_attr = str(queue_attr_raw)

if s3bucket in queue_attr:
print 'Bucket already exists in queue\'s policy'

else:
# append the bucket to the existing policy
print "A bucket already exists in this queue's policy, appending this bucket to it"

start = '\"aws:SourceArn\":'
end = '}}}'
result = re.search('%s(.*)%s' % (start, end), queue_attr).group(1)


leftbracketremoved = result.replace('[','')
rightbracketremoved = leftbracketremoved.replace(']','')

addon = '[' + rightbracketremoved + ',' + '\"arn:aws:s3:*:*:' + s3bucket +'\"' +']'


text = """ {
"Version": "2008-10-17",
@@ -260,7 +269,7 @@
"Resource": "arn:aws:sqs:%s:%s:%s",
"Condition": {
"ArnLike": {
"aws:SourceArn": %s
"aws:SourceArn": %s
}
}
},
@@ -277,18 +286,18 @@
}
""" % (region, acnumber, sqsname, addon, acnumber, region, acnumber, sqsname)


parsed = json.loads(text)

conn.set_queue_attribute(queue_name, 'Policy', json.dumps(parsed))

# s3 bucket notification configuration
# s3 bucket notification configuration
client = boto3.client('s3', region)


response = client.put_bucket_notification_configuration(
Bucket=s3bucket,
NotificationConfiguration={
NotificationConfiguration={
"QueueConfigurations": [{
"Id": "Notification",
"Events": ["s3:ObjectCreated:*"],
@@ -298,10 +307,10 @@
)

else:
# create the default queue or the queue passed as a parameter
# create the default queue or the queue passed as a parameter
q = conn.create_queue(sqsname)
queue_name = conn.get_queue(sqsname)

conn.set_queue_attribute(queue_name, 'Policy', json.dumps({
"Version": "2008-10-17",
"Id": "PolicyExample",
@@ -332,12 +341,12 @@
]
}))

# s3 bucket notification configuration
# s3 bucket notification configuration
client = boto3.client('s3', region)

response = client.put_bucket_notification_configuration(
Bucket=s3bucket,
NotificationConfiguration={
NotificationConfiguration={
"QueueConfigurations": [{
"Id": "Notification",
"Events": ["s3:ObjectCreated:*"],
@@ -354,37 +363,37 @@


if user != None and user != '':

try:
response = iam.get_user(user)
if 'get_user_response' in response:
print 'IAM user %s already exists, appending the sqs queue and s3 bucket to this IAM user\'s policy' % user

existing_policy = str(iam.get_user_policy(user, 'LogglyUserPolicy'))
existing_policy_decoded = urllib.unquote(existing_policy)

s3Buckets = []
sqsQueues = []
sqsQueues = []

response = iam.get_all_access_keys(user, max_items=1)

s = StringIO.StringIO(existing_policy_decoded)
for line in s:
if 'arn:aws:sqs' in line:
sqsQueues.append(line.strip().replace(",", ""))
if 'arn:aws:s3' in line:
s3Buckets.append(line.strip().replace(",", ""))

# append current s3bucket and sqs queue
sqsQueues.append('\"arn:aws:sqs:%s:%s:%s\"' % (region, acnumber, sqsname,))
s3Buckets.append('\"arn:aws:s3:::%s/*\"' % (s3bucket))
s3Buckets.append('\"arn:aws:s3:::%s\"' % (s3bucket))
s3Buckets.append('\"arn:aws:s3:::%s\"' % (s3bucket))

sqsQueueAddOn=""
for entry in sqsQueues:
sqsQueueAddOn = sqsQueueAddOn + entry + ",\n"


s3BucketAddOn=""
for entry in s3Buckets:
s3BucketAddOn = s3BucketAddOn + entry + ",\n"
@@ -421,10 +430,10 @@

print ""
print 'Appended! Please provide the access key and secret key for the IAM user %s in the form fields' % user

except BotoServerError, e:
if "The user with name" in e.message and "cannot be found" in e.message :

# create an IAM user
response = iam.create_user(user)

@@ -478,44 +487,44 @@
response = iam.put_user_policy(user,
'LogglyUserPolicy',
policy_json)
else:
print(e.message)
else:

print(e.message)

else:
else:
# create an IAM user
user = 'loggly-s3-user'

try:
response = iam.get_user(user)
if 'get_user_response' in response:
print 'The default IAM user \'loggly-s3-user\' which the script creates already exists, appending the sqs queue and s3 bucket to this IAM user\'s policy'

existing_policy = str(iam.get_user_policy(user, 'LogglyUserPolicy'))
existing_policy_decoded = urllib.unquote(existing_policy)

s3Buckets = []
sqsQueues = []
sqsQueues = []

response = iam.get_all_access_keys(user, max_items=1)

s = StringIO.StringIO(existing_policy_decoded)
for line in s:
if 'arn:aws:sqs' in line:
sqsQueues.append(line.strip().replace(",", ""))
if 'arn:aws:s3' in line:
s3Buckets.append(line.strip().replace(",", ""))

# append current s3bucket and sqs queue
sqsQueues.append('\"arn:aws:sqs:%s:%s:%s\"' % (region, acnumber, sqsname,))
s3Buckets.append('\"arn:aws:s3:::%s/*\"' % (s3bucket))
s3Buckets.append('\"arn:aws:s3:::%s\"' % (s3bucket))
s3Buckets.append('\"arn:aws:s3:::%s\"' % (s3bucket))

sqsQueueAddOn=""
for entry in sqsQueues:
sqsQueueAddOn = sqsQueueAddOn + entry + ",\n"


s3BucketAddOn=""
for entry in s3Buckets:
s3BucketAddOn = s3BucketAddOn + entry + ",\n"
@@ -546,13 +555,13 @@
}
]
}""" % (sqsQueueAddOn[:-2], s3BucketAddOn[:-2],)


try:

try:
response = iam.put_user_policy(user, 'LogglyUserPolicy', policy_json)
except Exception, e:
print e

print ""
print "Appended! Please provide the access key and secret key for the IAM user \'loggly-s3-user\' in the form fields"

ProTip! Use n and p to navigate between commits in a pull request.
You can’t perform that action at this time.