Skip to content

Commit 4076434

Browse files
committed
init repository
1 parent f4823ce commit 4076434

File tree

2 files changed

+42
-1
lines changed

2 files changed

+42
-1
lines changed

README.md

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1,6 @@
1-
# aws-lambda-functions
1+
AWS lambda functions
2+
======
3+
4+
### Python 2.7:
5+
6+
* kinesis_stream_put_partitioned_s3.py (process data from kinesis stream and put to s3, partitioning the data for Athena)
Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
from __future__ import print_function
2+
3+
import base64
4+
import json
5+
import boto3
6+
from datetime import datetime
7+
8+
S3_BUCKET = "bcc-s3"
9+
S3_PATH = "test"
10+
11+
print('Loading function')
12+
13+
14+
def lambda_handler(event, context):
15+
16+
#s3 client init
17+
s3 = boto3.client('s3')
18+
19+
for record in event['Records']:
20+
# Kinesis data is base64 encoded so decode here
21+
str_payload = base64.b64decode(record['kinesis']['data'])
22+
23+
24+
json_payload = json.loads(str_payload)
25+
26+
27+
date = datetime.fromtimestamp(json_payload['d']['sec'])
28+
29+
filePath = "year=" + str(date.year) + "/month=" + str(date.month) + "/day=" + str(date.day) + "/hour=" + str(date.hour) + "/minute=" + str(date.minute) + "/" + str(json_payload['d']['sec']) + "_" + str(json_payload['_id']) + ".json"
30+
31+
print("path: " + filePath)
32+
33+
r = s3.put_object(ContentType="application/json", Bucket=S3_BUCKET, Key=filePath, Body=str_payload)
34+
35+
36+
return 'Successfully processed {} records.'.format(len(event['Records']))

0 commit comments

Comments
 (0)