This repository has been archived by the owner on Mar 30, 2021. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 199
/
S3Sample.properties
41 lines (35 loc) · 1.8 KB
/
S3Sample.properties
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
# KinesisConnector Application Settings
appName = kinesisToS3
# By specifying the region name, the connector will connect from the Amazon Kinesis stream in this region
# unless the endpoint for Amazon Kinesis is explicitly specified. The Amazon DynamoDB lease table and Amazon CloudWatch
# metrics for connector will be created in this region. All resources in outgoing destination will
# not be affected by this region name.
regionName = us-east-1
retryLimit = 3
# 1MB = 1024*1024 = 1048756
bufferByteSizeLimit = 1048576
bufferRecordCountLimit = 25
bufferMillisecondsLimit = 3600000
#Flush when buffer exceeds 25 Amazon Kinesis records, 1 MB size limit or when time since last buffer exceeds 1 hour
# Amazon S3 parameters for KinesisConnector
# Please fill in the name of Amazon S3 bucket you'd like to use.
s3Bucket = pfifer-connector-test
s3Endpoint = https\://s3.amazonaws.com
# Optional Amazon S3 parameters for automatically creating the bucket
createS3Bucket = false
# Amazon Kinesis parameters for KinesisConnector
# Uncomment the following property if you would like to explicitly configure the Amazon Kinesis endpoint.
# This property will configure the connector's Amazon Kinesis client to read from this specific endpoint,
# overwriting the regionName property for ONLY the Amazon Kinesis client. The lease table and Amazon CloudWatch
# metrics will still use the regionName property.
# kinesisEndpoint = https\://kinesis.us-west-2.amazonaws.com
kinesisInputStream = s3TestStream
# Optional Amazon Kinesis parameters for automatically creating the stream
createKinesisInputStream = false
createKinesisOutputStream = false
kinesisInputStreamShardCount = 2
kinesisOutputStreamShardCount = 2
# Specifies file the StreamSource will read records from
createStreamSource = true
inputStreamFile = users.txt
connectorDestination = s3