/
ConfigureAmazonS3FileSystemUsingClient.java
50 lines (38 loc) · 1.79 KB
/
ConfigureAmazonS3FileSystemUsingClient.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
package com.northconcepts.datapipeline.examples.amazons3;
import java.io.InputStream;
import java.io.InputStreamReader;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.northconcepts.datapipeline.amazons3.AmazonS3FileSystem;
import com.northconcepts.datapipeline.core.DataReader;
import com.northconcepts.datapipeline.core.DataWriter;
import com.northconcepts.datapipeline.core.StreamWriter;
import com.northconcepts.datapipeline.csv.CSVReader;
import com.northconcepts.datapipeline.job.Job;
public class ConfigureAmazonS3FileSystemUsingClient {
private static final String ACCESS_KEY = "YOUR ACCESS KEY";
private static final String SECRET_KEY = "YOUR SECRET KEY";
public static void main(String[] args) throws Throwable {
BasicAWSCredentials basicCredentials = new BasicAWSCredentials(ACCESS_KEY, SECRET_KEY);
AmazonS3 s3Client = AmazonS3ClientBuilder
.standard()
.withCredentials(new AWSStaticCredentialsProvider(basicCredentials))
.withRegion(Regions.US_EAST_2)
.build();
AmazonS3FileSystem s3 = new AmazonS3FileSystem();
s3.setClient(s3Client);
s3.open();
try {
InputStream inputStream = s3.readFile("datapipeline-test-01", "output/orders-records.csv");
DataReader reader = new CSVReader(new InputStreamReader(inputStream));
DataWriter writer = StreamWriter.newSystemOutWriter();
Job.run(reader, writer);
System.out.println("Records read: " + writer.getRecordCount());
} finally {
s3.close();
}
}
}