forked from voldemort/voldemort
-
Notifications
You must be signed in to change notification settings - Fork 0
/
AvroStoreBuilderReducer.java
115 lines (91 loc) · 3.49 KB
/
AvroStoreBuilderReducer.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
package voldemort.store.readonly.mr;
/*
* Copyright 2008-2009 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
import java.io.Closeable;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Iterator;
import org.apache.avro.mapred.AvroKey;
import org.apache.avro.mapred.AvroValue;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.JobConfigurable;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import voldemort.store.readonly.disk.HadoopStoreWriter;
import voldemort.store.readonly.disk.KeyValueWriter;
import azkaban.common.utils.Utils;
/**
* Take key md5s and value bytes and build a Avro read-only store from these
* values
*/
public class AvroStoreBuilderReducer implements
Reducer<AvroKey<ByteBuffer>, AvroValue<ByteBuffer>, Text, Text>, JobConfigurable, Closeable {
// The Class implementing the keyvaluewriter
// this provides a pluggable mechanism for generating your own on disk
// format for the data and index files
String keyValueWriterClass;
@SuppressWarnings("rawtypes")
KeyValueWriter writer;
@SuppressWarnings("unchecked")
@Override
public void reduce(AvroKey<ByteBuffer> keyAvro,
Iterator<AvroValue<ByteBuffer>> iterator,
OutputCollector<Text, Text> collector,
Reporter reporter) throws IOException {
ByteBuffer keyBuffer = keyAvro.datum();
keyBuffer.rewind();
byte[] keyBytes = null, valueBytes;
keyBytes = new byte[keyBuffer.remaining()];
keyBuffer.get(keyBytes);
BytesWritable key = new BytesWritable(keyBytes);
ArrayList<BytesWritable> valueList = new ArrayList();
while(iterator.hasNext()) {
ByteBuffer writable = iterator.next().datum();
writable.rewind();
// BytesWritable writable = iterator.next();
valueBytes = null;
valueBytes = new byte[writable.remaining()];
writable.get(valueBytes);
BytesWritable value = new BytesWritable(valueBytes);
valueList.add(value);
}
writer.write(key, valueList.iterator(), reporter);
}
@Override
public void configure(JobConf job) {
JobConf conf = job;
try {
keyValueWriterClass = conf.get("writer.class");
if(keyValueWriterClass != null)
writer = (KeyValueWriter) Utils.callConstructor(keyValueWriterClass);
else
writer = new HadoopStoreWriter();
writer.conf(job);
} catch(Exception e) {
// throw new RuntimeException("Failed to open Input/OutputStream",
// e);
e.printStackTrace();
}
}
@Override
public void close() throws IOException {
writer.close();
}
}