Skip to content

Commit

Permalink
Final POM clean up.
Browse files Browse the repository at this point in the history
  • Loading branch information
larsgeorge committed May 4, 2015
1 parent 6cb9b7d commit 398fa42
Show file tree
Hide file tree
Showing 6 changed files with 61 additions and 43 deletions.
41 changes: 24 additions & 17 deletions ch07/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -41,22 +41,10 @@
<artifactId>hbase-book-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>1.1.1</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.16</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
<version>0.20-append-r1057313</version>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>2.6.0</version>
<scope>provided</scope>
</dependency>
<dependency>
Expand All @@ -68,12 +56,31 @@
<dependency>
<groupId>com.googlecode.json-simple</groupId>
<artifactId>json-simple</artifactId>
<version>1.1</version>
<version>1.1.1</version>
<exclusions>
<exclusion>
<artifactId>hamcrest-core</artifactId>
<groupId>org.hamcrest</groupId>
</exclusion>
<exclusion>
<artifactId>junit</artifactId>
<groupId>junit</groupId>
</exclusion>
<exclusion>
<artifactId>junit</artifactId>
<groupId>junit</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>1.4</version>
<version>1.10</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>log4j-over-slf4j</artifactId>
<version>1.7.10</version>
</dependency>
</dependencies>

Expand All @@ -98,7 +105,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>2.2.1</version>
<version>2.5.3</version>
<configuration>
<tarLongFileMode>gnu</tarLongFileMode>
<descriptors>
Expand Down
1 change: 0 additions & 1 deletion ch07/src/main/java/mapreduce/ParseJson.java
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.log4j.Level;
Expand Down
1 change: 0 additions & 1 deletion ch07/src/main/java/mapreduce/ParseJson2.java
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.log4j.Level;
Expand Down
26 changes: 16 additions & 10 deletions ch09/src/main/java/client/DeleteTimestampExample.java
Original file line number Diff line number Diff line change
@@ -1,17 +1,21 @@
package client;

// cc DeleteTimestampExample Example application deleting with explicit timestamps
import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;
import util.HBaseHelper;

import java.io.IOException;
import util.HBaseHelper;

// cc DeleteTimestampExample Example application deleting with explicit timestamps
public class DeleteTimestampExample {

private final static byte[] ROW1 = Bytes.toBytes("row1");
Expand All @@ -25,13 +29,15 @@ public static void main(String[] args) throws IOException, InterruptedException
helper.dropTable("testtable");
helper.createTable("testtable", "colfam1");

HTable table = new HTable(conf, "testtable");
HBaseAdmin admin = new HBaseAdmin(conf);
Connection connection = ConnectionFactory.createConnection(conf);

Table table = connection.getTable(TableName.valueOf("testtable"));
Admin admin = connection.getAdmin();

// vv DeleteTimestampExample
for (int count = 1; count <= 6; count++) { // co DeleteTimestampExample-1-Put Store the same column six times.
Put put = new Put(ROW1);
put.add(COLFAM1, QUAL1, count, Bytes.toBytes("val-" + count)); // co DeleteTimestampExample-2-Add The version is set to a specific value, using the loop variable.
put.addColumn(COLFAM1, QUAL1, count, Bytes.toBytes("val-" + count)); // co DeleteTimestampExample-2-Add The version is set to a specific value, using the loop variable.
table.put(put);
}
// ^^ DeleteTimestampExample
Expand All @@ -44,8 +50,8 @@ public static void main(String[] args) throws IOException, InterruptedException
// vv DeleteTimestampExample

Delete delete = new Delete(ROW1); // co DeleteTimestampExample-3-Delete Delete the newest two versions.
delete.deleteColumn(COLFAM1, QUAL1, 5);
delete.deleteColumn(COLFAM1, QUAL1, 6);
delete.addColumn(COLFAM1, QUAL1, 5);
delete.addColumn(COLFAM1, QUAL1, 6);
table.delete(delete);
// ^^ DeleteTimestampExample

Expand Down
8 changes: 7 additions & 1 deletion ch11/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,16 @@
<artifactId>hbase-book-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>2.6.0</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.googlecode.json-simple</groupId>
<artifactId>json-simple</artifactId>
<version>1.1</version>
<version>1.1.1</version>
</dependency>
</dependencies>
</project>
27 changes: 14 additions & 13 deletions ch11/src/main/java/bulkimport/BulkImportJobExample.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,14 @@
package bulkimport;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.Random;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
Expand All @@ -12,8 +21,6 @@
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat;
import org.apache.hadoop.hbase.mapreduce.PutSortReducer;
import org.apache.hadoop.hbase.mapreduce.hadoopbackport.InputSampler;
import org.apache.hadoop.hbase.mapreduce.hadoopbackport.TotalOrderPartitioner;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.RawComparator;
Expand All @@ -32,20 +39,14 @@
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.LineRecordReader;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.partition.InputSampler;
import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
import org.apache.hadoop.util.ReflectionUtils;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.Random;

public class BulkImportJobExample {

private static Log LOG = LogFactory.getLog(BulkImportJobExample.class);
Expand Down Expand Up @@ -79,7 +80,7 @@ public VerboseInputSampler(Configuration conf) {
* @throws InterruptedException
*/
@SuppressWarnings("unchecked")
public static <K, V> void writePartitionFile(Job job, Sampler<K, V> sampler)
public static <K, V> void writePartitionFile(Job job, InputSampler.Sampler<K, V> sampler)
throws IOException, ClassNotFoundException, InterruptedException {
LinkedList<K> splits = new LinkedList<K>();
Configuration conf = job.getConfiguration();
Expand Down Expand Up @@ -180,7 +181,7 @@ public Object[] getSample(InputFormat inf, Job job) throws IOException, Interrup
// but we accept the possibility of sampling additional splits to hit
// the target sample keyset
for (int i = 0; i < splitsToSample || (i < splits.size() && samples.size() < numSamples); ++i) {
TaskAttemptContext samplingContext = new TaskAttemptContext(
TaskAttemptContext samplingContext = new TaskAttemptContextImpl(
job.getConfiguration(), new TaskAttemptID());
RecordReader<K, V> reader = inf.createRecordReader(splits.get(i), samplingContext);
reader.initialize(splits.get(i), samplingContext);
Expand Down

0 comments on commit 398fa42

Please sign in to comment.