Permalink
Browse files

add HBase examples

  • Loading branch information...
1 parent 0703f6b commit 3d565b2ab3e1ae5d6a7631ff170b58bcee4d02b5 @leejianwei committed Mar 21, 2012
@@ -0,0 +1,34 @@
+description = 'Spring Hadoop Samples - HBase CRUD with Java'
+
+apply plugin: 'base'
+apply plugin: 'java'
+apply plugin: 'idea'
+apply plugin: 'eclipse'
+
+repositories {
+ mavenCentral()
+ mavenLocal()
+ // Public Spring artefacts
+ mavenRepo name: "spring-release", urls: "http://repo.springsource.org/release"
+ mavenRepo name: "spring-milestone", urls: "http://repo.springsource.org/milestone"
+ mavenRepo name: "spring-snapshot", urls: "http://repo.springsource.org/snapshot"
+}
+
+dependencies {
+ compile "org.springframework.data:spring-data-hadoop:$version"
+ compile "org.apache.hadoop:hadoop-examples:$hadoopVersion"
+ compile "org.apache.hbase:hbase:$hbaseVersion"
+ // see HADOOP-7461
+ runtime "org.codehaus.jackson:jackson-mapper-asl:$jacksonVersion"
+
+ testCompile "junit:junit:$junitVersion"
+ testCompile "org.springframework:spring-test:$springVersion"
+}
+
+task run(type: JavaExec) {
+ description = 'Runs the application'
+ main = "org.springframework.data.hadoop.samples.hbase.HBaseMain"
+ classpath = sourceSets.main.runtimeClasspath
+}
+
+defaultTasks 'run'
@@ -0,0 +1,11 @@
+junitVersion = 4.8.1
+springVersion = 3.0.7.RELEASE
+hadoopVersion = 1.0.0
+hbaseVersion = 0.92.0
+groovyVersion = 1.8.5
+jacksonVersion = 1.8.8
+
+version = 1.0.0.BUILD-SNAPSHOT
+
+systemProp.http.proxyHost=proxy.vmware.com
+systemProp.http.proxyPort=3128
@@ -0,0 +1,39 @@
+==============================
+== HBase CRUD with Java API ==
+==============================
+
+1. MOTIVATION
+
+The demo is on how to use HBase with Spring Hadoop. The demo
+requires a running Hadoop instance (by default at localhost:9000) and HBase instance
+The Hadoop settings can be configured through hadoop.properties (more info in the Spring Hadoop reference docs).
+
+The demo is to count the number of occurency in HBase table
+
+2. BUILD AND DEPLOYMENT
+
+This directory contains the source files.
+For building, JDK 1.6+ are required
+
+a) To build, test and run the sample, use the following command:
+
+*nix/BSD OS:
+$ ../gradlew
+
+Windows OS:
+$ ..\gradlew
+
+If you have Gradle installed and available in your classpath, you can simply type:
+$ gradle
+
+3. IDE IMPORT
+
+To import the code inside an IDE run the command
+
+For Eclipse
+$ ../gradlew eclipse
+
+For IDEA
+$ ../gradlew idea
+
+This will generate the IDE specific project files.
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2011 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.hadoop.samples.hbase;
+
+/**
+ * @author Jarred Li
+ *
+ */
+public interface Constant {
+
+ public static String tableName = "likes";
+ public static String columnFamilyName = "cf";
+ public static String linkAddress = "link";
+ public static String likeNumber = "likeCount";
+ public static String rowName = "row";
+ public static String cellValue = "http://blog.springsource.org/2012/02/29/introducing-spring-hadoop/";
+
+}
@@ -0,0 +1,192 @@
+/*
+ * Copyright 2011 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.data.hadoop.samples.hbase;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.MasterNotRunningException;
+import org.apache.hadoop.hbase.ZooKeeperConnectionException;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Increment;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.springframework.context.support.ClassPathXmlApplicationContext;
+
+/**
+ *
+ * @author Jarred Li
+ *
+ */
+public class HBaseMain {
+
+ /**
+ * @param args
+ * @throws IOException
+ */
+ public static void main(String[] args) {
+ // Initialize spring hadoop application context
+ new ClassPathXmlApplicationContext("META-INF/spring/context.xml");
+
+ try {
+ //1. create table
+ createTable();
+
+ //2. put data
+ putData();
+
+ //3. increment
+ increment();
+
+ //4. get data
+ getData();
+
+
+ //5. scan data
+ scanData();
+
+ } catch (MasterNotRunningException e) {
+ e.printStackTrace();
+ } catch (ZooKeeperConnectionException e) {
+ e.printStackTrace();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+
+
+
+ }
+
+
+ /**
+ *
+ * create HBase table
+ *
+ * @throws MasterNotRunningException
+ * @throws ZooKeeperConnectionException
+ * @throws IOException
+ */
+ private static void createTable() throws MasterNotRunningException, ZooKeeperConnectionException, IOException {
+
+ Configuration config = HBaseConfiguration.create();
+ HBaseAdmin admin = new HBaseAdmin(config);
+
+ if (admin.tableExists(Constant.tableName)) {
+ admin.disableTable(Constant.tableName);
+ admin.deleteTable(Constant.tableName);
+ }
+
+ HTableDescriptor tableDes = new HTableDescriptor(Constant.tableName);
+
+ HColumnDescriptor cf1 = new HColumnDescriptor(Constant.columnFamilyName);
+ tableDes.addFamily(cf1);
+ admin.createTable(tableDes);
+
+ }
+
+
+ /**
+ * put data into table
+ *
+ * @throws IOException
+ */
+ private static void putData() throws IOException {
+ Configuration config = HBaseConfiguration.create();
+ HTable table = new HTable(config, Constant.tableName);
+
+ for (int i = 0; i < 1000; i++) {
+ Put p = new Put(Bytes.toBytes(Constant.rowName + i));
+ p.add(Bytes.toBytes(Constant.columnFamilyName), Bytes.toBytes(Constant.linkAddress),
+ Bytes.toBytes(Constant.cellValue + i % 7));
+ table.put(p);
+ }
+ }
+
+
+
+ /**
+ * get data from table
+ *
+ * @throws IOException
+ */
+ private static void getData() throws IOException {
+ Configuration config = HBaseConfiguration.create();
+ HTable table = new HTable(config, Constant.tableName);
+
+ Get get = new Get(Bytes.toBytes(Constant.rowName + "2"));
+ Result result = table.get(get);
+ byte[] valueByte = result.getValue(Bytes.toBytes(Constant.columnFamilyName),
+ Bytes.toBytes(Constant.linkAddress));
+ System.out.println("get value is:" + new String(valueByte));
+
+ byte[] valueByte2 = result.getValue(Bytes.toBytes(Constant.columnFamilyName),
+ Bytes.toBytes(Constant.likeNumber));
+ System.out.println("get value is:" + Bytes.toLong(valueByte2));
+
+ }
+
+ /**
+ * increment column value
+ *
+ * @throws IOException
+ */
+ private static void increment() throws IOException {
+ Configuration config = HBaseConfiguration.create();
+ HTable table = new HTable(config, Constant.tableName);
+
+ for (int i = 0; i < 1000; i++) {
+ Increment inc = new Increment(Bytes.toBytes(Constant.rowName + i));
+ inc.addColumn(Bytes.toBytes(Constant.columnFamilyName), Bytes.toBytes(Constant.likeNumber), i % 10 + 1);
+ table.increment(inc);
+ }
+ }
+
+
+ /**
+ * scan data in table
+ *
+ * @throws IOException
+ */
+
+ private static void scanData() throws IOException {
+ Configuration config = HBaseConfiguration.create();
+ HTable table = new HTable(config, Constant.tableName);
+
+ Scan scan = new Scan();
+ scan.setMaxVersions();
+ scan.addColumn(Bytes.toBytes(Constant.columnFamilyName), Bytes.toBytes(Constant.linkAddress));
+ //scan.addColumn(Bytes.toBytes(Constant.columnFamilyName), Bytes.toBytes(Constant.likeNumber));
+ ResultScanner scanner = table.getScanner(scan);
+ for (Result r : scanner) {
+ KeyValue[] values = r.raw();
+ System.out.println("scanned row:" + Bytes.toString(values[0].getValue()));
+ //System.out.println("scanned row:" + Bytes.toLong(values[1].getValue()));
+ }
+ }
+
+
+
+}
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns:context="http://www.springframework.org/schema/context"
+ xmlns:hdp="http://www.springframework.org/schema/hadoop"
+ xmlns:p="http://www.springframework.org/schema/p"
+ xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
+ http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd
+ http://www.springframework.org/schema/hadoop http://www.springframework.org/schema/hadoop/spring-hadoop.xsd">
+
+
+ <context:property-placeholder location="hadoop.properties"/>
+
+ <hdp:configuration>
+ fs.default.name=${hd.fs}
+ </hdp:configuration>
+
+</beans>
@@ -0,0 +1,6 @@
+hd.fs=hdfs://localhost:9000
+
+inputTable=likes
+outputTable=numberOfLikes
+
+
@@ -0,0 +1,8 @@
+log4j.rootCategory=INFO, stdout
+
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] - <%m>%n
+
+# for debugging datasource initialization
+# log4j.category.test.jdbc=DEBUG
@@ -72,9 +72,9 @@ public static void main(String[] args) {
// runHBaseMR();
- JobRunner runner = ctx.getBean("&runner", JobRunner.class);
+ JobRunner runner = ctx.getBean("runner", JobRunner.class);
try {
- runner.runJob();
+ runner.runJobs();
} catch (Exception e1) {
e1.printStackTrace();
}
@@ -9,7 +9,7 @@
<hdp:configuration>
fs.default.name=hdfs://localhost:9000
- mapred.job.tracker=localhost:9001
+
</hdp:configuration>
Oops, something went wrong. Retry.

0 comments on commit 3d565b2

Please sign in to comment.