Skip to content
Patrik Schmidt edited this page Sep 1, 2013 · 3 revisions

See setupZookeeperHadoopHbaseTomcatSolrNutch for an advanced setup.

Stopping HBase

Sometimes the usual ./stop-mapred.sh && ./stop-dfs.sh won't stop all processes (especially if you messed up configuration/dependencies). This is how you can kill all remaining ones:

$ cd /tmp
$ cat hadoop*
; will print some PIDs
$ kill -9 <pid> ; for every PID

interaction

import java.io.File;
import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;

public class HDFSHelloWorld {

  public static final String theFilename = "hello.txt";
  public static final String message = "Hello, world!\n";

  public static void main (String [] args) throws IOException {

    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(conf);

    Path filenamePath = new Path(theFilename);

    try {
      if (fs.exists(filenamePath)) {
        // remove the file first
        fs.delete(filenamePath);
      }

      FSDataOutputStream out = fs.create(filenamePath);
      out.writeUTF(message;
      out.close();

      FSDataInputStream in = fs.open(filenamePath);
      String messageIn = in.readUTF();
      System.out.print(messageIn);
      in.close();
    } catch (IOException ioe) {
      System.err.println("IOException during operation: " + ioe.toString());
      System.exit(1);
    }
  }
}
Clone this wiki locally