forked from forward/node-hdfs
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
11 changed files
with
798 additions
and
249 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,2 +1,6 @@ | ||
build/ | ||
.lock-wscript | ||
hdfs_bindings.node | ||
hadoop_cluster.xml | ||
core-site.xml | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,60 @@ | ||
<?xml version="1.0"?> | ||
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?> | ||
|
||
<!-- Put site-specific property overrides in this file. --> | ||
|
||
<configuration> | ||
|
||
<property> | ||
<name>dfs.replication</name> | ||
<value>1</value> | ||
<description>Default block replication. | ||
The actual number of replications can be specified when the file is created. | ||
The default is used if replication is not specified in create time. | ||
</description> | ||
</property> | ||
|
||
<property> | ||
<name>dfs.support.append</name> | ||
<value>true</value> | ||
<description>Allow appends to files. | ||
</description> | ||
</property> | ||
|
||
<property> | ||
<name>dfs.datanode.address</name> | ||
<value>0.0.0.0:50012</value> | ||
<description> | ||
The address where the datanode server will listen to. | ||
If the port is 0 then the server will start on a free port. | ||
</description> | ||
</property> | ||
|
||
<property> | ||
<name>dfs.datanode.http.address</name> | ||
<value>0.0.0.0:50079</value> | ||
<description> | ||
The datanode http server address and port. | ||
If the port is 0 then the server will start on a free port. | ||
</description> | ||
</property> | ||
|
||
<property> | ||
<name>dfs.datanode.ipc.address</name> | ||
<value>0.0.0.0:50022</value> | ||
<description> | ||
The datanode ipc server address and port. | ||
If the port is 0 then the server will start on a free port. | ||
</description> | ||
</property> | ||
|
||
<property> | ||
<name>dfs.http.address</name> | ||
<value>0.0.0.0:50072</value> | ||
<description> | ||
The address and the base port where the dfs namenode web ui will listen on. | ||
If the port is 0 then the server will start on a free port. | ||
</description> | ||
</property> | ||
|
||
</configuration> |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
<?xml version="1.0"?> | ||
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?> | ||
|
||
<!-- Put site-specific property overrides in this file. --> | ||
|
||
<configuration> | ||
|
||
</configuration> |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
localhost |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,25 +1,39 @@ | ||
var h = require('../build/default/hdfs.node'); | ||
var hi = new h.Hdfs(); | ||
var data = new Buffer("Hello, my name is Paul. This is an example of what to do.", encoding='utf8') | ||
var sys = require('sys') | ||
, HDFS = require('../node-hdfs') | ||
|
||
var writtenBytes = hi.write("/tmp/testfile.txt", data, function(bytes) { | ||
console.log("Wrote file with " + bytes + " bytes.\n") | ||
console.log("About to start reading byes...") | ||
|
||
hi.read("/tmp/testfile.txt", function(data) { | ||
console.log("Data was: " + data) | ||
}) | ||
var hdfs = new HDFS({host:"default", port:0}); | ||
|
||
console.log("Finished asking to read byes...") | ||
}) | ||
console.log("Finished outer write\n") | ||
var hdfs_file_path = "/tmp/test.txt" | ||
var local_out_path = "/tmp/test.out"; | ||
|
||
// console.log("Wrote " + writtenBytes + " bytes") | ||
// Stat File | ||
hdfs.stat(hdfs_file_path, function(err, data) { | ||
if(!err) { | ||
console.log("File stat of '" + data.path + "' is: " + JSON.stringify(data)); | ||
// => {"type":"file","path":"/tmp/test","size":183,"replication":1,"block_size":33554432,"owner":"horaci","group":"wheel","permissions":420,"last_mod":1315326370,"last_access":0} | ||
} | ||
}) | ||
|
||
// Read file | ||
hdfs.read(hdfs_file_path, 1024*1024, function(reader) { | ||
var readed = 0; | ||
reader.on("open", function(handle) { | ||
console.log("File " + hdfs_file_path + " opened.") | ||
}); | ||
reader.on("data", function(data) { | ||
readed += data.length; | ||
console.log("readed " + data.length + " bytes (" + readed +")"); | ||
}); | ||
reader.on("end", function(err) { | ||
if(!err) { | ||
console.log("Finished reading data - Total readed: " + readed); | ||
} | ||
}); | ||
}) | ||
|
||
// hi.openForWriting("/tmp/tetfile.txt", function(f) { | ||
// f.write(buffer, function(bytes) { | ||
// console.log("I just wrote some bytes"); | ||
// }) | ||
// }) | ||
// | ||
// Copy file to local fs (in parallel with previous read file) | ||
hdfs.copyToLocalPath(hdfs_file_path, local_out_path, function(err, readed) { | ||
if(!err) { | ||
console.log(readed + " bytes copied from " + hdfs_file_path + " to " + local_out_path); | ||
} | ||
}) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,129 @@ | ||
var sys = require('sys') | ||
, fs = require('fs') | ||
, EventEmitter = require('events').EventEmitter | ||
, HDFSBindings = require('./hdfs_bindings') | ||
|
||
var HDFS = new HDFSBindings.Hdfs(); | ||
|
||
var modes = { | ||
O_RDONLY : 0x0000, | ||
O_WRONLY : 0x0001, | ||
O_RDWR : 0x0002, | ||
O_APPEND : 0x0008, | ||
O_CREAT : 0x0200, | ||
O_TRUNC : 0x0400 | ||
} | ||
|
||
module.exports = function(options) { | ||
this.host = options.host || "default"; | ||
this.port = options.port || 0; | ||
this.connected = false; | ||
|
||
var self = this; | ||
|
||
this.connect = function() { | ||
if(!this.connected) { | ||
HDFS.connect(self.host, self.port); | ||
this.connected = true; | ||
} | ||
} | ||
|
||
this.stat = function(path, cb) { | ||
self.connect(); | ||
HDFS.stat(path, cb); | ||
} | ||
|
||
this.open = function(path, mode, cb) { | ||
self.connect(); | ||
HDFS.open(path, mode, cb); | ||
} | ||
|
||
this.close = function(handle, cb) { | ||
self.connect(); | ||
HDFS.close(handle, cb); | ||
} | ||
|
||
this.read = function(path, bufferSize, cb) { | ||
if (!cb || typeof cb != "function") { | ||
cb = bufferSize; | ||
bufferSize = 1024*1024; | ||
} | ||
|
||
self.connect(); | ||
var reader = new HDFSReader(path, bufferSize); | ||
if(cb) { | ||
cb(reader); | ||
} else { | ||
return reader; | ||
} | ||
} | ||
|
||
this.copyToLocalPath = function(srcPath, dstPath, options, cb) { | ||
if (!cb || typeof cb != "function") { | ||
cb = options; | ||
options = {encoding: null, mode:0666, flags: 'w'}; | ||
} | ||
var stream = fs.createWriteStream(dstPath, options); | ||
var readed = 0; | ||
var bufferSize = options.bufferSize || 1024*1024; // 1mb chunks by default | ||
|
||
stream.once('open', function(fd) { | ||
self.read(srcPath, bufferSize, function(rh) { | ||
rh.on('data', function(data) { | ||
stream.write(data); | ||
readed += data.length; | ||
}); | ||
rh.once('end', function(err) { | ||
stream.end(); | ||
cb(err, readed); | ||
}); | ||
}) | ||
}); | ||
} | ||
} | ||
|
||
|
||
var HDFSReader = function(path, bufferSize) { | ||
var self = this; | ||
|
||
this.handle = null; | ||
this.offset = 0; | ||
this.length = 0; | ||
this.bufferSize = bufferSize || 1024*1024; | ||
|
||
this.read = function() { | ||
HDFS.read(self.handle, self.offset, self.bufferSize, function(data) { | ||
if(!data || data.length == 0) { | ||
self.end(); | ||
} else { | ||
self.emit("data", data); | ||
self.offset += data.length; | ||
data.length < self.bufferSize ? self.end() : self.read(); | ||
} | ||
}); | ||
}; | ||
|
||
this.end = function(err) { | ||
if(self.handle) { | ||
HDFS.close(self.handle, function() { | ||
self.emit("end", err); | ||
}) | ||
} else { | ||
self.emit("end", err); | ||
} | ||
} | ||
|
||
HDFS.open(path, modes.O_RDONLY, function(err, handle) { | ||
if(err) { | ||
self.end(err); | ||
} else { | ||
self.emit("open", handle); | ||
self.handle = handle; | ||
self.read(); | ||
} | ||
}); | ||
|
||
EventEmitter.call(this); | ||
} | ||
|
||
sys.inherits(HDFSReader, EventEmitter); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,14 @@ | ||
{ "name": "node-hdfs", | ||
"version": "0.0.1", | ||
"author": "Forward", | ||
"contributors": [ | ||
{ "name": "Paul Ingles", "email": "paul@forward.co.uk"}, | ||
{ "name": "Horaci Cuevas", "email": "horaci@forward.co.uk" } | ||
] | ||
"description": "A node module for accessing Hadoop's file system (HDFS)", | ||
"scripts": { "preinstall": "node-waf configure build" }, | ||
"main": "./node-hdfs", | ||
"engines": { "node": ">0.4.11" }, | ||
"keywords": [ "hdfs", "hadoop", "fs", "libhdfs" ], | ||
"repository": { "type": "git", "url": "http://github.com/forward/node-hdfs.git" } | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,11 @@ | ||
export HADOOP_HOME=/usr/local/hadoop | ||
|
||
# Path to hadoop libs | ||
export CLASSPATH=$HADOOP_HOME/hadoop-core-0.20.2-cdh3u0.jar:$HADOOP_HOME/lib/commons-logging-1.0.4.jar | ||
|
||
# Add conf path where core-site.xml is | ||
export CLASSPATH=$CLASSPATH::./conf | ||
node-waf clean | ||
node-waf configure | ||
node-waf build | ||
node demo/demo.js |
Oops, something went wrong.