Permalink
Browse files

hadoop support and bug fixes

  • Loading branch information...
1 parent e4c7ff9 commit dbc7df4872ef73b81f30d49997f9a69a365d2902 Anthony Giardullo committed Jun 6, 2009
View
@@ -58,6 +58,7 @@ Requirements
[thrift] Thrift framework
[fb303] Facebook Bassline (included in thrift/contrib/fb303/)
fb303 r697294 or later is required.
+[hadoop] optional. version 0.19.1 or higher (http://hadoop.apache.org)
These libraries are open source and may be freely obtained, but they are not
provided as a part of this distribution.
@@ -122,6 +123,9 @@ Examples:
# To disable static libraries and enable shared libraries. [ default has been set to static]
./configure --disable-static
+# To build scribe with Hadoop support
+./configure --enable-hdfs
+
# To set thrift home to a non-default location
./configure --with-thriftpath=/myhome/local/thrift
@@ -147,5 +151,5 @@ Acknowledgements
================
The build process for Scribe uses autoconf macros to compile/link with Boost.
These macros were written by Thomas Porschberg, Michael Tindal, and
-Daniel Casimiro. See ax_boost_base.m4, ax_boost_filesystem.m4, and
-ax_boost_system.m4 in the aclocal subdirectory for more information.
+Daniel Casimiro. See the m4 files in the aclocal subdirectory for more
+information.
View
@@ -46,16 +46,17 @@ FB_ENABLE_DEFAULT_STATIC
# Example: Macro supplies -DFACEBOOK at compile time and "if FACEBOOK endif" capabilities.
FB_ENABLE_FEATURE([FACEBOOK], [facebook])
+FB_ENABLE_FEATURE([USE_SCRIBE_HDFS], [hdfs])
# Personalized path generator Sets default paths. Provides --with-xx=DIR options.
# FB_WITH_PATH([<var>_home], [<var>path], [<default location>]
# Example: sets $(thrift_home) variable with default path set to /usr/local.
FB_WITH_PATH([thrift_home], [thriftpath], [/usr/local])
-FB_WITH_PATH([jvm_lib], [jvmpath], [/usr/local/java/jre/lib/amd64/server])
FB_WITH_PATH([fb303_home], [fb303path], [/usr/local])
FB_WITH_PATH([smc_home], [smcpath], [${EXTERNAL_PATH}/services/trunk/src])
FB_WITH_PATH([fb_home], [fbpath], [${EXTERNAL_PATH}/libfacebook])
+FB_WITH_PATH([hadoop_home], [hadooppath], [/usr/local])
# Require boost 1.36 with system and filesytem libraries
AX_BOOST_BASE([1.36])
View
@@ -0,0 +1,67 @@
+## Copyright (c) 2007-2009 Facebook
+##
+## Licensed under the Apache License, Version 2.0 (the "License");
+## you may not use this file except in compliance with the License.
+## You may obtain a copy of the License at
+##
+## http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS,
+## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+## See the License for the specific language governing permissions and
+## limitations under the License.
+##
+## See accompanying file LICENSE or visit the Scribe site at:
+## http://developers.facebook.com/scribe/
+
+
+##
+## Sample Scribe configuration
+##
+
+# This file configures Scribe to listen for messages on port 1463 and write
+# all messages to Hadoop. If unable to write to Hadoop, Scribe will buffer to
+# /tmp and keep retrying.
+
+port=1463
+max_msg_per_second=2000000
+check_interval=1
+max_queue_size=100000000
+num_thrift_server_threads=2
+
+
+# DEFAULT - write all messages to hadoop
+<store>
+category=default
+type=buffer
+
+target_write_size=20480
+max_write_interval=1
+buffer_send_rate=1
+retry_interval=30
+retry_interval_range=10
+
+<primary>
+type=file
+fs_type=hdfs
+file_path=hdfs://myhadoopserver:9000/scribedata
+create_symlink=no
+use_hostname_sub_directory=yes
+base_filename=thisisoverwritten
+max_size=1000000000
+rotate_period=daily
+rotate_hour=0
+rotate_minute=5
+add_newlines=1
+</primary>
+
+
+<secondary>
+type=file
+fs_type=std
+file_path=/tmp/scribetest
+base_filename=thisisoverwritten
+max_size=3000000
+</secondary>
+</store>
@@ -0,0 +1,98 @@
+## Copyright (c) 2007-2009 Facebook
+##
+## Licensed under the Apache License, Version 2.0 (the "License");
+## you may not use this file except in compliance with the License.
+## You may obtain a copy of the License at
+##
+## http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS,
+## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+## See the License for the specific language governing permissions and
+## limitations under the License.
+##
+## See accompanying file LICENSE or visit the Scribe site at:
+## http://developers.facebook.com/scribe/
+
+
+##
+## Sample Scribe configuration
+##
+
+# This file configures Scribe to first attempt to write to a hadoop instance.
+# If this fails, scribe will then attempt to write to a backup hadoop
+# instance. If this fails, scribe will buffer files to local disk. This is
+# accomplished by nesting a buffer store inside another buffer store.
+
+# Note that since replay_buffer=no in the inner buffer store, messages written
+# to the backup hadoop instance will remain on the backup hadoop instance even
+# if the primary hadoop instance comes back online. But since replay_buffer is
+# not turned off in the outer buffer store, messages logged to /tmp will
+# eventually get logged to hadoop when either the primary or backup hadoop
+# instance comes back online.
+
+port=1463
+max_msg_per_second=1000000
+check_interval=1
+max_queue_size=100000000
+num_thrift_server_threads=3
+
+# DEFAULT
+<store>
+ category=default
+ type=buffer
+
+ target_write_size=20480
+ max_write_interval=1
+ retry_interval=120
+ retry_interval_range=60
+ buffer_send_rate=5
+
+ <primary>
+ type=buffer
+
+ target_write_size=20480
+ max_write_interval=1
+ retry_interval=600
+ retry_interval_range=60
+ replay_buffer=no
+
+ <primary>
+ type=file
+ fs_type=hdfs
+ file_path=hdfs://hadoopserver:9000/scribedata
+ create_symlink=no
+ use_hostname_sub_directory=yes
+ base_filename=thisisoverwritten
+ max_size=1000000000
+ rotate_period=daily
+ rotate_hour=0
+ rotate_minute=5
+ add_newlines=1
+ </primary>
+
+ <secondary>
+ type=file
+ fs_type=hdfs
+ file_path=hdfs://backuphadoopserver:9000/scribedata
+ create_symlink=no
+ use_hostname_sub_directory=yes
+ base_filename=thisisoverwritten
+ max_size=1000000000
+ rotate_period=daily
+ rotate_hour=0
+ rotate_minute=5
+ add_newlines=1
+ </secondary>
+ </primary>
+
+ <secondary>
+ type=file
+ fs_type=std
+ file_path=/tmp
+ base_filename=thisisoverwritten
+ max_size=4000000
+ </secondary>
+</store>
+
View
@@ -20,7 +20,6 @@
include "fb303/if/fb303.thrift"
namespace cpp scribe.thrift
-namespace perl Scribe.Thrift
enum ResultCode
{
Oops, something went wrong.

0 comments on commit dbc7df4

Please sign in to comment.