Skip to content

Commit

Permalink
HADOOP-11498. Bump the version of HTrace to 3.1.0-incubating (Masatak…
Browse files Browse the repository at this point in the history
…e Iwasaki via Colin P. McCabe)
  • Loading branch information
Colin Patrick Mccabe committed Jan 31, 2015
1 parent 8dc59cb commit 09ad9a8
Show file tree
Hide file tree
Showing 34 changed files with 219 additions and 202 deletions.
3 changes: 3 additions & 0 deletions hadoop-common-project/hadoop-common/CHANGES.txt
Expand Up @@ -521,6 +521,9 @@ Release 2.7.0 - UNRELEASED

HADOOP-9137. Support connection limiting in IPC server (kihwal)

HADOOP-11498. Bump the version of HTrace to 3.1.0-incubating (Masatake
Iwasaki via Colin P. McCabe)

OPTIMIZATIONS

HADOOP-11323. WritableComparator#compare keeps reference to byte array.
Expand Down
2 changes: 1 addition & 1 deletion hadoop-common-project/hadoop-common/pom.xml
Expand Up @@ -240,7 +240,7 @@
</dependency>

<dependency>
<groupId>org.htrace</groupId>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</dependency>
<dependency>
Expand Down
Expand Up @@ -88,7 +88,7 @@
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Time;
import org.htrace.Trace;
import org.apache.htrace.Trace;

import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
Expand Down
Expand Up @@ -49,9 +49,8 @@
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.ProtoUtil;
import org.apache.hadoop.util.Time;
import org.htrace.Sampler;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;

import com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.BlockingService;
Expand Down
Expand Up @@ -117,10 +117,10 @@
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Time;
import org.htrace.Span;
import org.htrace.Trace;
import org.htrace.TraceInfo;
import org.htrace.TraceScope;
import org.apache.htrace.Span;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceInfo;
import org.apache.htrace.TraceScope;

import com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.ByteString;
Expand Down
Expand Up @@ -42,8 +42,8 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.*;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;

/** An RpcEngine implementation for Writable data. */
@InterfaceStability.Evolving
Expand Down
Expand Up @@ -25,7 +25,6 @@
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
Expand All @@ -38,11 +37,12 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.tracing.SpanReceiverInfo.ConfigurationPair;
import org.apache.hadoop.tracing.TraceUtils;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.ShutdownHookManager;
import org.htrace.HTraceConfiguration;
import org.htrace.SpanReceiver;
import org.htrace.Trace;
import org.apache.htrace.SpanReceiver;
import org.apache.htrace.SpanReceiverBuilder;
import org.apache.htrace.Trace;

/**
* This class provides functions for reading the names of SpanReceivers from
Expand Down Expand Up @@ -156,60 +156,13 @@ public synchronized void loadSpanReceivers(Configuration conf) {

private synchronized SpanReceiver loadInstance(String className,
List<ConfigurationPair> extraConfig) throws IOException {
className = className.trim();
if (!className.contains(".")) {
className = "org.htrace.impl." + className;
}
Class<?> implClass = null;
SpanReceiver impl;
try {
implClass = Class.forName(className);
Object o = ReflectionUtils.newInstance(implClass, config);
impl = (SpanReceiver)o;
impl.configure(wrapHadoopConf(config, extraConfig));
} catch (ClassCastException e) {
throw new IOException("Class " + className +
" does not implement SpanReceiver.");
} catch (ClassNotFoundException e) {
throw new IOException("Class " + className + " cannot be found.");
} catch (SecurityException e) {
throw new IOException("Got SecurityException while loading " +
"SpanReceiver " + className);
} catch (IllegalArgumentException e) {
throw new IOException("Got IllegalArgumentException while loading " +
"SpanReceiver " + className, e);
} catch (RuntimeException e) {
throw new IOException("Got RuntimeException while loading " +
"SpanReceiver " + className, e);
}
return impl;
}

private static HTraceConfiguration wrapHadoopConf(final Configuration conf,
List<ConfigurationPair> extraConfig) {
final HashMap<String, String> extraMap = new HashMap<String, String>();
for (ConfigurationPair pair : extraConfig) {
extraMap.put(pair.getKey(), pair.getValue());
SpanReceiverBuilder builder =
new SpanReceiverBuilder(TraceUtils.wrapHadoopConf(config, extraConfig));
SpanReceiver rcvr = builder.spanReceiverClass(className.trim()).build();
if (rcvr == null) {
throw new IOException("Failed to load SpanReceiver " + className);
}
return new HTraceConfiguration() {
public static final String HTRACE_CONF_PREFIX = "hadoop.htrace.";

@Override
public String get(String key) {
if (extraMap.containsKey(key)) {
return extraMap.get(key);
}
return conf.get(HTRACE_CONF_PREFIX + key);
}

@Override
public String get(String key, String defaultValue) {
if (extraMap.containsKey(key)) {
return extraMap.get(key);
}
return conf.get(HTRACE_CONF_PREFIX + key, defaultValue);
}
};
return rcvr;
}

/**
Expand Down

This file was deleted.

@@ -0,0 +1,65 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.tracing;

import java.util.Collections;
import java.util.HashMap;
import java.util.List;

import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.tracing.SpanReceiverInfo.ConfigurationPair;
import org.apache.htrace.HTraceConfiguration;

/**
* This class provides utility functions for tracing.
*/
@InterfaceAudience.Private
public class TraceUtils {
public static final String HTRACE_CONF_PREFIX = "hadoop.htrace.";
private static List<ConfigurationPair> EMPTY = Collections.emptyList();

public static HTraceConfiguration wrapHadoopConf(final Configuration conf) {
return wrapHadoopConf(conf, EMPTY);
}

public static HTraceConfiguration wrapHadoopConf(final Configuration conf,
List<ConfigurationPair> extraConfig) {
final HashMap<String, String> extraMap = new HashMap<String, String>();
for (ConfigurationPair pair : extraConfig) {
extraMap.put(pair.getKey(), pair.getValue());
}
return new HTraceConfiguration() {
@Override
public String get(String key) {
if (extraMap.containsKey(key)) {
return extraMap.get(key);
}
return conf.get(HTRACE_CONF_PREFIX + key, "");
}

@Override
public String get(String key, String defaultValue) {
if (extraMap.containsKey(key)) {
return extraMap.get(key);
}
return conf.get(HTRACE_CONF_PREFIX + key, defaultValue);
}
};
}
}
Expand Up @@ -27,8 +27,8 @@
import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.*;
import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
import org.apache.hadoop.security.UserGroupInformation;
import org.htrace.Span;
import org.htrace.Trace;
import org.apache.htrace.Span;
import org.apache.htrace.Trace;

import com.google.protobuf.ByteString;

Expand Down
20 changes: 10 additions & 10 deletions hadoop-common-project/hadoop-common/src/site/apt/Tracing.apt.vm
Expand Up @@ -60,7 +60,7 @@ public void receiveSpan(Span span);
+----
<property>
<name>hadoop.htrace.spanreceiver.classes</name>
<value>org.htrace.impl.LocalFileSpanReceiver</value>
<value>org.apache.htrace.impl.LocalFileSpanReceiver</value>
</property>
<property>
<name>hadoop.htrace.local-file-span-receiver.path</name>
Expand Down Expand Up @@ -131,11 +131,11 @@ public void receiveSpan(Span span);
+----
$ hadoop trace -list -host 192.168.56.2:9000
ID CLASS
1 org.htrace.impl.LocalFileSpanReceiver
1 org.apache.htrace.impl.LocalFileSpanReceiver

$ hadoop trace -list -host 192.168.56.2:50020
ID CLASS
1 org.htrace.impl.LocalFileSpanReceiver
1 org.apache.htrace.impl.LocalFileSpanReceiver
+----

<<<hadoop trace -remove>>> removes span receiver from server.
Expand All @@ -156,7 +156,7 @@ public void receiveSpan(Span span);

$ hadoop trace -list -host 192.168.56.2:9000
ID CLASS
2 org.htrace.impl.LocalFileSpanReceiver
2 org.apache.htrace.impl.LocalFileSpanReceiver
+----


Expand All @@ -172,9 +172,9 @@ public void receiveSpan(Span span);
+----
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.tracing.SpanReceiverHost;
import org.htrace.Sampler;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.apache.htrace.Sampler;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;

...

Expand All @@ -200,9 +200,9 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FsShell;
import org.apache.hadoop.tracing.SpanReceiverHost;
import org.apache.hadoop.util.ToolRunner;
import org.htrace.Sampler;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.apache.htrace.Sampler;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;

public class TracingFsShell {
public static void main(String argv[]) throws Exception {
Expand Down
@@ -0,0 +1,51 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.tracing;

import static org.junit.Assert.assertEquals;
import java.util.LinkedList;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.tracing.SpanReceiverInfo.ConfigurationPair;
import org.apache.htrace.HTraceConfiguration;
import org.junit.Test;

public class TestTraceUtils {
@Test
public void testWrappedHadoopConf() {
String key = "sampler";
String value = "ProbabilitySampler";
Configuration conf = new Configuration();
conf.set(TraceUtils.HTRACE_CONF_PREFIX + key, value);
HTraceConfiguration wrapped = TraceUtils.wrapHadoopConf(conf);
assertEquals(value, wrapped.get(key));
}

@Test
public void testExtraConfig() {
String key = "test.extra.config";
String oldValue = "old value";
String newValue = "new value";
Configuration conf = new Configuration();
conf.set(TraceUtils.HTRACE_CONF_PREFIX + key, oldValue);
LinkedList<ConfigurationPair> extraConfig =
new LinkedList<ConfigurationPair>();
extraConfig.add(new ConfigurationPair(key, newValue));
HTraceConfiguration wrapped = TraceUtils.wrapHadoopConf(conf, extraConfig);
assertEquals(newValue, wrapped.get(key));
}
}

0 comments on commit 09ad9a8

Please sign in to comment.