Skip to content

Commit

Permalink
s/SECURE/NEWRPC/
Browse files Browse the repository at this point in the history
  • Loading branch information
ekoontz committed Apr 2, 2012
1 parent 2816d42 commit d4050e4
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 19 deletions.
Expand Up @@ -61,9 +61,9 @@

import com.google.common.collect.Iterables;

/*if[HADOOP_SECURE]
/*if[HADOOP_NEWRPC]
import org.apache.hadoop.ipc.ProtocolSignature;
end[HADOOP_SECURE]*/
end[HADOOP_NEWRPC]*/

/**
* Basic RPC communications object that implements the lower level operations
Expand Down Expand Up @@ -693,14 +693,14 @@ public final long getProtocolVersion(String protocol, long clientVersion)
return VERSION_ID;
}

/*if[HADOOP_SECURE]
/*if[HADOOP_NEWRPC]
public ProtocolSignature getProtocolSignature(
String protocol,
long clientVersion,
int clientMethodsHash) throws IOException {
return new ProtocolSignature(VERSION_ID, null);
}
end[HADOOP_SECURE]*/
end[HADOOP_NEWRPC]*/

@Override
public void closeConnections() throws IOException {
Expand Down
18 changes: 9 additions & 9 deletions src/main/java/org/apache/giraph/comm/RPCCommunications.java
Expand Up @@ -65,11 +65,11 @@
@SuppressWarnings("rawtypes")
public class RPCCommunications<I extends WritableComparable,
V extends Writable, E extends Writable, M extends Writable>
/*if[HADOOP_SECURE]
/*if[HADOOP_NEWRPC]
extends BasicRPCCommunications<I, V, E, M, Token<JobTokenIdentifier>> {
else[HADOOP_SECURE]*/
else[HADOOP_NEWRPC]*/
extends BasicRPCCommunications<I, V, E, M, Object> {
/*end[HADOOP_SECURE]*/
/*end[HADOOP_NEWRPC]*/

/** Class logger */
public static final Logger LOG = Logger.getLogger(RPCCommunications.class);
Expand Down Expand Up @@ -129,7 +129,7 @@ Object createJobToken() throws IOException {
@Override
protected Server getRPCServer(
InetSocketAddress myAddress, int numHandlers, String jobId,
/*if[HADOOP_SECURE]
/*if[HADOOP_NEWRPC]
// needs facebook/trunk distinction.
Token<JobTokenIdentifier> jt) throws IOException {
@SuppressWarnings("deprecation")
Expand All @@ -150,11 +150,11 @@ protected Server getRPCServer(
server.refreshServiceAcl(conf, new BspPolicyProvider());
}
return server;
else[HADOOP_SECURE]*/
else[HADOOP_NEWRPC]*/
Object jt) throws IOException {
return RPC.getServer(this, myAddress.getHostName(), myAddress.getPort(),
numHandlers, false, conf);
/*end[HADOOP_SECURE]*/
/*end[HADOOP_NEWRPC]*/
}


Expand All @@ -170,7 +170,7 @@ protected Server getRPCServer(
/*if[HADOOP_NEWRPC]
else[HADOOP_NEWRPC]*/
/*end[HADOOP_NEWRPC]*/
/*if[HADOOP_SECURE]
/*if[HADOOP_NEWRPC]
CommunicationsInterface<I, V, E, M> getRPCProxy(
final InetSocketAddress addr,
String jobId,
Expand Down Expand Up @@ -207,7 +207,7 @@ public CommunicationsInterface<I, V, E, M> run()
});
return proxy;
}
else[HADOOP_SECURE]*/
else[HADOOP_NEWRPC]*/
CommunicationsInterface<I, V, E, M> getRPCProxy(
final InetSocketAddress addr,
String jobId,
Expand All @@ -219,5 +219,5 @@ CommunicationsInterface<I, V, E, M> getRPCProxy(
CommunicationsInterface.class, VERSION_ID, addr, config);
return proxy;
}
/*end[HADOOP_SECURE]*/
/*end[HADOOP_NEWRPC]*/
}
12 changes: 6 additions & 6 deletions src/test/java/org/apache/giraph/TestBspBasic.java
Expand Up @@ -52,10 +52,10 @@
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.JobID;
/*if[HADOOP_SECURE]
/*if[HADOOP_NEWRPC]
import org.apache.hadoop.mapreduce.task.JobContextImpl;
else[HADOOP_SECURE]*/
/*end[HADOOP_SECURE]*/
else[HADOOP_NEWRPC]*/
/*end[HADOOP_NEWRPC]*/
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
Expand Down Expand Up @@ -115,15 +115,15 @@ public void testInstantiateVertex()
", graphState" + gs);
VertexInputFormat<LongWritable, IntWritable, FloatWritable, IntWritable>
inputFormat = BspUtils.createVertexInputFormat(job.getConfiguration());
/*if[HADOOP_SECURE]
/*if[HADOOP_NEWRPC]
List<InputSplit> splitArray =
inputFormat.getSplits(
new JobContextImpl(new Configuration(), new JobID()), 1);
else[HADOOP_SECURE]*/
else[HADOOP_NEWRPC]*/
List<InputSplit> splitArray =
inputFormat.getSplits(
new JobContext(new Configuration(), new JobID()), 1);
/*end[HADOOP_SECURE]*/
/*end[HADOOP_NEWRPC]*/
ByteArrayOutputStream byteArrayOutputStream =
new ByteArrayOutputStream();
DataOutputStream outputStream =
Expand Down

0 comments on commit d4050e4

Please sign in to comment.