Skip to content

Commit d50e8f0

Browse files
committed
MAPREDUCE-6238. MR2 can't run local jobs with -libjars command options which is a regression from MR1 (zxu via rkanter)
1 parent f967fd2 commit d50e8f0

File tree

4 files changed

+96
-7
lines changed

4 files changed

+96
-7
lines changed

hadoop-mapreduce-project/CHANGES.txt

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -334,6 +334,9 @@ Release 2.8.0 - UNRELEASED
334334
MAPREDUCE-6266. Job#getTrackingURL should consistently return a proper URL
335335
(rchiang via rkanter)
336336

337+
MAPREDUCE-6238. MR2 can't run local jobs with -libjars command options
338+
which is a regression from MR1 (zxu via rkanter)
339+
337340
Release 2.7.1 - UNRELEASED
338341

339342
INCOMPATIBLE CHANGES

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -100,18 +100,12 @@ public void setup(JobConf conf) throws IOException {
100100
Path[] archiveClassPaths = DistributedCache.getArchiveClassPaths(conf);
101101
if (archiveClassPaths != null) {
102102
for (Path p : archiveClassPaths) {
103-
FileSystem remoteFS = p.getFileSystem(conf);
104-
p = remoteFS.resolvePath(p.makeQualified(remoteFS.getUri(),
105-
remoteFS.getWorkingDirectory()));
106103
classpaths.put(p.toUri().getPath().toString(), p);
107104
}
108105
}
109106
Path[] fileClassPaths = DistributedCache.getFileClassPaths(conf);
110107
if (fileClassPaths != null) {
111108
for (Path p : fileClassPaths) {
112-
FileSystem remoteFS = p.getFileSystem(conf);
113-
p = remoteFS.resolvePath(p.makeQualified(remoteFS.getUri(),
114-
remoteFS.getWorkingDirectory()));
115109
classpaths.put(p.toUri().getPath().toString(), p);
116110
}
117111
}

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobResourceUploader.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,7 @@ public void uploadFiles(Job job, Path submitJobDir) throws IOException {
127127
Path tmp = new Path(tmpjars);
128128
Path newPath = copyRemoteFiles(libjarsDir, tmp, conf, replication);
129129
DistributedCache.addFileToClassPath(
130-
new Path(newPath.toUri().getPath()), conf);
130+
new Path(newPath.toUri().getPath()), conf, jtFs);
131131
}
132132
}
133133

Original file line numberDiff line numberDiff line change
@@ -0,0 +1,92 @@
1+
/**
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
package org.apache.hadoop.mapred;
19+
20+
import java.io.File;
21+
import java.io.FileNotFoundException;
22+
import java.io.FileOutputStream;
23+
import java.io.IOException;
24+
import java.net.URL;
25+
import java.util.jar.JarOutputStream;
26+
import java.util.zip.ZipEntry;
27+
28+
import org.apache.hadoop.conf.Configuration;
29+
import org.apache.hadoop.fs.FileSystem;
30+
import org.apache.hadoop.fs.Path;
31+
32+
import org.apache.hadoop.mapreduce.SleepJob;
33+
import org.apache.hadoop.util.ToolRunner;
34+
import org.junit.After;
35+
import org.junit.Before;
36+
import org.junit.Test;
37+
38+
import static org.junit.Assert.*;
39+
40+
/**
41+
* check for the job submission options of
42+
* -jt local -libjars
43+
*/
44+
public class TestLocalJobSubmission {
45+
private static Path TEST_ROOT_DIR =
46+
new Path(System.getProperty("test.build.data","/tmp"));
47+
48+
@Before
49+
public void configure() throws Exception {
50+
}
51+
52+
@After
53+
public void cleanup() {
54+
}
55+
56+
/**
57+
* test the local job submission options of
58+
* -jt local -libjars
59+
* @throws IOException
60+
*/
61+
@Test
62+
public void testLocalJobLibjarsOption() throws IOException {
63+
Path jarPath = makeJar(new Path(TEST_ROOT_DIR, "test.jar"));
64+
65+
Configuration conf = new Configuration();
66+
conf.set(FileSystem.FS_DEFAULT_NAME_KEY, "hdfs://testcluster");
67+
final String[] args = {
68+
"-jt" , "local", "-libjars", jarPath.toString(),
69+
"-m", "1", "-r", "1", "-mt", "1", "-rt", "1"
70+
};
71+
int res = -1;
72+
try {
73+
res = ToolRunner.run(conf, new SleepJob(), args);
74+
} catch (Exception e) {
75+
System.out.println("Job failed with " + e.getLocalizedMessage());
76+
e.printStackTrace(System.out);
77+
fail("Job failed");
78+
}
79+
assertEquals("dist job res is not 0:", 0, res);
80+
}
81+
82+
private Path makeJar(Path p) throws IOException {
83+
FileOutputStream fos = new FileOutputStream(new File(p.toString()));
84+
JarOutputStream jos = new JarOutputStream(fos);
85+
ZipEntry ze = new ZipEntry("test.jar.inside");
86+
jos.putNextEntry(ze);
87+
jos.write(("inside the jar!").getBytes());
88+
jos.closeEntry();
89+
jos.close();
90+
return p;
91+
}
92+
}

0 commit comments

Comments
 (0)