Skip to content

Commit

Permalink
NUTCH-2633 Fix deprecation warnings when building Nutch master branch…
Browse files Browse the repository at this point in the history
… under JDK 10.0.2+13 (#374)
  • Loading branch information
lewismc committed Aug 11, 2018
1 parent 01c5d6e commit f02110f
Show file tree
Hide file tree
Showing 95 changed files with 143 additions and 252 deletions.
Empty file modified src/java/org/apache/nutch/crawl/AbstractFetchSchedule.java
100755 → 100644
Empty file.
Empty file modified src/java/org/apache/nutch/crawl/AdaptiveFetchSchedule.java
100755 → 100644
Empty file.
2 changes: 1 addition & 1 deletion src/java/org/apache/nutch/crawl/CrawlDatum.java
Original file line number Diff line number Diff line change
Expand Up @@ -545,7 +545,7 @@ public boolean evaluate(Expression expr, String url) {
jcontext.set("fetchTime", (long)(getFetchTime()));
jcontext.set("modifiedTime", (long)(getModifiedTime()));
jcontext.set("retries", getRetriesSinceFetch());
jcontext.set("interval", new Integer(getFetchInterval()));
jcontext.set("interval", Integer.valueOf(getFetchInterval()));
jcontext.set("score", getScore());
jcontext.set("signature", StringUtil.toHexString(getSignature()));

Expand Down
1 change: 0 additions & 1 deletion src/java/org/apache/nutch/crawl/CrawlDbMerger.java
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,6 @@
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.nutch.util.LockUtil;
import org.apache.nutch.util.NutchConfiguration;
import org.apache.nutch.util.NutchJob;
import org.apache.nutch.util.TimingUtil;
Expand Down
4 changes: 0 additions & 4 deletions src/java/org/apache/nutch/crawl/CrawlDbReader.java
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
Expand All @@ -43,7 +42,6 @@
import com.tdunning.math.stats.MergingDigest;
import com.tdunning.math.stats.TDigest;

import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
Expand All @@ -64,10 +62,8 @@
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.partition.HashPartitioner;
import org.apache.hadoop.mapreduce.Mapper.Context;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.util.StringUtils;
import org.apache.nutch.util.AbstractChecker;
Expand Down
Empty file modified src/java/org/apache/nutch/crawl/DefaultFetchSchedule.java
100755 → 100644
Empty file.
Empty file modified src/java/org/apache/nutch/crawl/FetchSchedule.java
100755 → 100644
Empty file.
2 changes: 1 addition & 1 deletion src/java/org/apache/nutch/crawl/FetchScheduleFactory.java
100755 → 100644
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ public synchronized static FetchSchedule getFetchSchedule(Configuration conf) {
try {
LOG.info("Using FetchSchedule impl: " + clazz);
Class<?> implClass = Class.forName(clazz);
impl = (FetchSchedule) implClass.newInstance();
impl = (FetchSchedule) implClass.getConstructor().newInstance();
impl.setConf(conf);
objectCache.setObject(clazz, impl);
} catch (Exception e) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ private void readMimeFile(Reader mimeFile) throws IOException {
if (splits.length == 3) {
// Add a lower cased MIME-type and the factor to the map
mimeMap.put(StringUtils.lowerCase(splits[0]), new AdaptiveRate(
new Float(splits[1]), new Float(splits[2])));
Float.valueOf(splits[1]), Float.valueOf(splits[2])));
} else {
LOG.warn("Invalid configuration line in: " + line);
}
Expand Down
2 changes: 1 addition & 1 deletion src/java/org/apache/nutch/crawl/SignatureFactory.java
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ public synchronized static Signature getSignature(Configuration conf) {
LOG.info("Using Signature impl: " + clazz);
}
Class<?> implClass = Class.forName(clazz);
impl = (Signature) implClass.newInstance();
impl = (Signature) implClass.getConstructor().newInstance();
impl.setConf(conf);
objectCache.setObject(clazz, impl);
} catch (Exception e) {
Expand Down
2 changes: 1 addition & 1 deletion src/java/org/apache/nutch/fetcher/Fetcher.java
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ private AtomicInteger getActiveThreads() {
private void reportStatus(Context context, FetchItemQueues fetchQueues, int pagesLastSec, int bytesLastSec)
throws IOException {
StringBuilder status = new StringBuilder();
Long elapsed = new Long((System.currentTimeMillis() - start) / 1000);
Long elapsed = Long.valueOf((System.currentTimeMillis() - start) / 1000);

float avgPagesSec = (float) pages.get() / elapsed.floatValue();
long avgBytesSec = (bytes.get() / 128l) / elapsed.longValue();
Expand Down
4 changes: 2 additions & 2 deletions src/java/org/apache/nutch/hostdb/ReadHostDb.java
Original file line number Diff line number Diff line change
Expand Up @@ -224,8 +224,8 @@ private void getHostDbRecord(Path hostDb, String host) throws Exception {
if (!keyClass.getName().equals("org.apache.hadoop.io.Text"))
throw new IOException("Incompatible key (" + keyClass.getName() + ")");

Text key = (Text) keyClass.newInstance();
HostDatum value = (HostDatum) valueClass.newInstance();
Text key = (Text) keyClass.getConstructor().newInstance();
HostDatum value = (HostDatum) valueClass.getConstructor().newInstance();

for (int i = 0; i < readers.length; i++) {
while (readers[i].next(key, value)) {
Expand Down
1 change: 1 addition & 0 deletions src/java/org/apache/nutch/hostdb/ResolverThread.java
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ public void run() {
// Resolve the host and act appropriatly
try {
// Throws an exception if host is not found
@SuppressWarnings("unused")
InetAddress inetAddr = InetAddress.getByName(host);

if (datum.isEmpty()) {
Expand Down
2 changes: 2 additions & 0 deletions src/java/org/apache/nutch/indexer/CleaningJob.java
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,9 @@ public void map(Text key, CrawlDatum value,

public static class DeleterReducer extends
Reducer<ByteWritable, Text, Text, ByteWritable> {
@SuppressWarnings("unused")
private static final int NUM_MAX_DELETE_REQUEST = 1000;
@SuppressWarnings("unused")
private int numDeletes = 0;
private int totalDeleted = 0;

Expand Down
3 changes: 3 additions & 0 deletions src/java/org/apache/nutch/indexer/IndexWriter.java
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,9 @@ public interface IndexWriter extends Pluggable, Configurable {
*/
final static String X_POINT_ID = IndexWriter.class.getName();

/**
* @deprecated use {@link #open(IndexWriterParams)}} instead.
*/
@Deprecated
public void open(Configuration conf, String name) throws IOException;

Expand Down
8 changes: 0 additions & 8 deletions src/java/org/apache/nutch/indexer/IndexingFilters.java
Original file line number Diff line number Diff line change
Expand Up @@ -17,26 +17,18 @@

package org.apache.nutch.indexer;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.nutch.plugin.PluginRepository;
import org.apache.nutch.parse.Parse;
import org.apache.hadoop.conf.Configuration;
import org.apache.nutch.crawl.CrawlDatum;
import org.apache.nutch.crawl.Inlinks;
import org.apache.hadoop.io.Text;

import java.lang.invoke.MethodHandles;

/** Creates and caches {@link IndexingFilter} implementing plugins. */
public class IndexingFilters {

public static final String INDEXINGFILTER_ORDER = "indexingfilter.order";

private static final Logger LOG = LoggerFactory
.getLogger(MethodHandles.lookup().lookupClass());

private IndexingFilter[] indexingFilters;

public IndexingFilters(Configuration conf) {
Expand Down
10 changes: 8 additions & 2 deletions src/java/org/apache/nutch/plugin/Extension.java
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
*/
package org.apache.nutch.plugin;

import java.lang.reflect.InvocationTargetException;
import java.util.HashMap;

import org.apache.hadoop.conf.Configuration;
Expand Down Expand Up @@ -158,8 +159,13 @@ public Object getExtensionInstance() throws PluginRuntimeException {
// lazy loading of Plugin in case there is no instance of the plugin
// already.
pluginRepository.getPluginInstance(getDescriptor());
Object object = extensionClazz.newInstance();
if (object instanceof Configurable) {
Object object = null;
try {
object = extensionClazz.getConstructor().newInstance();
} catch (IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e) {
e.printStackTrace();
}
if (object != null && object instanceof Configurable) {
((Configurable) object).setConf(this.conf);
}
return object;
Expand Down
3 changes: 2 additions & 1 deletion src/java/org/apache/nutch/plugin/Plugin.java
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
* instances are used as the point of life cycle managemet of plugin related
* functionality.
*
* The <code>Plugin</code> will be startuped and shutdown by the nutch plugin
* The <code>Plugin</code> will be started up and shutdown by the nutch plugin
* management system.
*
* A possible usecase of the <code>Plugin</code> implementation is to create or
Expand Down Expand Up @@ -88,6 +88,7 @@ private void setDescriptor(PluginDescriptor descriptor) {
fDescriptor = descriptor;
}

@SuppressWarnings("deprecation")
protected void finalize() throws Throwable {
super.finalize();
shutDown();
Expand Down
Empty file modified src/java/org/apache/nutch/protocol/Content.java
100755 → 100644
Empty file.
Empty file modified src/java/org/apache/nutch/protocol/Protocol.java
100755 → 100644
Empty file.
Empty file modified src/java/org/apache/nutch/protocol/ProtocolException.java
100755 → 100644
Empty file.
6 changes: 0 additions & 6 deletions src/java/org/apache/nutch/protocol/ProtocolFactory.java
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,9 @@

package org.apache.nutch.protocol;

import java.lang.invoke.MethodHandles;
import java.net.URL;
import java.net.MalformedURLException;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.nutch.plugin.Extension;
import org.apache.nutch.plugin.ExtensionPoint;
import org.apache.nutch.plugin.PluginRepository;
Expand All @@ -40,9 +37,6 @@
*/
public class ProtocolFactory {

private static final Logger LOG = LoggerFactory
.getLogger(MethodHandles.lookup().lookupClass());

private ExtensionPoint extensionPoint;

private Configuration conf;
Expand Down
34 changes: 17 additions & 17 deletions src/java/org/apache/nutch/protocol/ProtocolStatus.java
Original file line number Diff line number Diff line change
Expand Up @@ -101,22 +101,22 @@ public class ProtocolStatus implements Writable {

private static final HashMap<Integer, String> codeToName = new HashMap<>();
static {
codeToName.put(new Integer(SUCCESS), "success");
codeToName.put(new Integer(FAILED), "failed");
codeToName.put(new Integer(PROTO_NOT_FOUND), "proto_not_found");
codeToName.put(new Integer(GONE), "gone");
codeToName.put(new Integer(MOVED), "moved");
codeToName.put(new Integer(TEMP_MOVED), "temp_moved");
codeToName.put(new Integer(NOTFOUND), "notfound");
codeToName.put(new Integer(RETRY), "retry");
codeToName.put(new Integer(EXCEPTION), "exception");
codeToName.put(new Integer(ACCESS_DENIED), "access_denied");
codeToName.put(new Integer(ROBOTS_DENIED), "robots_denied");
codeToName.put(new Integer(REDIR_EXCEEDED), "redir_exceeded");
codeToName.put(new Integer(NOTFETCHING), "notfetching");
codeToName.put(new Integer(NOTMODIFIED), "notmodified");
codeToName.put(new Integer(WOULDBLOCK), "wouldblock");
codeToName.put(new Integer(BLOCKED), "blocked");
codeToName.put(Integer.valueOf(SUCCESS), "success");
codeToName.put(Integer.valueOf(FAILED), "failed");
codeToName.put(Integer.valueOf(PROTO_NOT_FOUND), "proto_not_found");
codeToName.put(Integer.valueOf(GONE), "gone");
codeToName.put(Integer.valueOf(MOVED), "moved");
codeToName.put(Integer.valueOf(TEMP_MOVED), "temp_moved");
codeToName.put(Integer.valueOf(NOTFOUND), "notfound");
codeToName.put(Integer.valueOf(RETRY), "retry");
codeToName.put(Integer.valueOf(EXCEPTION), "exception");
codeToName.put(Integer.valueOf(ACCESS_DENIED), "access_denied");
codeToName.put(Integer.valueOf(ROBOTS_DENIED), "robots_denied");
codeToName.put(Integer.valueOf(REDIR_EXCEEDED), "redir_exceeded");
codeToName.put(Integer.valueOf(NOTFETCHING), "notfetching");
codeToName.put(Integer.valueOf(NOTMODIFIED), "notmodified");
codeToName.put(Integer.valueOf(WOULDBLOCK), "wouldblock");
codeToName.put(Integer.valueOf(BLOCKED), "blocked");
}

public ProtocolStatus() {
Expand Down Expand Up @@ -280,7 +280,7 @@ public boolean equals(Object o) {

public String toString() {
StringBuffer res = new StringBuffer();
res.append(codeToName.get(new Integer(code)) + "(" + code
res.append(codeToName.get(Integer.valueOf(code)) + "(" + code
+ "), lastModified=" + lastModified);
if (args != null) {
if (args.length == 1) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ public void initialize(InputSplit split, TaskAttemptContext context){

}

@SuppressWarnings("unused")
public synchronized boolean next(Text key, Text value)
throws IOException, InterruptedException {

Expand Down
14 changes: 7 additions & 7 deletions src/java/org/apache/nutch/segment/SegmentReader.java
Original file line number Diff line number Diff line change
Expand Up @@ -421,16 +421,16 @@ private List<Writable> getMapRecords(Path dir, Text key) throws Exception {
Class<?> valueClass = readers[0].getValueClass();
if (!keyClass.getName().equals("org.apache.hadoop.io.Text"))
throw new IOException("Incompatible key (" + keyClass.getName() + ")");
Writable value = (Writable) valueClass.newInstance();
Writable value = (Writable) valueClass.getConstructor().newInstance();
// we don't know the partitioning schema
for (int i = 0; i < readers.length; i++) {
if (readers[i].get(key, value) != null) {
res.add(value);
value = (Writable) valueClass.newInstance();
Text aKey = (Text) keyClass.newInstance();
value = (Writable) valueClass.getConstructor().newInstance();
Text aKey = (Text) keyClass.getConstructor().newInstance();
while (readers[i].next(aKey, value) && aKey.equals(key)) {
res.add(value);
value = (Writable) valueClass.newInstance();
value = (Writable) valueClass.getConstructor().newInstance();
}
}
readers[i].close();
Expand All @@ -446,13 +446,13 @@ private List<Writable> getSeqRecords(Path dir, Text key) throws Exception {
Class<?> valueClass = readers[0].getValueClass();
if (!keyClass.getName().equals("org.apache.hadoop.io.Text"))
throw new IOException("Incompatible key (" + keyClass.getName() + ")");
WritableComparable<?> aKey = (WritableComparable<?>) keyClass.newInstance();
Writable value = (Writable) valueClass.newInstance();
WritableComparable<?> aKey = (WritableComparable<?>) keyClass.getConstructor().newInstance();
Writable value = (Writable) valueClass.getConstructor().newInstance();
for (int i = 0; i < readers.length; i++) {
while (readers[i].next(aKey, value)) {
if (aKey.equals(key)) {
res.add(value);
value = (Writable) valueClass.newInstance();
value = (Writable) valueClass.getConstructor().newInstance();
}
}
readers[i].close();
Expand Down
22 changes: 9 additions & 13 deletions src/java/org/apache/nutch/service/impl/LinkReader.java
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,6 @@ public List read(String path) throws FileNotFoundException {
throw new FileNotFoundException();

}catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
LOG.error("Error occurred while reading file {} : ", file, StringUtils.stringifyException(e));
throw new WebApplicationException();
Expand Down Expand Up @@ -93,7 +92,6 @@ public List head(String path, int nrows) throws FileNotFoundException {
throw new FileNotFoundException();

}catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
LOG.error("Error occurred while reading file {} : ", file, StringUtils.stringifyException(e));
throw new WebApplicationException();
Expand Down Expand Up @@ -128,7 +126,6 @@ public List slice(String path, int start, int end)
throw new FileNotFoundException();

}catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
LOG.error("Error occurred while reading file {} : ", file, StringUtils.stringifyException(e));
throw new WebApplicationException();
Expand All @@ -154,22 +151,21 @@ public int count(String path) throws FileNotFoundException {
} catch(FileNotFoundException fne){
throw new FileNotFoundException();
}catch (IOException e) {
// TODO Auto-generated catch block
LOG.error("Error occurred while reading file {} : ", file, StringUtils.stringifyException(e));
throw new WebApplicationException();
}
return i;
}

private HashMap<String, String> getLinksRow(Writable key, LinkDatum value) {
HashMap<String, String> t_row = new HashMap<>();
t_row.put("key_url", key.toString());
t_row.put("url", value.getUrl());
t_row.put("anchor", value.getAnchor());
t_row.put("score", String.valueOf(value.getScore()));
t_row.put("timestamp", String.valueOf(value.getTimestamp()));
t_row.put("linktype", String.valueOf(value.getLinkType()));

return t_row;
HashMap<String, String> tRow = new HashMap<>();
tRow.put("key_url", key.toString());
tRow.put("url", value.getUrl());
tRow.put("anchor", value.getAnchor());
tRow.put("score", String.valueOf(value.getScore()));
tRow.put("timestamp", String.valueOf(value.getTimestamp()));
tRow.put("linktype", String.valueOf(value.getLinkType()));

return tRow;
}
}
Loading

0 comments on commit f02110f

Please sign in to comment.