Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

HIVE-23687: Fix Spotbugs issues in hive-standalone-metastore-common #1107

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,8 @@ jobWrappers {
stage('Prechecks') {
def spotbugsProjects = [
":hive-shims",
":hive-storage-api"
":hive-storage-api",
":hive-standalone-metastore-common"
]
buildHive("-Pspotbugs -pl " + spotbugsProjects.join(",") + " -am compile com.github.spotbugs:spotbugs-maven-plugin:4.0.0:check")
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ private void init(){
}

@Override
public void Authenticate(String user, String password) throws AuthenticationException {
public void authenticate(String user, String password) throws AuthenticationException {

if(!userMap.containsKey(user)) {
throw new AuthenticationException("Invalid user : "+user);
Expand Down
4 changes: 2 additions & 2 deletions standalone-metastore/metastore-common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -385,7 +385,7 @@
<fork>true</fork>
<maxHeap>2048</maxHeap>
<jvmArgs>-Djava.awt.headless=true -Xmx2048m -Xms512m</jvmArgs>
<excludeFilterFile>${basedir}/spotbugs/spotbugs-exclude.xml</excludeFilterFile>
<excludeFilterFile>${basedir}/${standalone.metastore.path.to.root}/spotbugs/spotbugs-exclude.xml</excludeFilterFile>
</configuration>
</plugin>
</plugins>
Expand All @@ -400,7 +400,7 @@
<fork>true</fork>
<maxHeap>2048</maxHeap>
<jvmArgs>-Djava.awt.headless=true -Xmx2048m -Xms512m</jvmArgs>
<excludeFilterFile>${basedir}/spotbugs/spotbugs-exclude.xml</excludeFilterFile>
<excludeFilterFile>${basedir}/${standalone.metastore.path.to.root}/spotbugs/spotbugs-exclude.xml</excludeFilterFile>
</configuration>
</plugin>
</plugins>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hive.metastore.utils.StringUtils;

import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
Expand Down Expand Up @@ -133,16 +134,18 @@ public class ColumnType {
);

// This map defines the progression of up casts in numeric types.
public static final Map<String, Integer> NumericCastOrder = new HashMap<>();
public static final Map<String, Integer> NumericCastOrder;

static {
NumericCastOrder.put(TINYINT_TYPE_NAME, 1);
NumericCastOrder.put(SMALLINT_TYPE_NAME, 2);
NumericCastOrder.put(INT_TYPE_NAME, 3);
NumericCastOrder.put(BIGINT_TYPE_NAME, 4);
NumericCastOrder.put(DECIMAL_TYPE_NAME, 5);
NumericCastOrder.put(FLOAT_TYPE_NAME, 6);
NumericCastOrder.put(DOUBLE_TYPE_NAME, 7);
Map<String, Integer> map = new HashMap<>();
map.put(TINYINT_TYPE_NAME, 1);
map.put(SMALLINT_TYPE_NAME, 2);
map.put(INT_TYPE_NAME, 3);
map.put(BIGINT_TYPE_NAME, 4);
map.put(DECIMAL_TYPE_NAME, 5);
map.put(FLOAT_TYPE_NAME, 6);
map.put(DOUBLE_TYPE_NAME, 7);
NumericCastOrder = Collections.unmodifiableMap(map);
}

private static final Set<String> decoratedTypeNames = new HashSet<>();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -795,15 +795,15 @@ public void close() {
}

public static void setProcessorCapabilities(final String[] capabilities) {
processorCapabilities = capabilities;
processorCapabilities = capabilities != null ? Arrays.copyOf(capabilities, capabilities.length) : null;
}

public static void setProcessorIdentifier(final String id) {
processorIdentifier = id;
}

public static String[] getProcessorCapabilities() {
return processorCapabilities;
return processorCapabilities != null ? Arrays.copyOf(processorCapabilities, processorCapabilities.length) : null;
}

public static String getProcessorIdentifier() {
Expand Down Expand Up @@ -885,7 +885,7 @@ public int add_partitions(List<Partition> new_parts) throws TException {
if (new_parts == null || new_parts.contains(null)) {
throw new MetaException("Partitions cannot be null.");
}
if (new_parts != null && !new_parts.isEmpty() && !new_parts.get(0).isSetCatName()) {
if (!new_parts.isEmpty() && !new_parts.get(0).isSetCatName()) {
final String defaultCat = getDefaultCatalog(conf);
new_parts.forEach(p -> p.setCatName(defaultCat));
}
Expand Down Expand Up @@ -1754,8 +1754,8 @@ public Map<String, Type> getTypeAll(String name) throws MetaException,
Map<String, Type> fromClient = client.get_type_all(name);
if (fromClient != null) {
result = new LinkedHashMap<>();
for (String key : fromClient.keySet()) {
result.put(key, deepCopy(fromClient.get(key)));
for (Map.Entry<String, Type> entry: fromClient.entrySet()) {
result.put(entry.getKey(), deepCopy(entry.getValue()));
}
}
return result;
Expand Down Expand Up @@ -2013,7 +2013,7 @@ public Database getDatabase(String catalogName, String databaseName) throws TExc
if (catalogName != null)
request.setCatalogName(catalogName);
if (processorCapabilities != null) {
request.setProcessorCapabilities(Arrays.asList(processorCapabilities));
request.setProcessorCapabilities(new ArrayList<>(Arrays.asList(processorCapabilities)));
}
if (processorIdentifier != null) {
request.setProcessorIdentifier(processorIdentifier);
Expand Down Expand Up @@ -3539,7 +3539,7 @@ public NotificationEventResponse getNextNotification(long lastEventId, int maxEv
NotificationEventRequest rqst = new NotificationEventRequest(lastEventId);
rqst.setMaxEvents(maxEvents);
NotificationEventResponse rsp = client.get_next_notification(rqst);
LOG.debug("Got back {} events", rsp.getEventsSize());
LOG.debug("Got back {} events", rsp!= null ? rsp.getEventsSize() : 0);
NotificationEventResponse filtered = new NotificationEventResponse();
if (rsp != null && rsp.getEvents() != null) {
long nextEventId = lastEventId + 1;
Expand Down Expand Up @@ -4327,7 +4327,7 @@ public static class TableCapabilityBuilder {
public static final String KEY_CAPABILITIES = "OBJCAPABILITIES";

public TableCapabilityBuilder() {
capabilitiesString = new String();
capabilitiesString = "";
}

public TableCapabilityBuilder add(String skill) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
public class MetaStoreAnonymousAuthenticationProviderImpl implements MetaStorePasswdAuthenticationProvider {

@Override
public void Authenticate(String user, String password) throws AuthenticationException {
public void authenticate(String user, String password) throws AuthenticationException {
// no-op authentication
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ public class MetaStoreConfigAuthenticationProviderImpl implements MetaStorePassw
}

@Override
public void Authenticate(String authUser, String authPassword) throws AuthenticationException {
public void authenticate(String authUser, String authPassword) throws AuthenticationException {
if (!userName.equals(authUser)) {
LOG.debug("Invalid user " + authUser);
throw new AuthenticationException("Invalid credentials");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,8 @@ public class MetaStoreCustomAuthenticationProviderImpl implements MetaStorePassw
}

@Override
public void Authenticate(String user, String password) throws AuthenticationException {
customProvider.Authenticate(user, password);
public void authenticate(String user, String password) throws AuthenticationException {
customProvider.authenticate(user, password);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
Expand Down Expand Up @@ -70,7 +72,7 @@ public MetaStoreLdapAuthenticationProviderImpl(Configuration conf) {
}

@Override
public void Authenticate(String user, String password) throws AuthenticationException {
public void authenticate(String user, String password) throws AuthenticationException {
DirSearch search = null;
String bindUser = MetastoreConf.getVar(this.conf,
MetastoreConf.ConfVars.METASTORE_PLAIN_LDAP_BIND_USER);
Expand Down Expand Up @@ -117,7 +119,7 @@ private DirSearch createDirSearch(String user, String password) throws Authentic
throw new AuthenticationException("Error validating LDAP user:"
+ " a null or blank user name has been provided");
}
if (StringUtils.isBlank(password) || password.getBytes()[0] == 0) {
if (StringUtils.isBlank(password) || password.getBytes(StandardCharsets.UTF_8)[0] == 0) {
throw new AuthenticationException("Error validating LDAP user:"
+ " a null or blank password has been provided");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,5 +35,5 @@ public interface MetaStorePasswdAuthenticationProvider {
* @throws AuthenticationException When a user is found to be
* invalid by the implementation
*/
void Authenticate(String user, String password) throws AuthenticationException;
void authenticate(String user, String password) throws AuthenticationException;
}
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ public void handle(Callback[] callbacks) throws IOException, UnsupportedCallback
}
MetaStorePasswdAuthenticationProvider provider =
MetaStoreAuthenticationProviderFactory.getAuthenticationProvider(conf, authMethod);
provider.Authenticate(username, password);
provider.authenticate(username, password);
if (ac != null) {
ac.setAuthorized(true);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
package org.apache.hadoop.hive.metastore;

import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Executors;
Expand Down Expand Up @@ -293,7 +294,7 @@ public Boolean execute() throws IOException {
// xattr has limited capacity. We shall revisit and store all original
// locations if orig-loc becomes important
try {
fs.setXAttr(cmPath, ORIG_LOC_TAG, path.toString().getBytes());
fs.setXAttr(cmPath, ORIG_LOC_TAG, path.toString().getBytes(StandardCharsets.UTF_8));
} catch (UnsupportedOperationException e) {
LOG.warn("Error setting xattr for {}", path.toString());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -778,15 +778,15 @@ public List<FileStatus> getFileStatusesForUnpartitionedTable(Database db, Table
public static String makePartName(List<FieldSchema> partCols,
List<String> vals, String defaultStr) throws MetaException {
if ((partCols.size() != vals.size()) || (partCols.size() == 0)) {
String errorStr = "Invalid partition key & values; keys [";
StringBuilder errorStrBuilder = new StringBuilder("Invalid partition key & values; keys [");
for (FieldSchema fs : partCols) {
errorStr += (fs.getName() + ", ");
errorStrBuilder.append(fs.getName()).append(", ");
}
errorStr += "], values [";
errorStrBuilder.append("], values [");
for (String val : vals) {
errorStr += (val + ", ");
errorStrBuilder.append(val).append(", ");
}
throw new MetaException(errorStr + "]");
throw new MetaException(errorStrBuilder.append("]").toString());
}
List<String> colNames = new ArrayList<>();
for (FieldSchema col: partCols) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.common.ZooKeeperHiveHelper;
import org.apache.hadoop.hive.metastore.utils.StringUtils;
Expand Down Expand Up @@ -99,14 +100,22 @@ public class MetastoreConf {
"metastore.authentication.ldap.userMembershipKey";

private static final Map<String, ConfVars> metaConfs = new HashMap<>();
private static volatile URL hiveSiteURL = null;
private static URL hiveDefaultURL = null;
private static URL hiveSiteURL = null;
private static URL hiveMetastoreSiteURL = null;
private static URL metastoreSiteURL = null;
private static AtomicBoolean beenDumped = new AtomicBoolean();

private static Map<String, ConfVars> keyToVars;

static {
keyToVars = new HashMap<>(ConfVars.values().length * 2);
for (ConfVars var : ConfVars.values()) {
keyToVars.put(var.varname, var);
keyToVars.put(var.hiveName, var);
}
}

@VisibleForTesting
static final String TEST_ENV_WORKAROUND = "metastore.testing.env.workaround.dont.ever.set.this.";

Expand Down Expand Up @@ -145,6 +154,7 @@ public String toString() {
* TODO - I suspect the vast majority of these don't need to be here. But it requires testing
* before just pulling them out.
*/
@SuppressFBWarnings(value = "MS_MUTABLE_ARRAY")
public static final MetastoreConf.ConfVars[] metaVars = {
ConfVars.WAREHOUSE,
ConfVars.REPLDIR,
Expand Down Expand Up @@ -1341,7 +1351,7 @@ public enum ConfVars {
STR_LIST_ENTRY("test.str.list", "hive.test.str.list", "a,b,c",
"no comment"),
LONG_TEST_ENTRY("test.long", "hive.test.long", 42, "comment"),
DOUBLE_TEST_ENTRY("test.double", "hive.test.double", 3.141592654, "comment"),
DOUBLE_TEST_ENTRY("test.double", "hive.test.double", Math.PI, "comment"),
TIME_TEST_ENTRY("test.time", "hive.test.time", 1, TimeUnit.SECONDS, "comment"),
TIME_VALIDATOR_ENTRY_INCLUSIVE("test.time.validator.inclusive", "hive.test.time.validator.inclusive", 1,
TimeUnit.SECONDS,
Expand Down Expand Up @@ -1619,28 +1629,30 @@ private static URL findConfigFile(ClassLoader classLoader, String name) {
if (result == null) {
// Nope, so look to see if our conf dir has been explicitly set
result = seeIfConfAtThisLocation("METASTORE_CONF_DIR", name, false);
if (result == null) {
// Nope, so look to see if our home dir has been explicitly set
result = seeIfConfAtThisLocation("METASTORE_HOME", name, true);
if (result == null) {
// Nope, so look to see if Hive's conf dir has been explicitly set
result = seeIfConfAtThisLocation("HIVE_CONF_DIR", name, false);
if (result == null) {
// Nope, so look to see if Hive's home dir has been explicitly set
result = seeIfConfAtThisLocation("HIVE_HOME", name, true);
if (result == null) {
// Nope, so look to see if we can find a conf file by finding our jar, going up one
// directory, and looking for a conf directory.
URI jarUri = null;
try {
jarUri = MetastoreConf.class.getProtectionDomain().getCodeSource().getLocation().toURI();
} catch (Throwable e) {
LOG.warn("Cannot get jar URI", e);
}
result = seeIfConfAtThisLocation(new File(jarUri).getParent(), name, true);
}
}
}
}
if (result == null) {
// Nope, so look to see if our home dir has been explicitly set
result = seeIfConfAtThisLocation("METASTORE_HOME", name, true);
}
if (result == null) {
// Nope, so look to see if Hive's conf dir has been explicitly set
result = seeIfConfAtThisLocation("HIVE_CONF_DIR", name, false);
}
if (result == null) {
// Nope, so look to see if Hive's home dir has been explicitly set
result = seeIfConfAtThisLocation("HIVE_HOME", name, true);
}
if (result == null) {
// Nope, so look to see if we can find a conf file by finding our jar, going up one
// directory, and looking for a conf directory.
URI jarUri = null;
try {
jarUri = MetastoreConf.class.getProtectionDomain().getCodeSource().getLocation().toURI();
} catch (Throwable e) {
LOG.warn("Cannot get jar URI", e);
}
if (jarUri != null) {
result = seeIfConfAtThisLocation(new File(jarUri).getParent(), name, true);
}
}

Expand Down Expand Up @@ -1754,7 +1766,7 @@ public static int getIntVar(Configuration conf, ConfVars var) {
public static long getLongVar(Configuration conf, ConfVars var) {
assert var.defaultVal.getClass() == Long.class;
String val = conf.get(var.varname);
return val == null ? conf.getLong(var.hiveName, (Long)var.defaultVal) : Long.valueOf(val);
return val == null ? conf.getLong(var.hiveName, (Long)var.defaultVal) : Long.parseLong(val);
}

/**
Expand Down Expand Up @@ -1987,18 +1999,6 @@ public static String getPassword(Configuration conf, ConfVars var) throws IOExce
* @return the value set
*/
public static String get(Configuration conf, String key) {
// Map this key back to the ConfVars it is associated with.
if (keyToVars == null) {
synchronized (MetastoreConf.class) {
if (keyToVars == null) {
keyToVars = new HashMap<>(ConfVars.values().length * 2);
for (ConfVars var : ConfVars.values()) {
keyToVars.put(var.varname, var);
keyToVars.put(var.hiveName, var);
}
}
}
}
ConfVars var = keyToVars.get(key);
if (var == null) {
// Ok, this isn't one we track. Just return whatever matches the string
Expand Down Expand Up @@ -2030,9 +2030,8 @@ public static String getAsString(Configuration conf, ConfVars var) {
} else if (var.defaultVal.getClass() == Double.class) {
return Double.toString(getDoubleVar(conf, var));
} else if (var.defaultVal.getClass() == TimeValue.class) {
TimeUnit timeUnit = (var.defaultVal.getClass() == TimeValue.class) ?
((TimeValue)var.defaultVal).unit : null;
return Long.toString(getTimeVar(conf, var, timeUnit)) + timeAbbreviationFor(timeUnit);
TimeUnit timeUnit = ((TimeValue)var.defaultVal).unit;
return getTimeVar(conf, var, timeUnit) + timeAbbreviationFor(timeUnit);
} else {
throw new RuntimeException("Unknown type for getObject " + var.defaultVal.getClass().getName());
}
Expand Down
Loading