Skip to content

Commit

Permalink
HIVE-28121: (2.3) Use direct SQL for transactional altering table par…
Browse files Browse the repository at this point in the history
…ameter (#5204)
  • Loading branch information
pan3793 authored Apr 30, 2024
1 parent 35ef022 commit 10b3b03
Show file tree
Hide file tree
Showing 11 changed files with 100 additions and 11 deletions.
4 changes: 4 additions & 0 deletions common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
Original file line number Diff line number Diff line change
Expand Up @@ -786,6 +786,10 @@ public static enum ConfVars {
"Name of the identifier factory to use when generating table/column names etc. \n" +
"'datanucleus1' is used for backward compatibility with DataNucleus v1"),
METASTORE_USE_LEGACY_VALUE_STRATEGY("datanucleus.rdbms.useLegacyNativeValueStrategy", true, ""),
METASTORE_QUERY_SQL_ALLOWALL("datanucleus.query.sql.allowAll", true,
"In strict JDO all SQL queries must begin with \"SELECT ...\", and consequently it " +
"is not possible to execute queries that change data. This DataNucleus property when set to true allows " +
"insert, update and delete operations from JDO SQL. Default value is true."),
METASTORE_PLUGIN_REGISTRY_BUNDLE_CHECK("datanucleus.plugin.pluginRegistryBundleCheck", "LOG",
"Defines what happens when plugin bundles are found and are duplicated [EXCEPTION|LOG|NONE]"),
METASTORE_BATCH_RETRIEVE_MAX("hive.metastore.batch.retrieve.max", 300,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -928,4 +928,10 @@ public void addPrimaryKeys(List<SQLPrimaryKey> pks)
public void addForeignKeys(List<SQLForeignKey> fks)
throws InvalidObjectException, MetaException {
}

@Override
public long updateParameterWithExpectedValue(Table table, String key, String expectedValue, String newValue)
throws MetaException, NoSuchObjectException {
throw new UnsupportedOperationException("This Store doesn't support updating table parameter with expected value");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,6 @@
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hive.common.util.HiveStringUtils;

import javax.jdo.Constants;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
Expand Down Expand Up @@ -132,12 +131,7 @@ public void alterTable(RawStore msdb, Warehouse wh, String dbname,
String expectedValue = environmentContext != null && environmentContext.getProperties() != null ?
environmentContext.getProperties().get(hive_metastoreConstants.EXPECTED_PARAMETER_VALUE) : null;

if (expectedKey != null) {
// If we have to check the expected state of the table we have to prevent nonrepeatable reads.
msdb.openTransaction(Constants.TX_REPEATABLE_READ);
} else {
msdb.openTransaction();
}
msdb.openTransaction();

name = name.toLowerCase();
dbname = dbname.toLowerCase();
Expand All @@ -158,10 +152,20 @@ public void alterTable(RawStore msdb, Warehouse wh, String dbname,
throw new InvalidOperationException("table " + dbname + "." + name + " doesn't exist");
}

if (expectedKey != null && expectedValue != null
&& !expectedValue.equals(oldt.getParameters().get(expectedKey))) {
throw new MetaException("The table has been modified. The parameter value for key '" + expectedKey + "' is '"
+ oldt.getParameters().get(expectedKey) + "'. The expected was value was '" + expectedValue + "'");
if (expectedKey != null && expectedValue != null) {
String newValue = newt.getParameters().get(expectedKey);
if (newValue == null) {
throw new MetaException(String.format("New value for expected key %s is not set", expectedKey));
}
if (!expectedValue.equals(oldt.getParameters().get(expectedKey))) {
throw new MetaException("The table has been modified. The parameter value for key '" + expectedKey + "' is '"
+ oldt.getParameters().get(expectedKey) + "'. The expected was value was '" + expectedValue + "'");
}
long affectedRows = msdb.updateParameterWithExpectedValue(oldt, expectedKey, expectedValue, newValue);
if (affectedRows != 1) {
// make sure concurrent modification exception messages have the same prefix
throw new MetaException("The table has been modified. The parameter value for key '" + expectedKey + "' is different");
}
}

// Views derive the column type from the base table definition. So the view definition
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1964,4 +1964,13 @@ public List<SQLPrimaryKey> getPrimaryKeys(String db_name, String tbl_name) throw
}
return ret;
}

long updateTableParam(Table table, String key, String expectedValue, String newValue) {
String queryText = String.format("UPDATE \"TABLE_PARAMS\" SET \"PARAM_VALUE\" = ? " +
"WHERE \"PARAM_KEY\" = ? AND \"PARAM_VALUE\" = ? AND \"TBL_ID\" IN " +
"(SELECT \"TBL_ID\" FROM \"TBLS\" JOIN \"DBS\" ON \"TBLS\".\"DB_ID\" = \"DBS\".\"DB_ID\" WHERE \"TBL_NAME\" = '%s' AND \"NAME\" = '%s')",
table.getTableName(), table.getDbName());
Query query = pm.newQuery("javax.jdo.query.SQL", queryText);
return (long) query.executeWithArray(newValue, key, expectedValue);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -608,6 +608,33 @@ public boolean openTransaction(String isolationLevel) {
return result;
}

@Override
public long updateParameterWithExpectedValue(Table table, String key, String expectedValue, String newValue)
throws MetaException, NoSuchObjectException {
final Table _table = table;
final String _key = key;
final String _expectedValue = expectedValue;
final String _newValue = newValue;
return new GetHelper<Long>(table.getDbName(), table.getTableName(), true, false) {

@Override
protected String describeResult() {
return "Affected rows";
}

@Override
protected Long getSqlResult(GetHelper<Long> ctx) throws MetaException {
return directSql.updateTableParam(_table, _key, _expectedValue, _newValue);
}

@Override
protected Long getJdoResult(GetHelper<Long> ctx) throws MetaException, NoSuchObjectException {
throw new UnsupportedOperationException(
"Cannot update parameter with JDO, make sure direct SQL is enabled");
}
}.run(false);
}

/**
* if this is the commit of the first open call then an actual commit is
* called.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -711,4 +711,12 @@ void createTableWithConstraints(Table tbl, List<SQLPrimaryKey> primaryKeys,
void addPrimaryKeys(List<SQLPrimaryKey> pks) throws InvalidObjectException, MetaException;

void addForeignKeys(List<SQLForeignKey> fks) throws InvalidObjectException, MetaException;

/**
* Updates a given table parameter with expected value.
*
* @return the number of rows updated
*/
long updateParameterWithExpectedValue(Table table, String key, String expectedValue, String newValue)
throws MetaException, NoSuchObjectException;
}
Original file line number Diff line number Diff line change
Expand Up @@ -2851,4 +2851,10 @@ public void addForeignKeys(List<SQLForeignKey> fks) throws InvalidObjectExceptio
commitOrRoleBack(commit);
}
}

@Override
public long updateParameterWithExpectedValue(Table table, String key, String expectedValue, String newValue)
throws MetaException, NoSuchObjectException {
throw new UnsupportedOperationException("This Store doesn't support updating table parameter with expected value");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -885,4 +885,10 @@ public void addForeignKeys(List<SQLForeignKey> fks)
throws InvalidObjectException, MetaException {
// TODO Auto-generated method stub
}

@Override
public long updateParameterWithExpectedValue(Table table, String key, String expectedValue, String newValue)
throws MetaException, NoSuchObjectException {
throw new UnsupportedOperationException("This Store doesn't support updating table parameter with expected value");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -902,6 +902,12 @@ public void addForeignKeys(List<SQLForeignKey> fks)
throws InvalidObjectException, MetaException {
// TODO Auto-generated method stub
}

@Override
public long updateParameterWithExpectedValue(Table table, String key, String expectedValue, String newValue)
throws MetaException, NoSuchObjectException {
throw new UnsupportedOperationException("This Store doesn't support updating table parameter with expected value");
}
}


Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

package org.apache.hadoop.hive.metastore.client;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.RawStore;
import org.apache.hadoop.hive.metastore.Warehouse;
Expand All @@ -29,6 +30,7 @@
import org.apache.hadoop.hive.metastore.minihms.AbstractMetaStoreService;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
Expand Down Expand Up @@ -136,6 +138,8 @@ public void tearDown() throws Exception {

@Test
public void testAlterTableExpectedPropertyMatch() throws Exception {
Assume.assumeTrue(HiveConf.getBoolVar(metaStore.getConf(), HiveConf.ConfVars.METASTORE_TRY_DIRECT_SQL));
Assume.assumeTrue(HiveConf.getBoolVar(metaStore.getConf(), HiveConf.ConfVars.METASTORE_TRY_DIRECT_SQL_DDL));
Table originalTable = testTables[0];

EnvironmentContext context = new EnvironmentContext();
Expand All @@ -149,6 +153,8 @@ public void testAlterTableExpectedPropertyMatch() throws Exception {

@Test(expected = MetaException.class)
public void testAlterTableExpectedPropertyDifferent() throws Exception {
Assume.assumeTrue(HiveConf.getBoolVar(metaStore.getConf(), HiveConf.ConfVars.METASTORE_TRY_DIRECT_SQL));
Assume.assumeTrue(HiveConf.getBoolVar(metaStore.getConf(), HiveConf.ConfVars.METASTORE_TRY_DIRECT_SQL_DDL));
Table originalTable = testTables[0];

EnvironmentContext context = new EnvironmentContext();
Expand All @@ -168,6 +174,8 @@ public void testAlterTableExpectedPropertyDifferent() throws Exception {
*/
@Test
public void testAlterTableExpectedPropertyConcurrent() throws Exception {
Assume.assumeTrue(HiveConf.getBoolVar(metaStore.getConf(), HiveConf.ConfVars.METASTORE_TRY_DIRECT_SQL));
Assume.assumeTrue(HiveConf.getBoolVar(metaStore.getConf(), HiveConf.ConfVars.METASTORE_TRY_DIRECT_SQL_DDL));
Table originalTable = testTables[0];

originalTable.getParameters().put("snapshot", "0");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

package org.apache.hadoop.hive.metastore.minihms;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
Expand Down Expand Up @@ -150,4 +151,8 @@ public void cleanWarehouseDirs() throws MetaException {
*/
public void stop() {
}

public Configuration getConf() {
return configuration;
}
}

0 comments on commit 10b3b03

Please sign in to comment.