Skip to content

Commit

Permalink
Categorize partition name creation error
Browse files Browse the repository at this point in the history
  • Loading branch information
electrum committed Jun 16, 2015
1 parent df67507 commit 52939fb
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 4 deletions.
Expand Up @@ -46,7 +46,6 @@
import org.apache.hadoop.hive.common.FileUtils;
import org.apache.hadoop.hive.metastore.ProtectMode;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.Table;
Expand Down Expand Up @@ -86,7 +85,6 @@
import static java.lang.Math.min;
import static java.lang.String.format;
import static org.apache.hadoop.hive.metastore.ProtectMode.getProtectModeFromString;
import static org.apache.hadoop.hive.metastore.Warehouse.makePartName;

public class HiveSplitManager
implements ConnectorSplitManager
Expand Down Expand Up @@ -429,7 +427,7 @@ public List<HivePartitionMetadata> apply(List<HivePartition> partitionBatch)

// verify all partition is online
String protectMode = partition.getParameters().get(ProtectMode.PARAMETER_NAME);
String partName = makePartName(table.getPartitionKeys(), partition.getValues());
String partName = HiveUtil.createPartitionName(partition, table);
if (protectMode != null && getProtectModeFromString(protectMode).offline) {
throw new PartitionOfflineException(tableName, partName);
}
Expand Down Expand Up @@ -473,7 +471,7 @@ public List<HivePartitionMetadata> apply(List<HivePartition> partitionBatch)
catch (PrestoException | NoSuchObjectException | NullPointerException | IllegalStateException | IllegalArgumentException e) {
throw Throwables.propagate(e);
}
catch (MetaException | RuntimeException e) {
catch (RuntimeException e) {
exception = e;
log.debug("getPartitions attempt %s failed, will retry. Exception: %s", attempt, e.getMessage());
}
Expand Down
13 changes: 13 additions & 0 deletions presto-hive/src/main/java/com/facebook/presto/hive/HiveUtil.java
Expand Up @@ -31,6 +31,8 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.ql.io.SymlinkTextInputFormat;
import org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat;
Expand Down Expand Up @@ -95,6 +97,7 @@
import static java.lang.String.format;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.apache.hadoop.hive.metastore.MetaStoreUtils.getTableMetadata;
import static org.apache.hadoop.hive.metastore.Warehouse.makePartName;
import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.FILE_INPUT_FORMAT;
import static org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_LIB;
import static org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_ALL_COLUMNS;
Expand Down Expand Up @@ -541,6 +544,16 @@ public static List<HiveColumnHandle> getPartitionKeyColumnHandles(String connect
return columns.build();
}

public static String createPartitionName(Partition partition, Table table)
{
try {
return makePartName(table.getPartitionKeys(), partition.getValues());
}
catch (MetaException e) {
throw new PrestoException(HIVE_INVALID_METADATA, e);
}
}

public static Slice base64Decode(byte[] bytes)
{
return Slices.wrappedBuffer(Base64.getDecoder().decode(bytes));
Expand Down

0 comments on commit 52939fb

Please sign in to comment.