Skip to content

Commit

Permalink
DRILL-3535: Add support for Drop Table
Browse files Browse the repository at this point in the history
this closes #140
  • Loading branch information
mehant authored and adeneche committed Sep 8, 2015
1 parent 41fc9ca commit 2a19184
Show file tree
Hide file tree
Showing 14 changed files with 558 additions and 8 deletions.
3 changes: 2 additions & 1 deletion exec/java-exec/src/main/codegen/data/Parser.tdd
Expand Up @@ -46,7 +46,8 @@
"SqlCreateOrReplaceView()",
"SqlDropView()",
"SqlShowFiles()",
"SqlCreateTable()"
"SqlCreateTable()",
"SqlDropTable()"
]

# List of methods for parsing custom literals.
Expand Down
17 changes: 17 additions & 0 deletions exec/java-exec/src/main/codegen/includes/parserImpls.ftl
Expand Up @@ -240,3 +240,20 @@ SqlNode SqlCreateTable() :
return new SqlCreateTable(pos, tblName, fieldList, partitionFieldList, query);
}
}

/**
* Parses a drop table statement.
* DROP TABLE table_name;
*/
SqlNode SqlDropTable() :
{
SqlParserPos pos;
}
{
<DROP> { pos = getPos(); }
<TABLE>
{
return new SqlDropTable(pos, CompoundIdentifier());
}
}

@@ -0,0 +1,77 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.planner.sql.handlers;

import org.apache.calcite.schema.SchemaPlus;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.tools.RelConversionException;
import org.apache.calcite.tools.ValidationException;
import org.apache.drill.common.exceptions.UserException;
import org.apache.drill.exec.physical.PhysicalPlan;
import org.apache.drill.exec.planner.sql.DirectPlan;
import org.apache.drill.exec.planner.sql.SchemaUtilites;
import org.apache.drill.exec.planner.sql.parser.SqlDropTable;
import org.apache.drill.exec.store.AbstractSchema;

import java.io.IOException;

// SqlHandler for dropping a table.
public class DropTableHandler extends DefaultSqlHandler {

private static org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DropTableHandler.class);

public DropTableHandler(SqlHandlerConfig config) {
super(config);
}

/**
* Function resolves the schema and invokes the drop method. Raises an exception if the schema is
* immutable.
* @param sqlNode - Table name identifier
* @return - Single row indicating drop succeeded, raise exception otherwise
* @throws ValidationException
* @throws RelConversionException
* @throws IOException
*/
@Override
public PhysicalPlan getPlan(SqlNode sqlNode) throws ValidationException, RelConversionException, IOException {

SqlDropTable dropTableNode = ((SqlDropTable) sqlNode);
SqlIdentifier tableIdentifier = dropTableNode.getTableIdentifier();

SchemaPlus defaultSchema = context.getNewDefaultSchema();
AbstractSchema drillSchema = null;

if (tableIdentifier != null) {
drillSchema = SchemaUtilites.resolveToMutableDrillSchema(defaultSchema, dropTableNode.getSchema());
}

String tableName = ((SqlDropTable) sqlNode).getName();
if (drillSchema == null) {
throw UserException.validationError()
.message("Invalid table_name [%s]", tableName)
.build(logger);
}

drillSchema.dropTable(tableName);

return DirectPlan.createDirectPlan(context, true,
String.format("Table [%s] %s", tableName, "dropped"));
}
}
Expand Up @@ -160,6 +160,7 @@ RewriteType[] should be R(D, E, D, D).
rules.put(SqlUseSchema.class, R(D));
rules.put(SqlJoin.class, R(D, D, D, D, D, E));
rules.put(SqlOrderBy.class, R(D, E, D, D));
rules.put(SqlDropTable.class, R(D));
REWRITE_RULES = ImmutableMap.copyOf(rules);
}

Expand Down
@@ -0,0 +1,95 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.planner.sql.parser;

import java.util.Collections;
import java.util.List;

import org.apache.drill.exec.planner.sql.handlers.AbstractSqlHandler;
import org.apache.drill.exec.planner.sql.handlers.DropTableHandler;
import org.apache.drill.exec.planner.sql.handlers.SqlHandlerConfig;
import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlLiteral;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.SqlSpecialOperator;
import org.apache.calcite.sql.SqlWriter;
import org.apache.calcite.sql.parser.SqlParserPos;

import com.google.common.collect.ImmutableList;

public class SqlDropTable extends DrillSqlCall {
public static final SqlSpecialOperator OPERATOR = new SqlSpecialOperator("DROP_TABLE", SqlKind.OTHER) {
@Override
public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) {
return new SqlDropTable(pos, (SqlIdentifier) operands[0]);
}
};

private SqlIdentifier tableName;

public SqlDropTable(SqlParserPos pos, SqlIdentifier tableName) {
super(pos);
this.tableName = tableName;
}

@Override
public SqlOperator getOperator() {
return OPERATOR;
}

@Override
public List<SqlNode> getOperandList() {
return Collections.singletonList((SqlNode) tableName);
}

@Override
public void unparse(SqlWriter writer, int leftPrec, int rightPrec) {
writer.keyword("DROP");
writer.keyword("TABLE");
tableName.unparse(writer, leftPrec, rightPrec);
}

@Override
public AbstractSqlHandler getSqlHandler(SqlHandlerConfig config) {
return new DropTableHandler(config);
}

public List<String> getSchema() {
if (tableName.isSimple()) {
return ImmutableList.of();
}

return tableName.names.subList(0, tableName.names.size()-1);
}

public String getName() {
if (tableName.isSimple()) {
return tableName.getSimple();
}

return tableName.names.get(tableName.names.size() - 1);
}

public SqlIdentifier getTableIdentifier() {
return tableName;
}

}
Expand Up @@ -190,4 +190,10 @@ public boolean contentsHaveChangedSince(long lastCheck, long now) {
public void close() throws Exception {
// no-op: default implementation for most implementations.
}

public void dropTable(String tableName) {
throw UserException.unsupportedError()
.message("Dropping tables is not supported in schema [%s]", getSchemaPath())
.build(logger);
}
}
Expand Up @@ -72,7 +72,7 @@ public boolean supportDirectoryReads() {

@Override
public FormatSelection isReadable(DrillFileSystem fs, FileSelection selection) throws IOException {
if (isReadable(fs, selection.getFirstPath(fs))) {
if (isFileReadable(fs, selection.getFirstPath(fs))) {
if (plugin.getName() != null) {
NamedFormatPluginConfig namedConfig = new NamedFormatPluginConfig();
namedConfig.name = plugin.getName();
Expand All @@ -84,8 +84,12 @@ public FormatSelection isReadable(DrillFileSystem fs, FileSelection selection) t
return null;
}

protected final boolean isReadable(DrillFileSystem fs, FileStatus status) throws IOException {
CompressionCodec codec = null;
/*
* Function returns true if the file extension matches the pattern
*/
@Override
public boolean isFileReadable(DrillFileSystem fs, FileStatus status) throws IOException {
CompressionCodec codec = null;
if (compressible) {
codec = codecFactory.getCodec(status.getPath());
}
Expand All @@ -111,7 +115,6 @@ protected final boolean isReadable(DrillFileSystem fs, FileStatus status) throws
return false;
}


@Override
@JsonIgnore
public FormatPlugin getFormatPlugin() {
Expand Down
Expand Up @@ -71,6 +71,10 @@
public class DrillFileSystem extends FileSystem implements OpenFileTracker {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillFileSystem.class);
private final static boolean TRACKING_ENABLED = AssertionUtil.isAssertionsEnabled();

public static final String HIDDEN_FILE_PREFIX = "_";
public static final String DOT_FILE_PREFIX = ".";

private final ConcurrentMap<DrillFSDataInputStream, DebugStackTrace> openedFiles = Maps.newConcurrentMap();

private final FileSystem underlyingFs;
Expand Down
Expand Up @@ -23,7 +23,10 @@
public class DrillPathFilter extends Utils.OutputFileUtils.OutputFilesFilter {
@Override
public boolean accept(Path path) {
if (path.toString().contains("_metadata")) {
if (path.getName().startsWith(DrillFileSystem.HIDDEN_FILE_PREFIX)) {
return false;
}
if (path.getName().startsWith(DrillFileSystem.DOT_FILE_PREFIX)) {
return false;
}
return super.accept(path);
Expand Down
Expand Up @@ -17,12 +17,15 @@
*/
package org.apache.drill.exec.store.dfs;

import org.apache.hadoop.fs.FileStatus;

import java.io.IOException;

public abstract class FormatMatcher {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FormatMatcher.class);

public abstract boolean supportDirectoryReads();
public abstract FormatSelection isReadable(DrillFileSystem fs, FileSelection selection) throws IOException;
public abstract boolean isFileReadable(DrillFileSystem fs, FileStatus status) throws IOException;
public abstract FormatPlugin getFormatPlugin();
}

0 comments on commit 2a19184

Please sign in to comment.