Skip to content
This repository has been archived by the owner on Feb 8, 2019. It is now read-only.

[PIRK-44]: Tachyon FS Error #50

Closed
wants to merge 6 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
115 changes: 58 additions & 57 deletions pom.xml
@@ -1,20 +1,15 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<!-- ~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with ~
this work for additional information regarding copyright ownership. ~ The
ASF licenses this file to You under the Apache License, Version 2.0 ~ (the
"License"); you may not use this file except in compliance with ~ the License.
You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0
~ ~ Unless required by applicable law or agreed to in writing, software ~
distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the
License for the specific language governing permissions and ~ limitations
under the License. -->

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
Expand Down Expand Up @@ -199,9 +194,9 @@
<groupId>commons-net</groupId>
</exclusion>
<exclusion>
<groupId>org.apache.hive</groupId>
<artifactId>hive-service</artifactId>
</exclusion>
<groupId>org.apache.hive</groupId>
<artifactId>hive-service</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
Expand Down Expand Up @@ -406,13 +401,19 @@
<minimizeJar>true</minimizeJar>
<transformers>
<transformer
implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
</transformer>
<transformer
implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer">
implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer">
</transformer>
</transformers>
<filters>
<filter>
<artifact>org.tachyonproject:*</artifact>
<includes>
<include>org/tachyonproject/**</include>
</includes>
</filter>
<filter>
<artifact>*:*</artifact>
<excludes>
Expand All @@ -425,13 +426,13 @@
</configuration>
</execution>

<!-- in the version with benchmarks (pom-with-benchmarks.xml), this
<!-- in the version with benchmarks (pom-with-benchmarks.xml), this
is where that <execution></execution> lives -->

</executions>
</plugin>

<!--This plugin's configuration is used to store Eclipse m2e settings
<!--This plugin's configuration is used to store Eclipse m2e settings
only. It has no influence on the Maven build itself. -->
<plugin>
<groupId>org.eclipse.m2e</groupId>
Expand All @@ -456,27 +457,27 @@
</goals>
</pluginExecutionFilter>
<action>
<ignore/>
<ignore />
</action>
</pluginExecution>
<pluginExecution>
<pluginExecutionFilter>
<groupId>
org.apache.rat
</groupId>
<artifactId>
apache-rat-plugin
</artifactId>
<versionRange>
[0.11,)
</versionRange>
<goals>
<goal>check</goal>
</goals>
</pluginExecutionFilter>
<action>
<ignore/>
</action>
<pluginExecutionFilter>
<groupId>
org.apache.rat
</groupId>
<artifactId>
apache-rat-plugin
</artifactId>
<versionRange>
[0.11,)
</versionRange>
<goals>
<goal>check</goal>
</goals>
</pluginExecutionFilter>
<action>
<ignore />
</action>
</pluginExecution>
</pluginExecutions>
</lifecycleMappingMetadata>
Expand Down Expand Up @@ -526,12 +527,13 @@

<profiles>
<profile>
<!-- Performs execution of Integration Tests using the Maven FailSafe Plugin. The view of integration tests in this context
are those tests interfacing with external sources and services requiring additional resources or credentials that cannot
be explicitly provided. -->
<!-- Performs execution of Integration Tests using the Maven FailSafe
Plugin. The view of integration tests in this context are those tests interfacing
with external sources and services requiring additional resources or credentials
that cannot be explicitly provided. -->
<id>integration-tests</id>
<build>
<plugins>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
Expand All @@ -548,12 +550,12 @@
</build>
</profile>
<profile>
<!-- Checks style and licensing requirements. This is a good idea to run
for contributions and for the release process. While it would be nice to
run always these plugins can considerably slow the build and have proven
to create unstable builds in our multi-module project and when building using
multiple threads. The stability issues seen with Checkstyle in multi-module
builds include false-positives and false negatives. -->
<!-- Checks style and licensing requirements. This is a good idea to run
for contributions and for the release process. While it would be nice to
run always these plugins can considerably slow the build and have proven
to create unstable builds in our multi-module project and when building using
multiple threads. The stability issues seen with Checkstyle in multi-module
builds include false-positives and false negatives. -->
<id>contrib-check</id>
<build>
<plugins>
Expand Down Expand Up @@ -585,12 +587,11 @@
</build>
</profile>
<profile>
<!-- This profile will disable DocLint which performs strict
JavaDoc processing which was introduced in JDK 8. These are technically errors
in the JavaDoc which we need to eventually address. However, if a release
is performed using JDK 8, the JavaDoc generation would fail. By activating
this profile when running on JDK 8 we can ensure the JavaDocs continue to
generate successfully -->
<!-- This profile will disable DocLint which performs strict JavaDoc processing
which was introduced in JDK 8. These are technically errors in the JavaDoc
which we need to eventually address. However, if a release is performed using
JDK 8, the JavaDoc generation would fail. By activating this profile when
running on JDK 8 we can ensure the JavaDocs continue to generate successfully -->
<id>disable-doclint</id>
<activation>
<jdk>1.8</jdk>
Expand Down
31 changes: 14 additions & 17 deletions src/main/java/org/apache/pirk/schema/data/DataSchemaLoader.java
Expand Up @@ -48,7 +48,8 @@
* <p>
* Schemas should be specified as follows:
*
* <pre>{@code
* <pre>
* {@code
* <schema>
* <schemaName> name of the schema </schemaName>
* <element>
Expand Down Expand Up @@ -88,12 +89,10 @@ public class DataSchemaLoader
}
}



/* Kept for compatibility */
/**
* Initializes the static {@link DataSchemaRegistry} with a list of
* available data schema names.
* Initializes the static {@link DataSchemaRegistry} with a list of available data schema names.
*
* @throws Exception
*/
public static void initialize() throws Exception
Expand All @@ -103,14 +102,12 @@ public static void initialize() throws Exception

/* Kept for compatibility */
/**
* Initializes the static {@link DataSchemaRegistry} with a list of
* available data schema names.
* Initializes the static {@link DataSchemaRegistry} with a list of available data schema names.
*
* @param hdfs
* If true, specifies that the data schema is an hdfs file; if
* false, that it is a regular file.
* If true, specifies that the data schema is an hdfs file; if false, that it is a regular file.
* @param fs
* Used only when {@code hdfs} is true; the {@link FileSystem}
* handle for the hdfs in which the data schema exists
* Used only when {@code hdfs} is true; the {@link FileSystem} handle for the hdfs in which the data schema exists
* @throws Exception
*/
public static void initialize(boolean hdfs, FileSystem fs) throws Exception
Expand Down Expand Up @@ -173,9 +170,9 @@ public DataSchemaLoader()
* The source of the XML data schema description.
* @return The data schema.
* @throws IOException
* A problem occurred reading from the given stream.
* A problem occurred reading from the given stream.
* @throws PIRException
* The schema description is invalid.
* The schema description is invalid.
*/
public DataSchema loadSchema(InputStream stream) throws IOException, PIRException
{
Expand Down Expand Up @@ -210,10 +207,10 @@ public DataSchema loadSchema(InputStream stream) throws IOException, PIRExceptio

/**
* Parses and normalizes the XML document available on the given stream.
*
* @param stream
* The input stream.
* @return
* A {@link Document} representing the XML document.
* @return A {@link Document} representing the XML document.
* @throws IOException
* @throws PIRException
*/
Expand All @@ -236,6 +233,7 @@ private Document parseXMLDocument(InputStream stream) throws IOException, PIRExc

/**
* Extracts a data schema element node's contents
*
* @param eElement
* A data schema element node.
* @param schema
Expand Down Expand Up @@ -312,8 +310,7 @@ void validateIsPrimitiveType(String typeName) throws PIRException
*
* @param partitionerTypeName
* The name of the {@link DataPartitioner} subclass to instantiate.
* @return
* An instance of the named {@link DataPartitioner} subclass.
* @return An instance of the named {@link DataPartitioner} subclass.
* @throws PIRException
*/
DataPartitioner instantiatePartitioner(String partitionerTypeName) throws PIRException
Expand Down
23 changes: 12 additions & 11 deletions src/main/java/org/apache/pirk/schema/query/QuerySchemaLoader.java
Expand Up @@ -52,7 +52,8 @@
* <p>
* Schemas should be specified as follows:
*
* <pre>{@code
* <pre>
* {@code
* <schema>
* <schemaName> name of the schema </schemaName>
* <dataSchemaName> name of the data schema over which this query is run </dataSchemaName>
Expand Down Expand Up @@ -93,8 +94,8 @@ public class QuerySchemaLoader

/* Kept for compatibility */
/**
* Initializes the static {@link QuerySchemaRegistry} with a list of
* query schema names.
* Initializes the static {@link QuerySchemaRegistry} with a list of query schema names.
*
* @throws Exception
*/
public static void initialize() throws Exception
Expand All @@ -104,14 +105,12 @@ public static void initialize() throws Exception

/* Kept for compatibility */
/**
* Initializes the static {@link QuerySchemaRegistry} with a list of
* available query schema names.
* Initializes the static {@link QuerySchemaRegistry} with a list of available query schema names.
*
* @param hdfs
* If true, specifies that the query schema is an hdfs file; if false,
* that it is a regular file.
* If true, specifies that the query schema is an hdfs file; if false, that it is a regular file.
* @param fs
* Used only when {@code hdfs} is true; the {@link FileSystem} handle
* for the hdfs in which the query schema exists
* Used only when {@code hdfs} is true; the {@link FileSystem} handle for the hdfs in which the query schema exists
* @throws Exception
*/
public static void initialize(boolean hdfs, FileSystem fs) throws Exception
Expand Down Expand Up @@ -175,9 +174,9 @@ public QuerySchemaLoader()
* The source of the XML query schema description.
* @return The query schema.
* @throws IOException
* A problem occurred reading from the given stream.
* A problem occurred reading from the given stream.
* @throws PIRException
* The schema description is invalid.
* The schema description is invalid.
*/
public QuerySchema loadSchema(InputStream stream) throws IOException, PIRException
{
Expand Down Expand Up @@ -267,6 +266,7 @@ public QuerySchema loadSchema(InputStream stream) throws IOException, PIRExcepti

/**
* Parses and normalizes the XML document available on the given stream.
*
* @param stream
* The input stream.
* @return A Document representing the XML document.
Expand Down Expand Up @@ -355,6 +355,7 @@ private String extractValue(Document doc, String tagName) throws PIRException
* Instantiate the specified filter.
*
* Exceptions derive from call to the {@code getFilter} method of {@link FilterFactory}
*
* @param filterTypeName
* The name of the filter class we are instantiating
* @param filteredElementNames
Expand Down