Skip to content

Commit

Permalink
[CARBONDATA-3002] Fix some spell error
Browse files Browse the repository at this point in the history
add

add

add
  • Loading branch information
xubo245 committed Nov 1, 2018
1 parent 6258447 commit 9a11df9
Show file tree
Hide file tree
Showing 58 changed files with 151 additions and 151 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ ByteBuffer readByteBuffer(String filePath, long offset, int length)

/**
* This method will be used to read int from file from postion(offset), here
* length will be always 4 bacause int byte size if 4
* length will be always 4 because int byte size if 4
*
* @param filePath fully qualified file path
* @param offset reading start position,
Expand All @@ -66,7 +66,7 @@ ByteBuffer readByteBuffer(String filePath, long offset, int length)

/**
* This method will be used to read long from file from postion(offset), here
* length will be always 8 bacause int byte size is 8
* length will be always 8 because int byte size is 8
*
* @param filePath fully qualified file path
* @param offset reading start position,
Expand All @@ -76,7 +76,7 @@ ByteBuffer readByteBuffer(String filePath, long offset, int length)

/**
* This method will be used to read int from file from postion(offset), here
* length will be always 4 bacause int byte size if 4
* length will be always 4 because int byte size if 4
*
* @param filePath fully qualified file path
* @return read int
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ public abstract class AbstractChunkReader implements DimensionColumnChunkReader
* this will be used to uncompress the
* row id and rle chunk
*/
protected NumberCompressor numberComressor;
protected NumberCompressor numberCompressor;

/**
* number of element in each chunk
Expand All @@ -80,7 +80,7 @@ public AbstractChunkReader(final int[] eachColumnValueSize, final String filePat
} catch (NumberFormatException exception) {
numberOfElement = Integer.parseInt(CarbonCommonConstants.BLOCKLET_SIZE_DEFAULT_VAL);
}
this.numberComressor = new NumberCompressor(numberOfElement);
this.numberCompressor = new NumberCompressor(numberOfElement);
this.numberOfRows = numberOfRows;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ public CompressedDimensionChunkFileBasedReaderV1(final BlockletInfo blockletInfo
}
invertedIndexes = CarbonUtil
.getUnCompressColumnIndex(dataChunk.getRowIdPageLength(),
columnIndexData, numberComressor, 0);
columnIndexData, numberCompressor, 0);
// get the reverse index
invertedIndexesReverse = CarbonUtil.getInvertedReverseIndex(invertedIndexes);
}
Expand All @@ -141,7 +141,7 @@ public CompressedDimensionChunkFileBasedReaderV1(final BlockletInfo blockletInfo
.readByteArray(filePath, dataChunk.getRlePageOffset(),
dataChunk.getRlePageLength());
}
rlePage = numberComressor
rlePage = numberCompressor
.unCompress(key, 0, dataChunk.getRlePageLength());
// uncompress the data with rle indexes
dataPage = UnBlockIndexer.uncompressData(dataPage, rlePage, eachColumnValueSize[blockIndex]);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ public DimensionColumnPage decodeColumnPage(
rawData.get(dataInv);
invertedIndexes = CarbonUtil
.getUnCompressColumnIndex(dimensionColumnChunk.rowid_page_length, dataInv,
numberComressor, 0);
numberCompressor, 0);
copySourcePoint += dimensionColumnChunk.rowid_page_length;
// get the reverse index
invertedIndexesReverse = CarbonUtil.getInvertedReverseIndex(invertedIndexes);
Expand All @@ -167,7 +167,7 @@ public DimensionColumnPage decodeColumnPage(
byte[] dataRle = new byte[dimensionColumnChunk.rle_page_length];
rawData.position(copySourcePoint);
rawData.get(dataRle);
rlePage = numberComressor.unCompress(dataRle, 0, dimensionColumnChunk.rle_page_length);
rlePage = numberCompressor.unCompress(dataRle, 0, dimensionColumnChunk.rle_page_length);
// uncompress the data with rle indexes
dataPage = UnBlockIndexer.uncompressData(dataPage, rlePage, eachColumnValueSize[blockIndex]);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ public FileReaderImpl(int capacity) {

/**
* This method will be used to read int from file from postion(offset), here
* length will be always 4 bacause int byte size if 4
* length will be always 4 because int byte size if 4
*
* @param filePath fully qualified file path
* @param offset reading start position,
Expand All @@ -95,7 +95,7 @@ public FileReaderImpl(int capacity) {

/**
* This method will be used to read int from file from postion(offset), here
* length will be always 4 bacause int byte size if 4
* length will be always 4 because int byte size if 4
*
* @param filePath fully qualified file path
* @return read int
Expand All @@ -108,7 +108,7 @@ public FileReaderImpl(int capacity) {

/**
* This method will be used to read int from file from postion(offset), here
* length will be always 4 bacause int byte size if 4
* length will be always 4 because int byte size if 4
*
* @param filePath fully qualified file path
* @param offset reading start position,
Expand Down Expand Up @@ -184,7 +184,7 @@ private ByteBuffer read(FileChannel channel, int size) throws IOException {

/**
* This method will be used to read long from file from postion(offset), here
* length will be always 8 bacause int byte size is 8
* length will be always 8 because int byte size is 8
*
* @param filePath fully qualified file path
* @param offset reading start position,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ public static int getLockProperty(String property, int defaultValue) {
*/
public static void deleteExpiredSegmentLockFiles(CarbonTable carbonTable) {
final long currTime = System.currentTimeMillis();
final long segmentLockFilesPreservTime =
final long segmentLockFilesPreserveTime =
CarbonProperties.getInstance().getSegmentLockFilesPreserveHours();
AbsoluteTableIdentifier absoluteTableIdentifier = carbonTable.getAbsoluteTableIdentifier();
String lockFilesDir = CarbonProperties.getInstance()
Expand All @@ -137,7 +137,7 @@ public static void deleteExpiredSegmentLockFiles(CarbonTable carbonTable) {

@Override public boolean accept(CarbonFile pathName) {
if (CarbonTablePath.isSegmentLockFilePath(pathName.getName())) {
return (currTime - pathName.getLastModifiedTime()) > segmentLockFilesPreservTime;
return (currTime - pathName.getLastModifiedTime()) > segmentLockFilesPreserveTime;
}
return false;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,11 +107,11 @@ public abstract class AbstractDetailQueryResultIterator<E> extends CarbonIterato
FileFactory.getFileType(queryModel.getAbsoluteTableIdentifier().getTablePath()));
this.fileReader.setReadPageByPage(queryModel.isReadPageByPage());
this.execService = execService;
intialiseInfos();
initialiseInfos();
initQueryStatiticsModel();
}

private void intialiseInfos() {
private void initialiseInfos() {
for (BlockExecutionInfo blockInfo : blockExecutionInfos) {
Map<String, DeleteDeltaVo> deletedRowsMap = null;
// if delete delta file is present
Expand Down Expand Up @@ -172,7 +172,7 @@ private Map<String, DeleteDeltaVo> getDeleteDeltaDetails(AbstractIndex dataBlock
carbonDeleteDeltaFileReader = new CarbonDeleteFilesDataReader();
Map<String, DeleteDeltaVo> deletedRowsMap = carbonDeleteDeltaFileReader
.getDeletedRowsDataVo(deleteDeltaInfo.getDeleteDeltaFile());
setDeltedDeltaBoToDataBlock(deleteDeltaInfo, deletedRowsMap, dataBlock);
setDeletedDeltaBoToDataBlock(deleteDeltaInfo, deletedRowsMap, dataBlock);
// remove the lock
deleteDeltaToLockObjectMap.remove(deleteDeltaInfo);
return deletedRowsMap;
Expand All @@ -193,7 +193,7 @@ private Map<String, DeleteDeltaVo> getDeleteDeltaDetails(AbstractIndex dataBlock
* @param deletedRecordsMap
* @param dataBlock
*/
private void setDeltedDeltaBoToDataBlock(DeleteDeltaInfo deleteDeltaInfo,
private void setDeletedDeltaBoToDataBlock(DeleteDeltaInfo deleteDeltaInfo,
Map<String, DeleteDeltaVo> deletedRecordsMap, AbstractIndex dataBlock) {
// check if timestamp of data block is less than the latest delete delta timestamp
// then update the delete delta details and timestamp in data block
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -600,21 +600,21 @@ private void validateBlockletGroupSizeInMB() {
* This method validates the number of column read in one IO
*/
private void validateNumberOfColumnPerIORead() {
String numberofColumnPerIOString = carbonProperties
String numberOfColumnPerIOString = carbonProperties
.getProperty(NUMBER_OF_COLUMN_TO_READ_IN_IO,
CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO_DEFAULTVALUE);
try {
short numberofColumnPerIO = Short.parseShort(numberofColumnPerIOString);
short numberofColumnPerIO = Short.parseShort(numberOfColumnPerIOString);
if (numberofColumnPerIO < CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO_MIN
|| numberofColumnPerIO > CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO_MAX) {
LOGGER.info("The Number Of pages per blocklet column value \"" + numberofColumnPerIOString
LOGGER.info("The Number Of pages per blocklet column value \"" + numberOfColumnPerIOString
+ "\" is invalid. Using the default value \""
+ CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO_DEFAULTVALUE);
carbonProperties.setProperty(NUMBER_OF_COLUMN_TO_READ_IN_IO,
CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO_DEFAULTVALUE);
}
} catch (NumberFormatException e) {
LOGGER.info("The Number Of pages per blocklet column value \"" + numberofColumnPerIOString
LOGGER.info("The Number Of pages per blocklet column value \"" + numberOfColumnPerIOString
+ "\" is invalid. Using the default value \""
+ CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO_DEFAULTVALUE);
carbonProperties.setProperty(NUMBER_OF_COLUMN_TO_READ_IN_IO,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
sql("use carbon")
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
sql("drop table carbontable")
restoreData(dbLocationCustom, "carbontable")
Expand All @@ -99,7 +99,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
sql("use carbon")
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
sql("drop table carbontable")
restoreData(dbLocationCustom, "carbontable")
Expand All @@ -118,7 +118,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
sql("insert into carbontable select 'b',1,'aa','aaa'")
sql("insert into carbontable select 'a',10,'aa','aaa'")
sql("create datamap preagg1 on table carbontable using 'preaggregate' as select c1,sum(c2) from carbontable group by c1")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
backUpData(dbLocationCustom, "carbontable_preagg1")
sql("drop table carbontable")
Expand All @@ -141,7 +141,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
sql("insert into carbontable select 'b',1,'aa','aaa'")
sql("insert into carbontable select 'a',10,'aa','aaa'")
sql("create datamap preagg1 on table carbontable using 'preaggregate' as select c1,sum(c2) from carbontable group by c1")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
backUpData(dbLocationCustom, "carbontable_preagg1")
sql("drop table carbontable")
Expand All @@ -164,7 +164,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
sql("insert into carbontable select 'b',1,'aa','aaa'")
sql("insert into carbontable select 'a',10,'aa','aaa'")
sql("create datamap preagg1 on table carbontable using 'preaggregate' as select c1,sum(c2) from carbontable group by c1")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
backUpData(dbLocationCustom, "carbontable_preagg1")
sql("drop table carbontable")
Expand All @@ -183,7 +183,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'bb','bbb'")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
sql("drop table carbontable")
restoreData(dbLocationCustom, "carbontable")
Expand All @@ -205,7 +205,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'bb','bbb'")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
sql("drop table carbontable")
restoreData(dbLocationCustom, "carbontable")
Expand All @@ -227,7 +227,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'bb','bbb'")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
sql("drop table carbontable")
restoreData(dbLocationCustom, "carbontable")
Expand All @@ -249,7 +249,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'bb','bbb'")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
sql("drop table carbontable")
restoreData(dbLocationCustom, "carbontable")
Expand All @@ -270,7 +270,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'bb','bbb'")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
sql("drop table carbontable")
restoreData(dbLocationCustom, "carbontable")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ class TestQueryWithColumnMetCacheAndCacheLevelProperty extends QueryTest with Be
tableName: String,
segmentId: String,
isSchemaModified: Boolean = false): List[DataMap[_ <: Blocklet]] = {
val relation: CarbonRelation = CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore
val relation: CarbonRelation = CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore
.lookupRelation(Some(dbName), tableName)(sqlContext.sparkSession)
.asInstanceOf[CarbonRelation]
val carbonTable = relation.carbonTable
Expand Down Expand Up @@ -291,7 +291,7 @@ class TestQueryWithColumnMetCacheAndCacheLevelProperty extends QueryTest with Be
sql("insert into minMaxSerialize select 'a','aa','aaa'")
checkAnswer(sql("select * from minMaxSerialize where name='a'"), Row("a", "aa", "aaa"))
checkAnswer(sql("select * from minMaxSerialize where name='b'"), Seq.empty)
val relation: CarbonRelation = CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore
val relation: CarbonRelation = CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore
.lookupRelation(Some("default"), "minMaxSerialize")(sqlContext.sparkSession)
.asInstanceOf[CarbonRelation]
val carbonTable = relation.carbonTable
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ class TestCreateTableAsSelect extends QueryTest with BeforeAndAfterAll {
"create table ctas_tblproperties_testt stored by 'carbondata' TBLPROPERTIES" +
"('DICTIONARY_INCLUDE'='key', 'sort_scope'='global_sort') as select * from carbon_ctas_test")
checkAnswer(sql("select * from ctas_tblproperties_testt"), sql("select * from carbon_ctas_test"))
val carbonTable = CarbonEnv.getInstance(Spark2TestQueryExecutor.spark).carbonMetastore
val carbonTable = CarbonEnv.getInstance(Spark2TestQueryExecutor.spark).carbonMetaStore
.lookupRelation(Option("default"), "ctas_tblproperties_testt")(Spark2TestQueryExecutor.spark)
.asInstanceOf[CarbonRelation].carbonTable
val metadataFolderPath: CarbonFile = FileFactory.getCarbonFile(carbonTable.getMetadataPath)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,7 @@ class DBLocationCarbonTableTestCase extends QueryTest with BeforeAndAfterAll {
sql("drop table carbontable")
// perform file check
assert(FileFactory.isFileExist(timestampFile, timestampFileType, true) ||
CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore)
CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore)

CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_UPDATE_SYNC_FOLDER,
Expand All @@ -295,7 +295,7 @@ class DBLocationCarbonTableTestCase extends QueryTest with BeforeAndAfterAll {
sql("drop table carbontable")
// perform file check
assert(FileFactory.isFileExist(timestampFile, timestampFileType, true) ||
CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore)
CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore)
}

override def afterAll {
Expand Down

0 comments on commit 9a11df9

Please sign in to comment.