Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[CARBONDATA-3002] Fix some spell error #2890

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ ByteBuffer readByteBuffer(String filePath, long offset, int length)

/**
* This method will be used to read int from file from postion(offset), here
* length will be always 4 bacause int byte size if 4
* length will be always 4 because int byte size if 4
*
* @param filePath fully qualified file path
* @param offset reading start position,
Expand All @@ -66,7 +66,7 @@ ByteBuffer readByteBuffer(String filePath, long offset, int length)

/**
* This method will be used to read long from file from postion(offset), here
* length will be always 8 bacause int byte size is 8
* length will be always 8 because int byte size is 8
*
* @param filePath fully qualified file path
* @param offset reading start position,
Expand All @@ -76,7 +76,7 @@ ByteBuffer readByteBuffer(String filePath, long offset, int length)

/**
* This method will be used to read int from file from postion(offset), here
* length will be always 4 bacause int byte size if 4
* length will be always 4 because int byte size if 4
*
* @param filePath fully qualified file path
* @return read int
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ public FileReaderImpl(int capacity) {

/**
* This method will be used to read int from file from postion(offset), here
* length will be always 4 bacause int byte size if 4
* length will be always 4 because int byte size if 4
*
* @param filePath fully qualified file path
* @param offset reading start position,
Expand All @@ -95,7 +95,7 @@ public FileReaderImpl(int capacity) {

/**
* This method will be used to read int from file from postion(offset), here
* length will be always 4 bacause int byte size if 4
* length will be always 4 because int byte size if 4
*
* @param filePath fully qualified file path
* @return read int
Expand All @@ -108,7 +108,7 @@ public FileReaderImpl(int capacity) {

/**
* This method will be used to read int from file from postion(offset), here
* length will be always 4 bacause int byte size if 4
* length will be always 4 because int byte size if 4
*
* @param filePath fully qualified file path
* @param offset reading start position,
Expand Down Expand Up @@ -184,7 +184,7 @@ private ByteBuffer read(FileChannel channel, int size) throws IOException {

/**
* This method will be used to read long from file from postion(offset), here
* length will be always 8 bacause int byte size is 8
* length will be always 8 because int byte size is 8
*
* @param filePath fully qualified file path
* @param offset reading start position,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ public static int getLockProperty(String property, int defaultValue) {
*/
public static void deleteExpiredSegmentLockFiles(CarbonTable carbonTable) {
final long currTime = System.currentTimeMillis();
final long segmentLockFilesPreservTime =
final long segmentLockFilesPreserveTime =
CarbonProperties.getInstance().getSegmentLockFilesPreserveHours();
AbsoluteTableIdentifier absoluteTableIdentifier = carbonTable.getAbsoluteTableIdentifier();
String lockFilesDir = CarbonProperties.getInstance()
Expand All @@ -137,7 +137,7 @@ public static void deleteExpiredSegmentLockFiles(CarbonTable carbonTable) {

@Override public boolean accept(CarbonFile pathName) {
if (CarbonTablePath.isSegmentLockFilePath(pathName.getName())) {
return (currTime - pathName.getLastModifiedTime()) > segmentLockFilesPreservTime;
return (currTime - pathName.getLastModifiedTime()) > segmentLockFilesPreserveTime;
}
return false;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,11 +107,11 @@ public abstract class AbstractDetailQueryResultIterator<E> extends CarbonIterato
FileFactory.getFileType(queryModel.getAbsoluteTableIdentifier().getTablePath()));
this.fileReader.setReadPageByPage(queryModel.isReadPageByPage());
this.execService = execService;
intialiseInfos();
initialiseInfos();
initQueryStatiticsModel();
}

private void intialiseInfos() {
private void initialiseInfos() {
for (BlockExecutionInfo blockInfo : blockExecutionInfos) {
Map<String, DeleteDeltaVo> deletedRowsMap = null;
// if delete delta file is present
Expand Down Expand Up @@ -172,7 +172,7 @@ private Map<String, DeleteDeltaVo> getDeleteDeltaDetails(AbstractIndex dataBlock
carbonDeleteDeltaFileReader = new CarbonDeleteFilesDataReader();
Map<String, DeleteDeltaVo> deletedRowsMap = carbonDeleteDeltaFileReader
.getDeletedRowsDataVo(deleteDeltaInfo.getDeleteDeltaFile());
setDeltedDeltaBoToDataBlock(deleteDeltaInfo, deletedRowsMap, dataBlock);
setDeletedDeltaBoToDataBlock(deleteDeltaInfo, deletedRowsMap, dataBlock);
// remove the lock
deleteDeltaToLockObjectMap.remove(deleteDeltaInfo);
return deletedRowsMap;
Expand All @@ -193,7 +193,7 @@ private Map<String, DeleteDeltaVo> getDeleteDeltaDetails(AbstractIndex dataBlock
* @param deletedRecordsMap
* @param dataBlock
*/
private void setDeltedDeltaBoToDataBlock(DeleteDeltaInfo deleteDeltaInfo,
private void setDeletedDeltaBoToDataBlock(DeleteDeltaInfo deleteDeltaInfo,
Map<String, DeleteDeltaVo> deletedRecordsMap, AbstractIndex dataBlock) {
// check if timestamp of data block is less than the latest delete delta timestamp
// then update the delete delta details and timestamp in data block
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -593,21 +593,21 @@ private void validateBlockletGroupSizeInMB() {
* This method validates the number of column read in one IO
*/
private void validateNumberOfColumnPerIORead() {
String numberofColumnPerIOString = carbonProperties
String numberOfColumnPerIOString = carbonProperties
.getProperty(NUMBER_OF_COLUMN_TO_READ_IN_IO,
CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO_DEFAULTVALUE);
try {
short numberofColumnPerIO = Short.parseShort(numberofColumnPerIOString);
short numberofColumnPerIO = Short.parseShort(numberOfColumnPerIOString);
if (numberofColumnPerIO < CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO_MIN
|| numberofColumnPerIO > CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO_MAX) {
LOGGER.info("The Number Of pages per blocklet column value \"" + numberofColumnPerIOString
LOGGER.info("The Number Of pages per blocklet column value \"" + numberOfColumnPerIOString
+ "\" is invalid. Using the default value \""
+ CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO_DEFAULTVALUE);
carbonProperties.setProperty(NUMBER_OF_COLUMN_TO_READ_IN_IO,
CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO_DEFAULTVALUE);
}
} catch (NumberFormatException e) {
LOGGER.info("The Number Of pages per blocklet column value \"" + numberofColumnPerIOString
LOGGER.info("The Number Of pages per blocklet column value \"" + numberOfColumnPerIOString
+ "\" is invalid. Using the default value \""
+ CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO_DEFAULTVALUE);
carbonProperties.setProperty(NUMBER_OF_COLUMN_TO_READ_IN_IO,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
sql("use carbon")
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
sql("drop table carbontable")
restoreData(dbLocationCustom, "carbontable")
Expand All @@ -99,7 +99,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
sql("use carbon")
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
sql("drop table carbontable")
restoreData(dbLocationCustom, "carbontable")
Expand All @@ -118,7 +118,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
sql("insert into carbontable select 'b',1,'aa','aaa'")
sql("insert into carbontable select 'a',10,'aa','aaa'")
sql("create datamap preagg1 on table carbontable using 'preaggregate' as select c1,sum(c2) from carbontable group by c1")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
backUpData(dbLocationCustom, "carbontable_preagg1")
sql("drop table carbontable")
Expand All @@ -141,7 +141,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
sql("insert into carbontable select 'b',1,'aa','aaa'")
sql("insert into carbontable select 'a',10,'aa','aaa'")
sql("create datamap preagg1 on table carbontable using 'preaggregate' as select c1,sum(c2) from carbontable group by c1")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
backUpData(dbLocationCustom, "carbontable_preagg1")
sql("drop table carbontable")
Expand All @@ -164,7 +164,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
sql("insert into carbontable select 'b',1,'aa','aaa'")
sql("insert into carbontable select 'a',10,'aa','aaa'")
sql("create datamap preagg1 on table carbontable using 'preaggregate' as select c1,sum(c2) from carbontable group by c1")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
backUpData(dbLocationCustom, "carbontable_preagg1")
sql("drop table carbontable")
Expand All @@ -183,7 +183,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'bb','bbb'")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
sql("drop table carbontable")
restoreData(dbLocationCustom, "carbontable")
Expand All @@ -205,7 +205,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'bb','bbb'")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
sql("drop table carbontable")
restoreData(dbLocationCustom, "carbontable")
Expand All @@ -227,7 +227,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'bb','bbb'")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
sql("drop table carbontable")
restoreData(dbLocationCustom, "carbontable")
Expand All @@ -249,7 +249,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'bb','bbb'")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
sql("drop table carbontable")
restoreData(dbLocationCustom, "carbontable")
Expand All @@ -270,7 +270,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'bb','bbb'")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
sql("drop table carbontable")
restoreData(dbLocationCustom, "carbontable")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ class TestQueryWithColumnMetCacheAndCacheLevelProperty extends QueryTest with Be
tableName: String,
segmentId: String,
isSchemaModified: Boolean = false): List[DataMap[_ <: Blocklet]] = {
val relation: CarbonRelation = CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore
val relation: CarbonRelation = CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore
.lookupRelation(Some(dbName), tableName)(sqlContext.sparkSession)
.asInstanceOf[CarbonRelation]
val carbonTable = relation.carbonTable
Expand Down Expand Up @@ -291,7 +291,7 @@ class TestQueryWithColumnMetCacheAndCacheLevelProperty extends QueryTest with Be
sql("insert into minMaxSerialize select 'a','aa','aaa'")
checkAnswer(sql("select * from minMaxSerialize where name='a'"), Row("a", "aa", "aaa"))
checkAnswer(sql("select * from minMaxSerialize where name='b'"), Seq.empty)
val relation: CarbonRelation = CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore
val relation: CarbonRelation = CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore
.lookupRelation(Some("default"), "minMaxSerialize")(sqlContext.sparkSession)
.asInstanceOf[CarbonRelation]
val carbonTable = relation.carbonTable
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ class TestCreateTableAsSelect extends QueryTest with BeforeAndAfterAll {
"create table ctas_tblproperties_testt stored by 'carbondata' TBLPROPERTIES" +
"('DICTIONARY_INCLUDE'='key', 'sort_scope'='global_sort') as select * from carbon_ctas_test")
checkAnswer(sql("select * from ctas_tblproperties_testt"), sql("select * from carbon_ctas_test"))
val carbonTable = CarbonEnv.getInstance(Spark2TestQueryExecutor.spark).carbonMetastore
val carbonTable = CarbonEnv.getInstance(Spark2TestQueryExecutor.spark).carbonMetaStore
.lookupRelation(Option("default"), "ctas_tblproperties_testt")(Spark2TestQueryExecutor.spark)
.asInstanceOf[CarbonRelation].carbonTable
val metadataFolderPath: CarbonFile = FileFactory.getCarbonFile(carbonTable.getMetadataPath)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,7 @@ class DBLocationCarbonTableTestCase extends QueryTest with BeforeAndAfterAll {
sql("drop table carbontable")
// perform file check
assert(FileFactory.isFileExist(timestampFile, timestampFileType, true) ||
CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore)
CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore)

CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_UPDATE_SYNC_FOLDER,
Expand All @@ -295,7 +295,7 @@ class DBLocationCarbonTableTestCase extends QueryTest with BeforeAndAfterAll {
sql("drop table carbontable")
// perform file check
assert(FileFactory.isFileExist(timestampFile, timestampFileType, true) ||
CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore)
CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore)
}

override def afterAll {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,7 @@ class DeleteCarbonTableTestCase extends QueryTest with BeforeAndAfterAll {
.getCarbonTable(Some("iud_db"), "update_status_files")(sqlContext.sparkSession)
val metaPath = carbonTable.getMetadataPath
val files = FileFactory.getCarbonFile(metaPath)
val result = CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.getClass
val result = CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.getClass
if(result.getCanonicalName.contains("CarbonFileMetastore")) {
assert(files.listFiles(new CarbonFileFilter {
override def accept(file: CarbonFile): Boolean = !file.isDirectory
Expand Down Expand Up @@ -257,11 +257,11 @@ class DeleteCarbonTableTestCase extends QueryTest with BeforeAndAfterAll {
assert(
listOfTupleId(4).contains("0/0/0-0_batchno0-0-0-") && listOfTupleId(4).endsWith("/0/0/4"))

val carbonTable_part = CarbonEnv.getInstance(Spark2TestQueryExecutor.spark).carbonMetastore
val carbonTable_part = CarbonEnv.getInstance(Spark2TestQueryExecutor.spark).carbonMetaStore
.lookupRelation(Option("iud_db"), "dest_tuple_part")(Spark2TestQueryExecutor.spark)
.asInstanceOf[CarbonRelation].carbonTable

val carbonTable = CarbonEnv.getInstance(Spark2TestQueryExecutor.spark).carbonMetastore
val carbonTable = CarbonEnv.getInstance(Spark2TestQueryExecutor.spark).carbonMetaStore
.lookupRelation(Option("iud_db"), "dest_tuple")(Spark2TestQueryExecutor.spark)
.asInstanceOf[CarbonRelation].carbonTable

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -406,7 +406,7 @@ class StandardPartitionTableLoadingTestCase extends QueryTest with BeforeAndAfte
val dblocation = table.getTablePath.substring(0, table.getTablePath.lastIndexOf("/"))
backUpData(dblocation, "restorepartition")
sql("drop table restorepartition")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
restoreData(dblocation, "restorepartition")
sql("refresh table restorepartition")
checkAnswer(sql("select count(*) from restorepartition"), rows)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ class CarbonMergeFilesRDD(

if (isHivePartitionedTable) {
CarbonLoaderUtil
.mergeIndexFilesinPartitionedSegment(carbonTable, split.segmentId,
.mergeIndexFilesInPartitionedSegment(carbonTable, split.segmentId,
segmentFileNameToSegmentIdMap.get(split.segmentId))
} else {
new CarbonIndexFileMergeWriter(carbonTable)
Expand Down