Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Carbondata-3999] Fix permission issue in /tmp/indexservertmp directory. #3973

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Expand Up @@ -3250,14 +3250,14 @@ public static String getIndexServerTempPath() {
public static CarbonFile createTempFolderForIndexServer(String queryId)
throws IOException {
final String path = getIndexServerTempPath();
if (!FileFactory.isFileExist(path)) {
// Create the new index server temp directory if it does not exist
LOGGER.info("Creating Index Server temp folder:" + path);
FileFactory
.createDirectoryAndSetPermission(path,
new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
}
if (queryId == null) {
if (!FileFactory.isFileExist(path)) {
// Create the new index server temp directory if it does not exist
LOGGER.info("Creating Index Server temp folder:" + path);
FileFactory
.createDirectoryAndSetPermission(path,
new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
}
return null;
}
CarbonFile file = FileFactory.getCarbonFile(path + CarbonCommonConstants.FILE_SEPARATOR
Expand Down
Expand Up @@ -279,7 +279,8 @@ private List<String> getAllDeleteDeltaFiles(String path) {
private String[] getDeleteDeltaFiles(String segmentFilePath, List<String> allDeleteDeltaFiles) {
List<String> deleteDeltaFiles = new ArrayList<>();
String segmentFileName = null;
String[] pathElements = segmentFilePath.split(Pattern.quote(File.separator));
segmentFilePath = segmentFilePath.replace("\\", "/");
String[] pathElements = segmentFilePath.split(CarbonCommonConstants.FILE_SEPARATOR);
if (ArrayUtils.isNotEmpty(pathElements)) {
segmentFileName = pathElements[pathElements.length - 1];
}
Expand Down
Expand Up @@ -17,7 +17,6 @@

package org.apache.carbondata.hadoop.api;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
Expand All @@ -29,7 +28,6 @@
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicLong;
import java.util.regex.Pattern;

import org.apache.carbondata.common.exceptions.DeprecatedFeatureException;
import org.apache.carbondata.common.logging.LogServiceFactory;
Expand Down Expand Up @@ -576,7 +574,8 @@ public void close(TaskAttemptContext taskAttemptContext) throws IOException {
String blockName;
for (String tuple : tupleId) {
blockName = CarbonUpdateUtil.getBlockName(
(tuple.split(Pattern.quote(File.separator))[TupleIdEnum.BLOCK_ID.getTupleIdIndex()]));
(tuple.split(CarbonCommonConstants.FILE_SEPARATOR)
[TupleIdEnum.BLOCK_ID.getTupleIdIndex()]));

if (!blockToDeleteDeltaBlockMapping.containsKey(blockName)) {
blockDetails = new DeleteDeltaBlockDetails(blockName);
Expand Down
Expand Up @@ -58,7 +58,6 @@ public class CarbonVectorizedRecordReader extends AbstractRecordReader<Object> {

private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonVectorizedRecordReader.class.getName());
private static final int DEFAULT_BATCH_SIZE = 4 * 1024;

private CarbonColumnarBatch carbonColumnarBatch;

Expand Down Expand Up @@ -194,7 +193,7 @@ private void initBatch() {
}
carbonColumnarBatch = new CarbonColumnarBatch(vectors,
CarbonV3DataFormatConstants.NUMBER_OF_ROWS_PER_BLOCKLET_COLUMN_PAGE_DEFAULT,
new boolean[DEFAULT_BATCH_SIZE]);
new boolean[CarbonV3DataFormatConstants.NUMBER_OF_ROWS_PER_BLOCKLET_COLUMN_PAGE_DEFAULT]);
}
}

Expand Down
Expand Up @@ -21,6 +21,8 @@
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.sql.Date;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
Expand All @@ -34,6 +36,7 @@
import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.metadata.datatype.DataType;
import org.apache.carbondata.core.metadata.datatype.DataTypes;
import org.apache.carbondata.core.metadata.datatype.Field;
import org.apache.carbondata.core.scan.expression.ColumnExpression;
import org.apache.carbondata.core.scan.expression.Expression;
Expand Down Expand Up @@ -155,7 +158,15 @@ public void update(String path, Expression filterExpression,
Schema schema = CarbonSchemaReader.readSchema(indexFiles.get(0)).asOriginOrder();
Field[] fields = schema.getFields();
String[] projectionColumns = new String[fields.length + 1];
List<Integer> dateIndexes = new ArrayList<>();
List<Integer> timeStampIndexes = new ArrayList<>();
for (int i = 0; i < fields.length; i++) {
if (fields[i].getDataType() == DataTypes.DATE) {
dateIndexes.add(i);
}
if (fields[i].getDataType() == DataTypes.TIMESTAMP) {
timeStampIndexes.add(i);
}
projectionColumns[i] = (fields[i].getFieldName());
}
projectionColumns[projectionColumns.length - 1] =
Expand All @@ -173,10 +184,16 @@ public void update(String path, Expression filterExpression,
RecordWriter<NullWritable, ObjectArrayWritable> deleteDeltaWriter =
CarbonTableOutputFormat.getDeleteDeltaRecordWriter(path);
ObjectArrayWritable writable = new ObjectArrayWritable();

long day = 24L * 3600 * 1000;
while (reader.hasNext()) {
Object[] row = (Object[]) reader.readNextRow();
writable.set(Arrays.copyOfRange(row, row.length - 1, row.length));
for (Integer dateIndex : dateIndexes) {
row[dateIndex] = new Date((day * ((int) row[dateIndex])));
}
for (Integer timeStampIndex : timeStampIndexes) {
row[timeStampIndex] = new Timestamp((long) row[timeStampIndex] / 1000);
}
for (Map.Entry<String, String> column : updatedColumnToValueMapping.entrySet()) {
row[getColumnIndex(fields, column.getKey())] = column.getValue();
}
Expand Down
Expand Up @@ -72,6 +72,43 @@ public void testDelete() throws Exception {
FileUtils.deleteDirectory(new File(path));
}

@Test
public void testUpdateOnDateType() throws Exception {
String path = "./testWriteFiles";
FileUtils.deleteDirectory(new File(path));
Field[] fields = new Field[3];
fields[0] = new Field("intField", DataTypes.INT);
fields[1] = new Field("dateField", DataTypes.DATE);
fields[2] = new Field("timeField", DataTypes.TIMESTAMP);
CarbonWriter writer = CarbonWriter.builder()
.outputPath(path)
.withCsvInput(new Schema(fields))
.writtenBy("IUDTest")
.build();
for (int i = 0; i < 10; i++) {
String[] row2 = new String[]{
String.valueOf(i % 10000),
"2019-03-02",
"2019-02-12 03:03:34",
};
writer.write(row2);
}
writer.close();
CarbonIUD.getInstance().update(path, "intField", "0", "intField", "20").commit();
CarbonReader reader =
CarbonReader.builder(path).projection(new String[] { "intField", "dateField", "timeField" })
.build();
int i = 0;
while (reader.hasNext()) {
Object[] row = (Object[]) reader.readNextRow();
assert ((int) row[0] != 0);
i++;
}
Assert.assertEquals(i, 10);
reader.close();
FileUtils.deleteDirectory(new File(path));
}

@Test
public void testDeleteWithConditionalExpressions() throws Exception {
String path = "./testWriteFiles";
Expand Down