diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScalarDBMode.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScalarDbMode.java similarity index 82% rename from data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScalarDBMode.java rename to data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScalarDbMode.java index b7326ccb47..4f9e79ce61 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScalarDBMode.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/ScalarDbMode.java @@ -1,7 +1,7 @@ package com.scalar.db.dataloader.core; /** The available modes a ScalarDB instance can run in */ -public enum ScalarDBMode { +public enum ScalarDbMode { STORAGE, TRANSACTION } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/CsvExportManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/CsvExportManager.java index 81c7ab9ace..9e0dc4ba46 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/CsvExportManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/CsvExportManager.java @@ -3,7 +3,7 @@ import com.scalar.db.api.DistributedStorage; import com.scalar.db.api.TableMetadata; import com.scalar.db.dataloader.core.dataexport.producer.ProducerTaskFactory; -import com.scalar.db.dataloader.core.dataimport.dao.ScalarDBDao; +import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDao; import com.scalar.db.dataloader.core.util.CsvUtil; import com.scalar.db.transaction.consensuscommit.ConsensusCommitUtils; import java.io.IOException; @@ -13,7 +13,7 @@ public class CsvExportManager extends ExportManager { public CsvExportManager( - DistributedStorage storage, ScalarDBDao dao, ProducerTaskFactory producerTaskFactory) { + DistributedStorage storage, ScalarDbDao dao, ProducerTaskFactory producerTaskFactory) { super(storage, dao, producerTaskFactory); } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportManager.java index f66efdc9de..13f33a319a 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/ExportManager.java @@ -9,8 +9,8 @@ import com.scalar.db.dataloader.core.dataexport.producer.ProducerTaskFactory; import com.scalar.db.dataloader.core.dataexport.validation.ExportOptionsValidationException; import com.scalar.db.dataloader.core.dataexport.validation.ExportOptionsValidator; -import com.scalar.db.dataloader.core.dataimport.dao.ScalarDBDao; -import com.scalar.db.dataloader.core.dataimport.dao.ScalarDBDaoException; +import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDao; +import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDaoException; import com.scalar.db.dataloader.core.util.TableMetadataUtil; import com.scalar.db.io.DataType; import java.io.BufferedWriter; @@ -33,7 +33,7 @@ public abstract class ExportManager { private static final Logger logger = LoggerFactory.getLogger(ExportManager.class); private final DistributedStorage storage; - private final ScalarDBDao dao; + private final ScalarDbDao dao; private final ProducerTaskFactory producerTaskFactory; private final Object lock = new Object(); @@ -115,7 +115,7 @@ public ExportReport startExport( } finally { bufferedWriter.flush(); } - } catch (ExportOptionsValidationException | IOException | ScalarDBDaoException e) { + } catch (ExportOptionsValidationException | IOException | ScalarDbDaoException e) { logger.error("Error during export: {}", e.getMessage()); } return exportReport; @@ -215,11 +215,11 @@ private void handleTransactionMetadata(ExportOptions exportOptions, TableMetadat * @param dao ScalarDB dao object * @param storage distributed storage object * @return created scanner - * @throws ScalarDBDaoException throws if any issue occurs in creating scanner object + * @throws ScalarDbDaoException throws if any issue occurs in creating scanner object */ private Scanner createScanner( - ExportOptions exportOptions, ScalarDBDao dao, DistributedStorage storage) - throws ScalarDBDaoException { + ExportOptions exportOptions, ScalarDbDao dao, DistributedStorage storage) + throws ScalarDbDaoException { boolean isScanAll = exportOptions.getScanPartitionKey() == null; if (isScanAll) { return dao.createScanner( diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonExportManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonExportManager.java index 13e5804524..34e382dd5e 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonExportManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonExportManager.java @@ -3,13 +3,13 @@ import com.scalar.db.api.DistributedStorage; import com.scalar.db.api.TableMetadata; import com.scalar.db.dataloader.core.dataexport.producer.ProducerTaskFactory; -import com.scalar.db.dataloader.core.dataimport.dao.ScalarDBDao; +import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDao; import java.io.IOException; import java.io.Writer; public class JsonExportManager extends ExportManager { public JsonExportManager( - DistributedStorage storage, ScalarDBDao dao, ProducerTaskFactory producerTaskFactory) { + DistributedStorage storage, ScalarDbDao dao, ProducerTaskFactory producerTaskFactory) { super(storage, dao, producerTaskFactory); } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonLineExportManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonLineExportManager.java index 98f514cbec..8bc5fabe07 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonLineExportManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataexport/JsonLineExportManager.java @@ -3,13 +3,13 @@ import com.scalar.db.api.DistributedStorage; import com.scalar.db.api.TableMetadata; import com.scalar.db.dataloader.core.dataexport.producer.ProducerTaskFactory; -import com.scalar.db.dataloader.core.dataimport.dao.ScalarDBDao; +import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDao; import java.io.IOException; import java.io.Writer; public class JsonLineExportManager extends ExportManager { public JsonLineExportManager( - DistributedStorage storage, ScalarDBDao dao, ProducerTaskFactory producerTaskFactory) { + DistributedStorage storage, ScalarDbDao dao, ProducerTaskFactory producerTaskFactory) { super(storage, dao, producerTaskFactory); } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportManager.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportManager.java index f1984d6c26..9edbb478f7 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportManager.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportManager.java @@ -3,8 +3,8 @@ import com.scalar.db.api.DistributedStorage; import com.scalar.db.api.DistributedTransactionManager; import com.scalar.db.api.TableMetadata; -import com.scalar.db.dataloader.core.ScalarDBMode; -import com.scalar.db.dataloader.core.dataimport.dao.ScalarDBDao; +import com.scalar.db.dataloader.core.ScalarDbMode; +import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDao; import com.scalar.db.dataloader.core.dataimport.datachunk.ImportDataChunkStatus; import com.scalar.db.dataloader.core.dataimport.processor.ImportProcessor; import com.scalar.db.dataloader.core.dataimport.processor.ImportProcessorFactory; @@ -43,7 +43,7 @@ public class ImportManager implements ImportEventListener { @NonNull private final ImportOptions importOptions; private final ImportProcessorFactory importProcessorFactory; private final List listeners = new ArrayList<>(); - private final ScalarDBMode scalarDBMode; + private final ScalarDbMode scalarDbMode; private final DistributedStorage distributedStorage; private final DistributedTransactionManager distributedTransactionManager; private final ConcurrentHashMap importDataChunkStatusMap = @@ -62,10 +62,10 @@ public class ImportManager implements ImportEventListener { public ConcurrentHashMap startImport() { ImportProcessorParams params = ImportProcessorParams.builder() - .scalarDBMode(scalarDBMode) + .scalarDbMode(scalarDbMode) .importOptions(importOptions) .tableMetadataByTableName(tableMetadata) - .dao(new ScalarDBDao()) + .dao(new ScalarDbDao()) .distributedTransactionManager(distributedTransactionManager) .distributedStorage(distributedStorage) .tableColumnDataTypes(getTableColumnDataTypes()) diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDBDao.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbDao.java similarity index 92% rename from data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDBDao.java rename to data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbDao.java index a4497cb09e..e704f96d6d 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDBDao.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbDao.java @@ -26,10 +26,10 @@ import org.slf4j.LoggerFactory; /** The generic DAO that is used to scan ScalarDB data */ -public class ScalarDBDao { +public class ScalarDbDao { /* Class logger */ - private static final Logger logger = LoggerFactory.getLogger(ScalarDBDao.class); + private static final Logger logger = LoggerFactory.getLogger(ScalarDbDao.class); private static final String GET_COMPLETED_MSG = "GET completed for %s"; private static final String PUT_COMPLETED_MSG = "PUT completed for %s"; private static final String SCAN_START_MSG = "SCAN started..."; @@ -44,7 +44,7 @@ public class ScalarDBDao { * @param clusteringKey Optional clustering key for get * @param storage Distributed storage for ScalarDB connection that is running in storage mode. * @return Optional get result - * @throws ScalarDBDaoException if something goes wrong while reading the data + * @throws ScalarDbDaoException if something goes wrong while reading the data */ public Optional get( String namespace, @@ -52,7 +52,7 @@ public Optional get( Key partitionKey, Key clusteringKey, DistributedStorage storage) - throws ScalarDBDaoException { + throws ScalarDbDaoException { // Retrieving the key data for logging String loggingKey = keysToString(partitionKey, clusteringKey); @@ -63,7 +63,7 @@ public Optional get( logger.info(String.format(GET_COMPLETED_MSG, loggingKey)); return result; } catch (ExecutionException e) { - throw new ScalarDBDaoException("error GET " + loggingKey, e); + throw new ScalarDbDaoException("error GET " + loggingKey, e); } } @@ -76,7 +76,7 @@ public Optional get( * @param clusteringKey Optional clustering key for get * @param transaction ScalarDB transaction instance * @return Optional get result - * @throws ScalarDBDaoException if something goes wrong while reading the data + * @throws ScalarDbDaoException if something goes wrong while reading the data */ public Optional get( String namespace, @@ -84,7 +84,7 @@ public Optional get( Key partitionKey, Key clusteringKey, DistributedTransaction transaction) - throws ScalarDBDaoException { + throws ScalarDbDaoException { Get get = createGetWith(namespace, table, partitionKey, clusteringKey); // Retrieving the key data for logging @@ -94,7 +94,7 @@ public Optional get( logger.info(String.format(GET_COMPLETED_MSG, loggingKey)); return result; } catch (CrudException e) { - throw new ScalarDBDaoException("error GET " + loggingKey, e.getCause()); + throw new ScalarDbDaoException("error GET " + loggingKey, e.getCause()); } } @@ -107,7 +107,7 @@ public Optional get( * @param clusteringKey Optional clustering key * @param columns List of column values to be inserted or updated * @param transaction ScalarDB transaction instance - * @throws ScalarDBDaoException if something goes wrong while executing the transaction + * @throws ScalarDbDaoException if something goes wrong while executing the transaction */ public void put( String namespace, @@ -116,13 +116,13 @@ public void put( Key clusteringKey, List> columns, DistributedTransaction transaction) - throws ScalarDBDaoException { + throws ScalarDbDaoException { Put put = createPutWith(namespace, table, partitionKey, clusteringKey, columns); try { transaction.put(put); } catch (CrudException e) { - throw new ScalarDBDaoException( + throw new ScalarDbDaoException( CoreError.DATA_LOADER_ERROR_CRUD_EXCEPTION.buildMessage(e.getMessage()), e); } logger.info(String.format(PUT_COMPLETED_MSG, keysToString(partitionKey, clusteringKey))); @@ -137,7 +137,7 @@ public void put( * @param clusteringKey Optional clustering key * @param columns List of column values to be inserted or updated * @param storage Distributed storage for ScalarDB connection that is running in storage mode - * @throws ScalarDBDaoException if something goes wrong while executing the transaction + * @throws ScalarDbDaoException if something goes wrong while executing the transaction */ public void put( String namespace, @@ -146,12 +146,12 @@ public void put( Key clusteringKey, List> columns, DistributedStorage storage) - throws ScalarDBDaoException { + throws ScalarDbDaoException { Put put = createPutWith(namespace, table, partitionKey, clusteringKey, columns); try { storage.put(put); } catch (ExecutionException e) { - throw new ScalarDBDaoException( + throw new ScalarDbDaoException( CoreError.DATA_LOADER_ERROR_CRUD_EXCEPTION.buildMessage(e.getMessage()), e); } logger.info(String.format(PUT_COMPLETED_MSG, keysToString(partitionKey, clusteringKey))); @@ -169,7 +169,7 @@ public void put( * @param limit Scan limit value * @param storage Distributed storage for ScalarDB connection that is running in storage mode * @return List of ScalarDB scan results - * @throws ScalarDBDaoException if scan fails + * @throws ScalarDbDaoException if scan fails */ public List scan( String namespace, @@ -180,7 +180,7 @@ public List scan( List projections, int limit, DistributedStorage storage) - throws ScalarDBDaoException { + throws ScalarDbDaoException { // Create scan Scan scan = createScan(namespace, table, partitionKey, range, sorts, projections, limit); @@ -193,7 +193,7 @@ public List scan( return allResults; } } catch (ExecutionException | IOException e) { - throw new ScalarDBDaoException( + throw new ScalarDbDaoException( CoreError.DATA_LOADER_ERROR_SCAN.buildMessage(e.getMessage()), e); } } @@ -211,7 +211,7 @@ public List scan( * @param transaction Distributed Transaction manager for ScalarDB connection that is * running in * transaction mode * @return List of ScalarDB scan results - * @throws ScalarDBDaoException if scan fails + * @throws ScalarDbDaoException if scan fails */ public List scan( String namespace, @@ -222,7 +222,7 @@ public List scan( List projections, int limit, DistributedTransaction transaction) - throws ScalarDBDaoException { + throws ScalarDbDaoException { // Create scan Scan scan = createScan(namespace, table, partitionKey, range, sorts, projections, limit); @@ -236,7 +236,7 @@ public List scan( } catch (CrudException | NoSuchElementException e) { // No such element Exception is thrown when the scan is done in transaction mode but // ScalarDB is running in storage mode - throw new ScalarDBDaoException( + throw new ScalarDbDaoException( CoreError.DATA_LOADER_ERROR_SCAN.buildMessage(e.getMessage()), e); } } @@ -250,7 +250,7 @@ public List scan( * @param limit Scan limit value * @param storage Distributed storage for ScalarDB connection that is running in storage mode * @return ScalarDB Scanner object - * @throws ScalarDBDaoException if scan fails + * @throws ScalarDbDaoException if scan fails */ public Scanner createScanner( String namespace, @@ -258,13 +258,13 @@ public Scanner createScanner( List projectionColumns, int limit, DistributedStorage storage) - throws ScalarDBDaoException { + throws ScalarDbDaoException { Scan scan = createScan(namespace, table, null, null, new ArrayList<>(), projectionColumns, limit); try { return storage.scan(scan); } catch (ExecutionException e) { - throw new ScalarDBDaoException( + throw new ScalarDbDaoException( CoreError.DATA_LOADER_ERROR_SCAN.buildMessage(e.getMessage()), e); } } @@ -281,7 +281,7 @@ public Scanner createScanner( * @param limit Scan limit value * @param storage Distributed storage for ScalarDB connection that is running in storage mode * @return ScalarDB Scanner object - * @throws ScalarDBDaoException if scan fails + * @throws ScalarDbDaoException if scan fails */ public Scanner createScanner( String namespace, @@ -292,13 +292,13 @@ public Scanner createScanner( @Nullable List projectionColumns, int limit, DistributedStorage storage) - throws ScalarDBDaoException { + throws ScalarDbDaoException { Scan scan = createScan(namespace, table, partitionKey, scanRange, sortOrders, projectionColumns, limit); try { return storage.scan(scan); } catch (ExecutionException e) { - throw new ScalarDBDaoException( + throw new ScalarDbDaoException( CoreError.DATA_LOADER_ERROR_SCAN.buildMessage(e.getMessage()), e); } } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDBDaoException.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbDaoException.java similarity index 70% rename from data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDBDaoException.java rename to data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbDaoException.java index 1e50affb07..97a1aa5f0f 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDBDaoException.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbDaoException.java @@ -1,7 +1,7 @@ package com.scalar.db.dataloader.core.dataimport.dao; /** A custom DAO exception that encapsulates errors thrown by ScalarDB operations */ -public class ScalarDBDaoException extends Exception { +public class ScalarDbDaoException extends Exception { /** * Class constructor @@ -9,7 +9,7 @@ public class ScalarDBDaoException extends Exception { * @param message error message * @param cause reason for exception */ - public ScalarDBDaoException(String message, Throwable cause) { + public ScalarDbDaoException(String message, Throwable cause) { super(message, cause); } } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessor.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessor.java index 1a317a1a82..8da8850430 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessor.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessor.java @@ -1,7 +1,7 @@ package com.scalar.db.dataloader.core.dataimport.processor; import com.scalar.db.api.DistributedTransaction; -import com.scalar.db.dataloader.core.ScalarDBMode; +import com.scalar.db.dataloader.core.ScalarDbMode; import com.scalar.db.dataloader.core.dataimport.ImportEventListener; import com.scalar.db.dataloader.core.dataimport.datachunk.ImportDataChunk; import com.scalar.db.dataloader.core.dataimport.datachunk.ImportDataChunkStatus; @@ -49,7 +49,7 @@ public abstract class ImportProcessor { * *

This method reads data from the provided {@link BufferedReader}, processes it in chunks, and * batches transactions according to the specified sizes. The processing can be done in either - * transactional or storage mode, depending on the configured {@link ScalarDBMode}. + * transactional or storage mode, depending on the configured {@link ScalarDbMode}. * * @param dataChunkSize the number of records to include in each data chunk for parallel * processing @@ -290,7 +290,7 @@ private ImportTaskResult processStorageRecord(ImportDataChunk dataChunk, ImportR /** * Processes a complete data chunk using parallel execution. The processing mode (transactional or - * storage) is determined by the configured {@link ScalarDBMode}. + * storage) is determined by the configured {@link ScalarDbMode}. * * @param dataChunk the data chunk to process * @param transactionBatchSize the size of transaction batches (used only in transaction mode) @@ -306,7 +306,7 @@ protected ImportDataChunkStatus processDataChunk( .build(); notifyDataChunkStarted(status); ImportDataChunkStatus importDataChunkStatus; - if (params.getScalarDBMode() == ScalarDBMode.TRANSACTION) { + if (params.getScalarDbMode() == ScalarDbMode.TRANSACTION) { importDataChunkStatus = processDataChunkWithTransactions(dataChunk, transactionBatchSize); } else { importDataChunkStatus = processDataChunkWithoutTransactions(dataChunk); diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessorParams.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessorParams.java index 36b96f62d5..688b0ddf97 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessorParams.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/processor/ImportProcessorParams.java @@ -3,9 +3,9 @@ import com.scalar.db.api.DistributedStorage; import com.scalar.db.api.DistributedTransactionManager; import com.scalar.db.api.TableMetadata; -import com.scalar.db.dataloader.core.ScalarDBMode; +import com.scalar.db.dataloader.core.ScalarDbMode; import com.scalar.db.dataloader.core.dataimport.ImportOptions; -import com.scalar.db.dataloader.core.dataimport.dao.ScalarDBDao; +import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDao; import java.util.Map; import lombok.Builder; import lombok.Value; @@ -21,7 +21,7 @@ @Value public class ImportProcessorParams { /** The operational mode of ScalarDB (transaction or storage mode). */ - ScalarDBMode scalarDBMode; + ScalarDbMode scalarDbMode; /** Configuration options for the import operation. */ ImportOptions importOptions; @@ -33,7 +33,7 @@ public class ImportProcessorParams { TableColumnDataTypes tableColumnDataTypes; /** Data Access Object for ScalarDB operations. */ - ScalarDBDao dao; + ScalarDbDao dao; /** Storage interface for non-transactional operations. */ DistributedStorage distributedStorage; diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportStorageTask.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportStorageTask.java index 98d982cac0..e847cc3a34 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportStorageTask.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportStorageTask.java @@ -2,7 +2,7 @@ import com.scalar.db.api.DistributedStorage; import com.scalar.db.api.Result; -import com.scalar.db.dataloader.core.dataimport.dao.ScalarDBDaoException; +import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDaoException; import com.scalar.db.io.Column; import com.scalar.db.io.Key; import java.util.List; @@ -55,13 +55,13 @@ public ImportStorageTask(ImportTaskParams params, DistributedStorage storage) { * @param clusteringKey the clustering key for further record identification within the partition * @return an {@link Optional} containing the {@link Result} if the record exists, otherwise an * empty {@link Optional} - * @throws ScalarDBDaoException if an error occurs during the retrieval operation, such as + * @throws ScalarDbDaoException if an error occurs during the retrieval operation, such as * connection issues or invalid table/namespace */ @Override protected Optional getDataRecord( String namespace, String tableName, Key partitionKey, Key clusteringKey) - throws ScalarDBDaoException { + throws ScalarDbDaoException { return params.getDao().get(namespace, tableName, partitionKey, clusteringKey, this.storage); } @@ -77,7 +77,7 @@ protected Optional getDataRecord( * @param partitionKey the partition key determining where the record will be stored * @param clusteringKey the clustering key for organizing records within the partition * @param columns the list of columns containing the record's data to be saved - * @throws ScalarDBDaoException if an error occurs during the save operation, such as connection + * @throws ScalarDbDaoException if an error occurs during the save operation, such as connection * issues, invalid data types, or constraint violations */ @Override @@ -87,7 +87,7 @@ protected void saveRecord( Key partitionKey, Key clusteringKey, List> columns) - throws ScalarDBDaoException { + throws ScalarDbDaoException { params.getDao().put(namespace, tableName, partitionKey, clusteringKey, columns, this.storage); } } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTask.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTask.java index 3be177a00a..26b4993977 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTask.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTask.java @@ -9,7 +9,7 @@ import com.scalar.db.dataloader.core.dataimport.controlfile.ControlFile; import com.scalar.db.dataloader.core.dataimport.controlfile.ControlFileTable; import com.scalar.db.dataloader.core.dataimport.controlfile.ControlFileTableFieldMapping; -import com.scalar.db.dataloader.core.dataimport.dao.ScalarDBDaoException; +import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDaoException; import com.scalar.db.dataloader.core.dataimport.processor.TableColumnDataTypes; import com.scalar.db.dataloader.core.dataimport.task.mapping.ImportDataMapping; import com.scalar.db.dataloader.core.dataimport.task.result.ImportTargetResult; @@ -248,7 +248,7 @@ private ImportTargetResult importIntoSingleTable( optionalScalarDBResult = getDataRecord( namespace, table, optionalPartitionKey.get(), optionalClusteringKey.orElse(null)); - } catch (ScalarDBDaoException e) { + } catch (ScalarDbDaoException e) { return ImportTargetResult.builder() .namespace(namespace) .tableName(table) @@ -335,7 +335,7 @@ && shouldRevalidateMissingColumns(importOptions, checkForMissingColumns)) { .status(ImportTargetResultStatus.SAVED) .build(); - } catch (ScalarDBDaoException e) { + } catch (ScalarDbDaoException e) { return ImportTargetResult.builder() .namespace(namespace) .tableName(table) @@ -442,11 +442,11 @@ private boolean shouldFailForMissingData( * @param partitionKey the partition key for the record * @param clusteringKey the clustering key for the record (can be null) * @return Optional containing the Result if found, empty if not found - * @throws ScalarDBDaoException if there is an error accessing the database + * @throws ScalarDbDaoException if there is an error accessing the database */ protected abstract Optional getDataRecord( String namespace, String tableName, Key partitionKey, Key clusteringKey) - throws ScalarDBDaoException; + throws ScalarDbDaoException; /** * Saves a record to the database, either as an insert or update operation. @@ -456,7 +456,7 @@ protected abstract Optional getDataRecord( * @param partitionKey the partition key for the record * @param clusteringKey the clustering key for the record (can be null) * @param columns the columns and their values to be saved - * @throws ScalarDBDaoException if there is an error saving to the database + * @throws ScalarDbDaoException if there is an error saving to the database */ protected abstract void saveRecord( String namespace, @@ -464,5 +464,5 @@ protected abstract void saveRecord( Key partitionKey, Key clusteringKey, List> columns) - throws ScalarDBDaoException; + throws ScalarDbDaoException; } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskParams.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskParams.java index eafe3a42ae..0026e23d16 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskParams.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskParams.java @@ -3,7 +3,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.scalar.db.api.TableMetadata; import com.scalar.db.dataloader.core.dataimport.ImportOptions; -import com.scalar.db.dataloader.core.dataimport.dao.ScalarDBDao; +import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDao; import com.scalar.db.dataloader.core.dataimport.processor.TableColumnDataTypes; import java.util.Map; import lombok.Builder; @@ -37,5 +37,5 @@ public class ImportTaskParams { @NonNull TableColumnDataTypes tableColumnDataTypes; /** Data Access Object for interacting with ScalarDB */ - @NonNull ScalarDBDao dao; + @NonNull ScalarDbDao dao; } diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTransactionalTask.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTransactionalTask.java index 449270d929..ed901651ae 100644 --- a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTransactionalTask.java +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTransactionalTask.java @@ -2,7 +2,7 @@ import com.scalar.db.api.DistributedTransaction; import com.scalar.db.api.Result; -import com.scalar.db.dataloader.core.dataimport.dao.ScalarDBDaoException; +import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDaoException; import com.scalar.db.exception.transaction.AbortException; import com.scalar.db.exception.transaction.TransactionException; import com.scalar.db.io.Column; @@ -49,13 +49,13 @@ public ImportTransactionalTask(ImportTaskParams params, DistributedTransaction t * @param clusteringKey the clustering key for further record identification within the partition * @return an {@link Optional} containing the {@link Result} if the record exists, otherwise an * empty {@link Optional} - * @throws ScalarDBDaoException if an error occurs during the database operation or if the + * @throws ScalarDbDaoException if an error occurs during the database operation or if the * transaction encounters any issues */ @Override protected Optional getDataRecord( String namespace, String tableName, Key partitionKey, Key clusteringKey) - throws ScalarDBDaoException { + throws ScalarDbDaoException { return params.getDao().get(namespace, tableName, partitionKey, clusteringKey, transaction); } @@ -70,7 +70,7 @@ protected Optional getDataRecord( * @param partitionKey the partition key determining where the record will be stored * @param clusteringKey the clustering key for ordering/organizing records within the partition * @param columns the list of columns containing the actual data to be saved - * @throws ScalarDBDaoException if an error occurs during the database operation or if the + * @throws ScalarDbDaoException if an error occurs during the database operation or if the * transaction encounters any issues */ @Override @@ -80,7 +80,7 @@ protected void saveRecord( Key partitionKey, Key clusteringKey, List> columns) - throws ScalarDBDaoException { + throws ScalarDbDaoException { params.getDao().put(namespace, tableName, partitionKey, clusteringKey, columns, transaction); } diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataexport/CsvExportManagerTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataexport/CsvExportManagerTest.java index 86a943a9ec..ca65c10010 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataexport/CsvExportManagerTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataexport/CsvExportManagerTest.java @@ -9,8 +9,8 @@ import com.scalar.db.dataloader.core.ScanRange; import com.scalar.db.dataloader.core.UnitTestUtils; import com.scalar.db.dataloader.core.dataexport.producer.ProducerTaskFactory; -import com.scalar.db.dataloader.core.dataimport.dao.ScalarDBDao; -import com.scalar.db.dataloader.core.dataimport.dao.ScalarDBDaoException; +import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDao; +import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDaoException; import com.scalar.db.io.Column; import com.scalar.db.io.IntColumn; import com.scalar.db.io.Key; @@ -33,7 +33,7 @@ public class CsvExportManagerTest { TableMetadata mockData; DistributedStorage storage; - @Spy ScalarDBDao dao; + @Spy ScalarDbDao dao; ProducerTaskFactory producerTaskFactory; ExportManager exportManager; @@ -41,13 +41,13 @@ public class CsvExportManagerTest { void setup() { storage = Mockito.mock(DistributedStorage.class); mockData = UnitTestUtils.createTestTableMetadata(); - dao = Mockito.mock(ScalarDBDao.class); + dao = Mockito.mock(ScalarDbDao.class); producerTaskFactory = new ProducerTaskFactory(null, false, true); } @Test void startExport_givenValidDataWithoutPartitionKey_shouldGenerateOutputFile() - throws IOException, ScalarDBDaoException { + throws IOException, ScalarDbDaoException { exportManager = new JsonLineExportManager(storage, dao, producerTaskFactory); Scanner scanner = Mockito.mock(Scanner.class); String filePath = Paths.get("").toAbsolutePath() + "/output.csv"; @@ -85,7 +85,7 @@ void startExport_givenValidDataWithoutPartitionKey_shouldGenerateOutputFile() @Test void startExport_givenPartitionKey_shouldGenerateOutputFile() - throws IOException, ScalarDBDaoException { + throws IOException, ScalarDbDaoException { producerTaskFactory = new ProducerTaskFactory(",", false, false); exportManager = new CsvExportManager(storage, dao, producerTaskFactory); Scanner scanner = Mockito.mock(Scanner.class); diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataexport/JsonExportManagerTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataexport/JsonExportManagerTest.java index ac620458a4..c1ef7ead1a 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataexport/JsonExportManagerTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataexport/JsonExportManagerTest.java @@ -9,8 +9,8 @@ import com.scalar.db.dataloader.core.ScanRange; import com.scalar.db.dataloader.core.UnitTestUtils; import com.scalar.db.dataloader.core.dataexport.producer.ProducerTaskFactory; -import com.scalar.db.dataloader.core.dataimport.dao.ScalarDBDao; -import com.scalar.db.dataloader.core.dataimport.dao.ScalarDBDaoException; +import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDao; +import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDaoException; import com.scalar.db.io.Column; import com.scalar.db.io.IntColumn; import com.scalar.db.io.Key; @@ -34,7 +34,7 @@ public class JsonExportManagerTest { TableMetadata mockData; DistributedStorage storage; - @Spy ScalarDBDao dao; + @Spy ScalarDbDao dao; ProducerTaskFactory producerTaskFactory; ExportManager exportManager; @@ -42,13 +42,13 @@ public class JsonExportManagerTest { void setup() { storage = Mockito.mock(DistributedStorage.class); mockData = UnitTestUtils.createTestTableMetadata(); - dao = Mockito.mock(ScalarDBDao.class); + dao = Mockito.mock(ScalarDbDao.class); producerTaskFactory = new ProducerTaskFactory(null, false, true); } @Test void startExport_givenValidDataWithoutPartitionKey_shouldGenerateOutputFile() - throws IOException, ScalarDBDaoException { + throws IOException, ScalarDbDaoException { exportManager = new JsonExportManager(storage, dao, producerTaskFactory); Scanner scanner = Mockito.mock(Scanner.class); String filePath = Paths.get("").toAbsolutePath() + "/output.json"; @@ -87,7 +87,7 @@ void startExport_givenValidDataWithoutPartitionKey_shouldGenerateOutputFile() @Test void startExport_givenPartitionKey_shouldGenerateOutputFile() - throws IOException, ScalarDBDaoException { + throws IOException, ScalarDbDaoException { exportManager = new JsonExportManager(storage, dao, producerTaskFactory); Scanner scanner = Mockito.mock(Scanner.class); String filePath = Paths.get("").toAbsolutePath() + "/output.json"; diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataexport/JsonLineExportManagerTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataexport/JsonLineExportManagerTest.java index 36f01e7c62..31e4326a33 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataexport/JsonLineExportManagerTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataexport/JsonLineExportManagerTest.java @@ -9,8 +9,8 @@ import com.scalar.db.dataloader.core.ScanRange; import com.scalar.db.dataloader.core.UnitTestUtils; import com.scalar.db.dataloader.core.dataexport.producer.ProducerTaskFactory; -import com.scalar.db.dataloader.core.dataimport.dao.ScalarDBDao; -import com.scalar.db.dataloader.core.dataimport.dao.ScalarDBDaoException; +import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDao; +import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDaoException; import com.scalar.db.io.Column; import com.scalar.db.io.IntColumn; import com.scalar.db.io.Key; @@ -33,7 +33,7 @@ public class JsonLineExportManagerTest { TableMetadata mockData; DistributedStorage storage; - @Spy ScalarDBDao dao; + @Spy ScalarDbDao dao; ProducerTaskFactory producerTaskFactory; ExportManager exportManager; @@ -41,13 +41,13 @@ public class JsonLineExportManagerTest { void setup() { storage = Mockito.mock(DistributedStorage.class); mockData = UnitTestUtils.createTestTableMetadata(); - dao = Mockito.mock(ScalarDBDao.class); + dao = Mockito.mock(ScalarDbDao.class); producerTaskFactory = new ProducerTaskFactory(null, false, true); } @Test void startExport_givenValidDataWithoutPartitionKey_shouldGenerateOutputFile() - throws IOException, ScalarDBDaoException { + throws IOException, ScalarDbDaoException { exportManager = new JsonLineExportManager(storage, dao, producerTaskFactory); Scanner scanner = Mockito.mock(Scanner.class); String filePath = Paths.get("").toAbsolutePath() + "/output.jsonl"; @@ -86,7 +86,7 @@ void startExport_givenValidDataWithoutPartitionKey_shouldGenerateOutputFile() @Test void startExport_givenPartitionKey_shouldGenerateOutputFile() - throws IOException, ScalarDBDaoException { + throws IOException, ScalarDbDaoException { exportManager = new JsonLineExportManager(storage, dao, producerTaskFactory); Scanner scanner = Mockito.mock(Scanner.class); String filePath = Paths.get("").toAbsolutePath() + "/output.jsonl"; diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDBDaoTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbDaoTest.java similarity index 98% rename from data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDBDaoTest.java rename to data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbDaoTest.java index c46843156f..cc1798e2f8 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDBDaoTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/dao/ScalarDbDaoTest.java @@ -11,14 +11,14 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -class ScalarDBDaoTest { +class ScalarDbDaoTest { private static final int TEST_VALUE_INT_MIN = 1; - private ScalarDBDao dao; + private ScalarDbDao dao; @BeforeEach void setUp() { - this.dao = new ScalarDBDao(); + this.dao = new ScalarDbDao(); } @Test diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/CsvImportProcessorTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/CsvImportProcessorTest.java index 94acd20ace..ff57e42bac 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/CsvImportProcessorTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/CsvImportProcessorTest.java @@ -8,13 +8,13 @@ import com.scalar.db.api.DistributedTransactionManager; import com.scalar.db.api.TableMetadata; import com.scalar.db.dataloader.core.FileFormat; -import com.scalar.db.dataloader.core.ScalarDBMode; +import com.scalar.db.dataloader.core.ScalarDbMode; import com.scalar.db.dataloader.core.UnitTestUtils; import com.scalar.db.dataloader.core.dataimport.ImportMode; import com.scalar.db.dataloader.core.dataimport.ImportOptions; import com.scalar.db.dataloader.core.dataimport.controlfile.ControlFileValidationLevel; -import com.scalar.db.dataloader.core.dataimport.dao.ScalarDBDao; -import com.scalar.db.dataloader.core.dataimport.dao.ScalarDBDaoException; +import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDao; +import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDaoException; import com.scalar.db.dataloader.core.dataimport.datachunk.ImportDataChunkStatus; import com.scalar.db.dataloader.core.dataimport.log.LogMode; import com.scalar.db.exception.transaction.TransactionException; @@ -28,19 +28,18 @@ class CsvImportProcessorTest { @Mock private ImportProcessorParams params; - @Mock ScalarDBMode scalarDBMode; @Mock ImportOptions importOptions; @Mock Map tableMetadataByTableName; @Mock TableColumnDataTypes tableColumnDataTypes; - ScalarDBDao dao; + ScalarDbDao dao; @Mock DistributedStorage distributedStorage; DistributedTransactionManager distributedTransactionManager; CsvImportProcessor csvImportProcessor; @BeforeEach - void setup() throws ScalarDBDaoException, TransactionException { - dao = Mockito.mock(ScalarDBDao.class); + void setup() throws ScalarDbDaoException, TransactionException { + dao = Mockito.mock(ScalarDbDao.class); distributedTransactionManager = mock(DistributedTransactionManager.class); DistributedTransaction distributedTransaction = mock(DistributedTransaction.class); when(distributedTransactionManager.start()).thenReturn(distributedTransaction); @@ -83,12 +82,11 @@ void setup() throws ScalarDBDaoException, TransactionException { void test_importProcessWithStorage() { params = ImportProcessorParams.builder() - .scalarDBMode(ScalarDBMode.STORAGE) + .scalarDbMode(ScalarDbMode.STORAGE) .importOptions(importOptions) .dao(dao) .distributedStorage(distributedStorage) .distributedTransactionManager(distributedTransactionManager) - .scalarDBMode(scalarDBMode) .tableColumnDataTypes(tableColumnDataTypes) .tableMetadataByTableName(tableMetadataByTableName) .build(); @@ -103,12 +101,11 @@ void test_importProcessWithStorage() { void test_importProcessWithTransaction() { params = ImportProcessorParams.builder() - .scalarDBMode(ScalarDBMode.TRANSACTION) + .scalarDbMode(ScalarDbMode.TRANSACTION) .importOptions(importOptions) .dao(dao) .distributedStorage(distributedStorage) .distributedTransactionManager(distributedTransactionManager) - .scalarDBMode(scalarDBMode) .tableColumnDataTypes(tableColumnDataTypes) .tableMetadataByTableName(tableMetadataByTableName) .build(); diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/JsonImportProcessorTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/JsonImportProcessorTest.java index aa9a106a0c..44c57874c2 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/JsonImportProcessorTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/JsonImportProcessorTest.java @@ -8,13 +8,13 @@ import com.scalar.db.api.DistributedTransactionManager; import com.scalar.db.api.TableMetadata; import com.scalar.db.dataloader.core.FileFormat; -import com.scalar.db.dataloader.core.ScalarDBMode; +import com.scalar.db.dataloader.core.ScalarDbMode; import com.scalar.db.dataloader.core.UnitTestUtils; import com.scalar.db.dataloader.core.dataimport.ImportMode; import com.scalar.db.dataloader.core.dataimport.ImportOptions; import com.scalar.db.dataloader.core.dataimport.controlfile.ControlFileValidationLevel; -import com.scalar.db.dataloader.core.dataimport.dao.ScalarDBDao; -import com.scalar.db.dataloader.core.dataimport.dao.ScalarDBDaoException; +import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDao; +import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDaoException; import com.scalar.db.dataloader.core.dataimport.datachunk.ImportDataChunkStatus; import com.scalar.db.dataloader.core.dataimport.log.LogMode; import com.scalar.db.exception.transaction.TransactionException; @@ -28,19 +28,18 @@ class JsonImportProcessorTest { @Mock private ImportProcessorParams params; - @Mock ScalarDBMode scalarDBMode; @Mock ImportOptions importOptions; @Mock Map tableMetadataByTableName; @Mock TableColumnDataTypes tableColumnDataTypes; - ScalarDBDao dao; + ScalarDbDao dao; @Mock DistributedStorage distributedStorage; DistributedTransactionManager distributedTransactionManager; JsonImportProcessor jsonImportProcessor; @BeforeEach - void setup() throws ScalarDBDaoException, TransactionException { - dao = Mockito.mock(ScalarDBDao.class); + void setup() throws ScalarDbDaoException, TransactionException { + dao = Mockito.mock(ScalarDbDao.class); distributedTransactionManager = mock(DistributedTransactionManager.class); DistributedTransaction distributedTransaction = mock(DistributedTransaction.class); when(distributedTransactionManager.start()).thenReturn(distributedTransaction); @@ -83,12 +82,11 @@ void setup() throws ScalarDBDaoException, TransactionException { void test_importProcessWithStorage() { params = ImportProcessorParams.builder() - .scalarDBMode(ScalarDBMode.STORAGE) + .scalarDbMode(ScalarDbMode.STORAGE) .importOptions(importOptions) .dao(dao) .distributedStorage(distributedStorage) .distributedTransactionManager(distributedTransactionManager) - .scalarDBMode(scalarDBMode) .tableColumnDataTypes(tableColumnDataTypes) .tableMetadataByTableName(tableMetadataByTableName) .build(); @@ -103,12 +101,11 @@ void test_importProcessWithStorage() { void test_importProcessWithTransaction() { params = ImportProcessorParams.builder() - .scalarDBMode(ScalarDBMode.TRANSACTION) + .scalarDbMode(ScalarDbMode.TRANSACTION) .importOptions(importOptions) .dao(dao) .distributedStorage(distributedStorage) .distributedTransactionManager(distributedTransactionManager) - .scalarDBMode(scalarDBMode) .tableColumnDataTypes(tableColumnDataTypes) .tableMetadataByTableName(tableMetadataByTableName) .build(); diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/JsonLinesImportProcessorTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/JsonLinesImportProcessorTest.java index e3db391756..4c0e755aac 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/JsonLinesImportProcessorTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/processor/JsonLinesImportProcessorTest.java @@ -8,13 +8,13 @@ import com.scalar.db.api.DistributedTransactionManager; import com.scalar.db.api.TableMetadata; import com.scalar.db.dataloader.core.FileFormat; -import com.scalar.db.dataloader.core.ScalarDBMode; +import com.scalar.db.dataloader.core.ScalarDbMode; import com.scalar.db.dataloader.core.UnitTestUtils; import com.scalar.db.dataloader.core.dataimport.ImportMode; import com.scalar.db.dataloader.core.dataimport.ImportOptions; import com.scalar.db.dataloader.core.dataimport.controlfile.ControlFileValidationLevel; -import com.scalar.db.dataloader.core.dataimport.dao.ScalarDBDao; -import com.scalar.db.dataloader.core.dataimport.dao.ScalarDBDaoException; +import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDao; +import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDaoException; import com.scalar.db.dataloader.core.dataimport.datachunk.ImportDataChunkStatus; import com.scalar.db.dataloader.core.dataimport.log.LogMode; import com.scalar.db.exception.transaction.TransactionException; @@ -28,19 +28,18 @@ class JsonLinesImportProcessorTest { @Mock private ImportProcessorParams params; - @Mock ScalarDBMode scalarDBMode; @Mock ImportOptions importOptions; @Mock Map tableMetadataByTableName; @Mock TableColumnDataTypes tableColumnDataTypes; - ScalarDBDao dao; + ScalarDbDao dao; @Mock DistributedStorage distributedStorage; DistributedTransactionManager distributedTransactionManager; JsonLinesImportProcessor jsonLinesImportProcessor; @BeforeEach - void setup() throws ScalarDBDaoException, TransactionException { - dao = Mockito.mock(ScalarDBDao.class); + void setup() throws ScalarDbDaoException, TransactionException { + dao = Mockito.mock(ScalarDbDao.class); distributedTransactionManager = mock(DistributedTransactionManager.class); DistributedTransaction distributedTransaction = mock(DistributedTransaction.class); when(distributedTransactionManager.start()).thenReturn(distributedTransaction); @@ -83,12 +82,11 @@ void setup() throws ScalarDBDaoException, TransactionException { void test_importProcessWithStorage() { params = ImportProcessorParams.builder() - .scalarDBMode(ScalarDBMode.STORAGE) + .scalarDbMode(ScalarDbMode.STORAGE) .importOptions(importOptions) .dao(dao) .distributedStorage(distributedStorage) .distributedTransactionManager(distributedTransactionManager) - .scalarDBMode(scalarDBMode) .tableColumnDataTypes(tableColumnDataTypes) .tableMetadataByTableName(tableMetadataByTableName) .build(); @@ -103,12 +101,11 @@ void test_importProcessWithStorage() { void test_importProcessWithTransaction() { params = ImportProcessorParams.builder() - .scalarDBMode(ScalarDBMode.TRANSACTION) + .scalarDbMode(ScalarDbMode.TRANSACTION) .importOptions(importOptions) .dao(dao) .distributedStorage(distributedStorage) .distributedTransactionManager(distributedTransactionManager) - .scalarDBMode(scalarDBMode) .tableColumnDataTypes(tableColumnDataTypes) .tableMetadataByTableName(tableMetadataByTableName) .build();