From 780bf23d5c89452dd062be4fab9e2e56d50bb9e2 Mon Sep 17 00:00:00 2001 From: sboikov Date: Mon, 19 Sep 2016 18:19:33 +0300 Subject: [PATCH 01/69] ignite-3810 Fixed hang in FileSwapSpaceSpi when too large value is stored --- .../spi/swapspace/file/FileSwapSpaceSpi.java | 38 ++++++-- .../CacheSwapUnswapGetTestSmallQueueSize.java | 35 ++++++++ .../file/GridFileSwapSpaceSpiSelfTest.java | 89 +++++++++++++++++++ .../testsuites/IgniteCacheTestSuite4.java | 2 + 4 files changed, 158 insertions(+), 6 deletions(-) create mode 100644 modules/core/src/test/java/org/apache/ignite/internal/processors/cache/CacheSwapUnswapGetTestSmallQueueSize.java diff --git a/modules/core/src/main/java/org/apache/ignite/spi/swapspace/file/FileSwapSpaceSpi.java b/modules/core/src/main/java/org/apache/ignite/spi/swapspace/file/FileSwapSpaceSpi.java index 8809f08538725..9be5b93cc6789 100644 --- a/modules/core/src/main/java/org/apache/ignite/spi/swapspace/file/FileSwapSpaceSpi.java +++ b/modules/core/src/main/java/org/apache/ignite/spi/swapspace/file/FileSwapSpaceSpi.java @@ -639,7 +639,7 @@ private void notifyListener(int evtType, @Nullable String spaceName) { if (space == null && create) { validateName(name); - Space old = spaces.putIfAbsent(masked, space = new Space(masked)); + Space old = spaces.putIfAbsent(masked, space = new Space(masked, log)); if (old != null) space = old; @@ -833,13 +833,21 @@ private static class SwapValuesQueue { /** */ private final int maxSize; + /** */ + private final IgniteLogger log; + + /** */ + private boolean queueSizeWarn; + /** * @param minTakeSize Min size. * @param maxSize Max size. + * @param log logger */ - private SwapValuesQueue(int minTakeSize, int maxSize) { + private SwapValuesQueue(int minTakeSize, int maxSize, IgniteLogger log) { this.minTakeSize = minTakeSize; this.maxSize = maxSize; + this.log = log; } /** @@ -852,8 +860,24 @@ public void add(SwapValue val) throws IgniteSpiException { lock.lock(); try { - while (size + val.len > maxSize) - mayAdd.await(); + boolean largeVal = val.len > maxSize; + + if (largeVal) { + if (!queueSizeWarn) { + U.warn(log, "Trying to save in swap entry which have size more than write queue size. " + + "You may wish to increase 'maxWriteQueueSize' in FileSwapSpaceSpi configuration " + + "[queueMaxSize=" + maxSize + ", valSize=" + val.len + ']'); + + queueSizeWarn = true; + } + + while (size >= minTakeSize) + mayAdd.await(); + } + else { + while (size + val.len > maxSize) + mayAdd.await(); + } size += val.len; @@ -1419,7 +1443,7 @@ private class Space { private SwapFile right; /** */ - private final SwapValuesQueue que = new SwapValuesQueue(writeBufSize, maxWriteQueSize); + private final SwapValuesQueue que; /** Partitions. */ private final ConcurrentMap> parts = @@ -1442,11 +1466,13 @@ private class Space { /** * @param name Space name. + * @param log Logger. */ - private Space(String name) { + private Space(String name, IgniteLogger log) { assert name != null; this.name = name; + this.que = new SwapValuesQueue(writeBufSize, maxWriteQueSize, log); } /** diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/CacheSwapUnswapGetTestSmallQueueSize.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/CacheSwapUnswapGetTestSmallQueueSize.java new file mode 100644 index 0000000000000..8d189feb57ff9 --- /dev/null +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/CacheSwapUnswapGetTestSmallQueueSize.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.cache; + +import org.apache.ignite.configuration.IgniteConfiguration; +import org.apache.ignite.spi.swapspace.file.FileSwapSpaceSpi; + +/** + * + */ +public class CacheSwapUnswapGetTestSmallQueueSize extends CacheSwapUnswapGetTest { + /** {@inheritDoc} */ + @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { + IgniteConfiguration cfg = super.getConfiguration(gridName); + + ((FileSwapSpaceSpi)cfg.getSwapSpaceSpi()).setMaxWriteQueueSize(2); + + return cfg; + } +} diff --git a/modules/core/src/test/java/org/apache/ignite/spi/swapspace/file/GridFileSwapSpaceSpiSelfTest.java b/modules/core/src/test/java/org/apache/ignite/spi/swapspace/file/GridFileSwapSpaceSpiSelfTest.java index 64652b19e3220..ab211652273f3 100644 --- a/modules/core/src/test/java/org/apache/ignite/spi/swapspace/file/GridFileSwapSpaceSpiSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/spi/swapspace/file/GridFileSwapSpaceSpiSelfTest.java @@ -25,11 +25,14 @@ import java.util.Random; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; +import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.processors.cache.KeyCacheObject; import org.apache.ignite.internal.processors.cache.KeyCacheObjectImpl; +import org.apache.ignite.internal.util.lang.GridAbsPredicate; import org.apache.ignite.internal.util.typedef.CIX1; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteBiInClosure; @@ -37,8 +40,10 @@ import org.apache.ignite.spi.swapspace.GridSwapSpaceSpiAbstractSelfTest; import org.apache.ignite.spi.swapspace.SwapKey; import org.apache.ignite.spi.swapspace.SwapSpaceSpi; +import org.apache.ignite.testframework.GridTestUtils; import org.jetbrains.annotations.Nullable; import org.jsr166.ConcurrentHashMap8; +import org.junit.Assert; /** * Test for {@link FileSwapSpaceSpi}. @@ -364,4 +369,88 @@ public void testMultithreadedOperations() throws Exception { assertEquals(hash0, hash1); } + + /** + * @throws IgniteCheckedException If failed. + */ + public void testSaveValueLargeThenQueueSize() throws IgniteCheckedException { + final String spaceName = "mySpace"; + final SwapKey key = new SwapKey("key"); + + final byte[] val = new byte[FileSwapSpaceSpi.DFLT_QUE_SIZE * 2]; + Arrays.fill(val, (byte)1); + + IgniteInternalFuture fut = GridTestUtils.runAsync(new Callable() { + @Override public byte[] call() throws Exception { + return saveAndGet(spaceName, key, val); + } + }); + + byte[] bytes = fut.get(10_000); + + Assert.assertArrayEquals(val, bytes); + } + + /** + * @throws IgniteCheckedException If failed. + */ + public void testSaveValueLargeThenQueueSizeMultiThreaded() throws Exception { + final String spaceName = "mySpace"; + + final int threads = 5; + + long DURATION = 30_000; + + final int maxSize = FileSwapSpaceSpi.DFLT_QUE_SIZE * 2; + + final AtomicBoolean done = new AtomicBoolean(); + + try { + IgniteInternalFuture fut = multithreadedAsync(new Callable() { + @Override public Void call() throws Exception { + ThreadLocalRandom rnd = ThreadLocalRandom.current(); + + while (!done.get()) { + SwapKey key = new SwapKey(rnd.nextInt(1000)); + + spi.store(spaceName, key, new byte[rnd.nextInt(0, maxSize)], context()); + } + + return null; + } + }, threads, " async-put"); + + Thread.sleep(DURATION); + + done.set(true); + + fut.get(); + } + finally { + done.set(true); + } + } + + /** + * @param spaceName Space name. + * @param key Key. + * @param val Value. + * @throws Exception If failed. + * @return Read bytes. + */ + private byte[] saveAndGet(final String spaceName, final SwapKey key, byte[] val) throws Exception { + spi.store(spaceName, key, val, context()); + + GridTestUtils.waitForCondition(new GridAbsPredicate() { + @Override public boolean apply() { + return spi.read(spaceName, key, context()) != null; + } + }, 10_000); + + byte[] res = spi.read(spaceName, key, context()); + + assertNotNull(res); + + return res; + } } \ No newline at end of file diff --git a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite4.java b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite4.java index 60d59d70424b9..c494e732b4f66 100644 --- a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite4.java +++ b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite4.java @@ -41,6 +41,7 @@ import org.apache.ignite.internal.processors.cache.CacheStoreUsageMultinodeStaticStartAtomicTest; import org.apache.ignite.internal.processors.cache.CacheStoreUsageMultinodeStaticStartTxTest; import org.apache.ignite.internal.processors.cache.CacheSwapUnswapGetTest; +import org.apache.ignite.internal.processors.cache.CacheSwapUnswapGetTestSmallQueueSize; import org.apache.ignite.internal.processors.cache.CacheTxNotAllowReadFromBackupTest; import org.apache.ignite.internal.processors.cache.CrossCacheLockTest; import org.apache.ignite.internal.processors.cache.GridCacheMarshallingNodeJoinSelfTest; @@ -304,6 +305,7 @@ public static TestSuite suite() throws Exception { suite.addTestSuite(CacheVersionedEntryReplicatedTransactionalOffHeapSelfTest.class); suite.addTestSuite(CacheSwapUnswapGetTest.class); + suite.addTestSuite(CacheSwapUnswapGetTestSmallQueueSize.class); suite.addTestSuite(GridCacheDhtTxPreloadSelfTest.class); suite.addTestSuite(GridCacheNearTxPreloadSelfTest.class); From c1372ce2f0633968036fcfb079718214605c3350 Mon Sep 17 00:00:00 2001 From: sboikov Date: Tue, 20 Sep 2016 11:39:37 +0300 Subject: [PATCH 02/69] Client discovery: wait during join if receive RES_CONTINUE_JOIN, RES_WAIT. --- .../ignite/spi/discovery/tcp/ClientImpl.java | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/modules/core/src/main/java/org/apache/ignite/spi/discovery/tcp/ClientImpl.java b/modules/core/src/main/java/org/apache/ignite/spi/discovery/tcp/ClientImpl.java index bf7f519b6ca7f..2c85645305da5 100644 --- a/modules/core/src/main/java/org/apache/ignite/spi/discovery/tcp/ClientImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/spi/discovery/tcp/ClientImpl.java @@ -497,6 +497,8 @@ else if (state == DISCONNECTED) { Iterator it = addrs.iterator(); + boolean wait = false; + while (it.hasNext()) { if (Thread.currentThread().isInterrupted()) throw new InterruptedException(); @@ -515,12 +517,17 @@ else if (state == DISCONNECTED) { Socket sock = sockAndRes.get1().socket(); + if (log.isDebugEnabled()) + log.debug("Received response to join request [addr=" + addr + ", res=" + sockAndRes.get2() + ']'); + switch (sockAndRes.get2()) { case RES_OK: return new T2<>(sockAndRes.get1(), sockAndRes.get3()); case RES_CONTINUE_JOIN: case RES_WAIT: + wait = true; + U.closeQuiet(sock); break; @@ -533,7 +540,16 @@ else if (state == DISCONNECTED) { } } - if (addrs.isEmpty()) { + if (wait) { + if (timeout > 0 && (U.currentTimeMillis() - startTime) > timeout) + return null; + + if (log.isDebugEnabled()) + log.debug("Will wait before retry join."); + + Thread.sleep(2000); + } + else if (addrs.isEmpty()) { if (timeout > 0 && (U.currentTimeMillis() - startTime) > timeout) return null; From 5a35ee9dad194b3009151b79f0ebd3976bb8fd22 Mon Sep 17 00:00:00 2001 From: tledkov-gridgain Date: Tue, 20 Sep 2016 14:10:55 +0500 Subject: [PATCH 03/69] IGNITE-3859: IGFS: Support direct PROXY mode invocation in the open method, add proxy mode to IgfsInputStreamImpl This closes #1065. This closes #1083. --- .../internal/processors/igfs/IgfsContext.java | 35 +++++ .../processors/igfs/IgfsDataManager.java | 121 ++++++++---------- .../internal/processors/igfs/IgfsImpl.java | 82 +++++++++--- .../processors/igfs/IgfsInputStreamImpl.java | 103 +++++++++++---- 4 files changed, 226 insertions(+), 115 deletions(-) diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsContext.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsContext.java index 3e012469b0022..3405b5332eed2 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsContext.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsContext.java @@ -20,7 +20,10 @@ import java.util.LinkedList; import java.util.List; import java.util.UUID; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.RejectedExecutionException; import org.apache.ignite.IgniteCheckedException; +import org.apache.ignite.IgniteLogger; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.configuration.FileSystemConfiguration; import org.apache.ignite.internal.GridKernalContext; @@ -60,6 +63,12 @@ public class IgfsContext { /** Local cluster node. */ private volatile ClusterNode locNode; + /** IGFS executor service. */ + private ExecutorService igfsSvc; + + /** Logger. */ + protected IgniteLogger log; + /** * @param ctx Kernal context. * @param cfg IGFS configuration. @@ -85,6 +94,10 @@ public IgfsContext( this.srvMgr = add(srvMgr); this.fragmentizerMgr = add(fragmentizerMgr); + log = ctx.log(IgfsContext.class); + + igfsSvc = ctx.getIgfsExecutorService(); + igfs = new IgfsImpl(this); } @@ -205,6 +218,28 @@ public ClusterNode localNode() { return locNode; } + /** + * Executes runnable in IGFS executor service. If execution rejected, runnable will be executed + * in caller thread. + * + * @param r Runnable to execute. + */ + public void runInIgfsThreadPool(Runnable r) { + try { + igfsSvc.submit(r); + } + catch (RejectedExecutionException ignored) { + // This exception will happen if network speed is too low and data comes faster + // than we can send it to remote nodes. + try { + r.run(); + } + catch (Exception e) { + log.warning("Failed to execute IGFS runnable: " + r, e); + } + } + } + /** * Adds manager to managers list. * diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsDataManager.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsDataManager.java index d2183f927de30..2f704aea515de 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsDataManager.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsDataManager.java @@ -45,7 +45,6 @@ import org.apache.ignite.internal.processors.task.GridInternal; import org.apache.ignite.internal.util.future.GridFinishedFuture; import org.apache.ignite.internal.util.future.GridFutureAdapter; -import org.apache.ignite.internal.util.lang.GridPlainCallable; import org.apache.ignite.internal.util.typedef.CI1; import org.apache.ignite.internal.util.typedef.CX1; import org.apache.ignite.internal.util.typedef.F; @@ -74,12 +73,9 @@ import java.util.Map; import java.util.UUID; import java.util.concurrent.BlockingQueue; -import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutorService; import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Lock; @@ -123,9 +119,6 @@ public class IgfsDataManager extends IgfsManager { /** Affinity key generator. */ private AtomicLong affKeyGen = new AtomicLong(); - /** IGFS executor service. */ - private ExecutorService igfsSvc; - /** Request ID counter for write messages. */ private AtomicLong reqIdCtr = new AtomicLong(); @@ -183,8 +176,6 @@ else if (msg instanceof IgfsAckMessage) } }, EVT_NODE_LEFT, EVT_NODE_FAILED); - igfsSvc = igfsCtx.kernalContext().getIgfsExecutorService(); - delWorker = new AsyncDeleteWorker(igfsCtx.kernalContext().gridName(), "igfs-" + igfsName + "-delete-worker", log); } @@ -345,45 +336,11 @@ private IgniteDataStreamer dataStreamer() { if (oldRmtReadFut == null) { try { - if (log.isDebugEnabled()) - log.debug("Reading non-local data block in the secondary file system [path=" + - path + ", fileInfo=" + fileInfo + ", blockIdx=" + blockIdx + ']'); - - int blockSize = fileInfo.blockSize(); - - long pos = blockIdx * blockSize; // Calculate position for Hadoop - - res = new byte[blockSize]; - - int read = 0; - - synchronized (secReader) { - try { - // Delegate to the secondary file system. - while (read < blockSize) { - int r = secReader.read(pos + read, res, read, blockSize - read); - - if (r < 0) - break; - - read += r; - } - } - catch (IOException e) { - throw new IgniteCheckedException("Failed to read data due to secondary file system " + - "exception: " + e.getMessage(), e); - } - } - - // If we did not read full block at the end of the file - trim it. - if (read != blockSize) - res = Arrays.copyOf(res, read); + res = secondaryDataBlock(path, blockIdx, secReader, fileInfo.blockSize()); rmtReadFut.onDone(res); putBlock(fileInfo.blockSize(), key, res); - - igfsCtx.metrics().addReadBlocks(1, 1); } catch (IgniteCheckedException e) { rmtReadFut.onDone(e); @@ -416,12 +373,60 @@ private IgniteDataStreamer dataStreamer() { return fut; } + /** + * Get data block for specified block index from secondary reader. + * + * @param path Path reading from. + * @param blockIdx Block index. + * @param secReader Optional secondary file system reader. + * @param blockSize Block size. + * @return Requested data block or {@code null} if nothing found. + * @throws IgniteCheckedException If failed. + */ + @Nullable public byte[] secondaryDataBlock(IgfsPath path, long blockIdx, + IgfsSecondaryFileSystemPositionedReadable secReader, int blockSize) throws IgniteCheckedException { + if (log.isDebugEnabled()) + log.debug("Reading non-local data block in the secondary file system [path=" + + path + ", blockIdx=" + blockIdx + ']'); + + long pos = blockIdx * blockSize; // Calculate position for Hadoop + + byte[] res = new byte[blockSize]; + + int read = 0; + + try { + // Delegate to the secondary file system. + while (read < blockSize) { + int r = secReader.read(pos + read, res, read, blockSize - read); + + if (r < 0) + break; + + read += r; + } + } + catch (IOException e) { + throw new IgniteCheckedException("Failed to read data due to secondary file system " + + "exception: " + e.getMessage(), e); + } + + // If we did not read full block at the end of the file - trim it. + if (read != blockSize) + res = Arrays.copyOf(res, read); + + igfsCtx.metrics().addReadBlocks(1, 1); + + return res; + } + /** * Stores the given block in data cache. * * @param blockSize The size of the block. * @param key The data cache key of the block. * @param data The new value of the block. + * @throws IgniteCheckedException If failed. */ private void putBlock(int blockSize, IgfsBlockKey key, byte[] data) throws IgniteCheckedException { if (data.length < blockSize) @@ -967,8 +972,8 @@ private void processBatch(IgniteUuid fileId, final ClusterNode node, } } else { - callIgfsLocalSafe(new GridPlainCallable() { - @Override @Nullable public Object call() throws Exception { + igfsCtx.runInIgfsThreadPool(new Runnable() { + @Override public void run() { storeBlocksAsync(blocks).listen(new CI1>() { @Override public void apply(IgniteInternalFuture fut) { try { @@ -981,8 +986,6 @@ private void processBatch(IgniteUuid fileId, final ClusterNode node, } } }); - - return null; } }); } @@ -1069,28 +1072,6 @@ private void processPartialBlockWrite(IgniteUuid fileId, IgfsBlockKey colocatedK } } - /** - * Executes callable in IGFS executor service. If execution rejected, callable will be executed - * in caller thread. - * - * @param c Callable to execute. - */ - private void callIgfsLocalSafe(Callable c) { - try { - igfsSvc.submit(c); - } - catch (RejectedExecutionException ignored) { - // This exception will happen if network speed is too low and data comes faster - // than we can send it to remote nodes. - try { - c.call(); - } - catch (Exception e) { - log.warning("Failed to execute IGFS callable: " + c, e); - } - } - } - /** * @param blocks Blocks to write. * @return Future that will be completed after put is done. diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java index 45596a3f4f376..87a4699a4de6b 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java @@ -47,6 +47,7 @@ import org.apache.ignite.igfs.mapreduce.IgfsRecordResolver; import org.apache.ignite.igfs.mapreduce.IgfsTask; import org.apache.ignite.igfs.secondary.IgfsSecondaryFileSystem; +import org.apache.ignite.igfs.secondary.IgfsSecondaryFileSystemPositionedReadable; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.IgniteKernal; import org.apache.ignite.internal.managers.eventstorage.GridEventStorageManager; @@ -949,34 +950,79 @@ private IgfsEntryInfo primaryInfoForListing(IgfsPath path) throws IgniteCheckedE IgfsMode mode = resolveMode(path); - if (mode != PRIMARY) { - assert IgfsUtils.isDualMode(mode); + switch (mode) { + case PRIMARY: { + IgfsEntryInfo info = meta.infoForPath(path); - IgfsSecondaryInputStreamDescriptor desc = meta.openDual(secondaryFs, path, bufSize0); + if (info == null) + throw new IgfsPathNotFoundException("File not found: " + path); - IgfsInputStreamImpl os = new IgfsInputStreamImpl(igfsCtx, path, desc.info(), - cfg.getPrefetchBlocks(), seqReadsBeforePrefetch, desc.reader()); + if (!info.isFile()) + throw new IgfsPathIsDirectoryException("Failed to open file (not a file): " + path); - IgfsUtils.sendEvents(igfsCtx.kernalContext(), path, EVT_IGFS_FILE_OPENED_READ); + // Input stream to read data from grid cache with separate blocks. + IgfsInputStreamImpl os = new IgfsInputStreamImpl(igfsCtx, path, info, + cfg.getPrefetchBlocks(), seqReadsBeforePrefetch, null, + info.length(), info.blockSize(), info.blocksCount(), false); - return os; - } + IgfsUtils.sendEvents(igfsCtx.kernalContext(), path, EVT_IGFS_FILE_OPENED_READ); - IgfsEntryInfo info = meta.infoForPath(path); + return os; + } - if (info == null) - throw new IgfsPathNotFoundException("File not found: " + path); + case DUAL_ASYNC: + case DUAL_SYNC: { + assert IgfsUtils.isDualMode(mode); - if (!info.isFile()) - throw new IgfsPathIsDirectoryException("Failed to open file (not a file): " + path); + IgfsSecondaryInputStreamDescriptor desc = meta.openDual(secondaryFs, path, bufSize0); + + IgfsEntryInfo info = desc.info(); - // Input stream to read data from grid cache with separate blocks. - IgfsInputStreamImpl os = new IgfsInputStreamImpl(igfsCtx, path, info, - cfg.getPrefetchBlocks(), seqReadsBeforePrefetch, null); + IgfsInputStreamImpl os = new IgfsInputStreamImpl(igfsCtx, path, info, + cfg.getPrefetchBlocks(), seqReadsBeforePrefetch, desc.reader(), + info.length(), info.blockSize(), info.blocksCount(), false); + + IgfsUtils.sendEvents(igfsCtx.kernalContext(), path, EVT_IGFS_FILE_OPENED_READ); + + return os; + } - IgfsUtils.sendEvents(igfsCtx.kernalContext(), path, EVT_IGFS_FILE_OPENED_READ); + case PROXY: { + assert secondaryFs != null; - return os; + IgfsFile info = info(path); + + if (info == null) + throw new IgfsPathNotFoundException("File not found: " + path); + + if (!info.isFile()) + throw new IgfsPathIsDirectoryException("Failed to open file (not a file): " + path); + + IgfsSecondaryFileSystemPositionedReadable secReader = + new IgfsLazySecondaryFileSystemPositionedReadable(secondaryFs, path, bufSize); + + long len = info.length(); + + int blockSize = info.blockSize() > 0 ? info.blockSize() : cfg.getBlockSize(); + + long blockCnt = len / blockSize; + + if (len % blockSize != 0) + blockCnt++; + + IgfsInputStream os = new IgfsInputStreamImpl(igfsCtx, path, null, + cfg.getPrefetchBlocks(), seqReadsBeforePrefetch, secReader, + info.length(), blockSize, blockCnt, true); + + IgfsUtils.sendEvents(igfsCtx.kernalContext(), path, EVT_IGFS_FILE_OPENED_READ); + + return os; + } + + default: + assert false : "Unexpected mode " + mode; + return null; + } } }); } diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsInputStreamImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsInputStreamImpl.java index 2f9f2fcab5e1f..0d9f2cd36fda5 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsInputStreamImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsInputStreamImpl.java @@ -28,6 +28,7 @@ import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.managers.eventstorage.GridEventStorageManager; import org.apache.ignite.internal.util.GridConcurrentHashSet; +import org.apache.ignite.internal.util.future.GridFutureAdapter; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteInClosure; @@ -109,21 +110,44 @@ public class IgfsInputStreamImpl extends IgfsInputStream implements IgfsSecondar /** Time consumed on reading. */ private long time; + /** File Length. */ + private long len; + + /** Block size to read. */ + private int blockSize; + + /** Block size to read. */ + private long blocksCnt; + + /** Proxy mode. */ + private boolean proxy; + /** * Constructs file output stream. - * - * @param igfsCtx IGFS context. + * @param igfsCtx IGFS context. * @param path Path to stored file. * @param fileInfo File info to write binary data to. * @param prefetchBlocks Number of blocks to prefetch. * @param seqReadsBeforePrefetch Amount of sequential reads before prefetch is triggered. * @param secReader Optional secondary file system reader. + * @param len File length. + * @param blockSize Block size. + * @param blocksCnt Blocks count. + * @param proxy Proxy mode flag. */ - IgfsInputStreamImpl(IgfsContext igfsCtx, IgfsPath path, IgfsEntryInfo fileInfo, int prefetchBlocks, - int seqReadsBeforePrefetch, @Nullable IgfsSecondaryFileSystemPositionedReadable secReader) { + IgfsInputStreamImpl( + IgfsContext igfsCtx, + IgfsPath path, + @Nullable IgfsEntryInfo fileInfo, + int prefetchBlocks, + int seqReadsBeforePrefetch, + @Nullable IgfsSecondaryFileSystemPositionedReadable secReader, + long len, + int blockSize, + long blocksCnt, + boolean proxy) { assert igfsCtx != null; assert path != null; - assert fileInfo != null; this.igfsCtx = igfsCtx; this.path = path; @@ -131,6 +155,10 @@ public class IgfsInputStreamImpl extends IgfsInputStream implements IgfsSecondar this.prefetchBlocks = prefetchBlocks; this.seqReadsBeforePrefetch = seqReadsBeforePrefetch; this.secReader = secReader; + this.len = len; + this.blockSize = blockSize; + this.blocksCnt = blocksCnt; + this.proxy = proxy; log = igfsCtx.kernalContext().log(IgfsInputStream.class); @@ -154,7 +182,7 @@ public synchronized long bytes() { /** {@inheritDoc} */ @Override public long length() { - return fileInfo.length(); + return len; } /** {@inheritDoc} */ @@ -195,7 +223,7 @@ public synchronized long bytes() { /** {@inheritDoc} */ @Override public synchronized int available() throws IOException { - long l = fileInfo.length() - pos; + long l = len - pos; if (l < 0) return 0; @@ -240,7 +268,7 @@ public synchronized long bytes() { @SuppressWarnings("IfMayBeConditional") public synchronized byte[][] readChunks(long pos, int len) throws IOException { // Readable bytes in the file, starting from the specified position. - long readable = fileInfo.length() - pos; + long readable = this.len - pos; if (readable <= 0) return EMPTY_CHUNKS; @@ -254,8 +282,8 @@ public synchronized byte[][] readChunks(long pos, int len) throws IOException { bytes += len; - int start = (int)(pos / fileInfo.blockSize()); - int end = (int)((pos + len - 1) / fileInfo.blockSize()); + int start = (int)(pos / blockSize); + int end = (int)((pos + len - 1) / blockSize); int chunkCnt = end - start + 1; @@ -264,7 +292,7 @@ public synchronized byte[][] readChunks(long pos, int len) throws IOException { for (int i = 0; i < chunkCnt; i++) { byte[] block = blockFragmentizerSafe(start + i); - int blockOff = (int)(pos % fileInfo.blockSize()); + int blockOff = (int)(pos % blockSize); int blockLen = Math.min(len, block.length - blockOff); // If whole block can be used as result, do not do array copy. @@ -366,7 +394,7 @@ private int readFromStore(long pos, byte[] buf, int off, int len) throws IOExcep return 0; // Fully read done: read zero bytes correctly. // Readable bytes in the file, starting from the specified position. - long readable = fileInfo.length() - pos; + long readable = this.len - pos; if (readable <= 0) return -1; // EOF. @@ -378,10 +406,10 @@ private int readFromStore(long pos, byte[] buf, int off, int len) throws IOExcep assert len > 0; - byte[] block = blockFragmentizerSafe(pos / fileInfo.blockSize()); + byte[] block = blockFragmentizerSafe(pos / blockSize); // Skip bytes to expected position. - int blockOff = (int)(pos % fileInfo.blockSize()); + int blockOff = (int)(pos % blockSize); len = Math.min(len, block.length - blockOff); @@ -412,7 +440,7 @@ private byte[] blockFragmentizerSafe(long blockIdx) throws IOException { ", blockIdx=" + blockIdx + ", errMsg=" + e.getMessage() + ']'); // This failure may be caused by file being fragmented. - if (fileInfo.fileMap() != null && !fileInfo.fileMap().ranges().isEmpty()) { + if (fileInfo != null && fileInfo.fileMap() != null && !fileInfo.fileMap().ranges().isEmpty()) { IgfsEntryInfo newInfo = igfsCtx.meta().info(fileInfo.id()); // File was deleted. @@ -459,7 +487,7 @@ private byte[] block(long blockIdx) throws IOException, IgniteCheckedException { prevBlockIdx = blockIdx; - bytesFut = dataBlock(fileInfo, blockIdx); + bytesFut = dataBlock(blockIdx); assert bytesFut != null; @@ -470,10 +498,10 @@ private byte[] block(long blockIdx) throws IOException, IgniteCheckedException { if (prefetchBlocks > 0 && seqReads >= seqReadsBeforePrefetch - 1) { for (int i = 1; i <= prefetchBlocks; i++) { // Ensure that we do not prefetch over file size. - if (fileInfo.blockSize() * (i + blockIdx) >= fileInfo.length()) + if (blockSize * (i + blockIdx) >= len) break; else if (locCache.get(blockIdx + i) == null) - addLocalCacheFuture(blockIdx + i, dataBlock(fileInfo, blockIdx + i)); + addLocalCacheFuture(blockIdx + i, dataBlock(blockIdx + i)); } } @@ -483,17 +511,17 @@ else if (locCache.get(blockIdx + i) == null) throw new IgfsCorruptedFileException("Failed to retrieve file's data block (corrupted file?) " + "[path=" + path + ", blockIdx=" + blockIdx + ']'); - int blockSize = fileInfo.blockSize(); + int blockSize0 = blockSize; - if (blockIdx == fileInfo.blocksCount() - 1) - blockSize = (int)(fileInfo.length() % blockSize); + if (blockIdx == blocksCnt - 1) + blockSize0 = (int)(len % blockSize0); // If part of the file was reserved for writing, but was not actually written. - if (bytes.length < blockSize) + if (bytes.length < blockSize0) throw new IOException("Inconsistent file's data block (incorrectly written?)" + " [path=" + path + ", blockIdx=" + blockIdx + ", blockSize=" + bytes.length + - ", expectedBlockSize=" + blockSize + ", fileBlockSize=" + fileInfo.blockSize() + - ", fileLen=" + fileInfo.length() + ']'); + ", expectedBlockSize=" + blockSize0 + ", fileBlockSize=" + blockSize + + ", fileLen=" + len + ']'); return bytes; } @@ -538,14 +566,35 @@ public void apply(IgniteInternalFuture t) { /** * Get data block for specified block index. * - * @param fileInfo File info. * @param blockIdx Block index. * @return Requested data block or {@code null} if nothing found. * @throws IgniteCheckedException If failed. */ - @Nullable protected IgniteInternalFuture dataBlock(IgfsEntryInfo fileInfo, long blockIdx) + @Nullable protected IgniteInternalFuture dataBlock(final long blockIdx) throws IgniteCheckedException { - return igfsCtx.data().dataBlock(fileInfo, path, blockIdx, secReader); + if (proxy) { + assert secReader != null; + + final GridFutureAdapter fut = new GridFutureAdapter<>(); + + igfsCtx.runInIgfsThreadPool(new Runnable() { + @Override public void run() { + try { + fut.onDone(igfsCtx.data().secondaryDataBlock(path, blockIdx, secReader, blockSize)); + } + catch (Throwable e) { + fut.onDone(null, e); + } + } + }); + + return fut; + } + else { + assert fileInfo != null; + + return igfsCtx.data().dataBlock(fileInfo, path, blockIdx, secReader); + } } /** {@inheritDoc} */ From f6f417ff84f7823ebf3ab166980893d22995f143 Mon Sep 17 00:00:00 2001 From: "Andrey V. Mashenkov" Date: Tue, 20 Sep 2016 15:02:56 +0300 Subject: [PATCH 04/69] Implemented. --- .../apache/ignite/IgniteSystemProperties.java | 9 +++ .../internal/binary/BinaryObjectImpl.java | 5 ++ .../binary/BinaryObjectOffheapImpl.java | 5 ++ .../internal/binary/BinaryReaderExImpl.java | 18 +++++- .../ignite/internal/binary/BinaryUtils.java | 22 ++++--- .../internal/binary/BinaryWriterExImpl.java | 60 +++++++++++-------- .../internal/binary/GridBinaryMarshaller.java | 3 + .../binary/builder/BinaryBuilderReader.java | 20 ++++++- 8 files changed, 106 insertions(+), 36 deletions(-) diff --git a/modules/core/src/main/java/org/apache/ignite/IgniteSystemProperties.java b/modules/core/src/main/java/org/apache/ignite/IgniteSystemProperties.java index ab6403f1e47a0..f8ca3fbbdb64c 100644 --- a/modules/core/src/main/java/org/apache/ignite/IgniteSystemProperties.java +++ b/modules/core/src/main/java/org/apache/ignite/IgniteSystemProperties.java @@ -473,6 +473,15 @@ public final class IgniteSystemProperties { @Deprecated public static final String IGNITE_BINARY_DONT_WRAP_TREE_STRUCTURES = "IGNITE_BINARY_DONT_WRAP_TREE_STRUCTURES"; + /** + * When set to {@code true} Long zeroes will be encoded with with special type and no value included, + * that saves 8-bytes per field. Otherwise Long zeroes will be encoded in old manner, preserving compatibility. + *

+ * @deprecated Should be removed in Apache Ignite 2.0. + */ + @Deprecated + public static final String IGNITE_BINARY_COMPACT_LONG_ZEROES = "IGNITE_BINARY_COMPACT_LONG_ZEROES"; + /** * Enforces singleton. */ diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectImpl.java index 7b42c03c7d94a..ae864334bbc1f 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectImpl.java @@ -326,6 +326,11 @@ else if (fieldOffLen == BinaryUtils.OFFSET_2) break; + case GridBinaryMarshaller.ZERO_LONG: + val = 0L; + + break; + case GridBinaryMarshaller.BOOLEAN: val = BinaryPrimitives.readBoolean(arr, fieldPos + 1); diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectOffheapImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectOffheapImpl.java index 2225b7a09ae9a..16cc298c847d0 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectOffheapImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectOffheapImpl.java @@ -204,6 +204,11 @@ else if (fieldOffLen == BinaryUtils.OFFSET_2) break; + case GridBinaryMarshaller.ZERO_LONG: + val = 0L; + + break; + case GridBinaryMarshaller.BOOLEAN: val = BinaryPrimitives.readBoolean(ptr, fieldPos + 1); diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryReaderExImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryReaderExImpl.java index 775f237787cfc..dca29c8e91162 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryReaderExImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryReaderExImpl.java @@ -79,6 +79,7 @@ import static org.apache.ignite.internal.binary.GridBinaryMarshaller.UNREGISTERED_TYPE_ID; import static org.apache.ignite.internal.binary.GridBinaryMarshaller.UUID; import static org.apache.ignite.internal.binary.GridBinaryMarshaller.UUID_ARR; +import static org.apache.ignite.internal.binary.GridBinaryMarshaller.ZERO_LONG; /** * Binary reader implementation. @@ -421,6 +422,7 @@ private T readHandleField() { return (T)obj; } + /** {@inheritDoc} */ @Override public byte readByte(String fieldName) throws BinaryObjectException { return findFieldByName(fieldName) && checkFlagNoHandles(BYTE) == Flag.NORMAL ? in.readByte() : 0; @@ -721,7 +723,14 @@ long readLong(int fieldId) throws BinaryObjectException { * @throws BinaryObjectException In case of error. */ @Nullable Long readLongNullable(int fieldId) throws BinaryObjectException { - return findFieldById(fieldId) && checkFlagNoHandles(LONG) == Flag.NORMAL ? in.readLong() : null; + if (findFieldById(fieldId)) { + if (checkFlagNoHandles(LONG) == Flag.NORMAL) + return in.readLong(); + else if (checkFlagNoHandles(ZERO_LONG) == Flag.NORMAL) + return 0L; + } + + return null; } /** {@inheritDoc} */ @@ -1521,6 +1530,11 @@ else if (flag == NULL) break; + case ZERO_LONG: + obj = 0L; + + break; + case FLOAT: obj = in.readFloat(); @@ -2053,7 +2067,7 @@ private void streamPositionRandom(int pos) { /** {@inheritDoc} */ @Override public long skip(long n) throws IOException { - return skipBytes((int) n); + return skipBytes((int)n); } /** {@inheritDoc} */ diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryUtils.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryUtils.java index b5834a5254425..7d4512ce50349 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryUtils.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryUtils.java @@ -179,11 +179,16 @@ public class BinaryUtils { PLAIN_CLASS_TO_FLAG.put(boolean.class, GridBinaryMarshaller.BOOLEAN); for (byte b : new byte[] { - GridBinaryMarshaller.BYTE, GridBinaryMarshaller.SHORT, GridBinaryMarshaller.INT, GridBinaryMarshaller.LONG, GridBinaryMarshaller.FLOAT, GridBinaryMarshaller.DOUBLE, - GridBinaryMarshaller.CHAR, GridBinaryMarshaller.BOOLEAN, GridBinaryMarshaller.DECIMAL, GridBinaryMarshaller.STRING, GridBinaryMarshaller.UUID, GridBinaryMarshaller.DATE, GridBinaryMarshaller.TIMESTAMP, - GridBinaryMarshaller.BYTE_ARR, GridBinaryMarshaller.SHORT_ARR, GridBinaryMarshaller.INT_ARR, GridBinaryMarshaller.LONG_ARR, GridBinaryMarshaller.FLOAT_ARR, GridBinaryMarshaller.DOUBLE_ARR, - GridBinaryMarshaller.CHAR_ARR, GridBinaryMarshaller.BOOLEAN_ARR, GridBinaryMarshaller.DECIMAL_ARR, GridBinaryMarshaller.STRING_ARR, GridBinaryMarshaller.UUID_ARR, GridBinaryMarshaller.DATE_ARR, GridBinaryMarshaller.TIMESTAMP_ARR, - GridBinaryMarshaller.ENUM, GridBinaryMarshaller.ENUM_ARR, GridBinaryMarshaller.NULL}) { + GridBinaryMarshaller.BYTE, GridBinaryMarshaller.SHORT, GridBinaryMarshaller.INT, GridBinaryMarshaller.LONG, + GridBinaryMarshaller.FLOAT, GridBinaryMarshaller.DOUBLE, GridBinaryMarshaller.CHAR, + GridBinaryMarshaller.BOOLEAN, GridBinaryMarshaller.DECIMAL, GridBinaryMarshaller.STRING, + GridBinaryMarshaller.UUID, GridBinaryMarshaller.DATE, GridBinaryMarshaller.TIMESTAMP, + GridBinaryMarshaller.BYTE_ARR, GridBinaryMarshaller.SHORT_ARR, GridBinaryMarshaller.INT_ARR, + GridBinaryMarshaller.LONG_ARR, GridBinaryMarshaller.FLOAT_ARR, GridBinaryMarshaller.DOUBLE_ARR, + GridBinaryMarshaller.CHAR_ARR, GridBinaryMarshaller.BOOLEAN_ARR, GridBinaryMarshaller.DECIMAL_ARR, + GridBinaryMarshaller.STRING_ARR, GridBinaryMarshaller.UUID_ARR, GridBinaryMarshaller.DATE_ARR, + GridBinaryMarshaller.TIMESTAMP_ARR, GridBinaryMarshaller.ENUM, GridBinaryMarshaller.ENUM_ARR, + GridBinaryMarshaller.NULL}) { PLAIN_TYPE_FLAG[b] = true; } @@ -683,7 +688,7 @@ public static boolean knownArray(Object arr) { if (arr == null) return false; - Class cls = arr.getClass(); + Class cls = arr.getClass(); return cls == byte[].class || cls == short[].class || cls == int[].class || cls == long[].class || cls == float[].class || cls == double[].class || cls == char[].class || cls == boolean[].class || @@ -1717,6 +1722,9 @@ public static Object doReadOptimized(BinaryInputStream in, BinaryContext ctx, @N case GridBinaryMarshaller.LONG: return in.readLong(); + case GridBinaryMarshaller.ZERO_LONG: + return 0L; + case GridBinaryMarshaller.FLOAT: return in.readFloat(); @@ -2197,7 +2205,7 @@ else if (c > 0x07FF) { } else { arr[position++] = (byte)(0xC0 | ((c >> 6) & 0x1F)); - arr[position++] = (byte)(0x80 | (c & 0x3F)); + arr[position++] = (byte)(0x80 | (c & 0x3F)); } } diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryWriterExImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryWriterExImpl.java index 21fb2bfe5033d..b7a442d217452 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryWriterExImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryWriterExImpl.java @@ -18,6 +18,7 @@ package org.apache.ignite.internal.binary; import org.apache.ignite.IgniteCheckedException; +import org.apache.ignite.IgniteSystemProperties; import org.apache.ignite.binary.BinaryObjectException; import org.apache.ignite.binary.BinaryRawWriter; import org.apache.ignite.binary.BinaryWriter; @@ -430,7 +431,7 @@ public void doWriteDate(@Nullable Date date) { * @param ts Timestamp. */ public void doWriteTimestamp(@Nullable Timestamp ts) { - if (ts== null) + if (ts == null) out.writeByte(GridBinaryMarshaller.NULL); else { out.unsafeEnsure(1 + 8 + 4); @@ -640,21 +641,21 @@ void doWriteDateArray(@Nullable Date[] val) { } } - /** - * @param val Array of timestamps. - */ - void doWriteTimestampArray(@Nullable Timestamp[] val) { - if (val == null) - out.writeByte(GridBinaryMarshaller.NULL); - else { - out.unsafeEnsure(1 + 4); - out.unsafeWriteByte(GridBinaryMarshaller.TIMESTAMP_ARR); - out.unsafeWriteInt(val.length); + /** + * @param val Array of timestamps. + */ + void doWriteTimestampArray(@Nullable Timestamp[] val) { + if (val == null) + out.writeByte(GridBinaryMarshaller.NULL); + else { + out.unsafeEnsure(1 + 4); + out.unsafeWriteByte(GridBinaryMarshaller.TIMESTAMP_ARR); + out.unsafeWriteInt(val.length); - for (Timestamp ts : val) - doWriteTimestamp(ts); - } - } + for (Timestamp ts : val) + doWriteTimestamp(ts); + } + } /** * @param val Array of objects. @@ -949,10 +950,19 @@ void writeIntField(@Nullable Integer val) { * @param val Value. */ void writeLongFieldPrimitive(long val) { - out.unsafeEnsure(1 + 8); - out.unsafeWriteByte(GridBinaryMarshaller.LONG); - out.unsafeWriteLong(val); + if (val == 0L && + IgniteSystemProperties.getBoolean(IgniteSystemProperties.IGNITE_BINARY_COMPACT_LONG_ZEROES, false)) { + out.unsafeEnsure(1); + + out.unsafeWriteByte(GridBinaryMarshaller.ZERO_LONG); + } + else { + out.unsafeEnsure(1 + 8); + + out.unsafeWriteByte(GridBinaryMarshaller.LONG); + out.unsafeWriteLong(val); + } } /** @@ -1527,7 +1537,8 @@ void writeBinaryObjectField(@Nullable BinaryObjectImpl po) throws BinaryObjectEx } /** {@inheritDoc} */ - @Override public void writeTimestampArray(String fieldName, @Nullable Timestamp[] val) throws BinaryObjectException { + @Override public void writeTimestampArray(String fieldName, + @Nullable Timestamp[] val) throws BinaryObjectException { writeFieldId(fieldName); writeTimestampArrayField(val); } @@ -1537,7 +1548,7 @@ void writeBinaryObjectField(@Nullable BinaryObjectImpl po) throws BinaryObjectEx doWriteTimestampArray(val); } - /** {@inheritDoc} */ + /** {@inheritDoc} */ @Override public void writeObjectArray(String fieldName, @Nullable Object[] val) throws BinaryObjectException { writeFieldId(fieldName); writeObjectArrayField(val); @@ -1637,22 +1648,22 @@ void writeBinaryObjectField(@Nullable BinaryObjectImpl po) throws BinaryObjectEx /** {@inheritDoc} */ @Override public void writeByte(int v) throws IOException { - out.writeByte((byte) v); + out.writeByte((byte)v); } /** {@inheritDoc} */ @Override public void writeShort(int v) throws IOException { - out.writeShort((short) v); + out.writeShort((short)v); } /** {@inheritDoc} */ @Override public void writeChar(int v) throws IOException { - out.writeChar((char) v); + out.writeChar((char)v); } /** {@inheritDoc} */ @Override public void write(int b) throws IOException { - out.writeByte((byte) b); + out.writeByte((byte)b); } /** {@inheritDoc} */ @@ -1696,6 +1707,7 @@ private void writeFieldId(String fieldName) throws BinaryObjectException { /** * Write field ID. + * * @param fieldId Field ID. */ public void writeFieldId(int fieldId) { diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/GridBinaryMarshaller.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/GridBinaryMarshaller.java index ad635214898d0..b97e2354a2486 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/GridBinaryMarshaller.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/GridBinaryMarshaller.java @@ -57,6 +57,9 @@ public class GridBinaryMarshaller { /** */ public static final byte LONG = 4; + /** */ + public static final byte ZERO_LONG = 36; + /** */ public static final byte FLOAT = 5; diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/builder/BinaryBuilderReader.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/builder/BinaryBuilderReader.java index 347fb2bb9d99e..dd3aba3e26fb0 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/builder/BinaryBuilderReader.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/builder/BinaryBuilderReader.java @@ -260,6 +260,11 @@ public void skipValue() { break; + case GridBinaryMarshaller.ZERO_LONG: + len = 0; + + break; + case GridBinaryMarshaller.BYTE_ARR: case GridBinaryMarshaller.BOOLEAN_ARR: len = 4 + readLength(); @@ -272,7 +277,7 @@ public void skipValue() { break; case GridBinaryMarshaller.DECIMAL: - len = /** scale */ 4 + /** mag len */ 4 + /** mag bytes count */ readInt(4); + len = /** scale */4 + /** mag len */4 + /** mag bytes count */readInt(4); break; @@ -410,6 +415,9 @@ public Object getValueQuickly(int pos, int len) { case GridBinaryMarshaller.LONG: return BinaryPrimitives.readLong(arr, pos + 1); + case GridBinaryMarshaller.ZERO_LONG: + return 0L; + case GridBinaryMarshaller.FLOAT: return BinaryPrimitives.readFloat(arr, pos + 1); @@ -543,6 +551,11 @@ public Object parseValue() { break; + case GridBinaryMarshaller.ZERO_LONG: + plainLazyValLen = 0; + + break; + case GridBinaryMarshaller.FLOAT: plainLazyValLen = 4; @@ -562,7 +575,7 @@ public Object parseValue() { return arr[pos++] != 0; case GridBinaryMarshaller.DECIMAL: - plainLazyValLen = /** scale */ 4 + /** mag len */ 4 + /** mag bytes count */ readInt(4); + plainLazyValLen = /** scale */4 + /** mag len */4 + /** mag bytes count */readInt(4); break; @@ -645,7 +658,8 @@ public Object parseValue() { for (int i = 0; i < res.length; i++) { byte flag = arr[pos++]; - if (flag == GridBinaryMarshaller.NULL) continue; + if (flag == GridBinaryMarshaller.NULL) + continue; if (flag != GridBinaryMarshaller.DATE) throw new BinaryObjectException("Invalid flag value: " + flag); From c92803ec3f31e7ef5384876fa4fa0a959b08247d Mon Sep 17 00:00:00 2001 From: "Andrey V. Mashenkov" Date: Tue, 20 Sep 2016 17:03:37 +0300 Subject: [PATCH 05/69] Minor fixes. Tests added. --- .../internal/binary/BinaryReaderExImpl.java | 64 +++++++++- .../binary/BinaryFieldsAbstractSelfTest.java | 66 ++++++---- ...BinaryMarshallerCompactZeroesSelfTest.java | 119 ++++++++++++++++++ .../binary/BinaryMarshallerSelfTest.java | 20 ++- 4 files changed, 237 insertions(+), 32 deletions(-) create mode 100644 modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerCompactZeroesSelfTest.java diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryReaderExImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryReaderExImpl.java index dca29c8e91162..b29230cb5d3d2 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryReaderExImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryReaderExImpl.java @@ -22,6 +22,7 @@ import java.io.ObjectInput; import java.math.BigDecimal; import java.sql.Timestamp; +import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.Map; @@ -705,7 +706,18 @@ int readInt(int fieldId) throws BinaryObjectException { /** {@inheritDoc} */ @Override public long readLong(String fieldName) throws BinaryObjectException { - return findFieldByName(fieldName) && checkFlagNoHandles(LONG) == Flag.NORMAL ? in.readLong() : 0; + if (findFieldByName(fieldName)) { + switch (checkFlagNoHandles(LONG, ZERO_LONG)) { + case LONG: + return in.readLong(); + + case ZERO_LONG: + case NULL: + return 0L; + } + } + + return 0L; } /** @@ -714,7 +726,18 @@ int readInt(int fieldId) throws BinaryObjectException { * @throws BinaryObjectException If failed. */ long readLong(int fieldId) throws BinaryObjectException { - return findFieldById(fieldId) && checkFlagNoHandles(LONG) == Flag.NORMAL ? in.readLong() : 0; + if (findFieldById(fieldId)) { + switch (checkFlagNoHandles(LONG, ZERO_LONG)) { + case LONG: + return in.readLong(); + + case ZERO_LONG: + case NULL: + return 0L; + } + } + + return 0L; } /** @@ -724,10 +747,16 @@ long readLong(int fieldId) throws BinaryObjectException { */ @Nullable Long readLongNullable(int fieldId) throws BinaryObjectException { if (findFieldById(fieldId)) { - if (checkFlagNoHandles(LONG) == Flag.NORMAL) - return in.readLong(); - else if (checkFlagNoHandles(ZERO_LONG) == Flag.NORMAL) - return 0L; + switch (checkFlagNoHandles(LONG, ZERO_LONG)) { + case LONG: + return in.readLong(); + + case ZERO_LONG: + return 0L; + + case NULL: + return null; + } } return null; @@ -1434,6 +1463,29 @@ else if (flag == NULL) ", actual=" + flag + ']'); } + /** + * Ensure that type flag is either null or contained by allowed values. + * + * @param expFlags Expected values. + * @return type flag. + * @throws BinaryObjectException If flag is neither null, nor expected. + */ + private byte checkFlagNoHandles(byte... expFlags) { + byte flag = in.readByte(); + + if (flag == NULL) + return NULL; + + for (byte f : expFlags) + if (f == flag) + return f; + + int pos = BinaryUtils.positionForHandle(in); + + throw new BinaryObjectException("Unexpected flag value [pos=" + pos + ", expected=" + + Arrays.toString(expFlags) + ", actual=" + flag + ']'); + } + /** {@inheritDoc} */ @Override public BinaryRawReader rawReader() { if (!raw) { diff --git a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldsAbstractSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldsAbstractSelfTest.java index fd095e99f9d0b..0bb9d64d57e31 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldsAbstractSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldsAbstractSelfTest.java @@ -17,6 +17,7 @@ package org.apache.ignite.internal.binary; +import org.apache.ignite.IgniteSystemProperties; import org.apache.ignite.binary.BinaryField; import org.apache.ignite.binary.BinaryObject; import org.apache.ignite.binary.BinaryTypeConfiguration; @@ -199,6 +200,23 @@ public void testLong() throws Exception { check("fLong"); } + /** + * Test long field within compact mode on. + * Compact Long zeroes should become default mode in Apache Ignite 2.0, so this test will be redundant. + * + * @deprecated Should be removed in Apache Ignite 2.0. + * @throws Exception If failed. + */ + @Deprecated + public void testLongZero() throws Exception { + System.setProperty(IgniteSystemProperties.IGNITE_BINARY_COMPACT_LONG_ZEROES, "true"); + + check("fLong"); + check("fLongZero"); + + System.clearProperty(IgniteSystemProperties.IGNITE_BINARY_COMPACT_LONG_ZEROES); + } + /** * Test long array field. * @@ -429,27 +447,27 @@ private void check0(String fieldName, TestContext ctx, boolean exists) throws Ex Object expVal = U.field(ctx.obj, fieldName); if (val instanceof BinaryObject) - val = ((BinaryObject) val).deserialize(); + val = ((BinaryObject)val).deserialize(); if (val != null && val.getClass().isArray()) { assertNotNull(expVal); if (val instanceof byte[]) - assertTrue(Arrays.equals((byte[]) expVal, (byte[]) val)); + assertTrue(Arrays.equals((byte[])expVal, (byte[])val)); else if (val instanceof boolean[]) - assertTrue(Arrays.equals((boolean[]) expVal, (boolean[]) val)); + assertTrue(Arrays.equals((boolean[])expVal, (boolean[])val)); else if (val instanceof short[]) - assertTrue(Arrays.equals((short[]) expVal, (short[]) val)); + assertTrue(Arrays.equals((short[])expVal, (short[])val)); else if (val instanceof char[]) - assertTrue(Arrays.equals((char[]) expVal, (char[]) val)); + assertTrue(Arrays.equals((char[])expVal, (char[])val)); else if (val instanceof int[]) - assertTrue(Arrays.equals((int[]) expVal, (int[]) val)); + assertTrue(Arrays.equals((int[])expVal, (int[])val)); else if (val instanceof long[]) - assertTrue(Arrays.equals((long[]) expVal, (long[]) val)); + assertTrue(Arrays.equals((long[])expVal, (long[])val)); else if (val instanceof float[]) - assertTrue(Arrays.equals((float[]) expVal, (float[]) val)); + assertTrue(Arrays.equals((float[])expVal, (float[])val)); else if (val instanceof double[]) - assertTrue(Arrays.equals((double[]) expVal, (double[]) val)); + assertTrue(Arrays.equals((double[])expVal, (double[])val)); else { Object[] expVal0 = (Object[])expVal; Object[] val0 = (Object[])val; @@ -574,6 +592,7 @@ public static class TestObject { public char fChar; public int fInt; public long fLong; + public long fLongZero; public float fFloat; public double fDouble; @@ -626,17 +645,18 @@ public TestObject(int ignore) { fChar = 3; fInt = 4; fLong = 5; + fLongZero = 0; fFloat = 6.6f; fDouble = 7.7; - fByteArr = new byte[] { 1, 2 }; - fBoolArr = new boolean[] { true, false }; - fShortArr = new short[] { 2, 3 }; - fCharArr = new char[] { 3, 4 }; - fIntArr = new int[] { 4, 5 }; - fLongArr = new long[] { 5, 6 }; - fFloatArr = new float[] { 6.6f, 7.7f }; - fDoubleArr = new double[] { 7.7, 8.8 }; + fByteArr = new byte[] {1, 2}; + fBoolArr = new boolean[] {true, false}; + fShortArr = new short[] {2, 3}; + fCharArr = new char[] {3, 4}; + fIntArr = new int[] {4, 5}; + fLongArr = new long[] {5, 6}; + fFloatArr = new float[] {6.6f, 7.7f}; + fDoubleArr = new double[] {7.7, 8.8}; fString = "8"; fDate = new Date(); @@ -644,15 +664,15 @@ public TestObject(int ignore) { fUuid = UUID.randomUUID(); fDecimal = new BigDecimal(9); - fStringArr = new String[] { "8", "9" }; - fDateArr = new Date[] { new Date(), new Date(new Date().getTime() + 1) }; + fStringArr = new String[] {"8", "9"}; + fDateArr = new Date[] {new Date(), new Date(new Date().getTime() + 1)}; fTimestampArr = - new Timestamp[] { new Timestamp(new Date().getTime() + 1), new Timestamp(new Date().getTime() + 2) }; - fUuidArr = new UUID[] { UUID.randomUUID(), UUID.randomUUID() }; - fDecimalArr = new BigDecimal[] { new BigDecimal(9), new BigDecimal(10) }; + new Timestamp[] {new Timestamp(new Date().getTime() + 1), new Timestamp(new Date().getTime() + 2)}; + fUuidArr = new UUID[] {UUID.randomUUID(), UUID.randomUUID()}; + fDecimalArr = new BigDecimal[] {new BigDecimal(9), new BigDecimal(10)}; fObj = new TestInnerObject(10); - fObjArr = new TestInnerObject[] { new TestInnerObject(10), new TestInnerObject(11) }; + fObjArr = new TestInnerObject[] {new TestInnerObject(10), new TestInnerObject(11)}; } } diff --git a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerCompactZeroesSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerCompactZeroesSelfTest.java new file mode 100644 index 0000000000000..6ee6c38b38062 --- /dev/null +++ b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerCompactZeroesSelfTest.java @@ -0,0 +1,119 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.binary; + +import junit.framework.Assert; +import org.apache.ignite.IgniteCheckedException; +import org.apache.ignite.IgniteSystemProperties; +import org.apache.ignite.binary.BinaryBasicIdMapper; +import org.apache.ignite.binary.BinaryBasicNameMapper; +import org.apache.ignite.binary.BinaryCollectionFactory; +import org.apache.ignite.binary.BinaryField; +import org.apache.ignite.binary.BinaryIdMapper; +import org.apache.ignite.binary.BinaryMapFactory; +import org.apache.ignite.binary.BinaryNameMapper; +import org.apache.ignite.binary.BinaryObject; +import org.apache.ignite.binary.BinaryObjectBuilder; +import org.apache.ignite.binary.BinaryObjectException; +import org.apache.ignite.binary.BinaryRawReader; +import org.apache.ignite.binary.BinaryRawWriter; +import org.apache.ignite.binary.BinaryReader; +import org.apache.ignite.binary.BinarySerializer; +import org.apache.ignite.binary.BinaryType; +import org.apache.ignite.binary.BinaryTypeConfiguration; +import org.apache.ignite.binary.BinaryWriter; +import org.apache.ignite.binary.Binarylizable; +import org.apache.ignite.configuration.BinaryConfiguration; +import org.apache.ignite.configuration.IgniteConfiguration; +import org.apache.ignite.internal.binary.builder.BinaryObjectBuilderImpl; +import org.apache.ignite.internal.processors.cache.CacheObjectContext; +import org.apache.ignite.internal.util.GridUnsafe; +import org.apache.ignite.internal.util.IgniteUtils; +import org.apache.ignite.internal.util.lang.GridMapEntry; +import org.apache.ignite.internal.util.typedef.F; +import org.apache.ignite.internal.util.typedef.internal.S; +import org.apache.ignite.internal.util.typedef.internal.U; +import org.apache.ignite.logger.NullLogger; +import org.apache.ignite.marshaller.MarshallerContextTestImpl; +import org.apache.ignite.testframework.GridTestUtils; +import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.jsr166.ConcurrentHashMap8; + +import java.io.Externalizable; +import java.io.IOException; +import java.io.ObjectInput; +import java.io.ObjectOutput; +import java.io.Serializable; +import java.lang.reflect.Field; +import java.lang.reflect.InvocationHandler; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.lang.reflect.Proxy; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.net.InetSocketAddress; +import java.sql.Timestamp; +import java.util.AbstractQueue; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Queue; +import java.util.TreeMap; +import java.util.TreeSet; +import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentSkipListSet; + +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.apache.ignite.internal.binary.streams.BinaryMemoryAllocator.INSTANCE; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertNotEquals; + +/** + * Binary marshaller tests with compact mode on. + * Compact Long zeroes should become default mode in Apache Ignite 2.0, so this test will be redundant. + * + * @deprecated Should be removed in Apache Ignite 2.0. + */ +@Deprecated +public class BinaryMarshallerCompactZeroesSelfTest extends BinaryMarshallerSelfTest { + + @Override protected void beforeTestsStarted() throws Exception { + super.beforeTestsStarted(); + + System.setProperty(IgniteSystemProperties.IGNITE_BINARY_COMPACT_LONG_ZEROES, "true"); + } + + @Override protected void afterTestsStopped() throws Exception { + super.afterTestsStopped(); + + System.clearProperty(IgniteSystemProperties.IGNITE_BINARY_COMPACT_LONG_ZEROES); + } +} diff --git a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerSelfTest.java index b347ec0590030..398cbc9d6962f 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerSelfTest.java @@ -51,8 +51,10 @@ import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentSkipListSet; + import junit.framework.Assert; import org.apache.ignite.IgniteCheckedException; +import org.apache.ignite.IgniteSystemProperties; import org.apache.ignite.binary.BinaryBasicIdMapper; import org.apache.ignite.binary.BinaryBasicNameMapper; import org.apache.ignite.binary.BinaryCollectionFactory; @@ -73,6 +75,7 @@ import org.apache.ignite.binary.Binarylizable; import org.apache.ignite.configuration.BinaryConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; +import org.apache.ignite.internal.IgniteProperties; import org.apache.ignite.internal.binary.builder.BinaryObjectBuilderImpl; import org.apache.ignite.internal.processors.cache.CacheObjectContext; import org.apache.ignite.internal.util.GridUnsafe; @@ -716,6 +719,7 @@ public void testBinary() throws Exception { assertEquals(obj.s, (short)po.field("_s")); assertEquals(obj.i, (int)po.field("_i")); assertEquals(obj.l, (long)po.field("_l")); + assertEquals(obj.zl, (long)po.field("_zl")); assertEquals(obj.f, (float)po.field("_f"), 0); assertEquals(obj.d, (double)po.field("_d"), 0); assertEquals(obj.c, (char)po.field("_c")); @@ -842,8 +846,6 @@ public void testVoid() throws Exception { assertEquals(clazz, marshalUnmarshal(clazz)); } - - /** * */ @@ -3407,6 +3409,7 @@ private TestBinary binaryObject() { innerSimple.s = 1; innerSimple.i = 1; innerSimple.l = 1; + innerSimple.zl = 0L; innerSimple.f = 1.1f; innerSimple.d = 1.1d; innerSimple.c = 1; @@ -3446,6 +3449,7 @@ private TestBinary binaryObject() { innerBinary.s = 2; innerBinary.i = 2; innerBinary.l = 2; + innerBinary.zl = 0L; innerBinary.f = 2.2f; innerBinary.d = 2.2d; innerBinary.c = 2; @@ -3521,6 +3525,7 @@ private TestBinary binaryObject() { outer.s = 4; outer.i = 4; outer.l = 4; + outer.zl = 0L; outer.f = 4.4f; outer.d = 4.4d; outer.c = 4; @@ -3617,6 +3622,9 @@ private static class SimpleObject { /** */ private long l; + /** */ + private long zl; + /** */ private float f; @@ -3738,6 +3746,9 @@ private static class TestBinary implements Binarylizable { /** */ private long l; + /** */ + private long zl; + /** */ private long lRaw; @@ -3903,6 +3914,7 @@ private static class TestBinary implements Binarylizable { writer.writeShort("_s", s); writer.writeInt("_i", i); writer.writeLong("_l", l); + writer.writeLong("_zl", zl); writer.writeFloat("_f", f); writer.writeDouble("_d", d); writer.writeChar("_c", c); @@ -3970,6 +3982,7 @@ private static class TestBinary implements Binarylizable { s = reader.readShort("_s"); i = reader.readInt("_i"); l = reader.readLong("_l"); + zl = reader.readLong("_zl"); f = reader.readFloat("_f"); d = reader.readDouble("_d"); c = reader.readChar("_c"); @@ -4884,5 +4897,6 @@ private static class SingleHandleA { /** */ - private static class SingleHandleB {} + private static class SingleHandleB { + } } From 70f7f99896db3c4faddd2f6ac27a96d0566d1223 Mon Sep 17 00:00:00 2001 From: "Andrey V. Mashenkov" Date: Tue, 20 Sep 2016 18:16:28 +0300 Subject: [PATCH 06/69] Minor fixes --- .../apache/ignite/testsuites/IgniteBinaryObjectsTestSuite.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteBinaryObjectsTestSuite.java b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteBinaryObjectsTestSuite.java index c1d9974837fb3..d6bce7e102164 100644 --- a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteBinaryObjectsTestSuite.java +++ b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteBinaryObjectsTestSuite.java @@ -26,6 +26,7 @@ import org.apache.ignite.internal.binary.BinaryFieldsOffheapSelfTest; import org.apache.ignite.internal.binary.BinaryFooterOffsetsHeapSelfTest; import org.apache.ignite.internal.binary.BinaryFooterOffsetsOffheapSelfTest; +import org.apache.ignite.internal.binary.BinaryMarshallerCompactZeroesSelfTest; import org.apache.ignite.internal.binary.BinaryMarshallerSelfTest; import org.apache.ignite.internal.binary.BinaryObjectBuilderAdditionalSelfTest; import org.apache.ignite.internal.binary.BinaryObjectBuilderDefaultMappersSelfTest; @@ -89,6 +90,7 @@ public static TestSuite suite() throws Exception { suite.addTestSuite(BinaryTreeSelfTest.class); suite.addTestSuite(BinaryMarshallerSelfTest.class); + suite.addTestSuite(BinaryMarshallerCompactZeroesSelfTest.class); suite.addTestSuite(BinaryConfigurationConsistencySelfTest.class); suite.addTestSuite(GridBinaryMarshallerCtxDisabledSelfTest.class); suite.addTestSuite(BinaryObjectBuilderDefaultMappersSelfTest.class); From 135f0a8a39fb6895fada18d210260deebfb9426d Mon Sep 17 00:00:00 2001 From: vozerov-gridgain Date: Wed, 21 Sep 2016 10:33:11 +0300 Subject: [PATCH 07/69] Added missing header to BinaryObjectToStringSelfTest. --- .../binary/BinaryObjectToStringSelfTest.java | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryObjectToStringSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryObjectToStringSelfTest.java index cc6cf8bd226b7..df6bcde0fdf8e 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryObjectToStringSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryObjectToStringSelfTest.java @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.ignite.internal.binary; import java.util.Arrays; From 4ee52f0a50d9cf8bc64a277f2d02600a832d6ca6 Mon Sep 17 00:00:00 2001 From: Alexey Kuznetsov Date: Wed, 21 Sep 2016 15:37:52 +0700 Subject: [PATCH 08/69] IGNITE-3936 Added check for already processed key types on load cache. Added info message about started/finished load cache. Improved exceptions messages. --- .../store/jdbc/CacheAbstractJdbcStore.java | 42 ++++++++++++------- .../cache/store/jdbc/CacheJdbcPojoStore.java | 5 ++- 2 files changed, 29 insertions(+), 18 deletions(-) diff --git a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheAbstractJdbcStore.java b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheAbstractJdbcStore.java index a33a1e6625f86..fe8a50b5460bd 100644 --- a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheAbstractJdbcStore.java +++ b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheAbstractJdbcStore.java @@ -827,17 +827,31 @@ protected Integer columnIndex(Map loadColIdxs, String dbName) { throw new CacheLoaderException("Provided key type is not found in store or cache configuration " + "[cache=" + U.maskName(cacheName) + ", key=" + keyType + "]"); - String selQry = args[i + 1].toString(); + String qry = args[i + 1].toString(); EntryMapping em = entryMapping(cacheName, typeIdForTypeName(kindForName(keyType), keyType)); - futs.add(pool.submit(new LoadCacheCustomQueryWorker<>(em, selQry, clo))); + if (log.isInfoEnabled()) + log.info("Started load cache using custom query [cache=" + U.maskName(cacheName) + + ", keyType=" + keyType + ", query=" + qry + "]"); + + futs.add(pool.submit(new LoadCacheCustomQueryWorker<>(em, qry, clo))); } } else { - Collection entryMappings = mappings.values(); + Collection processedKeyTypes = new HashSet<>(); + + for (EntryMapping em : mappings.values()) { + String keyType = em.keyType(); + + if (processedKeyTypes.contains(keyType)) + continue; + + processedKeyTypes.add(keyType); + + if (log.isInfoEnabled()) + log.info("Started load cache [cache=" + U.maskName(cacheName) + ", keyType=" + keyType + "]"); - for (EntryMapping em : entryMappings) { if (parallelLoadCacheMinThreshold > 0) { Connection conn = null; @@ -853,7 +867,7 @@ protected Integer columnIndex(Map loadColIdxs, String dbName) { if (rs.next()) { if (log.isDebugEnabled()) log.debug("Multithread loading entries from db [cache=" + U.maskName(cacheName) + - ", keyType=" + em.keyType() + " ]"); + ", keyType=" + keyType + "]"); int keyCnt = em.keyCols.size(); @@ -876,13 +890,13 @@ protected Integer columnIndex(Map loadColIdxs, String dbName) { } futs.add(pool.submit(loadCacheRange(em, clo, upperBound, null, 0))); - - continue; } + + continue; } catch (SQLException e) { - log.warning("Failed to load entries from db in multithreaded mode " + - "[cache=" + U.maskName(cacheName) + ", keyType=" + em.keyType() + " ]", e); + log.warning("Failed to load entries from db in multithreaded mode, will try in single thread " + + "[cache=" + U.maskName(cacheName) + ", keyType=" + keyType + " ]", e); } finally { U.closeQuiet(conn); @@ -891,7 +905,7 @@ protected Integer columnIndex(Map loadColIdxs, String dbName) { if (log.isDebugEnabled()) log.debug("Single thread loading entries from db [cache=" + U.maskName(cacheName) + - ", keyType=" + em.keyType() + " ]"); + ", keyType=" + keyType + "]"); futs.add(pool.submit(loadCacheFull(em, clo))); } @@ -900,8 +914,8 @@ protected Integer columnIndex(Map loadColIdxs, String dbName) { for (Future fut : futs) U.get(fut); - if (log.isDebugEnabled()) - log.debug("Cache loaded from db: " + U.maskName(cacheName)); + if (log.isInfoEnabled()) + log.info("Finished load cache: " + U.maskName(cacheName)); } catch (IgniteCheckedException e) { throw new CacheLoaderException("Failed to load cache: " + U.maskName(cacheName), e.getCause()); @@ -1941,10 +1955,6 @@ private LoadCacheCustomQueryWorker(EntryMapping em, String qry, IgniteBiInClosur /** {@inheritDoc} */ @Override public Void call() throws Exception { - if (log.isDebugEnabled()) - log.debug("Load cache using custom query [cache= " + U.maskName(em.cacheName) + - ", keyType=" + em.keyType() + ", query=" + qry + "]"); - Connection conn = null; PreparedStatement stmt = null; diff --git a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStore.java b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStore.java index 798b84a8acba2..dd3e812d7d979 100644 --- a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStore.java +++ b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStore.java @@ -102,7 +102,8 @@ public class CacheJdbcPojoStore extends CacheAbstractJdbcStore { return prop.get(obj); } catch (Exception e) { - throw new CacheException("Failed to read object of class: " + typeName, e); + throw new CacheException("Failed to read object property [cache=" + U.maskName(cacheName) + + ", type=" + typeName + ", prop=" + fldName + "]", e); } } @@ -262,7 +263,7 @@ protected Object buildBinaryObject(String typeName, JdbcTypeField[] fields, return builder.build(); } catch (SQLException e) { - throw new CacheException("Failed to read binary object", e); + throw new CacheException("Failed to read binary object: " + typeName, e); } } From 16b82b77f00dff8e525c8cc68d3387de107c78d1 Mon Sep 17 00:00:00 2001 From: vozerov-gridgain Date: Wed, 21 Sep 2016 12:35:07 +0300 Subject: [PATCH 09/69] IGNITE-3635: Additional fix for stack overflow in binary objects. --- .../org/apache/ignite/internal/binary/BinaryObjectExImpl.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectExImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectExImpl.java index e6df4074bdb02..063bd837ac2cb 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectExImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectExImpl.java @@ -30,6 +30,7 @@ import org.apache.ignite.internal.binary.builder.BinaryObjectBuilderImpl; import org.apache.ignite.internal.util.offheap.unsafe.GridUnsafeMemory; import org.apache.ignite.internal.util.typedef.internal.SB; +import org.apache.ignite.lang.IgniteUuid; import org.jetbrains.annotations.Nullable; /** @@ -248,6 +249,8 @@ else if (val instanceof boolean[]) buf.a(Arrays.toString((boolean[]) val)); else if (val instanceof BigDecimal[]) buf.a(Arrays.toString((BigDecimal[])val)); + else if (val instanceof IgniteUuid) + buf.a(val); else if (val instanceof BinaryObjectExImpl) { BinaryObjectExImpl po = (BinaryObjectExImpl)val; From e3827a40bcbd54664cc0688e8e6ec57e4e328172 Mon Sep 17 00:00:00 2001 From: Valentin Kulichenko Date: Wed, 21 Sep 2016 16:07:02 -0700 Subject: [PATCH 10/69] IGNITE-3892 - Fixed bug in BinaryWriterExImpl.doWriteClass() method --- .../internal/binary/BinaryWriterExImpl.java | 21 ++++---- .../binary/BinaryMarshallerSelfTest.java | 53 +++++++++++++++---- .../marshaller/MarshallerContextTestImpl.java | 28 ++++++++-- 3 files changed, 77 insertions(+), 25 deletions(-) diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryWriterExImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryWriterExImpl.java index 21fb2bfe5033d..1a818199b9944 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryWriterExImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryWriterExImpl.java @@ -17,16 +17,6 @@ package org.apache.ignite.internal.binary; -import org.apache.ignite.IgniteCheckedException; -import org.apache.ignite.binary.BinaryObjectException; -import org.apache.ignite.binary.BinaryRawWriter; -import org.apache.ignite.binary.BinaryWriter; -import org.apache.ignite.internal.binary.streams.BinaryHeapOutputStream; -import org.apache.ignite.internal.binary.streams.BinaryOutputStream; -import org.apache.ignite.internal.util.IgniteUtils; -import org.apache.ignite.internal.util.typedef.internal.A; -import org.jetbrains.annotations.Nullable; - import java.io.IOException; import java.io.ObjectOutput; import java.lang.reflect.InvocationHandler; @@ -38,6 +28,15 @@ import java.util.Date; import java.util.Map; import java.util.UUID; +import org.apache.ignite.IgniteCheckedException; +import org.apache.ignite.binary.BinaryObjectException; +import org.apache.ignite.binary.BinaryRawWriter; +import org.apache.ignite.binary.BinaryWriter; +import org.apache.ignite.internal.binary.streams.BinaryHeapOutputStream; +import org.apache.ignite.internal.binary.streams.BinaryOutputStream; +import org.apache.ignite.internal.util.IgniteUtils; +import org.apache.ignite.internal.util.typedef.internal.A; +import org.jetbrains.annotations.Nullable; import static java.nio.charset.StandardCharsets.UTF_8; @@ -823,7 +822,7 @@ void doWriteClass(@Nullable Class val) { else { out.unsafeWriteInt(GridBinaryMarshaller.UNREGISTERED_TYPE_ID); - doWriteString(val.getClass().getName()); + doWriteString(val.getName()); } } } diff --git a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerSelfTest.java index b347ec0590030..f4154724bfd9a 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerSelfTest.java @@ -2648,7 +2648,7 @@ public void testThreadLocalArrayReleased() throws Exception { */ public void testDuplicateNameSimpleNameMapper() throws Exception { BinaryMarshaller marsh = binaryMarshaller(new BinaryBasicNameMapper(true), - new BinaryBasicIdMapper(true), null, null); + new BinaryBasicIdMapper(true), null, null, null); Test1.Job job1 = new Test1().new Job(); Test2.Job job2 = new Test2().new Job(); @@ -2672,7 +2672,7 @@ public void testDuplicateNameSimpleNameMapper() throws Exception { */ public void testDuplicateNameFullNameMapper() throws Exception { BinaryMarshaller marsh = binaryMarshaller(new BinaryBasicNameMapper(false), - new BinaryBasicIdMapper(false), null, null); + new BinaryBasicIdMapper(false), null, null, null); Test1.Job job1 = new Test1().new Job(); Test2.Job job2 = new Test2().new Job(); @@ -2910,6 +2910,17 @@ public void testSingleHandle() throws Exception { assertEquals(SingleHandleB.class, innerBo.deserialize().getClass()); } + /** + * @throws Exception If failed. + */ + public void testUnregisteredClass() throws Exception { + BinaryMarshaller m = binaryMarshaller(null, Collections.singletonList(Value.class.getName())); + + ClassFieldObject res = m.unmarshal(m.marshal(new ClassFieldObject(Value.class)), null); + + assertEquals(Value.class, res.cls); + } + /** * */ @@ -3227,9 +3238,8 @@ protected BinaryContext binaryContext(BinaryMarshaller marsh) { /** * */ - protected BinaryMarshaller binaryMarshaller() - throws IgniteCheckedException { - return binaryMarshaller(null, null, null, null); + protected BinaryMarshaller binaryMarshaller() throws IgniteCheckedException { + return binaryMarshaller(null, null, null, null, null); } /** @@ -3237,7 +3247,15 @@ protected BinaryMarshaller binaryMarshaller() */ protected BinaryMarshaller binaryMarshaller(Collection cfgs) throws IgniteCheckedException { - return binaryMarshaller(null, null, null, cfgs); + return binaryMarshaller(null, null, null, cfgs, null); + } + + /** + * + */ + protected BinaryMarshaller binaryMarshaller(Collection cfgs, + Collection excludedClasses) throws IgniteCheckedException { + return binaryMarshaller(null, null, null, cfgs, excludedClasses); } /** @@ -3246,7 +3264,7 @@ protected BinaryMarshaller binaryMarshaller(Collection protected BinaryMarshaller binaryMarshaller(BinaryNameMapper nameMapper, BinaryIdMapper mapper, Collection cfgs) throws IgniteCheckedException { - return binaryMarshaller(nameMapper, mapper, null, cfgs); + return binaryMarshaller(nameMapper, mapper, null, cfgs, null); } /** @@ -3254,7 +3272,7 @@ protected BinaryMarshaller binaryMarshaller(BinaryNameMapper nameMapper, BinaryI */ protected BinaryMarshaller binaryMarshaller(BinarySerializer serializer, Collection cfgs) throws IgniteCheckedException { - return binaryMarshaller(null, null, serializer, cfgs); + return binaryMarshaller(null, null, serializer, cfgs, null); } /** @@ -3264,7 +3282,8 @@ protected BinaryMarshaller binaryMarshaller( BinaryNameMapper nameMapper, BinaryIdMapper mapper, BinarySerializer serializer, - Collection cfgs + Collection cfgs, + Collection excludedClasses ) throws IgniteCheckedException { IgniteConfiguration iCfg = new IgniteConfiguration(); @@ -3283,7 +3302,7 @@ protected BinaryMarshaller binaryMarshaller( BinaryMarshaller marsh = new BinaryMarshaller(); - marsh.setContext(new MarshallerContextTestImpl(null)); + marsh.setContext(new MarshallerContextTestImpl(null, excludedClasses)); IgniteUtils.invoke(BinaryMarshaller.class, marsh, "setBinaryContext", ctx, iCfg); @@ -4885,4 +4904,18 @@ private static class SingleHandleA { /** */ private static class SingleHandleB {} + + /** + */ + private static class ClassFieldObject { + /** */ + private Class cls; + + /** + * @param cls Class field. + */ + public ClassFieldObject(Class cls) { + this.cls = cls; + } + } } diff --git a/modules/core/src/test/java/org/apache/ignite/marshaller/MarshallerContextTestImpl.java b/modules/core/src/test/java/org/apache/ignite/marshaller/MarshallerContextTestImpl.java index c600ca46d98a4..9ff127d245613 100644 --- a/modules/core/src/test/java/org/apache/ignite/marshaller/MarshallerContextTestImpl.java +++ b/modules/core/src/test/java/org/apache/ignite/marshaller/MarshallerContextTestImpl.java @@ -17,11 +17,13 @@ package org.apache.ignite.marshaller; +import java.util.Collection; import java.util.List; import java.util.concurrent.ConcurrentMap; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.MarshallerContextAdapter; import org.apache.ignite.plugin.PluginProvider; +import org.jetbrains.annotations.Nullable; import org.jsr166.ConcurrentHashMap8; /** @@ -29,26 +31,44 @@ */ public class MarshallerContextTestImpl extends MarshallerContextAdapter { /** */ - private final static ConcurrentMap map = new ConcurrentHashMap8<>(); + private static final ConcurrentMap map = new ConcurrentHashMap8<>(); + + /** */ + private final Collection excluded; /** * Initializes context. * * @param plugins Plugins. + * @param excluded Excluded classes. */ - public MarshallerContextTestImpl(List plugins) { + public MarshallerContextTestImpl(@Nullable List plugins, Collection excluded) { super(plugins); + + this.excluded = excluded; + } + + /** + * Initializes context. + * + * @param plugins Plugins. + */ + public MarshallerContextTestImpl(List plugins) { + this(plugins, null); } /** * Initializes context. */ public MarshallerContextTestImpl() { - super(null); + this(null); } /** {@inheritDoc} */ @Override protected boolean registerClassName(int id, String clsName) throws IgniteCheckedException { + if (excluded != null && excluded.contains(clsName)) + return false; + String oldClsName = map.putIfAbsent(id, clsName); if (oldClsName != null && !oldClsName.equals(clsName)) @@ -69,4 +89,4 @@ public MarshallerContextTestImpl() { public ConcurrentMap internalMap() { return map; } -} \ No newline at end of file +} From 0d5ee7887be03295133704227133d619898b6abf Mon Sep 17 00:00:00 2001 From: Alexey Kuznetsov Date: Thu, 22 Sep 2016 14:36:50 +0700 Subject: [PATCH 11/69] IGNITE-3937 implemented support for unsigned types in MySQL. --- .../apache/ignite/schema/parser/DbColumn.java | 16 +++++++++++-- .../dialect/DatabaseMetadataDialect.java | 9 +++++++ .../parser/dialect/JdbcMetadataDialect.java | 23 +++++++++++------- .../parser/dialect/MySQLMetadataDialect.java | 24 +++++++++++++++++-- .../parser/dialect/OracleMetadataDialect.java | 5 ++-- .../ignite/schema/model/PojoDescriptor.java | 24 ++++++++++++------- 6 files changed, 77 insertions(+), 24 deletions(-) diff --git a/modules/schema-import-db/src/main/java/org/apache/ignite/schema/parser/DbColumn.java b/modules/schema-import-db/src/main/java/org/apache/ignite/schema/parser/DbColumn.java index 8b0c813467c80..10fd50f94cc51 100644 --- a/modules/schema-import-db/src/main/java/org/apache/ignite/schema/parser/DbColumn.java +++ b/modules/schema-import-db/src/main/java/org/apache/ignite/schema/parser/DbColumn.java @@ -33,17 +33,22 @@ public class DbColumn { /** Is {@code NULL} allowed for column in database. */ private final boolean nullable; + /** Whether column unsigned. */ + private final boolean unsigned; + /** * @param name Column name. * @param type Column JDBC type. * @param key {@code true} if this column belongs to primary key. * @param nullable {@code true} if {@code NULL } allowed for column in database. + * @param unsigned {@code true} if column is unsigned. */ - public DbColumn(String name, int type, boolean key, boolean nullable) { + public DbColumn(String name, int type, boolean key, boolean nullable, boolean unsigned) { this.name = name; this.type = type; this.key = key; this.nullable = nullable; + this.unsigned = unsigned; } /** @@ -68,9 +73,16 @@ public boolean key() { } /** - * @return nullable {@code true} if {@code NULL } allowed for column in database. + * @return {@code true} if {@code NULL } allowed for column in database. */ public boolean nullable() { return nullable; } + + /** + * @return {@code true} if column is unsigned. + */ + public boolean unsigned() { + return unsigned; + } } diff --git a/modules/schema-import-db/src/main/java/org/apache/ignite/schema/parser/dialect/DatabaseMetadataDialect.java b/modules/schema-import-db/src/main/java/org/apache/ignite/schema/parser/dialect/DatabaseMetadataDialect.java index 5ef088ad1799c..56ee59b7732df 100644 --- a/modules/schema-import-db/src/main/java/org/apache/ignite/schema/parser/dialect/DatabaseMetadataDialect.java +++ b/modules/schema-import-db/src/main/java/org/apache/ignite/schema/parser/dialect/DatabaseMetadataDialect.java @@ -18,6 +18,7 @@ package org.apache.ignite.schema.parser.dialect; import java.sql.Connection; +import java.sql.DatabaseMetaData; import java.sql.SQLException; import java.util.Collection; import java.util.Collections; @@ -62,6 +63,14 @@ public Set systemSchemas() { return Collections.singleton("INFORMATION_SCHEMA"); } + /** + * @return Collection of unsigned type names. + * @throws SQLException If failed to get unsigned type names. + */ + public Set unsignedTypes(DatabaseMetaData dbMeta) throws SQLException { + return Collections.emptySet(); + } + /** * Create table descriptor. * diff --git a/modules/schema-import-db/src/main/java/org/apache/ignite/schema/parser/dialect/JdbcMetadataDialect.java b/modules/schema-import-db/src/main/java/org/apache/ignite/schema/parser/dialect/JdbcMetadataDialect.java index 235c84c3edfd0..f6c27f29fc60b 100644 --- a/modules/schema-import-db/src/main/java/org/apache/ignite/schema/parser/dialect/JdbcMetadataDialect.java +++ b/modules/schema-import-db/src/main/java/org/apache/ignite/schema/parser/dialect/JdbcMetadataDialect.java @@ -30,7 +30,6 @@ import java.util.Set; import org.apache.ignite.cache.QueryIndex; -import org.apache.ignite.cache.QueryIndexType; import org.apache.ignite.schema.parser.DbColumn; import org.apache.ignite.schema.parser.DbTable; @@ -62,6 +61,9 @@ public class JdbcMetadataDialect extends DatabaseMetadataDialect { /** Column data type index. */ private static final int COL_DATA_TYPE_IDX = 5; + /** Column type name index. */ + private static final int COL_TYPE_NAME_IDX = 6; + /** Column nullable index. */ private static final int COL_NULLABLE_IDX = 11; @@ -116,11 +118,11 @@ protected boolean useSchema() { Set sys = systemSchemas(); - Collection tbls = new ArrayList<>(); - if (schemas.isEmpty()) schemas.add(null); + Collection tbls = new ArrayList<>(); + for (String toSchema: schemas) { try (ResultSet tblsRs = dbMeta.getTables(useCatalog() ? toSchema : null, useSchema() ? toSchema : null, "%", tblsOnly ? TABLES_ONLY : TABLES_AND_VIEWS)) { @@ -136,24 +138,27 @@ protected boolean useSchema() { if (sys.contains(schema)) continue; - Set pkCols = new HashSet<>(); + Collection pkCols = new HashSet<>(); try (ResultSet pkRs = dbMeta.getPrimaryKeys(tblCatalog, tblSchema, tblName)) { while (pkRs.next()) pkCols.add(pkRs.getString(PK_COL_NAME_IDX)); } - List cols = new ArrayList<>(); + Collection cols = new ArrayList<>(); + + Collection unsignedTypes = unsignedTypes(dbMeta); try (ResultSet colsRs = dbMeta.getColumns(tblCatalog, tblSchema, tblName, null)) { while (colsRs.next()) { String colName = colsRs.getString(COL_NAME_IDX); cols.add(new DbColumn( - colName, - colsRs.getInt(COL_DATA_TYPE_IDX), - pkCols.contains(colName), - colsRs.getInt(COL_NULLABLE_IDX) == DatabaseMetaData.columnNullable)); + colName, + colsRs.getInt(COL_DATA_TYPE_IDX), + pkCols.contains(colName), + colsRs.getInt(COL_NULLABLE_IDX) == DatabaseMetaData.columnNullable, + unsignedTypes.contains(colsRs.getString(COL_TYPE_NAME_IDX)))); } } diff --git a/modules/schema-import-db/src/main/java/org/apache/ignite/schema/parser/dialect/MySQLMetadataDialect.java b/modules/schema-import-db/src/main/java/org/apache/ignite/schema/parser/dialect/MySQLMetadataDialect.java index 3332046da5226..7bd6f311c6495 100644 --- a/modules/schema-import-db/src/main/java/org/apache/ignite/schema/parser/dialect/MySQLMetadataDialect.java +++ b/modules/schema-import-db/src/main/java/org/apache/ignite/schema/parser/dialect/MySQLMetadataDialect.java @@ -18,20 +18,24 @@ package org.apache.ignite.schema.parser.dialect; import java.sql.Connection; +import java.sql.DatabaseMetaData; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; -import java.util.List; +import java.util.HashSet; import java.util.Set; /** * MySQL specific metadata dialect. */ public class MySQLMetadataDialect extends JdbcMetadataDialect { + /** Type name index. */ + private static final int TYPE_NAME_IDX = 1; + /** {@inheritDoc} */ @Override public Collection schemas(Connection conn) throws SQLException { - List schemas = new ArrayList<>(); + Collection schemas = new ArrayList<>(); ResultSet rs = conn.getMetaData().getCatalogs(); @@ -59,4 +63,20 @@ public class MySQLMetadataDialect extends JdbcMetadataDialect { @Override protected boolean useSchema() { return false; } + + /** {@inheritDoc} */ + @Override public Set unsignedTypes(DatabaseMetaData dbMeta) throws SQLException { + Set unsignedTypes = new HashSet<>(); + + try (ResultSet typeRs = dbMeta.getTypeInfo()) { + while (typeRs.next()) { + String typeName = typeRs.getString(TYPE_NAME_IDX); + + if (typeName.contains("UNSIGNED")) + unsignedTypes.add(typeName); + } + } + + return unsignedTypes; + } } diff --git a/modules/schema-import-db/src/main/java/org/apache/ignite/schema/parser/dialect/OracleMetadataDialect.java b/modules/schema-import-db/src/main/java/org/apache/ignite/schema/parser/dialect/OracleMetadataDialect.java index 35356558e91d2..47fb05c1146e3 100644 --- a/modules/schema-import-db/src/main/java/org/apache/ignite/schema/parser/dialect/OracleMetadataDialect.java +++ b/modules/schema-import-db/src/main/java/org/apache/ignite/schema/parser/dialect/OracleMetadataDialect.java @@ -258,8 +258,7 @@ private Set primaryKeys(PreparedStatement stmt, String owner, String tbl * @return Indexes. * @throws SQLException If failed to retrieve indexes columns. */ - private Collection indexes(PreparedStatement stmt, String owner, String tbl) - throws SQLException { + private Collection indexes(PreparedStatement stmt, String owner, String tbl) throws SQLException { Map idxs = new LinkedHashMap<>(); stmt.setString(1, owner); @@ -347,7 +346,7 @@ private Collection indexes(PreparedStatement stmt, String owner, Str String colName = colsRs.getString(COL_NAME_IDX); cols.add(new DbColumn(colName, decodeType(colsRs), pkCols.contains(colName), - !"N".equals(colsRs.getString(NULLABLE_IDX)))); + !"N".equals(colsRs.getString(NULLABLE_IDX)), false)); } if (!cols.isEmpty()) diff --git a/modules/schema-import/src/main/java/org/apache/ignite/schema/model/PojoDescriptor.java b/modules/schema-import/src/main/java/org/apache/ignite/schema/model/PojoDescriptor.java index 5053b07a8bdb8..4f696d6c0263b 100644 --- a/modules/schema-import/src/main/java/org/apache/ignite/schema/model/PojoDescriptor.java +++ b/modules/schema-import/src/main/java/org/apache/ignite/schema/model/PojoDescriptor.java @@ -123,7 +123,7 @@ public PojoDescriptor(PojoDescriptor prn, DbTable tbl) { String colName = col.name(); PojoField fld = new PojoField(colName, col.type(), - toJavaFieldName(colName), toJavaType(col.type(), col.nullable()).getName(), + toJavaFieldName(colName), toJavaType(col).getName(), col.key(), col.nullable()); fld.owner(this); @@ -427,24 +427,32 @@ private static String toJavaFieldName(String name) { /** * Convert JDBC data type to java type. * - * @param type JDBC SQL data type. - * @param nullable {@code true} if {@code NULL} is allowed for this field in database. + * @param col Database column descriptor. * @return Java data type. */ - private static Class toJavaType(int type, boolean nullable) { - switch (type) { + private static Class toJavaType(DbColumn col) { + boolean nullable = col.nullable(); + boolean unsigned = col.unsigned(); + + switch (col.type()) { case BIT: case BOOLEAN: return nullable ? Boolean.class : boolean.class; case TINYINT: - return nullable ? Byte.class : byte.class; + return unsigned + ? (nullable ? Short.class : short.class) + : (nullable ? Byte.class : byte.class); case SMALLINT: - return nullable ? Short.class : short.class; + return unsigned + ? (nullable ? Integer.class : int.class) + : (nullable ? Short.class : short.class); case INTEGER: - return nullable ? Integer.class : int.class; + return unsigned + ? (nullable ? Long.class : long.class) + : (nullable ? Integer.class : int.class); case BIGINT: return nullable ? Long.class : long.class; From a97483a4ce2c00bd0cca025c4ef4bfa181897aa9 Mon Sep 17 00:00:00 2001 From: tledkov-gridgain Date: Thu, 22 Sep 2016 10:51:05 +0300 Subject: [PATCH 12/69] IGNITE-3858 IGFS: Support direct PROXY mode invocation in methods: create / append. This closes #1070. This closes #1084. --- .../igfs/IgfsAbstractOutputStream.java | 266 +++++++++++++++ .../internal/processors/igfs/IgfsImpl.java | 27 +- .../processors/igfs/IgfsOutputStreamImpl.java | 319 ++++-------------- .../igfs/IgfsOutputStreamProxyImpl.java | 163 +++++++++ .../igfs/IgfsAbstractBaseSelfTest.java | 2 +- 5 files changed, 518 insertions(+), 259 deletions(-) create mode 100644 modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractOutputStream.java create mode 100644 modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsOutputStreamProxyImpl.java diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractOutputStream.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractOutputStream.java new file mode 100644 index 0000000000000..c1e751ecf53bc --- /dev/null +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractOutputStream.java @@ -0,0 +1,266 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.igfs; + +import org.apache.ignite.events.IgfsEvent; +import org.apache.ignite.igfs.IgfsOutputStream; +import org.apache.ignite.igfs.IgfsPath; +import org.apache.ignite.internal.managers.eventstorage.GridEventStorageManager; +import org.apache.ignite.internal.util.typedef.internal.A; +import org.apache.ignite.internal.util.typedef.internal.S; +import org.jetbrains.annotations.Nullable; + +import java.io.DataInput; +import java.io.IOException; +import java.nio.ByteBuffer; + +import static org.apache.ignite.events.EventType.EVT_IGFS_FILE_CLOSED_WRITE; + +/** + * Output stream to store data into grid cache with separate blocks. + */ +abstract class IgfsAbstractOutputStream extends IgfsOutputStream { + /** IGFS context. */ + protected final IgfsContext igfsCtx; + + /** Path to file. */ + protected final IgfsPath path; + + /** Buffer size. */ + protected final int bufSize; + + /** File worker batch. */ + protected final IgfsFileWorkerBatch batch; + + /** Mutex for synchronization. */ + protected final Object mux = new Object(); + + /** Flag for this stream open/closed state. */ + protected boolean closed; + + /** Local buffer to store stream data as consistent block. */ + protected ByteBuffer buf; + + /** Bytes written. */ + protected long bytes; + + /** Time consumed by write operations. */ + protected long time; + + /** + * Constructs file output stream. + * + * @param igfsCtx IGFS context. + * @param path Path to stored file. + * @param bufSize The size of the buffer to be used. + * @param batch Optional secondary file system batch. + */ + IgfsAbstractOutputStream(IgfsContext igfsCtx, IgfsPath path, int bufSize, @Nullable IgfsFileWorkerBatch batch) { + synchronized (mux) { + this.path = path; + this.bufSize = optimizeBufferSize(bufSize); + this.igfsCtx = igfsCtx; + this.batch = batch; + } + + igfsCtx.metrics().incrementFilesOpenedForWrite(); + } + + /** + * Optimize buffer size. + * + * @param bufSize Original byffer size. + * @return Optimized buffer size. + */ + protected abstract int optimizeBufferSize(int bufSize); + + /** {@inheritDoc} */ + @Override public void write(int b) throws IOException { + synchronized (mux) { + checkClosed(null, 0); + + b &= 0xFF; + + long startTime = System.nanoTime(); + + if (buf == null) + buf = allocateNewBuffer(); + + buf.put((byte)b); + + sendBufferIfFull(); + + time += System.nanoTime() - startTime; + } + } + + /** {@inheritDoc} */ + @SuppressWarnings("NullableProblems") + @Override public void write(byte[] b, int off, int len) throws IOException { + A.notNull(b, "b"); + + if ((off < 0) || (off > b.length) || (len < 0) || ((off + len) > b.length) || ((off + len) < 0)) { + throw new IndexOutOfBoundsException("Invalid bounds [data.length=" + b.length + ", offset=" + off + + ", length=" + len + ']'); + } + + synchronized (mux) { + checkClosed(null, 0); + + // Check if there is anything to write. + if (len == 0) + return; + + long startTime = System.nanoTime(); + + if (buf == null) { + if (len >= bufSize) { + // Send data right away. + ByteBuffer tmpBuf = ByteBuffer.wrap(b, off, len); + + send(tmpBuf, tmpBuf.remaining()); + } + else { + buf = allocateNewBuffer(); + + buf.put(b, off, len); + } + } + else { + // Re-allocate buffer if needed. + if (buf.remaining() < len) + buf = ByteBuffer.allocate(buf.position() + len).put((ByteBuffer)buf.flip()); + + buf.put(b, off, len); + + sendBufferIfFull(); + } + + time += System.nanoTime() - startTime; + } + } + + /** {@inheritDoc} */ + @Override public void transferFrom(DataInput in, int len) throws IOException { + synchronized (mux) { + checkClosed(in, len); + + long startTime = System.nanoTime(); + + // Clean-up local buffer before streaming. + sendBufferIfNotEmpty(); + + // Perform transfer. + send(in, len); + + time += System.nanoTime() - startTime; + } + } + + /** + * Validate this stream is open. + * + * @param in Data input. + * @param len Data len in bytes. + * @throws IOException If this stream is closed. + */ + protected void checkClosed(@Nullable DataInput in, int len) throws IOException { + assert Thread.holdsLock(mux); + + if (closed) { + // Must read data from stream before throwing exception. + if (in != null) + in.skipBytes(len); + + throw new IOException("Stream has been closed: " + this); + } + } + + /** + * Send local buffer if it full. + * + * @throws IOException If failed. + */ + private void sendBufferIfFull() throws IOException { + if (buf.position() >= bufSize) + sendBuffer(); + } + + /** + * Send local buffer if at least something is stored there. + * + * @throws IOException If failed. + */ + void sendBufferIfNotEmpty() throws IOException { + if (buf != null && buf.position() > 0) + sendBuffer(); + } + + /** + * Send all local-buffered data to server. + * + * @throws IOException In case of IO exception. + */ + private void sendBuffer() throws IOException { + buf.flip(); + + send(buf, buf.remaining()); + + buf = null; + } + + /** + * Store data block. + * + * @param data Block. + * @param writeLen Write length. + * @throws IOException If failed. + */ + protected abstract void send(Object data, int writeLen) throws IOException; + + /** + * Allocate new buffer. + * + * @return New buffer. + */ + private ByteBuffer allocateNewBuffer() { + return ByteBuffer.allocate(bufSize); + } + + /** + * Updates IGFS metrics when the stream is closed. + */ + protected void updateMetricsOnClose() { + IgfsLocalMetrics metrics = igfsCtx.metrics(); + + metrics.addWrittenBytesTime(bytes, time); + metrics.decrementFilesOpenedForWrite(); + + GridEventStorageManager evts = igfsCtx.kernalContext().event(); + + if (evts.isRecordable(EVT_IGFS_FILE_CLOSED_WRITE)) + evts.record(new IgfsEvent(path, igfsCtx.localNode(), + EVT_IGFS_FILE_CLOSED_WRITE, bytes)); + } + + /** {@inheritDoc} */ + @Override public String toString() { + return S.toString(IgfsAbstractOutputStream.class, this); + } + +} \ No newline at end of file diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java index 87a4699a4de6b..bee9d9a72a53c 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java @@ -92,7 +92,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.Callable; -import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.SynchronousQueue; import java.util.concurrent.ThreadFactory; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; @@ -274,7 +274,7 @@ public final class IgfsImpl implements IgfsEx { } dualPool = secondaryFs != null ? new IgniteThreadPoolExecutor(4, Integer.MAX_VALUE, 5000L, - new LinkedBlockingQueue(), new IgfsThreadFactory(cfg.getName()), null) : null; + new SynchronousQueue(), new IgfsThreadFactory(cfg.getName()), null) : null; } /** {@inheritDoc} */ @@ -1088,6 +1088,17 @@ private IgfsOutputStream create0( else dirProps = fileProps = new HashMap<>(props); + if (mode == PROXY) { + assert secondaryFs != null; + + OutputStream secondaryStream = secondaryFs.create(path, bufSize, overwrite, replication, + groupBlockSize(), props); + + IgfsFileWorkerBatch batch = newBatch(path, secondaryStream); + + return new IgfsOutputStreamProxyImpl(igfsCtx, path, info(path), bufferSize(bufSize), batch); + } + // Prepare context for DUAL mode. IgfsSecondaryFileSystemCreateContext secondaryCtx = null; @@ -1142,7 +1153,15 @@ private IgfsOutputStream create0( final IgfsMode mode = resolveMode(path); - IgfsFileWorkerBatch batch; + if (mode == PROXY) { + assert secondaryFs != null; + + OutputStream secondaryStream = secondaryFs.append(path, bufSize, create, props); + + IgfsFileWorkerBatch batch = newBatch(path, secondaryStream); + + return new IgfsOutputStreamProxyImpl(igfsCtx, path, info(path), bufferSize(bufSize), batch); + } if (mode != PRIMARY) { assert IgfsUtils.isDualMode(mode); @@ -1151,7 +1170,7 @@ private IgfsOutputStream create0( IgfsCreateResult desc = meta.appendDual(secondaryFs, path, bufSize, create); - batch = newBatch(path, desc.secondaryOutputStream()); + IgfsFileWorkerBatch batch = newBatch(path, desc.secondaryOutputStream()); return new IgfsOutputStreamImpl(igfsCtx, path, desc.info(), bufferSize(bufSize), mode, batch); } diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsOutputStreamImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsOutputStreamImpl.java index 6dec0c1d9c957..f97624247cfb8 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsOutputStreamImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsOutputStreamImpl.java @@ -18,14 +18,10 @@ package org.apache.ignite.internal.processors.igfs; import org.apache.ignite.IgniteCheckedException; -import org.apache.ignite.events.IgfsEvent; import org.apache.ignite.igfs.IgfsException; import org.apache.ignite.igfs.IgfsMode; -import org.apache.ignite.igfs.IgfsOutputStream; import org.apache.ignite.igfs.IgfsPath; import org.apache.ignite.internal.IgniteInternalFuture; -import org.apache.ignite.internal.managers.eventstorage.GridEventStorageManager; -import org.apache.ignite.internal.util.typedef.internal.A; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteUuid; @@ -35,7 +31,6 @@ import java.io.IOException; import java.nio.ByteBuffer; -import static org.apache.ignite.events.EventType.EVT_IGFS_FILE_CLOSED_WRITE; import static org.apache.ignite.igfs.IgfsMode.DUAL_SYNC; import static org.apache.ignite.igfs.IgfsMode.PRIMARY; import static org.apache.ignite.igfs.IgfsMode.PROXY; @@ -43,57 +38,30 @@ /** * Output stream to store data into grid cache with separate blocks. */ -class IgfsOutputStreamImpl extends IgfsOutputStream { +class IgfsOutputStreamImpl extends IgfsAbstractOutputStream { /** Maximum number of blocks in buffer. */ private static final int MAX_BLOCKS_CNT = 16; - /** IGFS context. */ - private final IgfsContext igfsCtx; - - /** Path to file. */ - private final IgfsPath path; - - /** Buffer size. */ - private final int bufSize; - /** IGFS mode. */ private final IgfsMode mode; - /** File worker batch. */ - private final IgfsFileWorkerBatch batch; - - /** Mutex for synchronization. */ - private final Object mux = new Object(); - /** Write completion future. */ private final IgniteInternalFuture writeFut; - /** Flag for this stream open/closed state. */ - private boolean closed; - - /** Local buffer to store stream data as consistent block. */ - private ByteBuffer buf; - - /** Bytes written. */ - private long bytes; - - /** Time consumed by write operations. */ - private long time; - /** File descriptor. */ private IgfsEntryInfo fileInfo; - /** Space in file to write data. */ - private long space; + /** Affinity written by this output stream. */ + private IgfsFileAffinityRange streamRange; + + /** Data length in remainder. */ + protected int remainderDataLen; /** Intermediate remainder to keep data. */ private byte[] remainder; - /** Data length in remainder. */ - private int remainderDataLen; - - /** Affinity written by this output stream. */ - private IgfsFileAffinityRange streamRange; + /** Space in file to write data. */ + protected long space; /** * Constructs file output stream. @@ -107,6 +75,8 @@ class IgfsOutputStreamImpl extends IgfsOutputStream { */ IgfsOutputStreamImpl(IgfsContext igfsCtx, IgfsPath path, IgfsEntryInfo fileInfo, int bufSize, IgfsMode mode, @Nullable IgfsFileWorkerBatch batch) { + super(igfsCtx, path, bufSize, batch); + assert fileInfo != null && fileInfo.isFile() : "Unexpected file info: " + fileInfo; assert mode != null && mode != PROXY && (mode == PRIMARY && batch == null || batch != null); @@ -115,108 +85,55 @@ class IgfsOutputStreamImpl extends IgfsOutputStream { throw new IgfsException("Failed to acquire file lock (concurrently modified?): " + path); synchronized (mux) { - this.path = path; - this.bufSize = optimizeBufferSize(bufSize, fileInfo); - this.igfsCtx = igfsCtx; this.fileInfo = fileInfo; this.mode = mode; - this.batch = batch; streamRange = initialStreamRange(fileInfo); writeFut = igfsCtx.data().writeStart(fileInfo.id()); } - - igfsCtx.metrics().incrementFilesOpenedForWrite(); } - /** {@inheritDoc} */ - @Override public void write(int b) throws IOException { - synchronized (mux) { - checkClosed(null, 0); - - b &= 0xFF; - - long startTime = System.nanoTime(); - - if (buf == null) - buf = allocateNewBuffer(); - - buf.put((byte)b); - - sendBufferIfFull(); - - time += System.nanoTime() - startTime; - } + /** + * @return Length of file. + */ + private long length() { + return fileInfo.length(); } /** {@inheritDoc} */ - @SuppressWarnings("NullableProblems") - @Override public void write(byte[] b, int off, int len) throws IOException { - A.notNull(b, "b"); - - if ((off < 0) || (off > b.length) || (len < 0) || ((off + len) > b.length) || ((off + len) < 0)) { - throw new IndexOutOfBoundsException("Invalid bounds [data.length=" + b.length + ", offset=" + off + - ", length=" + len + ']'); - } - - synchronized (mux) { - checkClosed(null, 0); - - // Check if there is anything to write. - if (len == 0) - return; - - long startTime = System.nanoTime(); - - if (buf == null) { - if (len >= bufSize) { - // Send data right away. - ByteBuffer tmpBuf = ByteBuffer.wrap(b, off, len); - - send(tmpBuf, tmpBuf.remaining()); - } - else { - buf = allocateNewBuffer(); - - buf.put(b, off, len); - } - } - else { - // Re-allocate buffer if needed. - if (buf.remaining() < len) - buf = ByteBuffer.allocate(buf.position() + len).put((ByteBuffer)buf.flip()); + @Override protected int optimizeBufferSize(int bufSize) { + assert bufSize > 0; - buf.put(b, off, len); + if (fileInfo == null) + return bufSize; - sendBufferIfFull(); - } + int blockSize = fileInfo.blockSize(); - time += System.nanoTime() - startTime; - } - } + if (blockSize <= 0) + return bufSize; - /** {@inheritDoc} */ - @Override public void transferFrom(DataInput in, int len) throws IOException { - synchronized (mux) { - checkClosed(in, len); + if (bufSize <= blockSize) + // Optimize minimum buffer size to be equal file's block size. + return blockSize; - long startTime = System.nanoTime(); + int maxBufSize = blockSize * MAX_BLOCKS_CNT; - // Clean-up local buffer before streaming. - sendBufferIfNotEmpty(); + if (bufSize > maxBufSize) + // There is no profit or optimization from larger buffers. + return maxBufSize; - // Perform transfer. - send(in, len); + if (fileInfo.length() == 0) + // Make buffer size multiple of block size (optimized for new files). + return bufSize / blockSize * blockSize; - time += System.nanoTime() - startTime; - } + return bufSize; } /** * Flushes this output stream and forces any buffered output bytes to be written out. * - * @exception IOException if an I/O error occurs. + * @throws IOException if an I/O error occurs. */ @Override public void flush() throws IOException { synchronized (mux) { @@ -250,40 +167,6 @@ class IgfsOutputStreamImpl extends IgfsOutputStream { } } - /** - * Await acknowledgments. - * - * @throws IOException If failed. - */ - private void awaitAcks() throws IOException { - try { - igfsCtx.data().awaitAllAcksReceived(fileInfo.id()); - } - catch (IgniteCheckedException e) { - throw new IOException("Failed to wait for flush acknowledge: " + fileInfo.id, e); - } - } - - /** - * Flush remainder. - * - * @throws IOException If failed. - */ - private void flushRemainder() throws IOException { - try { - if (remainder != null) { - igfsCtx.data().storeDataBlocks(fileInfo, fileInfo.length() + space, null, 0, - ByteBuffer.wrap(remainder, 0, remainderDataLen), true, streamRange, batch); - - remainder = null; - remainderDataLen = 0; - } - } - catch (IgniteCheckedException e) { - throw new IOException("Failed to flush data (remainder) [path=" + path + ", space=" + space + ']', e); - } - } - /** {@inheritDoc} */ @Override public final void close() throws IOException { synchronized (mux) { @@ -355,75 +238,33 @@ private void flushRemainder() throws IOException { if (err != null) throw err; - igfsCtx.metrics().addWrittenBytesTime(bytes, time); - igfsCtx.metrics().decrementFilesOpenedForWrite(); - - GridEventStorageManager evts = igfsCtx.kernalContext().event(); - - if (evts.isRecordable(EVT_IGFS_FILE_CLOSED_WRITE)) - evts.record(new IgfsEvent(path, igfsCtx.kernalContext().discovery().localNode(), - EVT_IGFS_FILE_CLOSED_WRITE, bytes)); - } - } - - /** - * Validate this stream is open. - * - * @throws IOException If this stream is closed. - */ - private void checkClosed(@Nullable DataInput in, int len) throws IOException { - assert Thread.holdsLock(mux); - - if (closed) { - // Must read data from stream before throwing exception. - if (in != null) - in.skipBytes(len); - - throw new IOException("Stream has been closed: " + this); + updateMetricsOnClose(); } } /** - * Send local buffer if it full. - * - * @throws IOException If failed. - */ - private void sendBufferIfFull() throws IOException { - if (buf.position() >= bufSize) - sendBuffer(); - } - - /** - * Send local buffer if at least something is stored there. + * Flush remainder. * * @throws IOException If failed. */ - private void sendBufferIfNotEmpty() throws IOException { - if (buf != null && buf.position() > 0) - sendBuffer(); - } - - /** - * Send all local-buffered data to server. - * - * @throws IOException In case of IO exception. - */ - private void sendBuffer() throws IOException { - buf.flip(); + private void flushRemainder() throws IOException { + try { + if (remainder != null) { - send(buf, buf.remaining()); + remainder = igfsCtx.data().storeDataBlocks(fileInfo, length() + space, null, + 0, ByteBuffer.wrap(remainder, 0, remainderDataLen), true, streamRange, batch); - buf = null; + remainder = null; + remainderDataLen = 0; + } + } + catch (IgniteCheckedException e) { + throw new IOException("Failed to flush data (remainder) [path=" + path + ", space=" + space + ']', e); + } } - /** - * Store data block. - * - * @param data Block. - * @param writeLen Write length. - * @throws IOException If failed. - */ - private void send(Object data, int writeLen) throws IOException { + /** {@inheritDoc} */ + @Override protected void send(Object data, int writeLen) throws IOException { assert Thread.holdsLock(mux); assert data instanceof ByteBuffer || data instanceof DataInput; @@ -449,20 +290,20 @@ else if (remainder.length != blockSize) { } if (data instanceof ByteBuffer) - ((ByteBuffer) data).get(remainder, remainderDataLen, writeLen); + ((ByteBuffer)data).get(remainder, remainderDataLen, writeLen); else - ((DataInput) data).readFully(remainder, remainderDataLen, writeLen); + ((DataInput)data).readFully(remainder, remainderDataLen, writeLen); remainderDataLen += writeLen; } else { if (data instanceof ByteBuffer) { - remainder = igfsCtx.data().storeDataBlocks(fileInfo, fileInfo.length() + space, remainder, - remainderDataLen, (ByteBuffer) data, false, streamRange, batch); + remainder = igfsCtx.data().storeDataBlocks(fileInfo, length() + space, remainder, + remainderDataLen, (ByteBuffer)data, false, streamRange, batch); } else { - remainder = igfsCtx.data().storeDataBlocks(fileInfo, fileInfo.length() + space, remainder, - remainderDataLen, (DataInput) data, writeLen, false, streamRange, batch); + remainder = igfsCtx.data().storeDataBlocks(fileInfo, length() + space, remainder, + remainderDataLen, (DataInput)data, writeLen, false, streamRange, batch); } remainderDataLen = remainder == null ? 0 : remainder.length; @@ -474,12 +315,17 @@ else if (remainder.length != blockSize) { } /** - * Allocate new buffer. + * Await acknowledgments. * - * @return New buffer. + * @throws IOException If failed. */ - private ByteBuffer allocateNewBuffer() { - return ByteBuffer.allocate(bufSize); + private void awaitAcks() throws IOException { + try { + igfsCtx.data().awaitAllAcksReceived(fileInfo.id()); + } + catch (IgniteCheckedException e) { + throw new IOException("Failed to wait for flush acknowledge: " + fileInfo.id, e); + } } /** @@ -516,41 +362,6 @@ private IgfsFileAffinityRange initialStreamRange(IgfsEntryInfo fileInfo) { return affKey == null ? null : new IgfsFileAffinityRange(off, off, affKey); } - /** - * Optimize buffer size. - * - * @param bufSize Requested buffer size. - * @param fileInfo File info. - * @return Optimized buffer size. - */ - private static int optimizeBufferSize(int bufSize, IgfsEntryInfo fileInfo) { - assert bufSize > 0; - - if (fileInfo == null) - return bufSize; - - int blockSize = fileInfo.blockSize(); - - if (blockSize <= 0) - return bufSize; - - if (bufSize <= blockSize) - // Optimize minimum buffer size to be equal file's block size. - return blockSize; - - int maxBufSize = blockSize * MAX_BLOCKS_CNT; - - if (bufSize > maxBufSize) - // There is no profit or optimization from larger buffers. - return maxBufSize; - - if (fileInfo.length() == 0) - // Make buffer size multiple of block size (optimized for new files). - return bufSize / blockSize * blockSize; - - return bufSize; - } - /** {@inheritDoc} */ @Override public String toString() { return S.toString(IgfsOutputStreamImpl.class, this); diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsOutputStreamProxyImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsOutputStreamProxyImpl.java new file mode 100644 index 0000000000000..7b74a1f00cf64 --- /dev/null +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsOutputStreamProxyImpl.java @@ -0,0 +1,163 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.igfs; + +import org.apache.ignite.IgniteCheckedException; +import org.apache.ignite.igfs.IgfsFile; +import org.apache.ignite.igfs.IgfsPath; +import org.apache.ignite.internal.util.typedef.internal.S; +import org.jetbrains.annotations.Nullable; + +import java.io.DataInput; +import java.io.IOException; +import java.nio.ByteBuffer; + +/** + * Output stream to store data into grid cache with separate blocks. + */ +class IgfsOutputStreamProxyImpl extends IgfsAbstractOutputStream { + /** File info. */ + private IgfsFile info; + + /** + * Constructs file output stream. + * + * @param igfsCtx IGFS context. + * @param path Path to stored file. + * @param info File info. + * @param bufSize The size of the buffer to be used. + * @param batch Optional secondary file system batch. + */ + IgfsOutputStreamProxyImpl(IgfsContext igfsCtx, IgfsPath path, IgfsFile info, int bufSize, + @Nullable IgfsFileWorkerBatch batch) { + super(igfsCtx, path, bufSize, batch); + + assert batch != null; + + this.info = info; + } + + /** {@inheritDoc} */ + @Override protected int optimizeBufferSize(int bufSize) { + assert bufSize > 0; + + return bufSize; + } + + /** + * Flushes this output stream and forces any buffered output bytes to be written out. + * + * @throws IOException if an I/O error occurs. + */ + @Override public void flush() throws IOException { + synchronized (mux) { + checkClosed(null, 0); + + sendBufferIfNotEmpty(); + } + } + + /** {@inheritDoc} */ + @Override public final void close() throws IOException { + synchronized (mux) { + // Do nothing if stream is already closed. + if (closed) + return; + + // Set closed flag immediately. + closed = true; + + // Flush data. + IOException err = null; + + try { + sendBufferIfNotEmpty(); + } + catch (Exception e) { + err = new IOException("Failed to flush data during stream close [path=" + path + + ", fileInfo=" + info + ']', e); + } + + // Finish batch before file unlocking to support the assertion that unlocked file batch, + // if any, must be in finishing state (e.g. append see more IgfsImpl.newBatch) + batch.finish(); + + // Finally, await secondary file system flush. + try { + batch.await(); + } + catch (IgniteCheckedException e) { + if (err == null) + err = new IOException("Failed to close secondary file system stream [path=" + path + + ", fileInfo=" + info + ']', e); + else + err.addSuppressed(e); + } + + // Throw error, if any. + if (err != null) + throw err; + + updateMetricsOnClose(); + } + } + + /** {@inheritDoc} */ + @Override protected void send(Object data, int writeLen) throws IOException { + assert Thread.holdsLock(mux); + assert data instanceof ByteBuffer || data instanceof DataInput; + + try { + // Increment metrics. + bytes += writeLen; + + byte [] dataBuf = new byte[writeLen]; + + if (data instanceof ByteBuffer) { + ByteBuffer byteBuf = (ByteBuffer)data; + + byteBuf.get(dataBuf); + } + else { + DataInput dataIn = (DataInput)data; + + try { + dataIn.readFully(dataBuf); + } + catch (IOException e) { + throw new IgniteCheckedException(e); + } + } + + if (!batch.write(dataBuf)) + throw new IgniteCheckedException("Cannot write more data to the secondary file system output " + + "stream because it was marked as closed: " + batch.path()); + else + igfsCtx.metrics().addWriteBlocks(1, 1); + + } + catch (IgniteCheckedException e) { + throw new IOException("Failed to store data into file: " + path, e); + } + } + + /** {@inheritDoc} */ + @Override public String toString() { + return S.toString(IgfsOutputStreamProxyImpl.class, this); + } +} \ No newline at end of file diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractBaseSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractBaseSelfTest.java index 3f62cf55013c4..14a653bf83b68 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractBaseSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractBaseSelfTest.java @@ -904,7 +904,7 @@ protected static IgfsPath[] paths(IgfsPath... paths) { protected void clear(IgniteFileSystem igfs, IgfsSecondaryFileSystemTestAdapter igfsSecondary) throws Exception { clear(igfs); - if (dual) + if (mode != PRIMARY) clear(igfsSecondary); } From 63888bd99b8a84cc961b226efba9964dcc292f4c Mon Sep 17 00:00:00 2001 From: Alexey Kuznetsov Date: Thu, 22 Sep 2016 16:22:40 +0700 Subject: [PATCH 13/69] IGNITE-3938 Implemented escaping for SQL identifiers. Added tests for escaped mode. --- .../store/jdbc/CacheAbstractJdbcStore.java | 104 +++++++++++++++--- .../store/jdbc/CacheJdbcPojoStoreFactory.java | 46 ++++++-- .../store/jdbc/dialect/BasicJdbcDialect.java | 8 +- .../cache/store/jdbc/dialect/DB2Dialect.java | 6 +- .../cache/store/jdbc/dialect/H2Dialect.java | 5 +- .../cache/store/jdbc/dialect/JdbcDialect.java | 18 ++- .../store/jdbc/dialect/MySQLDialect.java | 9 +- .../store/jdbc/dialect/OracleDialect.java | 5 +- .../store/jdbc/dialect/SQLServerDialect.java | 14 ++- .../CacheJdbcPojoStoreAbstractSelfTest.java | 48 +++++--- ...JdbcPojoStoreBinaryMarshallerSelfTest.java | 2 +- ...BinaryMarshallerWithSqlEscapeSelfTest.java | 28 +++++ ...cPojoStoreOptimizedMarshallerSelfTest.java | 2 +- ...imizedMarshallerWithSqlEscapeSelfTest.java | 28 +++++ .../testsuites/IgniteCacheTestSuite.java | 4 + .../test/config/jdbc-pojo-store-builtin.xml | 12 +- .../src/test/config/jdbc-pojo-store-obj.xml | 12 +- .../CacheJdbcBlobStoreFactorySelfTest.java | 12 +- .../CacheJdbcPojoStoreFactorySelfTest.java | 14 ++- .../store/jdbc/CachePojoStoreXmlSelfTest.java | 13 ++- ...achePojoStoreXmlWithSqlEscapeSelfTest.java | 28 +++++ .../testsuites/IgniteSpringTestSuite.java | 5 +- 22 files changed, 328 insertions(+), 95 deletions(-) create mode 100644 modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreBinaryMarshallerWithSqlEscapeSelfTest.java create mode 100644 modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreOptimizedMarshallerWithSqlEscapeSelfTest.java create mode 100644 modules/spring/src/test/java/org/apache/ignite/cache/store/jdbc/CachePojoStoreXmlWithSqlEscapeSelfTest.java diff --git a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheAbstractJdbcStore.java b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheAbstractJdbcStore.java index fe8a50b5460bd..a9a8ce1d0b697 100644 --- a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheAbstractJdbcStore.java +++ b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheAbstractJdbcStore.java @@ -187,6 +187,9 @@ public abstract class CacheAbstractJdbcStore implements CacheStore, /** Hash calculator. */ protected JdbcTypeHasher hasher = JdbcTypeDefaultHasher.INSTANCE; + /** Flag indicating that table and field names should be escaped in all SQL queries created by JDBC POJO store. */ + private boolean sqlEscapeAll; + /** * Get field value from object for use as query parameter. * @@ -727,7 +730,7 @@ private Map getOrCreateCacheMappings(@Nullable String cach checkTypeConfiguration(cacheName, valKind, valType, type.getValueFields()); - entryMappings.put(keyTypeId, new EntryMapping(cacheName, dialect, type, keyKind, valKind)); + entryMappings.put(keyTypeId, new EntryMapping(cacheName, dialect, type, keyKind, valKind, sqlEscapeAll)); // Add one more binding to binary typeId for POJOs, // because object could be passed to store in binary format. @@ -736,7 +739,7 @@ private Map getOrCreateCacheMappings(@Nullable String cach valKind = valKind == TypeKind.POJO ? TypeKind.BINARY : valKind; - entryMappings.put(keyTypeId, new EntryMapping(cacheName, dialect, type, TypeKind.BINARY, valKind)); + entryMappings.put(keyTypeId, new EntryMapping(cacheName, dialect, type, TypeKind.BINARY, valKind, sqlEscapeAll)); } } @@ -1674,6 +1677,28 @@ public void setParallelLoadCacheMinimumThreshold(int parallelLoadCacheMinThresho this.parallelLoadCacheMinThreshold = parallelLoadCacheMinThreshold; } + /** + * If {@code true} all the SQL table and field names will be escaped with double quotes like + * ({@code "tableName"."fieldsName"}). This enforces case sensitivity for field names and + * also allows having special characters in table and field names. + * + * @return Flag value. + */ + public boolean isSqlEscapeAll() { + return sqlEscapeAll; + } + + /** + * If {@code true} all the SQL table and field names will be escaped with double quotes like + * ({@code "tableName"."fieldsName"}). This enforces case sensitivity for field names and + * also allows having special characters in table and field names. + * + * @param sqlEscapeAll Flag value. + */ + public void setSqlEscapeAll(boolean sqlEscapeAll) { + this.sqlEscapeAll = sqlEscapeAll; + } + /** * @return Ignite instance. */ @@ -1740,9 +1765,15 @@ protected static class EntryMapping { /** Database key columns. */ private final Collection keyCols; + /** Database key columns prepared for building SQL queries.. */ + private final Collection sqlKeyCols; + /** Database unique value columns. */ private final Collection cols; + /** Database unique value columns prepared for building SQL queries. */ + private final Collection sqlCols; + /** Select query columns index. */ private final Map loadColIdxs; @@ -1761,13 +1792,34 @@ protected static class EntryMapping { /** Full table name. */ private final String fullTblName; + /** Full table name prepared for building SQL queries. */ + private final String sqlFullTblName; + + /** + * Escape collection of column names. + * @param dialect Database dialect. + * @param cols Columns. + * @return Collection of escaped names. + */ + private static Collection escape(JdbcDialect dialect, Collection cols) { + Collection res = new ArrayList<>(cols.size()); + + for (String col : cols) + res.add(dialect.escape(col)); + + return res; + } + /** * @param cacheName Cache name. * @param dialect JDBC dialect. * @param typeMeta Type metadata. + * @param keyKind Type kind. + * @param valKind Value kind. + * @param escape Escape SQL identifiers flag. */ public EntryMapping(@Nullable String cacheName, JdbcDialect dialect, JdbcType typeMeta, - TypeKind keyKind, TypeKind valKind) { + TypeKind keyKind, TypeKind valKind, boolean escape) { this.cacheName = cacheName; this.dialect = dialect; @@ -1794,11 +1846,9 @@ public EntryMapping(@Nullable String cacheName, JdbcDialect dialect, JdbcType ty String tblName = typeMeta.getDatabaseTable(); - fullTblName = F.isEmpty(schema) ? tblName : schema + "." + tblName; - - Collection uniqValCols = databaseColumns(uniqValFlds); + Collection uniqueValCols = databaseColumns(uniqValFlds); - cols = F.concat(false, keyCols, uniqValCols); + cols = F.concat(false, keyCols, uniqueValCols); loadColIdxs = U.newHashMap(cols.size()); @@ -1807,23 +1857,41 @@ public EntryMapping(@Nullable String cacheName, JdbcDialect dialect, JdbcType ty for (String col : cols) loadColIdxs.put(col.toUpperCase(), idx++); - loadCacheQry = dialect.loadCacheQuery(fullTblName, cols); + fullTblName = F.isEmpty(schema) ? tblName : schema + "." + tblName; + + Collection sqlUniqueValCols; + + if (escape) { + sqlFullTblName = F.isEmpty(schema) ? dialect.escape(tblName) : dialect.escape(schema) + "." + dialect.escape(tblName); + + sqlCols = escape(dialect, cols); + sqlKeyCols = escape(dialect, keyCols); + sqlUniqueValCols = escape(dialect, uniqueValCols); + } + else { + sqlFullTblName = fullTblName; + sqlCols = cols; + sqlKeyCols = keyCols; + sqlUniqueValCols = uniqueValCols; + } + + loadCacheQry = dialect.loadCacheQuery(sqlFullTblName, sqlCols); - loadCacheSelRangeQry = dialect.loadCacheSelectRangeQuery(fullTblName, keyCols); + loadCacheSelRangeQry = dialect.loadCacheSelectRangeQuery(sqlFullTblName, sqlKeyCols); - loadQrySingle = dialect.loadQuery(fullTblName, keyCols, cols, 1); + loadQrySingle = dialect.loadQuery(sqlFullTblName, sqlKeyCols, sqlCols, 1); - maxKeysPerStmt = dialect.getMaxParameterCount() / keyCols.size(); + maxKeysPerStmt = dialect.getMaxParameterCount() / sqlKeyCols.size(); - loadQry = dialect.loadQuery(fullTblName, keyCols, cols, maxKeysPerStmt); + loadQry = dialect.loadQuery(sqlFullTblName, sqlKeyCols, sqlCols, maxKeysPerStmt); - insQry = dialect.insertQuery(fullTblName, keyCols, uniqValCols); + insQry = dialect.insertQuery(sqlFullTblName, sqlKeyCols, sqlUniqueValCols); - updQry = dialect.updateQuery(fullTblName, keyCols, uniqValCols); + updQry = dialect.updateQuery(sqlFullTblName, sqlKeyCols, sqlUniqueValCols); - mergeQry = dialect.mergeQuery(fullTblName, keyCols, uniqValCols); + mergeQry = dialect.mergeQuery(sqlFullTblName, sqlKeyCols, sqlUniqueValCols); - remQry = dialect.removeQuery(fullTblName, keyCols); + remQry = dialect.removeQuery(sqlFullTblName, sqlKeyCols); } /** @@ -1884,7 +1952,7 @@ protected String loadQuery(int keyCnt) { if (keyCnt == 1) return loadQrySingle; - return dialect.loadQuery(fullTblName, keyCols, cols, keyCnt); + return dialect.loadQuery(sqlFullTblName, sqlKeyCols, sqlCols, keyCnt); } /** @@ -1895,7 +1963,7 @@ protected String loadQuery(int keyCnt) { * @return Query with range. */ protected String loadCacheRangeQuery(boolean appendLowerBound, boolean appendUpperBound) { - return dialect.loadCacheRangeQuery(fullTblName, keyCols, cols, appendLowerBound, appendUpperBound); + return dialect.loadCacheRangeQuery(sqlFullTblName, sqlKeyCols, sqlCols, appendLowerBound, appendUpperBound); } /** diff --git a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreFactory.java b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreFactory.java index 0fca3abe5ef5a..8bc603148bf06 100644 --- a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreFactory.java +++ b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreFactory.java @@ -111,7 +111,7 @@ public class CacheJdbcPojoStoreFactory implements Factory implements Factory implements Factory dataSrcFactory; + /** Flag indicating that table and field names should be escaped in all SQL queries created by JDBC POJO store. */ + private boolean sqlEscapeAll; + /** Application context. */ @SpringApplicationContextResource private transient Object appCtx; @@ -148,12 +151,14 @@ public class CacheJdbcPojoStoreFactory implements Factory create() { CacheJdbcPojoStore store = new CacheJdbcPojoStore<>(); - store.setBatchSize(batchSizw); + store.setBatchSize(batchSize); store.setDialect(dialect); store.setMaximumPoolSize(maxPoolSize); - store.setMaximumWriteAttempts(maxWriteAttempts); + store.setMaximumWriteAttempts(maxWrtAttempts); store.setParallelLoadCacheMinimumThreshold(parallelLoadCacheMinThreshold); store.setTypes(types); + store.setHasher(hasher); + store.setSqlEscapeAll(sqlEscapeAll); if (dataSrc != null) store.setDataSource(dataSrc); @@ -201,7 +206,7 @@ public CacheJdbcPojoStoreFactory setDataSource(DataSource dataSrc) { * @return Maximum batch size. */ public int getBatchSize() { - return batchSizw; + return batchSize; } /** @@ -211,7 +216,7 @@ public int getBatchSize() { * @return {@code This} for chaining. */ public CacheJdbcPojoStoreFactory setBatchSize(int batchSize) { - this.batchSizw = batchSize; + this.batchSize = batchSize; return this; } @@ -285,7 +290,7 @@ public CacheJdbcPojoStoreFactory setMaximumPoolSize(int maxPoolSize) { * @return Maximum number of write attempts. */ public int getMaximumWriteAttempts() { - return maxWriteAttempts; + return maxWrtAttempts; } /** @@ -295,7 +300,7 @@ public int getMaximumWriteAttempts() { * @return {@code This} for chaining. */ public CacheJdbcPojoStoreFactory setMaximumWriteAttempts(int maxWrtAttempts) { - this.maxWriteAttempts = maxWrtAttempts; + this.maxWrtAttempts = maxWrtAttempts; return this; } @@ -386,6 +391,31 @@ public CacheJdbcPojoStoreFactory setDataSourceFactory(Factory return this; } + /** + * If {@code true} all the SQL table and field names will be escaped with double quotes like + * ({@code "tableName"."fieldsName"}). This enforces case sensitivity for field names and + * also allows having special characters in table and field names. + * + * @return Flag value. + */ + public boolean isSqlEscapeAll() { + return sqlEscapeAll; + } + + /** + * If {@code true} all the SQL table and field names will be escaped with double quotes like + * ({@code "tableName"."fieldsName"}). This enforces case sensitivity for field names and + * also allows having special characters in table and field names. + * + * @param sqlEscapeAll Flag value. + * @return {@code this} for chaining. + */ + public CacheJdbcPojoStoreFactory setSqlEscapeAll(boolean sqlEscapeAll) { + this.sqlEscapeAll = sqlEscapeAll; + + return this; + } + /** {@inheritDoc} */ @Override public String toString() { return S.toString(CacheJdbcPojoStoreFactory.class, this); diff --git a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/BasicJdbcDialect.java b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/BasicJdbcDialect.java index cd9c9868a9495..5fa564b082fa5 100644 --- a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/BasicJdbcDialect.java +++ b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/BasicJdbcDialect.java @@ -148,6 +148,11 @@ private static String where(Collection keyCols, int keyCnt) { return sb.toString(); } + /** {@inheritDoc} */ + @Override public String escape(String ident) { + return '"' + ident + '"'; + } + /** {@inheritDoc} */ @Override public String loadCacheSelectRangeQuery(String fullTblName, Collection keyCols) { String cols = mkString(keyCols, ","); @@ -245,8 +250,7 @@ private static String where(Collection keyCols, int keyCnt) { } /** {@inheritDoc} */ - @Override public String mergeQuery(String fullTblName, Collection keyCols, - Collection uniqCols) { + @Override public String mergeQuery(String fullTblName, Collection keyCols, Collection uniqCols) { return ""; } diff --git a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/DB2Dialect.java b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/DB2Dialect.java index 2a92bddf48f26..4bae14a443f5b 100644 --- a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/DB2Dialect.java +++ b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/DB2Dialect.java @@ -34,9 +34,7 @@ public class DB2Dialect extends BasicJdbcDialect { } /** {@inheritDoc} */ - @Override public String mergeQuery(String fullTblName, Collection keyCols, - Collection uniqCols) { - + @Override public String mergeQuery(String fullTblName, Collection keyCols, Collection uniqCols) { Collection cols = F.concat(false, keyCols, uniqCols); String colsLst = mkString(cols, ", "); @@ -68,4 +66,4 @@ public class DB2Dialect extends BasicJdbcDialect { " INSERT (%s) VALUES (%s)", fullTblName, repeat("?", cols.size(), "", ",", ""), colsLst, match, setCols, colsLst, valuesCols); } -} \ No newline at end of file +} diff --git a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/H2Dialect.java b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/H2Dialect.java index 94d27a9727492..019a4b30fa9aa 100644 --- a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/H2Dialect.java +++ b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/H2Dialect.java @@ -33,11 +33,10 @@ public class H2Dialect extends BasicJdbcDialect { } /** {@inheritDoc} */ - @Override public String mergeQuery(String fullTblName, Collection keyCols, - Collection uniqCols) { + @Override public String mergeQuery(String fullTblName, Collection keyCols, Collection uniqCols) { Collection cols = F.concat(false, keyCols, uniqCols); return String.format("MERGE INTO %s (%s) KEY (%s) VALUES(%s)", fullTblName, mkString(cols, ","), mkString(keyCols, ","), repeat("?", cols.size(), "", ", ", "")); } -} \ No newline at end of file +} diff --git a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/JdbcDialect.java b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/JdbcDialect.java index 9daa00b36bec5..8c153b2db87af 100644 --- a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/JdbcDialect.java +++ b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/JdbcDialect.java @@ -25,7 +25,13 @@ */ public interface JdbcDialect extends Serializable { /** - * Construct select count query. + * @param ident SQL identifier to escape. + * @return Escaped SQL identifier. + */ + public String escape(String ident); + + /** + * Construct query to get ranges bounds. * * @param fullTblName Full table name. * @param keyCols Database key columns for order. @@ -34,7 +40,7 @@ public interface JdbcDialect extends Serializable { public String loadCacheSelectRangeQuery(String fullTblName, Collection keyCols); /** - * Construct select count query. + * Construct load cache query over specified range. * * @param fullTblName Full table name. * @param keyCols Database key columns for order. @@ -43,8 +49,8 @@ public interface JdbcDialect extends Serializable { * @param appendUpperBound Need add upper bound for range. * @return Query for select count. */ - public String loadCacheRangeQuery(String fullTblName, - Collection keyCols, Iterable uniqCols, boolean appendLowerBound, boolean appendUpperBound); + public String loadCacheRangeQuery(String fullTblName, Collection keyCols, Iterable uniqCols, + boolean appendLowerBound, boolean appendUpperBound); /** * Construct load cache query. @@ -73,6 +79,7 @@ public String loadQuery(String fullTblName, Collection keyCols, Iterable * @param fullTblName Full table name. * @param keyCols Database key columns. * @param valCols Database value columns. + * @return Insert query. */ public String insertQuery(String fullTblName, Collection keyCols, Collection valCols); @@ -82,6 +89,7 @@ public String loadQuery(String fullTblName, Collection keyCols, Iterable * @param fullTblName Full table name. * @param keyCols Database key columns. * @param valCols Database value columns. + * @return Update query. */ public String updateQuery(String fullTblName, Collection keyCols, Iterable valCols); @@ -96,7 +104,7 @@ public String loadQuery(String fullTblName, Collection keyCols, Iterable * @param fullTblName Full table name. * @param keyCols Database key columns. * @param uniqCols Database unique value columns. - * @return Put query. + * @return Merge query. */ public String mergeQuery(String fullTblName, Collection keyCols, Collection uniqCols); diff --git a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/MySQLDialect.java b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/MySQLDialect.java index 84e6d05f56f46..1a5730b675170 100644 --- a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/MySQLDialect.java +++ b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/MySQLDialect.java @@ -28,6 +28,11 @@ public class MySQLDialect extends BasicJdbcDialect { /** */ private static final long serialVersionUID = 0L; + /** {@inheritDoc} */ + @Override public String escape(String ident) { + return '`' + ident + '`'; + } + /** {@inheritDoc} */ @Override public String loadCacheSelectRangeQuery(String fullTblName, Collection keyCols) { String cols = mkString(keyCols, ","); @@ -43,9 +48,7 @@ public class MySQLDialect extends BasicJdbcDialect { } /** {@inheritDoc} */ - @Override public String mergeQuery(String fullTblName, Collection keyCols, - Collection uniqCols) { - + @Override public String mergeQuery(String fullTblName, Collection keyCols, Collection uniqCols) { Collection cols = F.concat(false, keyCols, uniqCols); String updPart = mkString(uniqCols, new C1() { diff --git a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/OracleDialect.java b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/OracleDialect.java index b7a0400c869c2..a9efe0dbd2c28 100644 --- a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/OracleDialect.java +++ b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/OracleDialect.java @@ -34,8 +34,7 @@ public class OracleDialect extends BasicJdbcDialect { } /** {@inheritDoc} */ - @Override public String mergeQuery(String fullTblName, Collection keyCols, - Collection uniqCols) { + @Override public String mergeQuery(String fullTblName, Collection keyCols, Collection uniqCols) { Collection cols = F.concat(false, keyCols, uniqCols); String colsLst = mkString(cols, ", "); @@ -72,4 +71,4 @@ public class OracleDialect extends BasicJdbcDialect { " WHEN NOT MATCHED THEN" + " INSERT (%s) VALUES (%s)", fullTblName, selCols, match, setCols, colsLst, valuesCols); } -} \ No newline at end of file +} diff --git a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/SQLServerDialect.java b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/SQLServerDialect.java index 9831aa88b1dba..883918f9803a4 100644 --- a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/SQLServerDialect.java +++ b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/dialect/SQLServerDialect.java @@ -28,14 +28,18 @@ public class SQLServerDialect extends BasicJdbcDialect { /** */ private static final long serialVersionUID = 0L; + /** {@inheritDoc} */ + @Override public String escape(String ident) { + return '[' + ident + ']'; + } + /** {@inheritDoc} */ @Override public boolean hasMerge() { return true; } /** {@inheritDoc} */ - @Override public String mergeQuery(String fullTblName, Collection keyCols, - Collection uniqCols) { + @Override public String mergeQuery(String fullTblName, Collection keyCols, Collection uniqCols) { Collection cols = F.concat(false, keyCols, uniqCols); String colsLst = mkString(cols, ", "); @@ -64,7 +68,7 @@ public class SQLServerDialect extends BasicJdbcDialect { " WHEN MATCHED THEN" + " UPDATE SET %s" + " WHEN NOT MATCHED THEN" + - " INSERT (%s) VALUES (%s);", fullTblName, repeat("?", cols.size(), "", ",", ""), colsLst, - match, setCols, colsLst, valuesCols); + " INSERT (%s) VALUES (%s);", fullTblName, repeat("?", cols.size(), "", ",", ""), + colsLst, match, setCols, colsLst, valuesCols); } -} \ No newline at end of file +} diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreAbstractSelfTest.java b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreAbstractSelfTest.java index e8592d74fc334..83065f1bbb146 100644 --- a/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreAbstractSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreAbstractSelfTest.java @@ -46,7 +46,7 @@ import static org.apache.ignite.cache.CacheMode.PARTITIONED; /** - * Class for {@code PojoCacheStore} tests. + * Class for {@link CacheJdbcPojoStore} tests. */ public abstract class CacheJdbcPojoStoreAbstractSelfTest extends GridCommonAbstractTest { /** IP finder. */ @@ -76,6 +76,13 @@ public abstract class CacheJdbcPojoStoreAbstractSelfTest extends GridCommonAbstr /** Flag indicating that classes for values available on class path or not. */ protected static boolean noValClasses; + /** + * @return Flag indicating that all internal SQL queries should use escaped identifiers. + */ + protected boolean sqlEscapeAll(){ + return false; + } + /** * @return Connection to test in-memory H2 database. * @throws SQLException if failed to connect. @@ -164,10 +171,13 @@ protected JdbcType[] storeTypes() { } storeTypes[0].setValueType("org.apache.ignite.cache.store.jdbc.model.Organization" + (noValClasses ? "1" : "")); + + boolean escape = sqlEscapeAll(); + storeTypes[0].setValueFields( - new JdbcTypeField(Types.INTEGER, "Id", Integer.class, "id"), - new JdbcTypeField(Types.VARCHAR, "Name", String.class, "name"), - new JdbcTypeField(Types.VARCHAR, "City", String.class, "city")); + new JdbcTypeField(Types.INTEGER, escape ? "ID" : "Id", Integer.class, "id"), + new JdbcTypeField(Types.VARCHAR, escape ? "NAME" : "Name", String.class, "name"), + new JdbcTypeField(Types.VARCHAR, escape ? "CITY" : "City", String.class, "city")); storeTypes[1] = new JdbcType(); storeTypes[1].setCacheName(CACHE_NAME); @@ -210,6 +220,7 @@ protected CacheConfiguration cacheConfiguration() throws Exception { storeFactory.setDialect(new H2Dialect()); storeFactory.setTypes(storeTypes()); storeFactory.setDataSourceFactory(new H2DataSourceFactory()); // H2 DataSource factory. + storeFactory.setSqlEscapeAll(sqlEscapeAll()); cc.setCacheStoreFactory(storeFactory); cc.setReadThrough(true); @@ -228,8 +239,6 @@ protected CacheConfiguration cacheConfiguration() throws Exception { protected void fillSampleDatabase(Connection conn) throws SQLException { info("Start to fill sample database..."); - Random rnd = new Random(); - PreparedStatement orgStmt = conn.prepareStatement("INSERT INTO Organization(id, name, city) VALUES (?, ?, ?)"); for (int i = 0; i < ORGANIZATION_CNT; i++) { @@ -249,6 +258,8 @@ protected void fillSampleDatabase(Connection conn) throws SQLException { PreparedStatement prnStmt = conn.prepareStatement( "INSERT INTO Person(id, org_id, birthday, name) VALUES (?, ?, ?, ?)"); + Random rnd = new Random(); + for (int i = 0; i < PERSON_CNT; i++) { prnStmt.setInt(1, i); prnStmt.setInt(2, i % 100); @@ -366,7 +377,7 @@ public void testLoadCachePrimitiveKeysTx() throws Exception { * * @throws Exception If failed. */ - private void checkPut() throws Exception { + private void checkPutRemove() throws Exception { IgniteCache c1 = grid().cache(CACHE_NAME); Connection conn = getConnection(); @@ -419,6 +430,13 @@ private void checkPut() throws Exception { assertFalse("Unexpected more data in result set", rs.next()); + // Test remove. + c1.remove(key); + + rs = stmt.executeQuery(); + + assertFalse("Unexpected non-empty result set", rs.next()); + U.closeQuiet(rs); } finally { @@ -429,37 +447,37 @@ private void checkPut() throws Exception { /** * @throws Exception If failed. */ - public void testPutBuiltIn() throws Exception { + public void testPutRemoveBuiltIn() throws Exception { startTestGrid(true, false, false, false); - checkPut(); + checkPutRemove(); } /** * @throws Exception If failed. */ - public void testPut() throws Exception { + public void testPutRemove() throws Exception { startTestGrid(false, false, false, false); - checkPut(); + checkPutRemove(); } /** * @throws Exception If failed. */ - public void testPutTxBuiltIn() throws Exception { + public void testPutRemoveTxBuiltIn() throws Exception { startTestGrid(true, false, false, true); - checkPut(); + checkPutRemove(); } /** * @throws Exception If failed. */ - public void testPutTx() throws Exception { + public void testPutRemoveTx() throws Exception { startTestGrid(false, false, false, true); - checkPut(); + checkPutRemove(); } /** diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreBinaryMarshallerSelfTest.java b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreBinaryMarshallerSelfTest.java index f998027cd0fbd..14c743cbb3bd5 100644 --- a/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreBinaryMarshallerSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreBinaryMarshallerSelfTest.java @@ -21,7 +21,7 @@ import org.apache.ignite.internal.binary.BinaryMarshaller; /** - * Class for {@code PojoCacheStore} tests. + * Test for {@link CacheJdbcPojoStore} with binary marshaller. */ public class CacheJdbcPojoStoreBinaryMarshallerSelfTest extends CacheJdbcPojoStoreAbstractSelfTest { /** {@inheritDoc} */ diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreBinaryMarshallerWithSqlEscapeSelfTest.java b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreBinaryMarshallerWithSqlEscapeSelfTest.java new file mode 100644 index 0000000000000..829fffa442f49 --- /dev/null +++ b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreBinaryMarshallerWithSqlEscapeSelfTest.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.jdbc; + +/** + * Test for {@link CacheJdbcPojoStore} with binary marshaller and enabled SQL escaping. + */ +public class CacheJdbcPojoStoreBinaryMarshallerWithSqlEscapeSelfTest extends CacheJdbcPojoStoreBinaryMarshallerSelfTest { + /** {@inheritDoc} */ + @Override protected boolean sqlEscapeAll(){ + return true; + } +} diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreOptimizedMarshallerSelfTest.java b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreOptimizedMarshallerSelfTest.java index f40f7d7f4b8d1..3f6c9b4425612 100644 --- a/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreOptimizedMarshallerSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreOptimizedMarshallerSelfTest.java @@ -21,7 +21,7 @@ import org.apache.ignite.marshaller.optimized.OptimizedMarshaller; /** - * Class for {@code PojoCacheStore} tests. + * Test for {@link CacheJdbcPojoStore} with optimized marshaller. */ public class CacheJdbcPojoStoreOptimizedMarshallerSelfTest extends CacheJdbcPojoStoreAbstractSelfTest { /** {@inheritDoc} */ diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreOptimizedMarshallerWithSqlEscapeSelfTest.java b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreOptimizedMarshallerWithSqlEscapeSelfTest.java new file mode 100644 index 0000000000000..d1ce726b53344 --- /dev/null +++ b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreOptimizedMarshallerWithSqlEscapeSelfTest.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.jdbc; + +/** + * Test for {@link CacheJdbcPojoStore} with optimized marshaller and enabled SQL escaping. + */ +public class CacheJdbcPojoStoreOptimizedMarshallerWithSqlEscapeSelfTest extends CacheJdbcPojoStoreOptimizedMarshallerSelfTest { + /** {@inheritDoc} */ + @Override protected boolean sqlEscapeAll(){ + return true; + } +} diff --git a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite.java b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite.java index 9240ef5959f59..5ad4cb82d70ad 100755 --- a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite.java +++ b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite.java @@ -32,8 +32,10 @@ import org.apache.ignite.cache.store.GridCacheLoadOnlyStoreAdapterSelfTest; import org.apache.ignite.cache.store.StoreResourceInjectionSelfTest; import org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreBinaryMarshallerSelfTest; +import org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreBinaryMarshallerWithSqlEscapeSelfTest; import org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreMultitreadedSelfTest; import org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreOptimizedMarshallerSelfTest; +import org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreOptimizedMarshallerWithSqlEscapeSelfTest; import org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreTest; import org.apache.ignite.cache.store.jdbc.GridCacheJdbcBlobStoreMultithreadedSelfTest; import org.apache.ignite.cache.store.jdbc.GridCacheJdbcBlobStoreSelfTest; @@ -243,7 +245,9 @@ public static TestSuite suite(Set ignoredTests) throws Exception { suite.addTestSuite(GridCacheJdbcBlobStoreMultithreadedSelfTest.class); suite.addTestSuite(CacheJdbcPojoStoreTest.class); suite.addTestSuite(CacheJdbcPojoStoreOptimizedMarshallerSelfTest.class); + suite.addTestSuite(CacheJdbcPojoStoreOptimizedMarshallerWithSqlEscapeSelfTest.class); suite.addTestSuite(CacheJdbcPojoStoreBinaryMarshallerSelfTest.class); + suite.addTestSuite(CacheJdbcPojoStoreBinaryMarshallerWithSqlEscapeSelfTest.class); suite.addTestSuite(CacheJdbcPojoStoreMultitreadedSelfTest.class); suite.addTestSuite(GridCacheBalancingStoreSelfTest.class); suite.addTestSuite(GridCacheAffinityApiSelfTest.class); diff --git a/modules/spring/src/test/config/jdbc-pojo-store-builtin.xml b/modules/spring/src/test/config/jdbc-pojo-store-builtin.xml index 3480ece45e152..d62e76b9edc21 100644 --- a/modules/spring/src/test/config/jdbc-pojo-store-builtin.xml +++ b/modules/spring/src/test/config/jdbc-pojo-store-builtin.xml @@ -70,7 +70,7 @@ - + @@ -83,7 +83,7 @@ - + @@ -91,7 +91,7 @@ - + @@ -112,7 +112,7 @@ - + @@ -125,7 +125,7 @@ - + @@ -133,7 +133,7 @@ - + diff --git a/modules/spring/src/test/config/jdbc-pojo-store-obj.xml b/modules/spring/src/test/config/jdbc-pojo-store-obj.xml index ee761d0ea9b30..83e05487aad1d 100644 --- a/modules/spring/src/test/config/jdbc-pojo-store-obj.xml +++ b/modules/spring/src/test/config/jdbc-pojo-store-obj.xml @@ -70,7 +70,7 @@ - + @@ -83,7 +83,7 @@ - + @@ -91,7 +91,7 @@ - + @@ -112,7 +112,7 @@ - + @@ -125,7 +125,7 @@ - + @@ -133,7 +133,7 @@ - + diff --git a/modules/spring/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcBlobStoreFactorySelfTest.java b/modules/spring/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcBlobStoreFactorySelfTest.java index da7004261aaa8..6ecf67fdcd6a6 100644 --- a/modules/spring/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcBlobStoreFactorySelfTest.java +++ b/modules/spring/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcBlobStoreFactorySelfTest.java @@ -34,7 +34,7 @@ import org.h2.jdbcx.JdbcDataSource; /** - * Test for Cache jdbc blob store factory. + * Test for Cache JDBC blob store factory. */ public class CacheJdbcBlobStoreFactorySelfTest extends GridCommonAbstractTest { /** Cache name. */ @@ -107,16 +107,16 @@ private CacheConfiguration cacheConfiguration() { /** * @param cache Ignite cache. - * @param dataSrcClass Data source class. + * @param dataSrcCls Data source class. * @throws Exception If store parameters is not the same as in configuration xml. */ - private void checkStore(IgniteCache cache, Class dataSrcClass) throws Exception { + private void checkStore(IgniteCache cache, Class dataSrcCls) throws Exception { CacheJdbcBlobStore store = (CacheJdbcBlobStore) cache.getConfiguration(CacheConfiguration.class). getCacheStoreFactory().create(); assertEquals(USER_NAME, GridTestUtils.getFieldValue(store, CacheJdbcBlobStore.class, "user")); - assertEquals(dataSrcClass, + assertEquals(dataSrcCls, GridTestUtils.getFieldValue(store, CacheJdbcBlobStore.class, "dataSrc").getClass()); } @@ -135,7 +135,7 @@ public DummyDataSource() { } /** {@inheritDoc} */ - @Override public Connection getConnection(String username, String password) throws SQLException { + @Override public Connection getConnection(String username, String pwd) throws SQLException { return null; } @@ -174,4 +174,4 @@ public DummyDataSource() { return false; } } -} \ No newline at end of file +} diff --git a/modules/spring/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreFactorySelfTest.java b/modules/spring/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreFactorySelfTest.java index dfa14529c4f50..e3549355d6e3b 100644 --- a/modules/spring/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreFactorySelfTest.java +++ b/modules/spring/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreFactorySelfTest.java @@ -17,7 +17,6 @@ package org.apache.ignite.cache.store.jdbc; -import java.io.Serializable; import java.util.Collection; import java.util.concurrent.Callable; import org.apache.ignite.Ignite; @@ -32,7 +31,7 @@ import org.h2.jdbcx.JdbcDataSource; /** - * Test for Cache jdbc blob store factory. + * Test for Cache JDBC POJO store factory. */ public class CacheJdbcPojoStoreFactorySelfTest extends GridCommonAbstractTest { /** Cache name. */ @@ -117,14 +116,14 @@ private CacheConfiguration cacheConfigurationH2Dialect() { /** * @param cache Ignite cache. - * @param dataSrcClass Data source class. + * @param dataSrcCls Data source class. * @throws Exception If store parameters is not the same as in configuration xml. */ - private void checkStore(IgniteCache cache, Class dataSrcClass) throws Exception { + private void checkStore(IgniteCache cache, Class dataSrcCls) throws Exception { CacheJdbcPojoStore store = (CacheJdbcPojoStore)cache.getConfiguration(CacheConfiguration.class). getCacheStoreFactory().create(); - assertEquals(dataSrcClass, + assertEquals(dataSrcCls, GridTestUtils.getFieldValue(store, CacheAbstractJdbcStore.class, "dataSrc").getClass()); } @@ -132,6 +131,11 @@ private void checkStore(IgniteCache cache, Class dataSrcClas * Dummy JDBC dialect that does nothing. */ public static class DummyDialect implements JdbcDialect { + /** {@inheritDoc} */ + @Override public String escape(String ident) { + return null; + } + /** {@inheritDoc} */ @Override public String loadCacheSelectRangeQuery(String fullTblName, Collection keyCols) { return null; diff --git a/modules/spring/src/test/java/org/apache/ignite/cache/store/jdbc/CachePojoStoreXmlSelfTest.java b/modules/spring/src/test/java/org/apache/ignite/cache/store/jdbc/CachePojoStoreXmlSelfTest.java index c712f882f8ecc..7de0cfe2d3727 100644 --- a/modules/spring/src/test/java/org/apache/ignite/cache/store/jdbc/CachePojoStoreXmlSelfTest.java +++ b/modules/spring/src/test/java/org/apache/ignite/cache/store/jdbc/CachePojoStoreXmlSelfTest.java @@ -18,6 +18,7 @@ package org.apache.ignite.cache.store.jdbc; import java.net.URL; +import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.IgniteComponentType; import org.apache.ignite.internal.util.spring.IgniteSpringHelper; @@ -25,13 +26,14 @@ import org.apache.ignite.marshaller.Marshaller; /** - * Tests for {@code PojoCacheStore} created via XML. + * Tests for {@link CacheJdbcPojoStore} created via XML. */ public class CachePojoStoreXmlSelfTest extends CacheJdbcPojoStoreAbstractSelfTest { /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { - String path = builtinKeys ? "modules/spring/src/test/config/jdbc-pojo-store-builtin.xml" : - "modules/spring/src/test/config/jdbc-pojo-store-obj.xml"; + String path = builtinKeys + ? "modules/spring/src/test/config/jdbc-pojo-store-builtin.xml" + : "modules/spring/src/test/config/jdbc-pojo-store-obj.xml"; URL url = U.resolveIgniteUrl(path); @@ -39,6 +41,11 @@ public class CachePojoStoreXmlSelfTest extends CacheJdbcPojoStoreAbstractSelfTes IgniteConfiguration cfg = spring.loadConfigurations(url).get1().iterator().next(); + if (sqlEscapeAll()) { + for (CacheConfiguration ccfg : cfg.getCacheConfiguration()) + ((CacheJdbcPojoStoreFactory)ccfg.getCacheStoreFactory()).setSqlEscapeAll(true); + } + cfg.setGridName(gridName); return cfg; diff --git a/modules/spring/src/test/java/org/apache/ignite/cache/store/jdbc/CachePojoStoreXmlWithSqlEscapeSelfTest.java b/modules/spring/src/test/java/org/apache/ignite/cache/store/jdbc/CachePojoStoreXmlWithSqlEscapeSelfTest.java new file mode 100644 index 0000000000000..e801682ce1cda --- /dev/null +++ b/modules/spring/src/test/java/org/apache/ignite/cache/store/jdbc/CachePojoStoreXmlWithSqlEscapeSelfTest.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.cache.store.jdbc; + +/** + * Tests for {@link CacheJdbcPojoStore} created via XML. + */ +public class CachePojoStoreXmlWithSqlEscapeSelfTest extends CachePojoStoreXmlSelfTest { + /** {@inheritDoc} */ + @Override protected boolean sqlEscapeAll() { + return true; + } +} diff --git a/modules/spring/src/test/java/org/apache/ignite/testsuites/IgniteSpringTestSuite.java b/modules/spring/src/test/java/org/apache/ignite/testsuites/IgniteSpringTestSuite.java index 67b117dbee686..c4a4b75c5171d 100644 --- a/modules/spring/src/test/java/org/apache/ignite/testsuites/IgniteSpringTestSuite.java +++ b/modules/spring/src/test/java/org/apache/ignite/testsuites/IgniteSpringTestSuite.java @@ -22,6 +22,8 @@ import org.apache.ignite.cache.spring.SpringCacheManagerContextInjectionTest; import org.apache.ignite.cache.store.jdbc.CacheJdbcBlobStoreFactorySelfTest; import org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreFactorySelfTest; +import org.apache.ignite.cache.store.jdbc.CachePojoStoreXmlSelfTest; +import org.apache.ignite.cache.store.jdbc.CachePojoStoreXmlWithSqlEscapeSelfTest; import org.apache.ignite.cache.store.spring.CacheSpringStoreSessionListenerSelfTest; import org.apache.ignite.internal.GridFactorySelfTest; import org.apache.ignite.internal.GridSpringBeanSerializationSelfTest; @@ -64,8 +66,9 @@ public static TestSuite suite() throws Exception { suite.addTestSuite(CacheSpringStoreSessionListenerSelfTest.class); suite.addTestSuite(CacheJdbcBlobStoreFactorySelfTest.class); - suite.addTestSuite(CacheJdbcPojoStoreFactorySelfTest.class); + suite.addTestSuite(CachePojoStoreXmlSelfTest.class); + suite.addTestSuite(CachePojoStoreXmlWithSqlEscapeSelfTest.class); suite.addTest(new TestSuite(GridSpringTransactionManagerSelfTest.class)); From b0e56afb758df6c36d3dcd7c78da496746b26368 Mon Sep 17 00:00:00 2001 From: "Andrey V. Mashenkov" Date: Thu, 22 Sep 2016 13:48:51 +0300 Subject: [PATCH 14/69] Zero Integer compaction added. --- .../apache/ignite/IgniteSystemProperties.java | 7 +- .../internal/binary/BinaryObjectImpl.java | 5 ++ .../binary/BinaryObjectOffheapImpl.java | 5 ++ .../internal/binary/BinaryReaderExImpl.java | 51 +++++------- .../ignite/internal/binary/BinaryUtils.java | 3 + .../internal/binary/BinaryWriterExImpl.java | 17 ++-- .../internal/binary/GridBinaryMarshaller.java | 3 + .../binary/builder/BinaryBuilderReader.java | 15 ++-- .../binary/BinaryFieldsAbstractSelfTest.java | 4 +- ...BinaryMarshallerCompactZeroesSelfTest.java | 78 +------------------ .../binary/BinaryMarshallerSelfTest.java | 19 +++++ 11 files changed, 86 insertions(+), 121 deletions(-) diff --git a/modules/core/src/main/java/org/apache/ignite/IgniteSystemProperties.java b/modules/core/src/main/java/org/apache/ignite/IgniteSystemProperties.java index f8ca3fbbdb64c..6a2bb68b7faed 100644 --- a/modules/core/src/main/java/org/apache/ignite/IgniteSystemProperties.java +++ b/modules/core/src/main/java/org/apache/ignite/IgniteSystemProperties.java @@ -474,13 +474,14 @@ public final class IgniteSystemProperties { public static final String IGNITE_BINARY_DONT_WRAP_TREE_STRUCTURES = "IGNITE_BINARY_DONT_WRAP_TREE_STRUCTURES"; /** - * When set to {@code true} Long zeroes will be encoded with with special type and no value included, - * that saves 8-bytes per field. Otherwise Long zeroes will be encoded in old manner, preserving compatibility. + * When set to {@code true} Integer and Long zeroes will be encoded with with special type and + * with no value included, that saves 8-bytes per field. + * Otherwise Integer\Long zeroes will be encoded in old manner, preserving compatibility. *

* @deprecated Should be removed in Apache Ignite 2.0. */ @Deprecated - public static final String IGNITE_BINARY_COMPACT_LONG_ZEROES = "IGNITE_BINARY_COMPACT_LONG_ZEROES"; + public static final String IGNITE_BINARY_COMPACT_INT_ZEROES = "IGNITE_BINARY_COMPACT_INT_ZEROES"; /** * Enforces singleton. diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectImpl.java index ae864334bbc1f..4564e518767d4 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectImpl.java @@ -321,6 +321,11 @@ else if (fieldOffLen == BinaryUtils.OFFSET_2) break; + case GridBinaryMarshaller.ZERO_INT: + val = 0; + + break; + case GridBinaryMarshaller.LONG: val = BinaryPrimitives.readLong(arr, fieldPos + 1); diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectOffheapImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectOffheapImpl.java index 16cc298c847d0..aa1d80999ce82 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectOffheapImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectOffheapImpl.java @@ -199,6 +199,11 @@ else if (fieldOffLen == BinaryUtils.OFFSET_2) break; + case GridBinaryMarshaller.ZERO_INT: + val = 0; + + break; + case GridBinaryMarshaller.LONG: val = BinaryPrimitives.readLong(ptr, fieldPos + 1); diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryReaderExImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryReaderExImpl.java index b29230cb5d3d2..bbacff59244fb 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryReaderExImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryReaderExImpl.java @@ -27,6 +27,7 @@ import java.util.Date; import java.util.Map; import java.util.UUID; + import org.apache.ignite.binary.BinaryCollectionFactory; import org.apache.ignite.binary.BinaryInvalidTypeException; import org.apache.ignite.binary.BinaryMapFactory; @@ -80,6 +81,7 @@ import static org.apache.ignite.internal.binary.GridBinaryMarshaller.UNREGISTERED_TYPE_ID; import static org.apache.ignite.internal.binary.GridBinaryMarshaller.UUID; import static org.apache.ignite.internal.binary.GridBinaryMarshaller.UUID_ARR; +import static org.apache.ignite.internal.binary.GridBinaryMarshaller.ZERO_INT; import static org.apache.ignite.internal.binary.GridBinaryMarshaller.ZERO_LONG; /** @@ -650,7 +652,7 @@ char readChar(int fieldId) throws BinaryObjectException { /** {@inheritDoc} */ @Override public int readInt(String fieldName) throws BinaryObjectException { - return findFieldByName(fieldName) && checkFlagNoHandles(INT) == Flag.NORMAL ? in.readInt() : 0; + return findFieldByName(fieldName) && checkFlagNoHandles(INT,ZERO_INT) == INT ? in.readInt() : 0; } /** @@ -659,7 +661,7 @@ char readChar(int fieldId) throws BinaryObjectException { * @throws BinaryObjectException If failed. */ int readInt(int fieldId) throws BinaryObjectException { - return findFieldById(fieldId) && checkFlagNoHandles(INT) == Flag.NORMAL ? in.readInt() : 0; + return findFieldById(fieldId) && checkFlagNoHandles(INT,ZERO_INT) == INT ? in.readInt() : 0; } /** @@ -668,7 +670,16 @@ int readInt(int fieldId) throws BinaryObjectException { * @throws BinaryObjectException In case of error. */ @Nullable Integer readIntNullable(int fieldId) throws BinaryObjectException { - return findFieldById(fieldId) && checkFlagNoHandles(INT) == Flag.NORMAL ? in.readInt() : null; + if(findFieldById(fieldId)) { + switch (checkFlagNoHandles(INT, ZERO_INT)) { + case INT: + return in.readInt(); + + case ZERO_INT: + return 0; + } + } + return null; } /** {@inheritDoc} */ @@ -706,18 +717,7 @@ int readInt(int fieldId) throws BinaryObjectException { /** {@inheritDoc} */ @Override public long readLong(String fieldName) throws BinaryObjectException { - if (findFieldByName(fieldName)) { - switch (checkFlagNoHandles(LONG, ZERO_LONG)) { - case LONG: - return in.readLong(); - - case ZERO_LONG: - case NULL: - return 0L; - } - } - - return 0L; + return (findFieldByName(fieldName) && checkFlagNoHandles(LONG, ZERO_LONG) == LONG) ? in.readLong() : 0L; } /** @@ -726,18 +726,7 @@ int readInt(int fieldId) throws BinaryObjectException { * @throws BinaryObjectException If failed. */ long readLong(int fieldId) throws BinaryObjectException { - if (findFieldById(fieldId)) { - switch (checkFlagNoHandles(LONG, ZERO_LONG)) { - case LONG: - return in.readLong(); - - case ZERO_LONG: - case NULL: - return 0L; - } - } - - return 0L; + return (findFieldById(fieldId) && checkFlagNoHandles(LONG, ZERO_LONG) == LONG) ? in.readLong() : 0L; } /** @@ -753,9 +742,6 @@ long readLong(int fieldId) throws BinaryObjectException { case ZERO_LONG: return 0L; - - case NULL: - return null; } } @@ -1577,6 +1563,11 @@ private byte checkFlagNoHandles(byte... expFlags) { break; + case ZERO_INT: + obj = 0; + + break; + case LONG: obj = in.readLong(); diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryUtils.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryUtils.java index 7d4512ce50349..355666ff7a886 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryUtils.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryUtils.java @@ -1719,6 +1719,9 @@ public static Object doReadOptimized(BinaryInputStream in, BinaryContext ctx, @N case GridBinaryMarshaller.INT: return in.readInt(); + case GridBinaryMarshaller.ZERO_INT: + return 0; + case GridBinaryMarshaller.LONG: return in.readLong(); diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryWriterExImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryWriterExImpl.java index b7a442d217452..b8a8630511667 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryWriterExImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryWriterExImpl.java @@ -930,10 +930,18 @@ void writeShortField(@Nullable Short val) { * @param val Value. */ void writeIntFieldPrimitive(int val) { - out.unsafeEnsure(1 + 4); + if (val == 0 && + IgniteSystemProperties.getBoolean(IgniteSystemProperties.IGNITE_BINARY_COMPACT_INT_ZEROES, false)) { + out.unsafeEnsure(1); + + out.unsafeWriteByte(GridBinaryMarshaller.ZERO_INT); + } + else { + out.unsafeEnsure(1 + 4); - out.unsafeWriteByte(GridBinaryMarshaller.INT); - out.unsafeWriteInt(val); + out.unsafeWriteByte(GridBinaryMarshaller.INT); + out.unsafeWriteInt(val); + } } /** @@ -950,9 +958,8 @@ void writeIntField(@Nullable Integer val) { * @param val Value. */ void writeLongFieldPrimitive(long val) { - if (val == 0L && - IgniteSystemProperties.getBoolean(IgniteSystemProperties.IGNITE_BINARY_COMPACT_LONG_ZEROES, false)) { + IgniteSystemProperties.getBoolean(IgniteSystemProperties.IGNITE_BINARY_COMPACT_INT_ZEROES, false)) { out.unsafeEnsure(1); out.unsafeWriteByte(GridBinaryMarshaller.ZERO_LONG); diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/GridBinaryMarshaller.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/GridBinaryMarshaller.java index b97e2354a2486..5c1976204b132 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/GridBinaryMarshaller.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/GridBinaryMarshaller.java @@ -54,6 +54,9 @@ public class GridBinaryMarshaller { /** */ public static final byte INT = 3; + /** */ + public static final byte ZERO_INT = 37; + /** */ public static final byte LONG = 4; diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/builder/BinaryBuilderReader.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/builder/BinaryBuilderReader.java index dd3aba3e26fb0..4359b96b496f7 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/builder/BinaryBuilderReader.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/builder/BinaryBuilderReader.java @@ -223,6 +223,8 @@ public void skipValue() { switch (type) { case GridBinaryMarshaller.NULL: + case GridBinaryMarshaller.ZERO_INT: + case GridBinaryMarshaller.ZERO_LONG: return; case GridBinaryMarshaller.OBJ: @@ -260,11 +262,6 @@ public void skipValue() { break; - case GridBinaryMarshaller.ZERO_LONG: - len = 0; - - break; - case GridBinaryMarshaller.BYTE_ARR: case GridBinaryMarshaller.BOOLEAN_ARR: len = 4 + readLength(); @@ -412,6 +409,9 @@ public Object getValueQuickly(int pos, int len) { case GridBinaryMarshaller.INT: return BinaryPrimitives.readInt(arr, pos + 1); + case GridBinaryMarshaller.ZERO_INT: + return 0; + case GridBinaryMarshaller.LONG: return BinaryPrimitives.readLong(arr, pos + 1); @@ -546,6 +546,11 @@ public Object parseValue() { case GridBinaryMarshaller.INT: return readInt(); + case GridBinaryMarshaller.ZERO_INT: + plainLazyValLen = 0; + + break; + case GridBinaryMarshaller.LONG: plainLazyValLen = 8; diff --git a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldsAbstractSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldsAbstractSelfTest.java index 0bb9d64d57e31..e3ae3946e64fa 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldsAbstractSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldsAbstractSelfTest.java @@ -209,12 +209,12 @@ public void testLong() throws Exception { */ @Deprecated public void testLongZero() throws Exception { - System.setProperty(IgniteSystemProperties.IGNITE_BINARY_COMPACT_LONG_ZEROES, "true"); + System.setProperty(IgniteSystemProperties.IGNITE_BINARY_COMPACT_INT_ZEROES, "true"); check("fLong"); check("fLongZero"); - System.clearProperty(IgniteSystemProperties.IGNITE_BINARY_COMPACT_LONG_ZEROES); + System.clearProperty(IgniteSystemProperties.IGNITE_BINARY_COMPACT_INT_ZEROES); } /** diff --git a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerCompactZeroesSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerCompactZeroesSelfTest.java index 6ee6c38b38062..a034c65417c5f 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerCompactZeroesSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerCompactZeroesSelfTest.java @@ -17,82 +17,8 @@ package org.apache.ignite.internal.binary; -import junit.framework.Assert; -import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteSystemProperties; -import org.apache.ignite.binary.BinaryBasicIdMapper; -import org.apache.ignite.binary.BinaryBasicNameMapper; -import org.apache.ignite.binary.BinaryCollectionFactory; -import org.apache.ignite.binary.BinaryField; -import org.apache.ignite.binary.BinaryIdMapper; -import org.apache.ignite.binary.BinaryMapFactory; -import org.apache.ignite.binary.BinaryNameMapper; -import org.apache.ignite.binary.BinaryObject; -import org.apache.ignite.binary.BinaryObjectBuilder; -import org.apache.ignite.binary.BinaryObjectException; -import org.apache.ignite.binary.BinaryRawReader; -import org.apache.ignite.binary.BinaryRawWriter; -import org.apache.ignite.binary.BinaryReader; -import org.apache.ignite.binary.BinarySerializer; -import org.apache.ignite.binary.BinaryType; -import org.apache.ignite.binary.BinaryTypeConfiguration; -import org.apache.ignite.binary.BinaryWriter; -import org.apache.ignite.binary.Binarylizable; -import org.apache.ignite.configuration.BinaryConfiguration; -import org.apache.ignite.configuration.IgniteConfiguration; -import org.apache.ignite.internal.binary.builder.BinaryObjectBuilderImpl; -import org.apache.ignite.internal.processors.cache.CacheObjectContext; -import org.apache.ignite.internal.util.GridUnsafe; -import org.apache.ignite.internal.util.IgniteUtils; -import org.apache.ignite.internal.util.lang.GridMapEntry; -import org.apache.ignite.internal.util.typedef.F; -import org.apache.ignite.internal.util.typedef.internal.S; -import org.apache.ignite.internal.util.typedef.internal.U; -import org.apache.ignite.logger.NullLogger; -import org.apache.ignite.marshaller.MarshallerContextTestImpl; -import org.apache.ignite.testframework.GridTestUtils; -import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.jsr166.ConcurrentHashMap8; -import java.io.Externalizable; -import java.io.IOException; -import java.io.ObjectInput; -import java.io.ObjectOutput; -import java.io.Serializable; -import java.lang.reflect.Field; -import java.lang.reflect.InvocationHandler; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.lang.reflect.Proxy; -import java.math.BigDecimal; -import java.math.BigInteger; -import java.net.InetSocketAddress; -import java.sql.Timestamp; -import java.util.AbstractQueue; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Queue; -import java.util.TreeMap; -import java.util.TreeSet; -import java.util.UUID; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentSkipListSet; - -import static java.nio.charset.StandardCharsets.UTF_8; -import static org.apache.ignite.internal.binary.streams.BinaryMemoryAllocator.INSTANCE; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertNotEquals; @@ -108,12 +34,12 @@ public class BinaryMarshallerCompactZeroesSelfTest extends BinaryMarshallerSelfT @Override protected void beforeTestsStarted() throws Exception { super.beforeTestsStarted(); - System.setProperty(IgniteSystemProperties.IGNITE_BINARY_COMPACT_LONG_ZEROES, "true"); + System.setProperty(IgniteSystemProperties.IGNITE_BINARY_COMPACT_INT_ZEROES, "true"); } @Override protected void afterTestsStopped() throws Exception { super.afterTestsStopped(); - System.clearProperty(IgniteSystemProperties.IGNITE_BINARY_COMPACT_LONG_ZEROES); + System.clearProperty(IgniteSystemProperties.IGNITE_BINARY_COMPACT_INT_ZEROES); } } diff --git a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerSelfTest.java index 398cbc9d6962f..09f48c0c44325 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerSelfTest.java @@ -127,6 +127,7 @@ public void testShort() throws Exception { * @throws Exception If failed. */ public void testInt() throws Exception { + assertEquals(0, marshalUnmarshal(0).intValue()); assertEquals(100, marshalUnmarshal(100).intValue()); } @@ -134,6 +135,7 @@ public void testInt() throws Exception { * @throws Exception If failed. */ public void testLong() throws Exception { + assertEquals(0L, marshalUnmarshal(0L).longValue()); assertEquals(100L, marshalUnmarshal(100L).longValue()); } @@ -2299,6 +2301,7 @@ public void testBinaryCopyObject() throws Exception { SimpleObject newObj = new SimpleObject(); newObj.i = 12345; + newObj.zi = 0; newObj.fArr = new float[] {5, 8, 0}; newObj.str = "newStr"; @@ -2328,6 +2331,7 @@ public void testBinaryCopyNonPrimitives() throws Exception { SimpleObject newObj = new SimpleObject(); newObj.i = 12345; + newObj.zi = 0; newObj.fArr = new float[] {5, 8, 0}; newObj.str = "newStr"; @@ -3320,7 +3324,9 @@ private SimpleObject simpleObject() { inner.b = 1; inner.s = 1; inner.i = 1; + inner.zi = 0; inner.l = 1; + inner.zl = 0L; inner.f = 1.1f; inner.d = 1.1d; inner.c = 1; @@ -3360,7 +3366,9 @@ private SimpleObject simpleObject() { outer.b = 2; outer.s = 2; outer.i = 2; + outer.zi = 0; outer.l = 2; + outer.zl = 0L; outer.f = 2.2f; outer.d = 2.2d; outer.c = 2; @@ -3408,6 +3416,7 @@ private TestBinary binaryObject() { innerSimple.b = 1; innerSimple.s = 1; innerSimple.i = 1; + innerSimple.zi = 0; innerSimple.l = 1; innerSimple.zl = 0L; innerSimple.f = 1.1f; @@ -3448,6 +3457,7 @@ private TestBinary binaryObject() { innerBinary.b = 2; innerBinary.s = 2; innerBinary.i = 2; + innerBinary.zi = 0; innerBinary.l = 2; innerBinary.zl = 0L; innerBinary.f = 2.2f; @@ -3524,6 +3534,7 @@ private TestBinary binaryObject() { outer.b = 4; outer.s = 4; outer.i = 4; + outer.zi = 0; outer.l = 4; outer.zl = 0L; outer.f = 4.4f; @@ -3619,6 +3630,9 @@ private static class SimpleObject { /** */ private int i; + /** */ + private int zi; + /** */ private long l; @@ -3740,6 +3754,9 @@ private static class TestBinary implements Binarylizable { /** */ private int i; + /** */ + private int zi; + /** */ private int iRaw; @@ -3913,6 +3930,7 @@ private static class TestBinary implements Binarylizable { writer.writeByte("_b", b); writer.writeShort("_s", s); writer.writeInt("_i", i); + writer.writeInt("_zi", zi); writer.writeLong("_l", l); writer.writeLong("_zl", zl); writer.writeFloat("_f", f); @@ -3981,6 +3999,7 @@ private static class TestBinary implements Binarylizable { b = reader.readByte("_b"); s = reader.readShort("_s"); i = reader.readInt("_i"); + zi = reader.readInt("_zi"); l = reader.readLong("_l"); zl = reader.readLong("_zl"); f = reader.readFloat("_f"); From 66da1a640903c720f0d87272ce13900f861fbd10 Mon Sep 17 00:00:00 2001 From: "Andrey V. Mashenkov" Date: Thu, 22 Sep 2016 13:54:14 +0300 Subject: [PATCH 15/69] Minors --- .../binary/BinaryFieldsAbstractSelfTest.java | 19 +++++++++++++++++++ .../binary/BinaryMarshallerSelfTest.java | 11 +++++++++++ 2 files changed, 30 insertions(+) diff --git a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldsAbstractSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldsAbstractSelfTest.java index e3ae3946e64fa..e16bf64618aef 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldsAbstractSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldsAbstractSelfTest.java @@ -182,6 +182,23 @@ public void testInt() throws Exception { check("fInt"); } + /** + * Test int field within compact mode on. + * Compact Integer zeroes should become default mode in Apache Ignite 2.0, so this test will be redundant. + * + * @deprecated Should be removed in Apache Ignite 2.0. + * @throws Exception If failed. + */ + @Deprecated + public void testOIntZero() throws Exception { + System.setProperty(IgniteSystemProperties.IGNITE_BINARY_COMPACT_INT_ZEROES, "true"); + + check("fInt"); + check("fIntZero"); + + System.clearProperty(IgniteSystemProperties.IGNITE_BINARY_COMPACT_INT_ZEROES); + } + /** * Test int array field. * @@ -591,6 +608,7 @@ public static class TestObject { public short fShort; public char fChar; public int fInt; + public int fIntZero; public long fLong; public long fLongZero; public float fFloat; @@ -644,6 +662,7 @@ public TestObject(int ignore) { fShort = 2; fChar = 3; fInt = 4; + fIntZero = 0; fLong = 5; fLongZero = 0; fFloat = 6.6f; diff --git a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerSelfTest.java index 09f48c0c44325..2c0ead3b3dcc3 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerSelfTest.java @@ -634,7 +634,9 @@ public void testSimpleObject() throws Exception { assertEquals(obj.b, (byte)po.field("b")); assertEquals(obj.s, (short)po.field("s")); assertEquals(obj.i, (int)po.field("i")); + assertEquals(obj.zi, (int)po.field("zi")); assertEquals(obj.l, (long)po.field("l")); + assertEquals(obj.zl, (long)po.field("zl")); assertEquals(obj.f, (float)po.field("f"), 0); assertEquals(obj.d, (double)po.field("d"), 0); assertEquals(obj.c, (char)po.field("c")); @@ -669,7 +671,9 @@ public void testSimpleObject() throws Exception { assertEquals(obj.inner.b, (byte)innerPo.field("b")); assertEquals(obj.inner.s, (short)innerPo.field("s")); assertEquals(obj.inner.i, (int)innerPo.field("i")); + assertEquals(obj.inner.zi, (int)innerPo.field("zi")); assertEquals(obj.inner.l, (long)innerPo.field("l")); + assertEquals(obj.inner.zl, (long)innerPo.field("zl")); assertEquals(obj.inner.f, (float)innerPo.field("f"), 0); assertEquals(obj.inner.d, (double)innerPo.field("d"), 0); assertEquals(obj.inner.c, (char)innerPo.field("c")); @@ -720,6 +724,7 @@ public void testBinary() throws Exception { assertEquals(obj.b, (byte)po.field("_b")); assertEquals(obj.s, (short)po.field("_s")); assertEquals(obj.i, (int)po.field("_i")); + assertEquals(obj.zi, (int)po.field("_zi")); assertEquals(obj.l, (long)po.field("_l")); assertEquals(obj.zl, (long)po.field("_zl")); assertEquals(obj.f, (float)po.field("_f"), 0); @@ -755,7 +760,9 @@ public void testBinary() throws Exception { assertEquals(obj.simple.b, (byte)simplePo.field("b")); assertEquals(obj.simple.s, (short)simplePo.field("s")); assertEquals(obj.simple.i, (int)simplePo.field("i")); + assertEquals(obj.simple.zi, (int)simplePo.field("zi")); assertEquals(obj.simple.l, (long)simplePo.field("l")); + assertEquals(obj.simple.zl, (long)simplePo.field("zl")); assertEquals(obj.simple.f, (float)simplePo.field("f"), 0); assertEquals(obj.simple.d, (double)simplePo.field("d"), 0); assertEquals(obj.simple.c, (char)simplePo.field("c")); @@ -793,7 +800,9 @@ public void testBinary() throws Exception { assertEquals(obj.binary.b, (byte)binaryPo.field("_b")); assertEquals(obj.binary.s, (short)binaryPo.field("_s")); assertEquals(obj.binary.i, (int)binaryPo.field("_i")); + assertEquals(obj.binary.zi, (int)binaryPo.field("_zi")); assertEquals(obj.binary.l, (long)binaryPo.field("_l")); + assertEquals(obj.binary.zl, (long)binaryPo.field("_zl")); assertEquals(obj.binary.f, (float)binaryPo.field("_f"), 0); assertEquals(obj.binary.d, (double)binaryPo.field("_d"), 0); assertEquals(obj.binary.c, (char)binaryPo.field("_c")); @@ -957,7 +966,9 @@ private void checkSimpleObjectData(SimpleObject obj, BinaryObject po) { assertEquals(obj.b, (byte)po.field("b")); assertEquals(obj.s, (short)po.field("s")); assertEquals(obj.i, (int)po.field("i")); + assertEquals(obj.zi, (int)po.field("zi")); assertEquals(obj.l, (long)po.field("l")); + assertEquals(obj.zl, (long)po.field("zl")); assertEquals(obj.f, (float)po.field("f"), 0); assertEquals(obj.d, (double)po.field("d"), 0); assertEquals(obj.c, (char)po.field("c")); From 9666dc49d704c32a4b0b4b7920af62b78af69298 Mon Sep 17 00:00:00 2001 From: Alexey Kuznetsov Date: Fri, 23 Sep 2016 10:18:15 +0700 Subject: [PATCH 16/69] IGNITE-3937 implemented support for unsigned types in MySQL. Minor fix. --- .../ignite/schema/parser/dialect/JdbcMetadataDialect.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/schema-import-db/src/main/java/org/apache/ignite/schema/parser/dialect/JdbcMetadataDialect.java b/modules/schema-import-db/src/main/java/org/apache/ignite/schema/parser/dialect/JdbcMetadataDialect.java index f6c27f29fc60b..07361ba14527d 100644 --- a/modules/schema-import-db/src/main/java/org/apache/ignite/schema/parser/dialect/JdbcMetadataDialect.java +++ b/modules/schema-import-db/src/main/java/org/apache/ignite/schema/parser/dialect/JdbcMetadataDialect.java @@ -118,6 +118,8 @@ protected boolean useSchema() { Set sys = systemSchemas(); + Collection unsignedTypes = unsignedTypes(dbMeta); + if (schemas.isEmpty()) schemas.add(null); @@ -147,8 +149,6 @@ protected boolean useSchema() { Collection cols = new ArrayList<>(); - Collection unsignedTypes = unsignedTypes(dbMeta); - try (ResultSet colsRs = dbMeta.getColumns(tblCatalog, tblSchema, tblName, null)) { while (colsRs.next()) { String colName = colsRs.getString(COL_NAME_IDX); From 29f508b17eb383a1c910c72e7d5056564acc37c2 Mon Sep 17 00:00:00 2001 From: vozerov-gridgain Date: Fri, 23 Sep 2016 10:59:55 +0300 Subject: [PATCH 17/69] IGNITE-3925: Output process ID to the log on node start. --- .../main/java/org/apache/ignite/internal/IgniteKernal.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java b/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java index b85692e19d719..827b54a262e97 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java @@ -1781,6 +1781,11 @@ private void ackOsInfo() { if (log.isInfoEnabled()) { log.info("OS: " + U.osString()); log.info("OS user: " + System.getProperty("user.name")); + + int jvmPid = U.jvmPid(); + + if (log.isInfoEnabled()) + log.info("PID: " + (jvmPid == -1 ? "N/A" : jvmPid)); } } From 40f647258a10fa8b42ac6fda67d6fa4db71b8100 Mon Sep 17 00:00:00 2001 From: Anton Vinogradov Date: Thu, 8 Sep 2016 18:58:31 +0300 Subject: [PATCH 18/69] IGNITE-3874 sync putIfAbsent forever blocked on all nodes after one node is stopped --- .../processors/cache/transactions/IgniteTxManager.java | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/transactions/IgniteTxManager.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/transactions/IgniteTxManager.java index a1580a58fa413..c72d7f781d938 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/transactions/IgniteTxManager.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/transactions/IgniteTxManager.java @@ -374,7 +374,7 @@ private boolean salvageTx(IgniteInternalTx tx, boolean warn, IgniteInternalTx.Fi "crashed or left grid: " + CU.txString(tx)); } } - catch (IgniteTxOptimisticCheckedException ignore) { + catch (IgniteCheckedException ignore) { if (log.isDebugEnabled()) log.debug("Optimistic failure while invalidating transaction (will rollback): " + tx.xidVersion()); @@ -386,9 +386,6 @@ private boolean salvageTx(IgniteInternalTx tx, boolean warn, IgniteInternalTx.Fi U.error(log, "Failed to rollback transaction: " + tx.xidVersion(), e); } } - catch (IgniteCheckedException e) { - U.error(log, "Failed to invalidate transaction: " + tx, e); - } } else if (state == MARKED_ROLLBACK) { try { From 062ecd75908131084e27883f3f160f3504e92165 Mon Sep 17 00:00:00 2001 From: tledkov-gridgain Date: Fri, 23 Sep 2016 14:05:17 +0300 Subject: [PATCH 19/69] IGNITE-3931: IGFS: Test fixes for PROXY mode. This closes #1102. --- .../local/LocalIgfsSecondaryFileSystem.java | 13 +- .../internal/processors/igfs/IgfsImpl.java | 16 +- .../igfs/IgfsAbstractBaseSelfTest.java | 2 +- .../processors/igfs/IgfsAbstractSelfTest.java | 26 +- ...condaryFileSystemDualAbstractSelfTest.java | 3 +- ...econdaryFileSystemProxyClientSelfTest.java | 28 ++ ...LocalSecondaryFileSystemProxySelfTest.java | 272 ++++++++++++++++++ .../processors/igfs/IgfsProxySelfTest.java | 2 +- .../testsuites/IgniteIgfsTestSuite.java | 8 + 9 files changed, 345 insertions(+), 25 deletions(-) create mode 100644 modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsLocalSecondaryFileSystemProxyClientSelfTest.java create mode 100644 modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsLocalSecondaryFileSystemProxySelfTest.java diff --git a/modules/core/src/main/java/org/apache/ignite/igfs/secondary/local/LocalIgfsSecondaryFileSystem.java b/modules/core/src/main/java/org/apache/ignite/igfs/secondary/local/LocalIgfsSecondaryFileSystem.java index c84fcf2adefb4..8dd4fdac14728 100644 --- a/modules/core/src/main/java/org/apache/ignite/igfs/secondary/local/LocalIgfsSecondaryFileSystem.java +++ b/modules/core/src/main/java/org/apache/ignite/igfs/secondary/local/LocalIgfsSecondaryFileSystem.java @@ -115,40 +115,41 @@ else if(!srcFile.renameTo(destFile)) if (!recursive) return f.delete(); else - return deleteRecursive(f); + return deleteRecursive(f, false); } /** * Delete directory recursively. * * @param f Directory. + * @param deleteIfExists Ignore delete errors if the file doesn't exist. * @return {@code true} if successful. */ - private boolean deleteRecursive(File f) { + private boolean deleteRecursive(File f, boolean deleteIfExists) { BasicFileAttributes attrs; try { attrs = Files.readAttributes(f.toPath(), BasicFileAttributes.class, LinkOption.NOFOLLOW_LINKS); } catch (IOException ignore) { - return false; + return deleteIfExists && !f.exists(); } if (!attrs.isDirectory() || attrs.isSymbolicLink()) - return f.delete(); + return f.delete() || (deleteIfExists && !f.exists()); File[] entries = f.listFiles(); if (entries != null) { for (File entry : entries) { - boolean res = deleteRecursive(entry); + boolean res = deleteRecursive(entry, true); if (!res) return false; } } - return f.delete(); + return f.delete() || (deleteIfExists && !f.exists()); } /** {@inheritDoc} */ diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java index bee9d9a72a53c..6ff1f8fa3eddb 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java @@ -1232,10 +1232,20 @@ private IgfsOutputStream create0( @Override public Void call() throws Exception { IgfsMode mode = resolveMode(path); - boolean useSecondary = IgfsUtils.isDualMode(mode) && secondaryFs instanceof IgfsSecondaryFileSystemV2; + if (mode == PROXY) { + if (secondaryFs instanceof IgfsSecondaryFileSystemV2) + ((IgfsSecondaryFileSystemV2)secondaryFs).setTimes(path, accessTime, modificationTime); + else + throw new UnsupportedOperationException("setTimes is not supported in PROXY mode for " + + "this secondary file system,"); + } + else { + boolean useSecondary = + IgfsUtils.isDualMode(mode) && secondaryFs instanceof IgfsSecondaryFileSystemV2; - meta.updateTimes(path, accessTime, modificationTime, - useSecondary ? (IgfsSecondaryFileSystemV2)secondaryFs : null); + meta.updateTimes(path, accessTime, modificationTime, + useSecondary ? (IgfsSecondaryFileSystemV2) secondaryFs : null); + } return null; } diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractBaseSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractBaseSelfTest.java index 14a653bf83b68..58c4c50707656 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractBaseSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractBaseSelfTest.java @@ -713,7 +713,7 @@ protected void checkNotExist(IgfsImpl igfs, IgfsSecondaryFileSystemTestAdapter i throws Exception { checkNotExist(igfs, paths); - if (dual) + if (mode != PRIMARY) checkNotExist(igfsSecondary, paths); } diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractSelfTest.java index a115e03c7f981..e8d65f18e06ab 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractSelfTest.java @@ -61,6 +61,9 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import static org.apache.ignite.igfs.IgfsMode.PRIMARY; +import static org.apache.ignite.igfs.IgfsMode.PROXY; + /** * Test fo regular igfs operations. */ @@ -657,6 +660,9 @@ public void testUpdatePathDoesNotExist() throws Exception { */ @SuppressWarnings("ConstantConditions") public void testFormat() throws Exception { + if (mode == PROXY) + return; + final GridCacheAdapter dataCache = getDataCache(igfs); assert dataCache != null; @@ -1010,7 +1016,7 @@ public void testCreateParentRoot() throws Exception { * @throws Exception If failed. */ public void testCreateNoClose() throws Exception { - if (dual) + if (mode != PRIMARY) return; create(igfs, paths(DIR, SUBDIR), null); @@ -1089,7 +1095,7 @@ public void testCreateRenameParentNoClose() throws Exception { * @throws Exception If failed. */ public void testCreateDeleteNoClose() throws Exception { - if (dual) + if (mode != PRIMARY) return; create(igfs, paths(DIR, SUBDIR), null); @@ -1143,7 +1149,7 @@ public void testCreateDeleteNoClose() throws Exception { * @throws Exception If failed. */ public void testCreateDeleteParentNoClose() throws Exception { - if (dual) + if (mode != PRIMARY) return; create(igfs, paths(DIR, SUBDIR), null); @@ -1541,7 +1547,7 @@ public void testAppendParentRoot() throws Exception { * @throws Exception If failed. */ public void testAppendNoClose() throws Exception { - if (dual) + if (mode != PRIMARY) return; if (appendSupported()) { @@ -1631,7 +1637,7 @@ public void testAppendRenameParentNoClose() throws Exception { * @throws Exception If failed. */ public void testAppendDeleteNoClose() throws Exception { - if (dual) + if (mode != PRIMARY) return; if (appendSupported()) { @@ -1686,7 +1692,7 @@ public boolean apply() { * @throws Exception If failed. */ public void testAppendDeleteParentNoClose() throws Exception { - if (dual) + if (mode != PRIMARY) return; if (appendSupported()) { @@ -2154,9 +2160,7 @@ public void testConcurrentDeletes() throws Exception { U.awaitQuiet(barrier); try { - igfs.delete(SUBDIR, true); - - return true; + return igfs.delete(SUBDIR, true); } catch (IgniteException ignored) { return false; @@ -2169,9 +2173,7 @@ public void testConcurrentDeletes() throws Exception { U.awaitQuiet(barrier); try { - igfs.delete(SUBSUBDIR, true); - - return true; + return igfs.delete(SUBSUBDIR, true); } catch (IgniteException ignored) { return false; diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsLocalSecondaryFileSystemDualAbstractSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsLocalSecondaryFileSystemDualAbstractSelfTest.java index 8baaf4a4dff8d..0e6fc48c4a8f3 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsLocalSecondaryFileSystemDualAbstractSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsLocalSecondaryFileSystemDualAbstractSelfTest.java @@ -66,7 +66,7 @@ public abstract class IgfsLocalSecondaryFileSystemDualAbstractSelfTest extends I * * @param mode IGFS mode. */ - public IgfsLocalSecondaryFileSystemDualAbstractSelfTest(IgfsMode mode) { + protected IgfsLocalSecondaryFileSystemDualAbstractSelfTest(IgfsMode mode) { super(mode); } @@ -82,7 +82,6 @@ public IgfsLocalSecondaryFileSystemDualAbstractSelfTest(IgfsMode mode) { cleanDirectory(extDir); } - /** * Creates secondary filesystems. * @return IgfsSecondaryFileSystem diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsLocalSecondaryFileSystemProxyClientSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsLocalSecondaryFileSystemProxyClientSelfTest.java new file mode 100644 index 0000000000000..4e5d85f9dee35 --- /dev/null +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsLocalSecondaryFileSystemProxyClientSelfTest.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.igfs; + +/** + * Tests for PROXY mode with local FS is configured as a secondary. + */ +public class IgfsLocalSecondaryFileSystemProxyClientSelfTest extends IgfsLocalSecondaryFileSystemProxySelfTest { + /** {@inheritDoc} */ + @Override protected boolean client() { + return true; + } +} \ No newline at end of file diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsLocalSecondaryFileSystemProxySelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsLocalSecondaryFileSystemProxySelfTest.java new file mode 100644 index 0000000000000..848abe2b825ae --- /dev/null +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsLocalSecondaryFileSystemProxySelfTest.java @@ -0,0 +1,272 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.igfs; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.nio.file.Files; +import java.util.Collection; +import java.util.concurrent.atomic.AtomicLong; +import org.apache.ignite.igfs.IgfsFile; +import org.apache.ignite.igfs.IgfsPath; +import org.apache.ignite.igfs.secondary.IgfsSecondaryFileSystem; +import org.apache.ignite.igfs.secondary.local.LocalIgfsSecondaryFileSystem; +import org.apache.ignite.internal.util.typedef.F; +import org.apache.ignite.internal.util.typedef.internal.U; +import org.apache.ignite.lang.IgniteBiInClosure; +import org.jetbrains.annotations.Nullable; + +/** + * Tests for PROXY mode with local FS is configured as a secondary. + */ +public class IgfsLocalSecondaryFileSystemProxySelfTest extends IgfsProxySelfTest { + /** Fs work directory. */ + private static final String FS_WORK_DIR = U.getIgniteHome() + File.separatorChar + "work" + + File.separatorChar + "fs"; + + /** */ + private static final String FS_EXT_DIR = U.getIgniteHome() + File.separatorChar + "work" + + File.separatorChar + "ext"; + + /** */ + private final File dirLinkDest = new File(FS_EXT_DIR + File.separatorChar + "extdir"); + + /** */ + private final File fileLinkDest = + new File(FS_EXT_DIR + File.separatorChar + "extdir" + File.separatorChar + "filedest"); + + /** */ + private final File dirLinkSrc = new File(FS_WORK_DIR + File.separatorChar + "dir"); + + /** */ + private final File fileLinkSrc = new File(FS_WORK_DIR + File.separatorChar + "file"); + + /** + * Creates secondary filesystems. + * @return IgfsSecondaryFileSystem + * @throws Exception On failure. + */ + @Override protected IgfsSecondaryFileSystem createSecondaryFileSystemStack() throws Exception { + final File workDir = new File(FS_WORK_DIR); + + if (!workDir.exists()) + assert workDir.mkdirs(); + + LocalIgfsSecondaryFileSystem second = new LocalIgfsSecondaryFileSystem(); + + second.setWorkDirectory(workDir.getAbsolutePath()); + + igfsSecondary = new IgfsLocalSecondaryFileSystemTestAdapter(workDir); + + return second; + } + + /** {@inheritDoc} */ + @Override protected void beforeTest() throws Exception { + super.beforeTest(); + + final File extDir = new File(FS_EXT_DIR); + + if (!extDir.exists()) + assert extDir.mkdirs(); + else + cleanDirectory(extDir); + } + + /** {@inheritDoc} */ + @Override protected boolean permissionsSupported() { + return false; + } + + /** {@inheritDoc} */ + @Override protected boolean propertiesSupported() { + return false; + } + + /** {@inheritDoc} */ + @Override protected boolean timesSupported() { + return false; + } + + /** {@inheritDoc} */ + @Override public void testUpdatePathDoesNotExist() throws Exception { + fail("IGNITE-3645"); + } + + /** + * + * @throws Exception If failed. + */ + @SuppressWarnings("ConstantConditions") + public void testListPathForSymlink() throws Exception { + if (U.isWindows()) + return; + + createSymlinks(); + + assertTrue(igfs.info(DIR).isDirectory()); + + Collection pathes = igfs.listPaths(DIR); + Collection files = igfs.listFiles(DIR); + + assertEquals(1, pathes.size()); + assertEquals(1, files.size()); + + assertEquals("filedest", F.first(pathes).name()); + assertEquals("filedest", F.first(files).path().name()); + } + + /** + * + * @throws Exception If failed. + */ + public void testDeleteSymlinkDir() throws Exception { + if (U.isWindows()) + return; + + createSymlinks(); + + // Only symlink must be deleted. Destination content must be exist. + igfs.delete(DIR, true); + + assertTrue(fileLinkDest.exists()); + } + + /** + * + * @throws Exception If failed. + */ + public void testSymlinkToFile() throws Exception { + if (U.isWindows()) + return; + + createSymlinks(); + + checkFileContent(igfs, new IgfsPath("/file"), chunk); + } + + /** + * + * @throws Exception If failed. + */ + public void testMkdirsInsideSymlink() throws Exception { + if (U.isWindows()) + return; + + createSymlinks(); + + igfs.mkdirs(SUBSUBDIR); + + assertTrue(Files.isDirectory(dirLinkDest.toPath().resolve("subdir/subsubdir"))); + assertTrue(Files.isDirectory(dirLinkSrc.toPath().resolve("subdir/subsubdir"))); + } + + /** + * + * @throws Exception If failed. + */ + public void testUsedSpaceSize() throws Exception { + final int DIRS_COUNT = 5; + final int DIRS_MAX_DEEP = 3; + final int FILES_COUNT = 10; + final AtomicLong totalSize = new AtomicLong(); + + IgniteBiInClosure createHierarchy = new IgniteBiInClosure() { + @Override public void apply(Integer level, IgfsPath levelDir) { + try { + for (int i = 0; i < FILES_COUNT; ++i) { + IgfsPath filePath = new IgfsPath(levelDir, "file" + Integer.toString(i)); + + createFile(igfs, filePath, true, chunk); + + totalSize.getAndAdd(chunk.length); + } + + if (level < DIRS_MAX_DEEP) { + for (int dir = 0; dir < DIRS_COUNT; dir++) { + IgfsPath dirPath = new IgfsPath(levelDir, "dir" + Integer.toString(dir)); + + igfs.mkdirs(dirPath); + + apply(level + 1, dirPath); + } + } + } catch (Exception e) { + fail(e.getMessage()); + } + } + }; + + createHierarchy.apply(1, new IgfsPath("/dir")); + + assertEquals(totalSize.get(), igfs.metrics().secondarySpaceSize()); + } + + /** + * + * @throws Exception If failed. + */ + private void createSymlinks() throws Exception { + assert dirLinkDest.mkdir(); + + createFile(fileLinkDest, true, chunk); + + Files.createSymbolicLink(dirLinkSrc.toPath(), dirLinkDest.toPath()); + Files.createSymbolicLink(fileLinkSrc.toPath(), fileLinkDest.toPath()); + } + + /** + * @param dir Directory to clean. + */ + private static void cleanDirectory(File dir){ + File[] entries = dir.listFiles(); + + if (entries != null) { + for (File entry : entries) { + if (entry.isDirectory()) { + cleanDirectory(entry); + + assert entry.delete(); + } + else + assert entry.delete(); + } + } + } + + /** + * @param f File object. + * @param overwrite Overwrite flag. + * @param chunks File content. + * @throws IOException If failed. + */ + private static void createFile(File f, boolean overwrite, @Nullable byte[]... chunks) throws IOException { + OutputStream os = null; + + try { + os = new FileOutputStream(f, overwrite); + + writeFileChunks(os, chunks); + } + finally { + U.closeQuiet(os); + } + } +} \ No newline at end of file diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsProxySelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsProxySelfTest.java index 3b8c606592dac..73835a535fca9 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsProxySelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsProxySelfTest.java @@ -20,7 +20,7 @@ import static org.apache.ignite.igfs.IgfsMode.PROXY; /** - * Tests for PRIMARY mode. + * Tests for PROXY mode. */ public class IgfsProxySelfTest extends IgfsAbstractSelfTest { /** diff --git a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteIgfsTestSuite.java b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteIgfsTestSuite.java index 0241068bcaac4..5a7e3d7456c9e 100644 --- a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteIgfsTestSuite.java +++ b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteIgfsTestSuite.java @@ -29,6 +29,7 @@ import org.apache.ignite.internal.processors.igfs.IgfsCacheSelfTest; import org.apache.ignite.internal.processors.igfs.IgfsDualAsyncClientSelfTest; import org.apache.ignite.internal.processors.igfs.IgfsDualSyncClientSelfTest; +import org.apache.ignite.internal.processors.igfs.IgfsLocalSecondaryFileSystemProxyClientSelfTest; import org.apache.ignite.internal.processors.igfs.IgfsMaxSizeSelfTest; import org.apache.ignite.internal.processors.igfs.IgfsPrimaryClientSelfTest; import org.apache.ignite.internal.processors.igfs.IgfsDataManagerSelfTest; @@ -52,6 +53,8 @@ import org.apache.ignite.internal.processors.igfs.IgfsPrimarySelfTest; import org.apache.ignite.internal.processors.igfs.IgfsProcessorSelfTest; import org.apache.ignite.internal.processors.igfs.IgfsProcessorValidationSelfTest; +import org.apache.ignite.internal.processors.igfs.IgfsProxySelfTest; +import org.apache.ignite.internal.processors.igfs.IgfsLocalSecondaryFileSystemProxySelfTest; import org.apache.ignite.internal.processors.igfs.IgfsServerManagerIpcEndpointRegistrationOnWindowsSelfTest; import org.apache.ignite.internal.processors.igfs.IgfsSizeSelfTest; import org.apache.ignite.internal.processors.igfs.IgfsStartCacheTest; @@ -151,6 +154,11 @@ public static TestSuite suite() throws Exception { suite.addTestSuite(IgfsMaxSizeSelfTest.class); + + suite.addTestSuite(IgfsProxySelfTest.class); + suite.addTestSuite(IgfsLocalSecondaryFileSystemProxySelfTest.class); + suite.addTestSuite(IgfsLocalSecondaryFileSystemProxyClientSelfTest.class); + return suite; } } \ No newline at end of file From 14959f2efea43046a1a2e5c1978fc27a156f5a7a Mon Sep 17 00:00:00 2001 From: vozerov-gridgain Date: Mon, 26 Sep 2016 10:12:45 +0300 Subject: [PATCH 20/69] IGNITE-3925: Removed unnecessary "log.isInfoEnabled()" check during PID printout. --- .../src/main/java/org/apache/ignite/internal/IgniteKernal.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java b/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java index 827b54a262e97..c5d274848d7ee 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java @@ -1784,8 +1784,7 @@ private void ackOsInfo() { int jvmPid = U.jvmPid(); - if (log.isInfoEnabled()) - log.info("PID: " + (jvmPid == -1 ? "N/A" : jvmPid)); + log.info("PID: " + (jvmPid == -1 ? "N/A" : jvmPid)); } } From 38d1d0491ab9e6858a58b90d0e5d892d9fc0e284 Mon Sep 17 00:00:00 2001 From: Igor Sapego Date: Mon, 26 Sep 2016 11:04:56 +0300 Subject: [PATCH 21/69] IGNITE-3876: ODBC: Better handling of NULL values for output parameters. This closes #1099. --- modules/platforms/cpp/odbc-test/Makefile.am | 1 + .../cpp/odbc-test/include/test_utils.h | 8 + .../odbc-test/project/vs/odbc-test.vcxproj | 1 + .../project/vs/odbc-test.vcxproj.filters | 3 + .../cpp/odbc-test/src/api_robustness_test.cpp | 1006 +++++++++++++++++ .../odbc/include/ignite/odbc/common_types.h | 8 +- .../platforms/cpp/odbc/src/entry_points.cpp | 19 +- modules/platforms/cpp/odbc/src/odbc.cpp | 199 ++-- modules/platforms/cpp/odbc/src/statement.cpp | 2 +- 9 files changed, 1145 insertions(+), 102 deletions(-) create mode 100644 modules/platforms/cpp/odbc-test/src/api_robustness_test.cpp diff --git a/modules/platforms/cpp/odbc-test/Makefile.am b/modules/platforms/cpp/odbc-test/Makefile.am index a22e247c92f72..ccf1192445563 100644 --- a/modules/platforms/cpp/odbc-test/Makefile.am +++ b/modules/platforms/cpp/odbc-test/Makefile.am @@ -72,6 +72,7 @@ ignite_odbc_tests_SOURCES = \ src/sql_types_test.cpp \ src/sql_date_time_functions_test.cpp \ src/sql_outer_join_test.cpp \ + src/api_robustness_test.cpp \ ../odbc/src/cursor.cpp \ ../odbc/src/config/connection_info.cpp \ ../odbc/src/app/application_data_buffer.cpp \ diff --git a/modules/platforms/cpp/odbc-test/include/test_utils.h b/modules/platforms/cpp/odbc-test/include/test_utils.h index e8cd089287af1..e239f45ee12af 100644 --- a/modules/platforms/cpp/odbc-test/include/test_utils.h +++ b/modules/platforms/cpp/odbc-test/include/test_utils.h @@ -27,6 +27,14 @@ #include +#define ODBC_FAIL_ON_ERROR(ret, type, handle) \ + if (!SQL_SUCCEEDED(ret)) \ + { \ + Ignition::StopAll(true); \ + BOOST_FAIL(GetOdbcErrorMessage(type, handle)); \ + } + + namespace ignite { /** Read buffer size. */ diff --git a/modules/platforms/cpp/odbc-test/project/vs/odbc-test.vcxproj b/modules/platforms/cpp/odbc-test/project/vs/odbc-test.vcxproj index 98a1e587c6553..91603dc5f01f9 100644 --- a/modules/platforms/cpp/odbc-test/project/vs/odbc-test.vcxproj +++ b/modules/platforms/cpp/odbc-test/project/vs/odbc-test.vcxproj @@ -161,6 +161,7 @@ + diff --git a/modules/platforms/cpp/odbc-test/project/vs/odbc-test.vcxproj.filters b/modules/platforms/cpp/odbc-test/project/vs/odbc-test.vcxproj.filters index f348ee7dd5ee6..eef6abbd60bfa 100644 --- a/modules/platforms/cpp/odbc-test/project/vs/odbc-test.vcxproj.filters +++ b/modules/platforms/cpp/odbc-test/project/vs/odbc-test.vcxproj.filters @@ -112,6 +112,9 @@ Code + + Code + diff --git a/modules/platforms/cpp/odbc-test/src/api_robustness_test.cpp b/modules/platforms/cpp/odbc-test/src/api_robustness_test.cpp new file mode 100644 index 0000000000000..008cf25f26c7e --- /dev/null +++ b/modules/platforms/cpp/odbc-test/src/api_robustness_test.cpp @@ -0,0 +1,1006 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifdef _WIN32 +# include +#endif + +#include +#include + +#include +#include + +#ifndef _MSC_VER +# define BOOST_TEST_DYN_LINK +#endif + +#include + +#include "ignite/ignite.h" +#include "ignite/ignition.h" +#include "ignite/impl/binary/binary_utils.h" + +#include "test_type.h" +#include "test_utils.h" + +using namespace ignite; +using namespace ignite::cache; +using namespace ignite::cache::query; +using namespace ignite::common; + +using namespace boost::unit_test; + +using ignite::impl::binary::BinaryUtils; + +/** + * Test setup fixture. + */ +struct ApiRobustnessTestSuiteFixture +{ + void Prepare() + { + // Allocate an environment handle + SQLAllocHandle(SQL_HANDLE_ENV, SQL_NULL_HANDLE, &env); + + BOOST_REQUIRE(env != NULL); + + // We want ODBC 3 support + SQLSetEnvAttr(env, SQL_ATTR_ODBC_VERSION, reinterpret_cast(SQL_OV_ODBC3), 0); + + // Allocate a connection handle + SQLAllocHandle(SQL_HANDLE_DBC, env, &dbc); + + BOOST_REQUIRE(dbc != NULL); + } + + /** + * Establish connection to node. + * + * @param connectStr Connection string. + */ + void Connect(const std::string& connectStr) + { + Prepare(); + + // Connect string + std::vector connectStr0; + + connectStr0.reserve(connectStr.size() + 1); + std::copy(connectStr.begin(), connectStr.end(), std::back_inserter(connectStr0)); + + SQLCHAR outstr[ODBC_BUFFER_SIZE]; + SQLSMALLINT outstrlen; + + // Connecting to ODBC server. + SQLRETURN ret = SQLDriverConnect(dbc, NULL, &connectStr0[0], static_cast(connectStr0.size()), + outstr, sizeof(outstr), &outstrlen, SQL_DRIVER_COMPLETE); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_DBC, dbc); + + // Allocate a statement handle + SQLAllocHandle(SQL_HANDLE_STMT, dbc, &stmt); + + BOOST_REQUIRE(stmt != NULL); + } + + void Disconnect() + { + // Releasing statement handle. + SQLFreeHandle(SQL_HANDLE_STMT, stmt); + + // Disconneting from the server. + SQLDisconnect(dbc); + + // Releasing allocated handles. + SQLFreeHandle(SQL_HANDLE_DBC, dbc); + SQLFreeHandle(SQL_HANDLE_ENV, env); + } + + static Ignite StartNode(const char* name, const char* config) + { + IgniteConfiguration cfg; + + cfg.jvmOpts.push_back("-Xdebug"); + cfg.jvmOpts.push_back("-Xnoagent"); + cfg.jvmOpts.push_back("-Djava.compiler=NONE"); + cfg.jvmOpts.push_back("-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005"); + cfg.jvmOpts.push_back("-XX:+HeapDumpOnOutOfMemoryError"); + cfg.jvmOpts.push_back("-Duser.timezone=GMT"); + +#ifdef IGNITE_TESTS_32 + cfg.jvmInitMem = 256; + cfg.jvmMaxMem = 768; +#else + cfg.jvmInitMem = 1024; + cfg.jvmMaxMem = 4096; +#endif + + char* cfgPath = getenv("IGNITE_NATIVE_TEST_ODBC_CONFIG_PATH"); + + BOOST_REQUIRE(cfgPath != 0); + + cfg.springCfgPath.assign(cfgPath).append("/").append(config); + + IgniteError err; + + return Ignition::Start(cfg, name); + } + + static Ignite StartAdditionalNode(const char* name) + { + return StartNode(name, "queries-test-noodbc.xml"); + } + + /** + * Constructor. + */ + ApiRobustnessTestSuiteFixture() : + testCache(0), + env(NULL), + dbc(NULL), + stmt(NULL) + { + grid = StartNode("NodeMain", "queries-test.xml"); + + testCache = grid.GetCache("cache"); + } + + /** + * Destructor. + */ + ~ApiRobustnessTestSuiteFixture() + { + Disconnect(); + + Ignition::StopAll(true); + } + + /** Node started during the test. */ + Ignite grid; + + /** Test cache instance. */ + Cache testCache; + + /** ODBC Environment. */ + SQLHENV env; + + /** ODBC Connect. */ + SQLHDBC dbc; + + /** ODBC Statement. */ + SQLHSTMT stmt; +}; + +BOOST_FIXTURE_TEST_SUITE(ApiRobustnessTestSuite, ApiRobustnessTestSuiteFixture) + +BOOST_AUTO_TEST_CASE(TestSQLDriverConnect) +{ + // There are no checks because we do not really care what is the result of these + // calls as long as they do not cause segmentation fault. + + Prepare(); + + SQLCHAR connectStr[] = "DRIVER={Apache Ignite};address=127.0.0.1:11110;cache=cache"; + + SQLCHAR outStr[ODBC_BUFFER_SIZE]; + SQLSMALLINT outStrLen; + + // Normal connect. + SQLRETURN ret = SQLDriverConnect(dbc, NULL, connectStr, sizeof(connectStr), + outStr, sizeof(outStr), &outStrLen, SQL_DRIVER_COMPLETE); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + SQLDisconnect(dbc); + + // Null out string resulting length. + SQLDriverConnect(dbc, NULL, connectStr, sizeof(connectStr), outStr, sizeof(outStr), 0, SQL_DRIVER_COMPLETE); + + SQLDisconnect(dbc); + + // Null out string buffer length. + SQLDriverConnect(dbc, NULL, connectStr, sizeof(connectStr), outStr, 0, &outStrLen, SQL_DRIVER_COMPLETE); + + SQLDisconnect(dbc); + + // Null out string. + SQLDriverConnect(dbc, NULL, connectStr, sizeof(connectStr), 0, sizeof(outStr), &outStrLen, SQL_DRIVER_COMPLETE); + + SQLDisconnect(dbc); + + // Null all. + SQLDriverConnect(dbc, NULL, connectStr, sizeof(connectStr), 0, 0, 0, SQL_DRIVER_COMPLETE); + + SQLDisconnect(dbc); +} + +BOOST_AUTO_TEST_CASE(TestSQLConnect) +{ + // There are no checks because we do not really care what is the result of these + // calls as long as they do not cause segmentation fault. + + Connect("DRIVER={Apache Ignite};address=127.0.0.1:11110;cache=cache"); + + SQLCHAR buffer[ODBC_BUFFER_SIZE]; + SQLSMALLINT resLen = 0; + + // Everyting is ok. + SQLRETURN ret = SQLGetInfo(dbc, SQL_DRIVER_NAME, buffer, ODBC_BUFFER_SIZE, &resLen); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + // Resulting length is null. + SQLGetInfo(dbc, SQL_DRIVER_NAME, buffer, ODBC_BUFFER_SIZE, 0); + + // Buffer length is null. + SQLGetInfo(dbc, SQL_DRIVER_NAME, buffer, 0, &resLen); + + // Buffer is null. + SQLGetInfo(dbc, SQL_DRIVER_NAME, 0, ODBC_BUFFER_SIZE, &resLen); + + // Unknown info. + SQLGetInfo(dbc, -1, buffer, ODBC_BUFFER_SIZE, &resLen); + + // All nulls. + SQLGetInfo(dbc, SQL_DRIVER_NAME, 0, 0, 0); +} + +BOOST_AUTO_TEST_CASE(TestSQLPrepare) +{ + // There are no checks because we do not really care what is the result of these + // calls as long as they do not cause segmentation fault. + + Connect("DRIVER={Apache Ignite};address=127.0.0.1:11110;cache=cache"); + + SQLCHAR sql[] = "SELECT strField FROM TestType"; + + // Everyting is ok. + SQLRETURN ret = SQLPrepare(stmt, sql, sizeof(sql)); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + SQLCloseCursor(stmt); + + // Value length is null. + SQLPrepare(stmt, sql, 0); + + SQLCloseCursor(stmt); + + // Value is null. + SQLPrepare(stmt, 0, sizeof(sql)); + + SQLCloseCursor(stmt); + + // All nulls. + SQLPrepare(stmt, 0, 0); + + SQLCloseCursor(stmt); +} + +BOOST_AUTO_TEST_CASE(TestSQLExecDirect) +{ + // There are no checks because we do not really care what is the result of these + // calls as long as they do not cause segmentation fault. + + Connect("DRIVER={Apache Ignite};address=127.0.0.1:11110;cache=cache"); + + SQLCHAR sql[] = "SELECT strField FROM TestType"; + + // Everyting is ok. + SQLRETURN ret = SQLExecDirect(stmt, sql, sizeof(sql)); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + SQLCloseCursor(stmt); + + // Value length is null. + SQLExecDirect(stmt, sql, 0); + + SQLCloseCursor(stmt); + + // Value is null. + SQLExecDirect(stmt, 0, sizeof(sql)); + + SQLCloseCursor(stmt); + + // All nulls. + SQLExecDirect(stmt, 0, 0); + + SQLCloseCursor(stmt); +} + +BOOST_AUTO_TEST_CASE(TestSQLExtendedFetch) +{ + // There are no checks because we do not really care what is the result of these + // calls as long as they do not cause segmentation fault. + + for (int i = 0; i < 100; ++i) + { + TestType obj; + + obj.strField = LexicalCast(i); + + testCache.Put(i, obj); + } + + Connect("DRIVER={Apache Ignite};address=127.0.0.1:11110;cache=cache"); + + SQLCHAR sql[] = "SELECT strField FROM TestType"; + + SQLRETURN ret = SQLExecDirect(stmt, sql, sizeof(sql)); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + SQLULEN rowCount; + SQLUSMALLINT rowStatus[16]; + + // Everyting is ok. + ret = SQLExtendedFetch(stmt, SQL_FETCH_NEXT, 0, &rowCount, rowStatus); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + // Row count is null. + SQLExtendedFetch(stmt, SQL_FETCH_NEXT, 0, 0, rowStatus); + + // Row statuses is null. + SQLExtendedFetch(stmt, SQL_FETCH_NEXT, 0, &rowCount, 0); + + // All nulls. + SQLExtendedFetch(stmt, SQL_FETCH_NEXT, 0, 0, 0); +} + +BOOST_AUTO_TEST_CASE(TestSQLNumResultCols) +{ + // There are no checks because we do not really care what is the result of these + // calls as long as they do not cause segmentation fault. + + for (int i = 0; i < 100; ++i) + { + TestType obj; + + obj.strField = LexicalCast(i); + + testCache.Put(i, obj); + } + + Connect("DRIVER={Apache Ignite};address=127.0.0.1:11110;cache=cache"); + + SQLCHAR sql[] = "SELECT strField FROM TestType"; + + SQLRETURN ret = SQLExecDirect(stmt, sql, sizeof(sql)); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + SQLSMALLINT columnCount; + + // Everyting is ok. + ret = SQLNumResultCols(stmt, &columnCount); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + // Column count is null. + SQLNumResultCols(stmt, 0); +} + +BOOST_AUTO_TEST_CASE(TestSQLTables) +{ + // There are no checks because we do not really care what is the result of these + // calls as long as they do not cause segmentation fault. + + Connect("DRIVER={Apache Ignite};address=127.0.0.1:11110;cache=cache"); + + SQLCHAR catalogName[] = ""; + SQLCHAR schemaName[] = ""; + SQLCHAR tableName[] = ""; + SQLCHAR tableType[] = ""; + + // Everithing is ok. + SQLRETURN ret = SQLTables(stmt, catalogName, sizeof(catalogName), schemaName, + sizeof(schemaName), tableName, sizeof(tableName), tableType, sizeof(tableType)); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + // Sizes are nulls. + SQLTables(dbc, catalogName, 0, schemaName, 0, tableName, 0, tableType, 0); + + // Values are nulls. + SQLTables(dbc, 0, sizeof(catalogName), 0, sizeof(schemaName), 0, sizeof(tableName), 0, sizeof(tableType)); + + // All nulls. + SQLTables(dbc, 0, 0, 0, 0, 0, 0, 0, 0); +} + +BOOST_AUTO_TEST_CASE(TestSQLColumns) +{ + // There are no checks because we do not really care what is the result of these + // calls as long as they do not cause segmentation fault. + + Connect("DRIVER={Apache Ignite};address=127.0.0.1:11110;cache=cache"); + + SQLCHAR catalogName[] = ""; + SQLCHAR schemaName[] = ""; + SQLCHAR tableName[] = ""; + SQLCHAR columnName[] = ""; + + // Everithing is ok. + SQLRETURN ret = SQLColumns(stmt, catalogName, sizeof(catalogName), schemaName, + sizeof(schemaName), tableName, sizeof(tableName), columnName, sizeof(columnName)); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + // Sizes are nulls. + SQLColumns(dbc, catalogName, 0, schemaName, 0, tableName, 0, columnName, 0); + + // Values are nulls. + SQLColumns(dbc, 0, sizeof(catalogName), 0, sizeof(schemaName), 0, sizeof(tableName), 0, sizeof(columnName)); + + // All nulls. + SQLColumns(dbc, 0, 0, 0, 0, 0, 0, 0, 0); +} + +BOOST_AUTO_TEST_CASE(TestSQLBindCol) +{ + // There are no checks because we do not really care what is the result of these + // calls as long as they do not cause segmentation fault. + + Connect("DRIVER={Apache Ignite};address=127.0.0.1:11110;cache=cache"); + + SQLINTEGER ind1; + SQLLEN len1 = 0; + + // Everithing is ok. + SQLRETURN ret = SQLBindCol(stmt, 1, SQL_C_SLONG, &ind1, sizeof(ind1), &len1); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + // Size is null. + SQLBindCol(stmt, 1, SQL_C_SLONG, &ind1, 0, &len1); + + // Res size is null. + SQLBindCol(stmt, 2, SQL_C_SLONG, &ind1, sizeof(ind1), 0); + + // Value is null. + SQLBindCol(stmt, 3, SQL_C_SLONG, 0, sizeof(ind1), &len1); + + // All nulls. + SQLBindCol(stmt, 4, SQL_C_SLONG, 0, 0, 0); +} + +BOOST_AUTO_TEST_CASE(TestSQLBindParameter) +{ + // There are no checks because we do not really care what is the result of these + // calls as long as they do not cause segmentation fault. + + Connect("DRIVER={Apache Ignite};address=127.0.0.1:11110;cache=cache"); + + SQLINTEGER ind1; + SQLLEN len1 = 0; + + // Everithing is ok. + SQLRETURN ret = SQLBindParameter(stmt, 1, SQL_PARAM_INPUT, + SQL_C_SLONG, SQL_INTEGER, 100, 100, &ind1, sizeof(ind1), &len1); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + // Size is null. + SQLBindParameter(stmt, 2, SQL_PARAM_INPUT, SQL_C_SLONG, SQL_INTEGER, 100, 100, &ind1, 0, &len1); + + // Res size is null. + SQLBindParameter(stmt, 3, SQL_PARAM_INPUT, SQL_C_SLONG, SQL_INTEGER, 100, 100, &ind1, sizeof(ind1), 0); + + // Value is null. + SQLBindParameter(stmt, 4, SQL_PARAM_INPUT, SQL_C_SLONG, SQL_INTEGER, 100, 100, 0, sizeof(ind1), &len1); + + // All nulls. + SQLBindParameter(stmt, 5, SQL_PARAM_INPUT, SQL_C_SLONG, SQL_INTEGER, 100, 100, 0, 0, 0); +} + +BOOST_AUTO_TEST_CASE(TestSQLNativeSql) +{ + // There are no checks because we do not really care what is the result of these + // calls as long as they do not cause segmentation fault. + + Connect("DRIVER={Apache Ignite};address=127.0.0.1:11110;cache=cache"); + + SQLCHAR sql[] = "SELECT strField FROM TestType"; + SQLCHAR buffer[ODBC_BUFFER_SIZE]; + SQLINTEGER resLen = 0; + + // Everithing is ok. + SQLRETURN ret = SQLNativeSql(dbc, sql, sizeof(sql), buffer, sizeof(buffer), &resLen); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + // Value size is null. + SQLNativeSql(dbc, sql, 0, buffer, sizeof(buffer), &resLen); + + // Buffer size is null. + SQLNativeSql(dbc, sql, sizeof(sql), buffer, 0, &resLen); + + // Res size is null. + SQLNativeSql(dbc, sql, sizeof(sql), buffer, sizeof(buffer), 0); + + // Value is null. + SQLNativeSql(dbc, sql, 0, buffer, sizeof(buffer), &resLen); + + // Buffer is null. + SQLNativeSql(dbc, sql, sizeof(sql), 0, sizeof(buffer), &resLen); + + // All nulls. + SQLNativeSql(dbc, sql, 0, 0, 0, 0); +} + +BOOST_AUTO_TEST_CASE(TestSQLColAttribute) +{ + // There are no checks because we do not really care what is the result of these + // calls as long as they do not cause segmentation fault. + + Connect("DRIVER={Apache Ignite};address=127.0.0.1:11110;cache=cache"); + + SQLCHAR sql[] = "SELECT strField FROM TestType"; + + SQLRETURN ret = SQLExecDirect(stmt, sql, sizeof(sql)); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + SQLCHAR buffer[ODBC_BUFFER_SIZE]; + SQLSMALLINT resLen = 0; + SQLLEN numericAttr = 0; + + // Everithing is ok. Character attribute. + ret = SQLColAttribute(stmt, 1, SQL_COLUMN_TABLE_NAME, buffer, sizeof(buffer), &resLen, &numericAttr); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + // Everithing is ok. Numeric attribute. + ret = SQLColAttribute(stmt, 1, SQL_DESC_COUNT, buffer, sizeof(buffer), &resLen, &numericAttr); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + SQLColAttribute(stmt, 1, SQL_COLUMN_TABLE_NAME, buffer, sizeof(buffer), &resLen, 0); + SQLColAttribute(stmt, 1, SQL_COLUMN_TABLE_NAME, buffer, sizeof(buffer), 0, &numericAttr); + SQLColAttribute(stmt, 1, SQL_COLUMN_TABLE_NAME, buffer, 0, &resLen, &numericAttr); + SQLColAttribute(stmt, 1, SQL_COLUMN_TABLE_NAME, 0, sizeof(buffer), &resLen, &numericAttr); + SQLColAttribute(stmt, 1, SQL_COLUMN_TABLE_NAME, 0, 0, 0, 0); + + SQLColAttribute(stmt, 1, SQL_DESC_COUNT, buffer, sizeof(buffer), &resLen, 0); + SQLColAttribute(stmt, 1, SQL_DESC_COUNT, buffer, sizeof(buffer), 0, &numericAttr); + SQLColAttribute(stmt, 1, SQL_DESC_COUNT, buffer, 0, &resLen, &numericAttr); + SQLColAttribute(stmt, 1, SQL_DESC_COUNT, 0, sizeof(buffer), &resLen, &numericAttr); + SQLColAttribute(stmt, 1, SQL_DESC_COUNT, 0, 0, 0, 0); +} + +BOOST_AUTO_TEST_CASE(TestSQLDescribeCol) +{ + // There are no checks because we do not really care what is the result of these + // calls as long as they do not cause segmentation fault. + + Connect("DRIVER={Apache Ignite};address=127.0.0.1:11110;cache=cache"); + + SQLCHAR sql[] = "SELECT strField FROM TestType"; + + SQLRETURN ret = SQLExecDirect(stmt, sql, sizeof(sql)); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + SQLCHAR columnName[ODBC_BUFFER_SIZE]; + SQLSMALLINT columnNameLen = 0; + SQLSMALLINT dataType = 0; + SQLULEN columnSize = 0; + SQLSMALLINT decimalDigits = 0; + SQLSMALLINT nullable = 0; + + // Everithing is ok. + ret = SQLDescribeCol(stmt, 1, columnName, sizeof(columnName), + &columnNameLen, &dataType, &columnSize, &decimalDigits, &nullable); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + SQLDescribeCol(stmt, 1, 0, sizeof(columnName), &columnNameLen, &dataType, &columnSize, &decimalDigits, &nullable); + SQLDescribeCol(stmt, 1, columnName, 0, &columnNameLen, &dataType, &columnSize, &decimalDigits, &nullable); + SQLDescribeCol(stmt, 1, columnName, sizeof(columnName), 0, &dataType, &columnSize, &decimalDigits, &nullable); + SQLDescribeCol(stmt, 1, columnName, sizeof(columnName), &columnNameLen, 0, &columnSize, &decimalDigits, &nullable); + SQLDescribeCol(stmt, 1, columnName, sizeof(columnName), &columnNameLen, &dataType, 0, &decimalDigits, &nullable); + SQLDescribeCol(stmt, 1, columnName, sizeof(columnName), &columnNameLen, &dataType, &columnSize, 0, &nullable); + SQLDescribeCol(stmt, 1, columnName, sizeof(columnName), &columnNameLen, &dataType, &columnSize, &decimalDigits, 0); + SQLDescribeCol(stmt, 1, 0, 0, 0, 0, 0, 0, 0); +} + +BOOST_AUTO_TEST_CASE(TestSQLRowCount) +{ + // There are no checks because we do not really care what is the result of these + // calls as long as they do not cause segmentation fault. + + Connect("DRIVER={Apache Ignite};address=127.0.0.1:11110;cache=cache"); + + SQLCHAR sql[] = "SELECT strField FROM TestType"; + + SQLRETURN ret = SQLExecDirect(stmt, sql, sizeof(sql)); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + SQLLEN rows = 0; + + // Everithing is ok. + ret = SQLRowCount(stmt, &rows); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + SQLRowCount(stmt, 0); +} + +BOOST_AUTO_TEST_CASE(TestSQLForeignKeys) +{ + // There are no checks because we do not really care what is the result of these + // calls as long as they do not cause segmentation fault. + + Connect("DRIVER={Apache Ignite};address=127.0.0.1:11110;cache=cache"); + + SQLCHAR catalogName[] = ""; + SQLCHAR schemaName[] = "cache"; + SQLCHAR tableName[] = "TestType"; + + // Everithing is ok. + SQLRETURN ret = SQLForeignKeys(stmt, catalogName, sizeof(catalogName), schemaName, sizeof(schemaName), + tableName, sizeof(tableName), catalogName, sizeof(catalogName), + schemaName, sizeof(schemaName), tableName, sizeof(tableName)); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + SQLCloseCursor(stmt); + + SQLForeignKeys(stmt, 0, sizeof(catalogName), schemaName, sizeof(schemaName), tableName, sizeof(tableName), + catalogName, sizeof(catalogName), schemaName, sizeof(schemaName), tableName, sizeof(tableName)); + + SQLCloseCursor(stmt); + + SQLForeignKeys(stmt, catalogName, 0, schemaName, sizeof(schemaName), tableName, sizeof(tableName), + catalogName, sizeof(catalogName), schemaName, sizeof(schemaName), tableName, sizeof(tableName)); + + SQLCloseCursor(stmt); + + SQLForeignKeys(stmt, catalogName, sizeof(catalogName), 0, sizeof(schemaName), tableName, sizeof(tableName), + catalogName, sizeof(catalogName), schemaName, sizeof(schemaName), tableName, sizeof(tableName)); + + SQLCloseCursor(stmt); + + SQLForeignKeys(stmt, catalogName, sizeof(catalogName), schemaName, 0, tableName, sizeof(tableName), + catalogName, sizeof(catalogName), schemaName, sizeof(schemaName), tableName, sizeof(tableName)); + + SQLCloseCursor(stmt); + + SQLForeignKeys(stmt, catalogName, sizeof(catalogName), schemaName, sizeof(schemaName), 0, sizeof(tableName), + catalogName, sizeof(catalogName), schemaName, sizeof(schemaName), tableName, sizeof(tableName)); + + SQLCloseCursor(stmt); + + SQLForeignKeys(stmt, catalogName, sizeof(catalogName), schemaName, sizeof(schemaName), tableName, 0, catalogName, + sizeof(catalogName), schemaName, sizeof(schemaName), tableName, sizeof(tableName)); + + SQLCloseCursor(stmt); + + SQLForeignKeys(stmt, catalogName, sizeof(catalogName), schemaName, sizeof(schemaName), tableName, sizeof(tableName), + 0, sizeof(catalogName), schemaName, sizeof(schemaName), tableName, sizeof(tableName)); + + SQLCloseCursor(stmt); + + SQLForeignKeys(stmt, catalogName, sizeof(catalogName), schemaName, sizeof(schemaName), tableName, sizeof(tableName), + catalogName, 0, schemaName, sizeof(schemaName), tableName, sizeof(tableName)); + + SQLCloseCursor(stmt); + + SQLForeignKeys(stmt, catalogName, sizeof(catalogName), schemaName, sizeof(schemaName), tableName, sizeof(tableName), + catalogName, sizeof(catalogName), 0, sizeof(schemaName), tableName, sizeof(tableName)); + + SQLCloseCursor(stmt); + + SQLForeignKeys(stmt, catalogName, sizeof(catalogName), schemaName, sizeof(schemaName), tableName, sizeof(tableName), + catalogName, sizeof(catalogName), schemaName, 0, tableName, sizeof(tableName)); + + SQLCloseCursor(stmt); + + SQLForeignKeys(stmt, catalogName, sizeof(catalogName), schemaName, sizeof(schemaName), tableName, sizeof(tableName), + catalogName, sizeof(catalogName), schemaName, sizeof(schemaName), 0, sizeof(tableName)); + + SQLCloseCursor(stmt); + + SQLForeignKeys(stmt, catalogName, sizeof(catalogName), schemaName, sizeof(schemaName), tableName, sizeof(tableName), + catalogName, sizeof(catalogName), schemaName, sizeof(schemaName), tableName, 0); + + SQLCloseCursor(stmt); + + SQLForeignKeys(stmt, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0); + + SQLCloseCursor(stmt); +} + +BOOST_AUTO_TEST_CASE(TestSQLGetStmtAttr) +{ + // There are no checks because we do not really care what is the result of these + // calls as long as they do not cause segmentation fault. + + Connect("DRIVER={Apache Ignite};address=127.0.0.1:11110;cache=cache"); + + SQLCHAR buffer[ODBC_BUFFER_SIZE]; + SQLINTEGER resLen = 0; + + // Everithing is ok. + SQLRETURN ret = SQLGetStmtAttr(stmt, SQL_ATTR_ROW_ARRAY_SIZE, buffer, sizeof(buffer), &resLen); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + SQLGetStmtAttr(stmt, SQL_ATTR_ROW_ARRAY_SIZE, 0, sizeof(buffer), &resLen); + SQLGetStmtAttr(stmt, SQL_ATTR_ROW_ARRAY_SIZE, buffer, 0, &resLen); + SQLGetStmtAttr(stmt, SQL_ATTR_ROW_ARRAY_SIZE, buffer, sizeof(buffer), 0); + SQLGetStmtAttr(stmt, SQL_ATTR_ROW_ARRAY_SIZE, 0, 0, 0); +} + +BOOST_AUTO_TEST_CASE(TestSQLSetStmtAttr) +{ + // There are no checks because we do not really care what is the result of these + // calls as long as they do not cause segmentation fault. + + Connect("DRIVER={Apache Ignite};address=127.0.0.1:11110;cache=cache"); + + SQLULEN val = 1; + + // Everithing is ok. + SQLRETURN ret = SQLSetStmtAttr(stmt, SQL_ATTR_ROW_ARRAY_SIZE, reinterpret_cast(val), sizeof(val)); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + SQLSetStmtAttr(stmt, SQL_ATTR_ROW_ARRAY_SIZE, 0, sizeof(val)); + SQLSetStmtAttr(stmt, SQL_ATTR_ROW_ARRAY_SIZE, reinterpret_cast(val), 0); + SQLSetStmtAttr(stmt, SQL_ATTR_ROW_ARRAY_SIZE, 0, 0); +} + +BOOST_AUTO_TEST_CASE(TestSQLPrimaryKeys) +{ + // There are no checks because we do not really care what is the result of these + // calls as long as they do not cause segmentation fault. + + Connect("DRIVER={Apache Ignite};address=127.0.0.1:11110;cache=cache"); + + SQLCHAR catalogName[] = ""; + SQLCHAR schemaName[] = "cache"; + SQLCHAR tableName[] = "TestType"; + + // Everithing is ok. + SQLRETURN ret = SQLPrimaryKeys(stmt, catalogName, sizeof(catalogName), schemaName, sizeof(schemaName), + tableName, sizeof(tableName)); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + SQLPrimaryKeys(stmt, 0, sizeof(catalogName), schemaName, sizeof(schemaName), tableName, sizeof(tableName)); + SQLPrimaryKeys(stmt, catalogName, 0, schemaName, sizeof(schemaName), tableName, sizeof(tableName)); + SQLPrimaryKeys(stmt, catalogName, sizeof(catalogName), 0, sizeof(schemaName), tableName, sizeof(tableName)); + SQLPrimaryKeys(stmt, catalogName, sizeof(catalogName), schemaName, 0, tableName, sizeof(tableName)); + SQLPrimaryKeys(stmt, catalogName, sizeof(catalogName), schemaName, sizeof(schemaName), 0, sizeof(tableName)); + SQLPrimaryKeys(stmt, catalogName, sizeof(catalogName), schemaName, sizeof(schemaName), tableName, 0); + SQLPrimaryKeys(stmt, 0, 0, 0, 0, 0, 0); +} + +BOOST_AUTO_TEST_CASE(TestSQLNumParams) +{ + // There are no checks because we do not really care what is the result of these + // calls as long as they do not cause segmentation fault. + + Connect("DRIVER={Apache Ignite};address=127.0.0.1:11110;cache=cache"); + + SQLCHAR sql[] = "SELECT strField FROM TestType"; + + // Everyting is ok. + SQLRETURN ret = SQLPrepare(stmt, sql, sizeof(sql)); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + SQLSMALLINT params; + + // Everithing is ok. + ret = SQLNumParams(stmt, ¶ms); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + SQLNumParams(stmt, 0); +} + +BOOST_AUTO_TEST_CASE(TestSQLGetDiagField) +{ + // There are no checks because we do not really care what is the result of these + // calls as long as they do not cause segmentation fault. + + Connect("DRIVER={Apache Ignite};address=127.0.0.1:11110;cache=cache"); + + // Should fail. + SQLRETURN ret = SQLGetTypeInfo(stmt, SQL_INTERVAL_MONTH); + + BOOST_REQUIRE_EQUAL(ret, SQL_ERROR); + + SQLCHAR buffer[ODBC_BUFFER_SIZE]; + SQLSMALLINT resLen = 0; + + // Everithing is ok + ret = SQLGetDiagField(SQL_HANDLE_STMT, stmt, 1, SQL_DIAG_MESSAGE_TEXT, buffer, sizeof(buffer), &resLen); + + BOOST_REQUIRE_EQUAL(ret, SQL_SUCCESS); + + SQLGetDiagField(SQL_HANDLE_STMT, stmt, 1, SQL_DIAG_MESSAGE_TEXT, 0, sizeof(buffer), &resLen); + SQLGetDiagField(SQL_HANDLE_STMT, stmt, 1, SQL_DIAG_MESSAGE_TEXT, buffer, 0, &resLen); + SQLGetDiagField(SQL_HANDLE_STMT, stmt, 1, SQL_DIAG_MESSAGE_TEXT, buffer, sizeof(buffer), 0); + SQLGetDiagField(SQL_HANDLE_STMT, stmt, 1, SQL_DIAG_MESSAGE_TEXT, 0, 0, 0); +} + +BOOST_AUTO_TEST_CASE(TestSQLGetDiagRec) +{ + // There are no checks because we do not really care what is the result of these + // calls as long as they do not cause segmentation fault. + + Connect("DRIVER={Apache Ignite};address=127.0.0.1:11110;cache=cache"); + + // Should fail. + SQLRETURN ret = SQLGetTypeInfo(stmt, SQL_INTERVAL_MONTH); + + BOOST_REQUIRE_EQUAL(ret, SQL_ERROR); + + SQLCHAR state[ODBC_BUFFER_SIZE]; + SQLINTEGER nativeError = 0; + SQLCHAR message[ODBC_BUFFER_SIZE]; + SQLSMALLINT messageLen = 0; + + // Everithing is ok + ret = SQLGetDiagRec(SQL_HANDLE_STMT, stmt, 1, state, &nativeError, message, sizeof(message), &messageLen); + + BOOST_REQUIRE_EQUAL(ret, SQL_SUCCESS); + + SQLGetDiagRec(SQL_HANDLE_STMT, stmt, 1, 0, &nativeError, message, sizeof(message), &messageLen); + SQLGetDiagRec(SQL_HANDLE_STMT, stmt, 1, state, 0, message, sizeof(message), &messageLen); + SQLGetDiagRec(SQL_HANDLE_STMT, stmt, 1, state, &nativeError, 0, sizeof(message), &messageLen); + SQLGetDiagRec(SQL_HANDLE_STMT, stmt, 1, state, &nativeError, message, 0, &messageLen); + SQLGetDiagRec(SQL_HANDLE_STMT, stmt, 1, state, &nativeError, message, sizeof(message), 0); + SQLGetDiagRec(SQL_HANDLE_STMT, stmt, 1, 0, 0, 0, 0, 0); +} + +BOOST_AUTO_TEST_CASE(TestSQLGetData) +{ + // There are no checks because we do not really care what is the result of these + // calls as long as they do not cause segmentation fault. + + for (int i = 0; i < 100; ++i) + { + TestType obj; + + obj.strField = LexicalCast(i); + + testCache.Put(i, obj); + } + + Connect("DRIVER={Apache Ignite};address=127.0.0.1:11110;cache=cache"); + + SQLCHAR sql[] = "SELECT strField FROM TestType"; + + SQLRETURN ret = SQLExecDirect(stmt, sql, sizeof(sql)); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + ret = SQLFetch(stmt); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + SQLCHAR buffer[ODBC_BUFFER_SIZE]; + SQLLEN resLen = 0; + + // Everything is ok. + ret = SQLGetData(stmt, 1, SQL_C_CHAR, buffer, sizeof(buffer), &resLen); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + SQLFetch(stmt); + + SQLGetData(stmt, 1, SQL_C_CHAR, 0, sizeof(buffer), &resLen); + + SQLFetch(stmt); + + SQLGetData(stmt, 1, SQL_C_CHAR, buffer, 0, &resLen); + + SQLFetch(stmt); + + SQLGetData(stmt, 1, SQL_C_CHAR, buffer, sizeof(buffer), 0); + + SQLFetch(stmt); + + SQLGetData(stmt, 1, SQL_C_CHAR, 0, 0, 0); + + SQLFetch(stmt); +} + +BOOST_AUTO_TEST_CASE(TestSQLGetEnvAttr) +{ + // There are no checks because we do not really care what is the result of these + // calls as long as they do not cause segmentation fault. + + Connect("DRIVER={Apache Ignite};address=127.0.0.1:11110;cache=cache"); + + SQLCHAR buffer[ODBC_BUFFER_SIZE]; + SQLINTEGER resLen = 0; + + // Everything is ok. + SQLRETURN ret = SQLGetEnvAttr(env, SQL_ATTR_ODBC_VERSION, buffer, sizeof(buffer), &resLen); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_ENV, env); + + SQLGetEnvAttr(env, SQL_ATTR_ODBC_VERSION, 0, sizeof(buffer), &resLen); + SQLGetEnvAttr(env, SQL_ATTR_ODBC_VERSION, buffer, 0, &resLen); + SQLGetEnvAttr(env, SQL_ATTR_ODBC_VERSION, buffer, sizeof(buffer), 0); + SQLGetEnvAttr(env, SQL_ATTR_ODBC_VERSION, 0, 0, 0); +} + +BOOST_AUTO_TEST_CASE(TestSQLSpecialColumns) +{ + // There are no checks because we do not really care what is the result of these + // calls as long as they do not cause segmentation fault. + + Connect("DRIVER={Apache Ignite};address=127.0.0.1:11110;cache=cache"); + + SQLCHAR catalogName[] = ""; + SQLCHAR schemaName[] = "cache"; + SQLCHAR tableName[] = "TestType"; + + // Everything is ok. + SQLRETURN ret = SQLSpecialColumns(stmt, SQL_BEST_ROWID, catalogName, sizeof(catalogName), + schemaName, sizeof(schemaName), tableName, sizeof(tableName), SQL_SCOPE_CURROW, SQL_NO_NULLS); + + ODBC_FAIL_ON_ERROR(ret, SQL_HANDLE_STMT, stmt); + + SQLCloseCursor(stmt); + + SQLSpecialColumns(stmt, SQL_BEST_ROWID, 0, sizeof(catalogName), schemaName, sizeof(schemaName), tableName, + sizeof(tableName), SQL_SCOPE_CURROW, SQL_NO_NULLS); + + SQLCloseCursor(stmt); + + SQLSpecialColumns(stmt, SQL_BEST_ROWID, catalogName, 0, schemaName, sizeof(schemaName), tableName, + sizeof(tableName), SQL_SCOPE_CURROW, SQL_NO_NULLS); + + SQLCloseCursor(stmt); + + SQLSpecialColumns(stmt, SQL_BEST_ROWID, catalogName, sizeof(catalogName), 0, sizeof(schemaName), tableName, + sizeof(tableName), SQL_SCOPE_CURROW, SQL_NO_NULLS); + + SQLCloseCursor(stmt); + + SQLSpecialColumns(stmt, SQL_BEST_ROWID, catalogName, sizeof(catalogName), schemaName, 0, tableName, + sizeof(tableName), SQL_SCOPE_CURROW, SQL_NO_NULLS); + + SQLCloseCursor(stmt); + + SQLSpecialColumns(stmt, SQL_BEST_ROWID, catalogName, sizeof(catalogName), schemaName, sizeof(schemaName), + 0, sizeof(tableName), SQL_SCOPE_CURROW, SQL_NO_NULLS); + + SQLCloseCursor(stmt); + + SQLSpecialColumns(stmt, SQL_BEST_ROWID, catalogName, sizeof(catalogName), schemaName, sizeof(schemaName), + tableName, 0, SQL_SCOPE_CURROW, SQL_NO_NULLS); + + SQLCloseCursor(stmt); + + SQLSpecialColumns(stmt, SQL_BEST_ROWID, 0, 0, 0, 0, 0, 0, SQL_SCOPE_CURROW, SQL_NO_NULLS); + + SQLCloseCursor(stmt); +} + +BOOST_AUTO_TEST_SUITE_END() diff --git a/modules/platforms/cpp/odbc/include/ignite/odbc/common_types.h b/modules/platforms/cpp/odbc/include/ignite/odbc/common_types.h index 250eaf2c16c0c..b01ec76fd1e1a 100644 --- a/modules/platforms/cpp/odbc/include/ignite/odbc/common_types.h +++ b/modules/platforms/cpp/odbc/include/ignite/odbc/common_types.h @@ -19,17 +19,13 @@ #define _IGNITE_ODBC_COMMON_TYPES #include +#include "system/odbc_constants.h" namespace ignite { namespace odbc { - -#ifdef _WIN64 - typedef long long SqlLen; -#else - typedef long SqlLen; -#endif + typedef SQLLEN SqlLen; /** * SQL result. diff --git a/modules/platforms/cpp/odbc/src/entry_points.cpp b/modules/platforms/cpp/odbc/src/entry_points.cpp index f6195e1168b44..c3d24bb960c9b 100644 --- a/modules/platforms/cpp/odbc/src/entry_points.cpp +++ b/modules/platforms/cpp/odbc/src/entry_points.cpp @@ -218,6 +218,8 @@ SQLRETURN SQL_API SQLNativeSql(SQLHDBC conn, outQueryBuffer, outQueryBufferLen, outQueryLen); } + +#if defined _WIN64 || !defined _WIN32 SQLRETURN SQL_API SQLColAttribute(SQLHSTMT stmt, SQLUSMALLINT columnNum, SQLUSMALLINT fieldId, @@ -225,9 +227,18 @@ SQLRETURN SQL_API SQLColAttribute(SQLHSTMT stmt, SQLSMALLINT bufferLen, SQLSMALLINT* strAttrLen, SQLLEN* numericAttr) +#else +SQLRETURN SQL_API SQLColAttribute(SQLHSTMT stmt, + SQLUSMALLINT columnNum, + SQLUSMALLINT fieldId, + SQLPOINTER strAttr, + SQLSMALLINT bufferLen, + SQLSMALLINT* strAttrLen, + SQLPOINTER numericAttr) +#endif { return ignite::SQLColAttribute(stmt, columnNum, fieldId, - strAttr, bufferLen, strAttrLen, numericAttr); + strAttr, bufferLen, strAttrLen, (SQLLEN*)numericAttr); } SQLRETURN SQL_API SQLDescribeCol(SQLHSTMT stmt, @@ -373,15 +384,15 @@ SQLRETURN SQL_API SQLGetEnvAttr(SQLHENV env, } SQLRETURN SQL_API SQLSpecialColumns(SQLHSTMT stmt, - SQLSMALLINT idType, + SQLUSMALLINT idType, SQLCHAR* catalogName, SQLSMALLINT catalogNameLen, SQLCHAR* schemaName, SQLSMALLINT schemaNameLen, SQLCHAR* tableName, SQLSMALLINT tableNameLen, - SQLSMALLINT scope, - SQLSMALLINT nullable) + SQLUSMALLINT scope, + SQLUSMALLINT nullable) { return ignite::SQLSpecialColumns(stmt, idType, catalogName, catalogNameLen, schemaName, schemaNameLen, tableName, diff --git a/modules/platforms/cpp/odbc/src/odbc.cpp b/modules/platforms/cpp/odbc/src/odbc.cpp index fd35cbab6875e..74d0f9d833f4d 100644 --- a/modules/platforms/cpp/odbc/src/odbc.cpp +++ b/modules/platforms/cpp/odbc/src/odbc.cpp @@ -39,8 +39,8 @@ namespace ignite SQLSMALLINT infoValueMax, SQLSMALLINT* length) { - using ignite::odbc::Connection; - using ignite::odbc::config::ConnectionInfo; + using odbc::Connection; + using odbc::config::ConnectionInfo; LOG_MSG("SQLGetInfo called: %d (%s), %p, %d, %p\n", infoType, ConnectionInfo::InfoTypeToString(infoType), @@ -81,7 +81,7 @@ namespace ignite SQLRETURN SQLAllocEnv(SQLHENV* env) { - using ignite::odbc::Environment; + using odbc::Environment; LOG_MSG("SQLAllocEnv called\n"); @@ -92,8 +92,8 @@ namespace ignite SQLRETURN SQLAllocConnect(SQLHENV env, SQLHDBC* conn) { - using ignite::odbc::Environment; - using ignite::odbc::Connection; + using odbc::Environment; + using odbc::Connection; LOG_MSG("SQLAllocConnect called\n"); @@ -116,8 +116,8 @@ namespace ignite SQLRETURN SQLAllocStmt(SQLHDBC conn, SQLHSTMT* stmt) { - using ignite::odbc::Connection; - using ignite::odbc::Statement; + using odbc::Connection; + using odbc::Statement; LOG_MSG("SQLAllocStmt called\n"); @@ -158,7 +158,7 @@ namespace ignite SQLRETURN SQLFreeEnv(SQLHENV env) { - using ignite::odbc::Environment; + using odbc::Environment; LOG_MSG("SQLFreeEnv called\n"); @@ -174,7 +174,7 @@ namespace ignite SQLRETURN SQLFreeConnect(SQLHDBC conn) { - using ignite::odbc::Connection; + using odbc::Connection; LOG_MSG("SQLFreeConnect called\n"); @@ -190,7 +190,7 @@ namespace ignite SQLRETURN SQLFreeStmt(SQLHSTMT stmt, SQLUSMALLINT option) { - using ignite::odbc::Statement; + using odbc::Statement; LOG_MSG("SQLFreeStmt called\n"); @@ -236,7 +236,7 @@ namespace ignite SQLRETURN SQLCloseCursor(SQLHSTMT stmt) { - using ignite::odbc::Statement; + using odbc::Statement; LOG_MSG("SQLCloseCursor called\n"); @@ -311,9 +311,9 @@ namespace ignite SQLCHAR* auth, SQLSMALLINT authLen) { - using ignite::odbc::Connection; - using ignite::odbc::config::Configuration; - using ignite::utility::SqlStringToString; + using odbc::Connection; + using odbc::config::Configuration; + using utility::SqlStringToString; LOG_MSG("SQLConnect called\n"); @@ -335,7 +335,7 @@ namespace ignite SQLRETURN SQLDisconnect(SQLHDBC conn) { - using ignite::odbc::Connection; + using odbc::Connection; LOG_MSG("SQLDisconnect called\n"); @@ -351,8 +351,8 @@ namespace ignite SQLRETURN SQLPrepare(SQLHSTMT stmt, SQLCHAR* query, SQLINTEGER queryLen) { - using ignite::odbc::Statement; - using ignite::utility::SqlStringToString; + using odbc::Statement; + using utility::SqlStringToString; LOG_MSG("SQLPrepare called\n"); @@ -372,7 +372,7 @@ namespace ignite SQLRETURN SQLExecute(SQLHSTMT stmt) { - using ignite::odbc::Statement; + using odbc::Statement; LOG_MSG("SQLExecute called\n"); @@ -388,8 +388,8 @@ namespace ignite SQLRETURN SQLExecDirect(SQLHSTMT stmt, SQLCHAR* query, SQLINTEGER queryLen) { - using ignite::odbc::Statement; - using ignite::utility::SqlStringToString; + using odbc::Statement; + using utility::SqlStringToString; LOG_MSG("SQLExecDirect called\n"); @@ -414,10 +414,10 @@ namespace ignite SQLLEN bufferLength, SQLLEN* strLengthOrIndicator) { - using namespace ignite::odbc::type_traits; + using namespace odbc::type_traits; - using ignite::odbc::Statement; - using ignite::odbc::app::ApplicationDataBuffer; + using odbc::Statement; + using odbc::app::ApplicationDataBuffer; LOG_MSG("SQLBindCol called: index=%d, type=%d\n", colNum, targetType); @@ -448,7 +448,7 @@ namespace ignite SQLRETURN SQLFetch(SQLHSTMT stmt) { - using ignite::odbc::Statement; + using odbc::Statement; LOG_MSG("SQLFetch called\n"); @@ -483,7 +483,7 @@ namespace ignite SQLRETURN res = SQLFetchScroll(stmt, orientation, offset); - if (res == SQL_SUCCESS || res == SQL_NO_DATA) + if (res == SQL_SUCCESS) { if (rowCount) *rowCount = 1; @@ -491,14 +491,16 @@ namespace ignite if (rowStatusArray) rowStatusArray[0] = SQL_ROW_SUCCESS; } + else if (res == SQL_NO_DATA && rowCount) + *rowCount = 0; return res; } SQLRETURN SQLNumResultCols(SQLHSTMT stmt, SQLSMALLINT *columnNum) { - using ignite::odbc::Statement; - using ignite::odbc::meta::ColumnMetaVector; + using odbc::Statement; + using odbc::meta::ColumnMetaVector; LOG_MSG("SQLNumResultCols called\n"); @@ -509,7 +511,8 @@ namespace ignite int32_t res = statement->GetColumnNumber(); - *columnNum = static_cast(res); + if (columnNum) + *columnNum = static_cast(res); LOG_MSG("columnNum: %d\n", *columnNum); @@ -526,8 +529,8 @@ namespace ignite SQLCHAR* tableType, SQLSMALLINT tableTypeLen) { - using ignite::odbc::Statement; - using ignite::utility::SqlStringToString; + using odbc::Statement; + using utility::SqlStringToString; LOG_MSG("SQLTables called\n"); @@ -561,8 +564,8 @@ namespace ignite SQLCHAR* columnName, SQLSMALLINT columnNameLen) { - using ignite::odbc::Statement; - using ignite::utility::SqlStringToString; + using odbc::Statement; + using utility::SqlStringToString; LOG_MSG("SQLColumns called\n"); @@ -588,7 +591,7 @@ namespace ignite SQLRETURN SQLMoreResults(SQLHSTMT stmt) { - using ignite::odbc::Statement; + using odbc::Statement; LOG_MSG("SQLMoreResults called\n"); @@ -611,12 +614,12 @@ namespace ignite SQLLEN bufferLen, SQLLEN* resLen) { - using namespace ignite::odbc::type_traits; + using namespace odbc::type_traits; - using ignite::odbc::Statement; - using ignite::odbc::app::ApplicationDataBuffer; - using ignite::odbc::app::Parameter; - using ignite::odbc::type_traits::IsSqlTypeSupported; + using odbc::Statement; + using odbc::app::ApplicationDataBuffer; + using odbc::app::Parameter; + using odbc::type_traits::IsSqlTypeSupported; LOG_MSG("SQLBindParameter called\n"); @@ -628,7 +631,7 @@ namespace ignite if (ioType != SQL_PARAM_INPUT) return SQL_ERROR; - if (*resLen == SQL_DATA_AT_EXEC || *resLen <= SQL_LEN_DATA_AT_EXEC_OFFSET) + if (resLen && (*resLen == SQL_DATA_AT_EXEC || *resLen <= SQL_LEN_DATA_AT_EXEC_OFFSET)) return SQL_ERROR; if (!IsSqlTypeSupported(paramSqlType)) @@ -660,7 +663,7 @@ namespace ignite SQLINTEGER outQueryBufferLen, SQLINTEGER* outQueryLen) { - using namespace ignite::utility; + using namespace utility; LOG_MSG("SQLNativeSql called\n"); @@ -669,7 +672,8 @@ namespace ignite CopyStringToBuffer(in, reinterpret_cast(outQueryBuffer), static_cast(outQueryBufferLen)); - *outQueryLen = std::min(outQueryBufferLen, static_cast(in.size())); + if (outQueryLen) + *outQueryLen = std::min(outQueryBufferLen, static_cast(in.size())); return SQL_SUCCESS; } @@ -682,9 +686,9 @@ namespace ignite SQLSMALLINT* strAttrLen, SQLLEN* numericAttr) { - using ignite::odbc::Statement; - using ignite::odbc::meta::ColumnMetaVector; - using ignite::odbc::meta::ColumnMeta; + using odbc::Statement; + using odbc::meta::ColumnMetaVector; + using odbc::meta::ColumnMeta; LOG_MSG("SQLColAttribute called: %d (%s)\n", fieldId, ColumnMeta::AttrIdToString(fieldId)); @@ -700,7 +704,7 @@ namespace ignite SQLRETURN res = SQLNumResultCols(stmt, &val); - if (res == SQL_SUCCESS) + if (numericAttr && res == SQL_SUCCESS) *numericAttr = val; return res; @@ -722,8 +726,8 @@ namespace ignite SQLSMALLINT* decimalDigits, SQLSMALLINT* nullable) { - using ignite::odbc::Statement; - using ignite::odbc::SqlLen; + using odbc::Statement; + using odbc::SqlLen; LOG_MSG("SQLDescribeCol called\n"); @@ -750,13 +754,20 @@ namespace ignite LOG_MSG("columnSizeRes: %lld\n", columnSizeRes); LOG_MSG("decimalDigitsRes: %lld\n", decimalDigitsRes); LOG_MSG("nullableRes: %lld\n", nullableRes); - LOG_MSG("columnNameBuf: %s\n", columnNameBuf); - LOG_MSG("columnNameLen: %d\n", *columnNameLen); + LOG_MSG("columnNameBuf: %s\n", columnNameBuf ? columnNameBuf : ""); + LOG_MSG("columnNameLen: %d\n", columnNameLen ? *columnNameLen : -1); + + if (dataType) + *dataType = static_cast(dataTypeRes); + + if (columnSize) + *columnSize = static_cast(columnSizeRes); - *dataType = static_cast(dataTypeRes); - *columnSize = static_cast(columnSizeRes); - *decimalDigits = static_cast(decimalDigitsRes); - *nullable = static_cast(nullableRes); + if (decimalDigits) + *decimalDigits = static_cast(decimalDigitsRes); + + if (nullable) + *nullable = static_cast(nullableRes); return statement->GetDiagnosticRecords().GetReturnCode(); } @@ -764,7 +775,7 @@ namespace ignite SQLRETURN SQLRowCount(SQLHSTMT stmt, SQLLEN* rowCnt) { - using ignite::odbc::Statement; + using odbc::Statement; LOG_MSG("SQLRowCount called\n"); @@ -775,7 +786,8 @@ namespace ignite int64_t res = statement->AffectedRows(); - *rowCnt = static_cast(res); + if (rowCnt) + *rowCnt = static_cast(res); return statement->GetDiagnosticRecords().GetReturnCode(); } @@ -794,8 +806,8 @@ namespace ignite SQLCHAR* foreignTableName, SQLSMALLINT foreignTableNameLen) { - using ignite::odbc::Statement; - using ignite::utility::SqlStringToString; + using odbc::Statement; + using utility::SqlStringToString; LOG_MSG("SQLForeignKeys called\n"); @@ -830,21 +842,24 @@ namespace ignite SQLINTEGER valueBufLen, SQLINTEGER* valueResLen) { - using ignite::odbc::Statement; + using odbc::Statement; LOG_MSG("SQLGetStmtAttr called"); - #ifdef ODBC_DEBUG - using ignite::odbc::type_traits::StatementAttrIdToString; +#ifdef ODBC_DEBUG + using odbc::type_traits::StatementAttrIdToString; LOG_MSG("Attr: %s (%d)\n", StatementAttrIdToString(attr), attr); - #endif //ODBC_DEBUG +#endif //ODBC_DEBUG Statement *statement = reinterpret_cast(stmt); if (!statement) return SQL_INVALID_HANDLE; + if (!valueBuf) + return SQL_ERROR; + switch (attr) { case SQL_ATTR_APP_ROW_DESC: @@ -916,15 +931,15 @@ namespace ignite SQLPOINTER value, SQLINTEGER valueLen) { - using ignite::odbc::Statement; + using odbc::Statement; LOG_MSG("SQLSetStmtAttr called"); - #ifdef ODBC_DEBUG - using ignite::odbc::type_traits::StatementAttrIdToString; +#ifdef ODBC_DEBUG + using odbc::type_traits::StatementAttrIdToString; LOG_MSG("Attr: %s (%d)\n", StatementAttrIdToString(attr), attr); - #endif //ODBC_DEBUG +#endif //ODBC_DEBUG Statement *statement = reinterpret_cast(stmt); @@ -988,8 +1003,8 @@ namespace ignite SQLCHAR* tableName, SQLSMALLINT tableNameLen) { - using ignite::odbc::Statement; - using ignite::utility::SqlStringToString; + using odbc::Statement; + using utility::SqlStringToString; LOG_MSG("SQLPrimaryKeys called\n"); @@ -1013,7 +1028,7 @@ namespace ignite SQLRETURN SQLNumParams(SQLHSTMT stmt, SQLSMALLINT* paramCnt) { - using ignite::odbc::Statement; + using odbc::Statement; LOG_MSG("SQLNumParams called\n"); @@ -1022,7 +1037,8 @@ namespace ignite if (!statement) return SQL_INVALID_HANDLE; - *paramCnt = static_cast(statement->GetParametersNumber()); + if (paramCnt) + *paramCnt = static_cast(statement->GetParametersNumber()); return statement->GetDiagnosticRecords().GetReturnCode(); } @@ -1035,11 +1051,11 @@ namespace ignite SQLSMALLINT bufferLen, SQLSMALLINT* resLen) { - using namespace ignite::odbc; - using namespace ignite::odbc::diagnostic; - using namespace ignite::odbc::type_traits; + using namespace odbc; + using namespace odbc::diagnostic; + using namespace odbc::type_traits; - using ignite::odbc::app::ApplicationDataBuffer; + using odbc::app::ApplicationDataBuffer; LOG_MSG("SQLGetDiagField called: %d\n", recNum); @@ -1070,7 +1086,7 @@ namespace ignite } } - if (result == SQL_RESULT_SUCCESS) + if (resLen && result == SQL_RESULT_SUCCESS) *resLen = static_cast(outResLen); return SqlResultToReturnCode(result); @@ -1085,12 +1101,12 @@ namespace ignite SQLSMALLINT msgBufferLen, SQLSMALLINT* msgLen) { - using namespace ignite::utility; - using namespace ignite::odbc; - using namespace ignite::odbc::diagnostic; - using namespace ignite::odbc::type_traits; + using namespace utility; + using namespace odbc; + using namespace odbc::diagnostic; + using namespace odbc::type_traits; - using ignite::odbc::app::ApplicationDataBuffer; + using odbc::app::ApplicationDataBuffer; LOG_MSG("SQLGetDiagRec called\n"); @@ -1129,14 +1145,15 @@ namespace ignite outBuffer.PutString(record.GetMessageText()); - *msgLen = static_cast(outResLen); + if (msgLen) + *msgLen = static_cast(outResLen); return SQL_SUCCESS; } SQLRETURN SQLGetTypeInfo(SQLHSTMT stmt, SQLSMALLINT type) { - using ignite::odbc::Statement; + using odbc::Statement; LOG_MSG("SQLGetTypeInfo called\n"); @@ -1152,7 +1169,7 @@ namespace ignite SQLRETURN SQLEndTran(SQLSMALLINT handleType, SQLHANDLE handle, SQLSMALLINT completionType) { - using namespace ignite::odbc; + using namespace odbc; LOG_MSG("SQLEndTran called\n"); @@ -1212,10 +1229,10 @@ namespace ignite SQLLEN bufferLength, SQLLEN* strLengthOrIndicator) { - using namespace ignite::odbc::type_traits; + using namespace odbc::type_traits; - using ignite::odbc::Statement; - using ignite::odbc::app::ApplicationDataBuffer; + using odbc::Statement; + using odbc::app::ApplicationDataBuffer; LOG_MSG("SQLGetData called\n"); @@ -1238,7 +1255,7 @@ namespace ignite SQLPOINTER value, SQLINTEGER valueLen) { - using ignite::odbc::Environment; + using odbc::Environment; LOG_MSG("SQLSetEnvAttr called\n"); @@ -1258,10 +1275,10 @@ namespace ignite SQLINTEGER valueBufLen, SQLINTEGER* valueResLen) { - using namespace ignite::odbc; - using namespace ignite::odbc::type_traits; + using namespace odbc; + using namespace odbc::type_traits; - using ignite::odbc::app::ApplicationDataBuffer; + using odbc::app::ApplicationDataBuffer; LOG_MSG("SQLGetEnvAttr called\n"); @@ -1293,9 +1310,9 @@ namespace ignite SQLSMALLINT scope, SQLSMALLINT nullable) { - using namespace ignite::odbc; + using namespace odbc; - using ignite::utility::SqlStringToString; + using utility::SqlStringToString; LOG_MSG("SQLSpecialColumns called\n"); diff --git a/modules/platforms/cpp/odbc/src/statement.cpp b/modules/platforms/cpp/odbc/src/statement.cpp index 96a63276bfd7f..8aae156dc7818 100644 --- a/modules/platforms/cpp/odbc/src/statement.cpp +++ b/modules/platforms/cpp/odbc/src/statement.cpp @@ -463,7 +463,7 @@ namespace ignite if (found && strbuf) outSize = utility::CopyStringToBuffer(out, strbuf, buflen); - if (found && strbuf) + if (found && reslen) *reslen = static_cast(outSize); } From 548fe6a3fe4d38caebe3b45366f3e812b035d006 Mon Sep 17 00:00:00 2001 From: tledkov-gridgain Date: Mon, 26 Sep 2016 12:05:07 +0300 Subject: [PATCH 22/69] IGNITE-3333: IGFS: Now it is possible to use ATOMIC cache for data blocks when fragmentizer it disabled. This closes #1108. --- .../FileSystemConfiguration.java | 2 +- .../processors/igfs/IgfsDataManager.java | 2 +- .../processors/igfs/IgfsProcessor.java | 17 +++++++- .../igfs/IgfsAbstractBaseSelfTest.java | 8 ++++ .../IgfsAtomicPrimaryMultiNodeSelfTest.java | 39 +++++++++++++++++++ ...gfsAtomicPrimaryOffheapTieredSelfTest.java | 39 +++++++++++++++++++ ...gfsAtomicPrimaryOffheapValuesSelfTest.java | 39 +++++++++++++++++++ .../igfs/IgfsAtomicPrimarySelfTest.java | 39 +++++++++++++++++++ .../testsuites/IgniteIgfsTestSuite.java | 10 ++++- 9 files changed, 190 insertions(+), 5 deletions(-) create mode 100644 modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAtomicPrimaryMultiNodeSelfTest.java create mode 100644 modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAtomicPrimaryOffheapTieredSelfTest.java create mode 100644 modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAtomicPrimaryOffheapValuesSelfTest.java create mode 100644 modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAtomicPrimarySelfTest.java diff --git a/modules/core/src/main/java/org/apache/ignite/configuration/FileSystemConfiguration.java b/modules/core/src/main/java/org/apache/ignite/configuration/FileSystemConfiguration.java index 6e0e5e78a0203..e665e843c0ccf 100644 --- a/modules/core/src/main/java/org/apache/ignite/configuration/FileSystemConfiguration.java +++ b/modules/core/src/main/java/org/apache/ignite/configuration/FileSystemConfiguration.java @@ -879,7 +879,7 @@ public void setInitializeDefaultPathModes(boolean initDfltPathModes) { *

* IGFS stores information about file system structure (metadata) inside a transactional cache configured through * {@link #getMetaCacheName()} property. Metadata updates caused by operations on IGFS usually require several - * intearnal keys to be updated. As IGFS metadata cache usually operates in {@link CacheMode#REPLICATED} mode, + * internal keys to be updated. As IGFS metadata cache usually operates in {@link CacheMode#REPLICATED} mode, * meaning that all nodes have all metadata locally, it makes sense to give a hint to Ignite to co-locate * ownership of all metadata keys on a single node. This will decrease amount of network trips required to update * metadata and hence could improve performance. diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsDataManager.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsDataManager.java index 2f704aea515de..e534800efd7a6 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsDataManager.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsDataManager.java @@ -609,7 +609,7 @@ public void cleanBlocks(IgfsEntryInfo fileInfo, IgfsFileAffinityRange range, boo } /** - * Moves all colocated blocks in range to non-colocated keys. + * Moves all collocated blocks in range to non-colocated keys. * @param fileInfo File info to move data for. * @param range Range to move. */ diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsProcessor.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsProcessor.java index 85dcb1cd1cbc0..5c0e030b1b8c9 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsProcessor.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsProcessor.java @@ -311,8 +311,9 @@ private void validateLocalIgfsConfigurations(FileSystemConfiguration[] cfgs) thr if (GridQueryProcessor.isEnabled(dataCacheCfg)) throw new IgniteCheckedException("IGFS data cache cannot start with enabled query indexing."); - if (dataCacheCfg.getAtomicityMode() != TRANSACTIONAL) - throw new IgniteCheckedException("Data cache should be transactional: " + cfg.getDataCacheName()); + if (dataCacheCfg.getAtomicityMode() != TRANSACTIONAL && cfg.isFragmentizerEnabled()) + throw new IgniteCheckedException("Data cache should be transactional: " + cfg.getDataCacheName() + + " when fragmentizer is enabled"); if (metaCacheCfg == null) throw new IgniteCheckedException("Metadata cache is not configured locally for IGFS: " + cfg); @@ -442,6 +443,18 @@ private void checkIgfsOnRemoteNode(ClusterNode rmtNode) throws IgniteCheckedExce } } + /** + * Check IGFS property equality on local and remote nodes. + * + * @param name Property human readable name. + * @param propName Property name/ + * @param rmtNodeId Remote node ID. + * @param rmtVal Remote value. + * @param locVal Local value. + * @param igfsName IGFS name. + * + * @throws IgniteCheckedException If failed. + */ private void checkSame(String name, String propName, UUID rmtNodeId, Object rmtVal, Object locVal, String igfsName) throws IgniteCheckedException { if (!F.eq(rmtVal, locVal)) diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractBaseSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractBaseSelfTest.java index 58c4c50707656..79dc57b630ee9 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractBaseSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractBaseSelfTest.java @@ -219,6 +219,13 @@ protected boolean relaxedConsistency() { return false; } + /** + * @return FragmentizerEnabled IGFS config flag. + */ + protected boolean fragmentizerEnabled() { + return true; + } + /** * @return Relaxed consistency flag. */ @@ -378,6 +385,7 @@ protected Ignite startGridWithIgfs(String gridName, String igfsName, IgfsMode mo igfsCfg.setPrefetchBlocks(PREFETCH_BLOCKS); igfsCfg.setSequentialReadsBeforePrefetch(SEQ_READS_BEFORE_PREFETCH); igfsCfg.setRelaxedConsistency(relaxedConsistency()); + igfsCfg.setFragmentizerEnabled(fragmentizerEnabled()); igfsCfg.setInitializeDefaultPathModes(initializeDefaultPathModes()); diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAtomicPrimaryMultiNodeSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAtomicPrimaryMultiNodeSelfTest.java new file mode 100644 index 0000000000000..0e342a9fce8fc --- /dev/null +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAtomicPrimaryMultiNodeSelfTest.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.igfs; + +import org.apache.ignite.cache.CacheAtomicityMode; +import org.apache.ignite.configuration.CacheConfiguration; + +/** + * Tests for PRIMARY mode. + */ +public class IgfsAtomicPrimaryMultiNodeSelfTest extends IgfsPrimaryMultiNodeSelfTest { + /** {@inheritDoc} */ + @Override protected boolean fragmentizerEnabled() { + return false; + } + + /** {@inheritDoc} */ + @Override protected void prepareCacheConfigurations(CacheConfiguration dataCacheCfg, + CacheConfiguration metaCacheCfg) { + super.prepareCacheConfigurations(dataCacheCfg, metaCacheCfg); + + dataCacheCfg.setAtomicityMode(CacheAtomicityMode.ATOMIC); + } +} diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAtomicPrimaryOffheapTieredSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAtomicPrimaryOffheapTieredSelfTest.java new file mode 100644 index 0000000000000..09b4f9e184bf6 --- /dev/null +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAtomicPrimaryOffheapTieredSelfTest.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.igfs; + +import org.apache.ignite.cache.CacheAtomicityMode; +import org.apache.ignite.configuration.CacheConfiguration; + +/** + * Tests for PRIMARY mode. + */ +public class IgfsAtomicPrimaryOffheapTieredSelfTest extends IgfsPrimaryOffheapTieredSelfTest { + /** {@inheritDoc} */ + @Override protected boolean fragmentizerEnabled() { + return false; + } + + /** {@inheritDoc} */ + @Override protected void prepareCacheConfigurations(CacheConfiguration dataCacheCfg, + CacheConfiguration metaCacheCfg) { + super.prepareCacheConfigurations(dataCacheCfg, metaCacheCfg); + + dataCacheCfg.setAtomicityMode(CacheAtomicityMode.ATOMIC); + } +} diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAtomicPrimaryOffheapValuesSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAtomicPrimaryOffheapValuesSelfTest.java new file mode 100644 index 0000000000000..8e9965df1f179 --- /dev/null +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAtomicPrimaryOffheapValuesSelfTest.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.igfs; + +import org.apache.ignite.cache.CacheAtomicityMode; +import org.apache.ignite.configuration.CacheConfiguration; + +/** + * Tests for PRIMARY mode. + */ +public class IgfsAtomicPrimaryOffheapValuesSelfTest extends IgfsPrimaryOffheapValuesSelfTest { + /** {@inheritDoc} */ + @Override protected boolean fragmentizerEnabled() { + return false; + } + + /** {@inheritDoc} */ + @Override protected void prepareCacheConfigurations(CacheConfiguration dataCacheCfg, + CacheConfiguration metaCacheCfg) { + super.prepareCacheConfigurations(dataCacheCfg, metaCacheCfg); + + dataCacheCfg.setAtomicityMode(CacheAtomicityMode.ATOMIC); + } +} diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAtomicPrimarySelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAtomicPrimarySelfTest.java new file mode 100644 index 0000000000000..1c28d6baa7883 --- /dev/null +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAtomicPrimarySelfTest.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.igfs; + +import org.apache.ignite.cache.CacheAtomicityMode; +import org.apache.ignite.configuration.CacheConfiguration; + +/** + * Tests for PRIMARY mode. + */ +public class IgfsAtomicPrimarySelfTest extends IgfsPrimarySelfTest { + /** {@inheritDoc} */ + @Override protected boolean fragmentizerEnabled() { + return false; + } + + /** {@inheritDoc} */ + @Override protected void prepareCacheConfigurations(CacheConfiguration dataCacheCfg, + CacheConfiguration metaCacheCfg) { + super.prepareCacheConfigurations(dataCacheCfg, metaCacheCfg); + + dataCacheCfg.setAtomicityMode(CacheAtomicityMode.ATOMIC); + } +} diff --git a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteIgfsTestSuite.java b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteIgfsTestSuite.java index 5a7e3d7456c9e..775c2ceb11d69 100644 --- a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteIgfsTestSuite.java +++ b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteIgfsTestSuite.java @@ -20,6 +20,10 @@ import junit.framework.TestSuite; import org.apache.ignite.igfs.IgfsFragmentizerSelfTest; import org.apache.ignite.igfs.IgfsFragmentizerTopologySelfTest; +import org.apache.ignite.internal.processors.igfs.IgfsAtomicPrimaryMultiNodeSelfTest; +import org.apache.ignite.internal.processors.igfs.IgfsAtomicPrimaryOffheapTieredSelfTest; +import org.apache.ignite.internal.processors.igfs.IgfsAtomicPrimaryOffheapValuesSelfTest; +import org.apache.ignite.internal.processors.igfs.IgfsAtomicPrimarySelfTest; import org.apache.ignite.internal.processors.igfs.IgfsAttributesSelfTest; import org.apache.ignite.internal.processors.igfs.IgfsBackupsDualAsyncSelfTest; import org.apache.ignite.internal.processors.igfs.IgfsBackupsDualSyncSelfTest; @@ -154,11 +158,15 @@ public static TestSuite suite() throws Exception { suite.addTestSuite(IgfsMaxSizeSelfTest.class); - suite.addTestSuite(IgfsProxySelfTest.class); suite.addTestSuite(IgfsLocalSecondaryFileSystemProxySelfTest.class); suite.addTestSuite(IgfsLocalSecondaryFileSystemProxyClientSelfTest.class); + suite.addTestSuite(IgfsAtomicPrimarySelfTest.class); + suite.addTestSuite(IgfsAtomicPrimaryMultiNodeSelfTest.class); + suite.addTestSuite(IgfsAtomicPrimaryOffheapTieredSelfTest.class); + suite.addTestSuite(IgfsAtomicPrimaryOffheapValuesSelfTest.class); + return suite; } } \ No newline at end of file From 8032fc2c8a7cf9f404eb75c65164bb2900aab79d Mon Sep 17 00:00:00 2001 From: vozerov-gridgain Date: Mon, 26 Sep 2016 12:31:30 +0300 Subject: [PATCH 23/69] IGNITE-3912: Hadoop: Implemented new class loading architecture for embedded execution mode. --- bin/include/setenv.bat | 8 - bin/include/setenv.sh | 31 - .../ignite/internal/GridKernalContext.java | 8 + .../internal/GridKernalContextImpl.java | 12 + .../ignite/internal/IgniteComponentType.java | 11 +- .../apache/ignite/internal/IgniteKernal.java | 8 +- .../processors/hadoop/HadoopClassLoader.java | 487 +++++++++ .../hadoop/HadoopClasspathUtils.java | 57 +- .../hadoop/HadoopDefaultJobInfo.java | 6 +- .../processors/hadoop/HadoopHelper.java | 55 + .../processors/hadoop/HadoopJobInfo.java | 4 +- .../processors/hadoop/HadoopLocations.java | 0 .../processors/hadoop/HadoopNoopHelper.java | 66 ++ .../hadoop/HadoopNoopProcessor.java | 4 +- .../internal/processors/igfs/IgfsImpl.java | 5 +- .../igfs/IgfsKernalContextAware.java} | 20 +- .../fs/BasicHadoopFileSystemFactory.java | 125 +-- .../fs/CachingHadoopFileSystemFactory.java | 54 +- .../hadoop/fs/HadoopFileSystemFactory.java | 11 +- .../IgniteHadoopFileSystemCounterWriter.java | 79 +- .../IgniteHadoopIgfsSecondaryFileSystem.java | 402 +------- .../fs/KerberosHadoopFileSystemFactory.java | 77 +- .../hadoop/fs/v1/IgniteHadoopFileSystem.java | 47 +- .../hadoop/fs/v2/IgniteHadoopFileSystem.java | 47 +- .../IgniteHadoopClientProtocolProvider.java | 11 +- .../IgniteHadoopMapReducePlanner.java | 22 +- .../IgniteHadoopWeightedMapReducePlanner.java | 4 +- .../ignite/hadoop/util/UserNameMapper.java | 4 +- .../processors/hadoop/HadoopClassLoader.java | 964 ------------------ .../hadoop/HadoopClasspathMain.java | 44 - .../processors/hadoop/HadoopCommonUtils.java | 154 +++ .../processors/hadoop/HadoopContext.java | 1 - .../hadoop/{v2 => }/HadoopExternalSplit.java | 3 +- .../processors/hadoop/HadoopHelperImpl.java | 120 +++ .../processors/hadoop/HadoopProcessor.java | 32 +- .../hadoop/{v2 => }/HadoopSplitWrapper.java | 4 +- .../hadoop/counter/HadoopCounterAdapter.java | 1 + .../counter/HadoopPerformanceCounter.java | 12 +- .../hadoop/delegate/HadoopDelegateUtils.java | 138 +++ ...adoopFileSystemCounterWriterDelegate.java} | 20 +- .../HadoopFileSystemFactoryDelegate.java} | 28 +- ...adoopIgfsSecondaryFileSystemDelegate.java} | 9 +- .../HadoopMapReduceCounterGroup.java | 9 +- .../{ => impl}/HadoopMapReduceCounters.java | 23 +- .../hadoop/{ => impl}/HadoopUtils.java | 155 +-- .../HadoopBasicFileSystemFactoryDelegate.java | 164 +++ ...adoopCachingFileSystemFactoryDelegate.java | 75 ++ ...adoopDefaultFileSystemFactoryDelegate.java | 62 ++ ...opFileSystemCounterWriterDelegateImpl.java | 108 ++ ...opIgfsSecondaryFileSystemDelegateImpl.java | 472 +++++++++ ...doopKerberosFileSystemFactoryDelegate.java | 112 ++ .../fs/HadoopFileSystemCacheUtils.java | 7 +- .../{ => impl}/fs/HadoopFileSystemsUtils.java | 2 +- .../fs/HadoopLazyConcurrentMap.java | 14 +- .../fs/HadoopLocalFileSystemV1.java | 5 +- .../fs/HadoopLocalFileSystemV2.java | 9 +- .../{ => impl}/fs/HadoopParameters.java | 2 +- .../fs/HadoopRawLocalFileSystem.java | 21 +- .../hadoop/{ => impl}/igfs/HadoopIgfs.java | 9 +- .../HadoopIgfsCommunicationException.java | 2 +- .../hadoop/{ => impl}/igfs/HadoopIgfsEx.java | 5 +- .../{ => impl}/igfs/HadoopIgfsFuture.java | 2 +- .../{ => impl}/igfs/HadoopIgfsInProc.java | 15 +- .../igfs/HadoopIgfsInputStream.java | 9 +- .../hadoop/{ => impl}/igfs/HadoopIgfsIo.java | 2 +- .../{ => impl}/igfs/HadoopIgfsIpcIo.java | 27 +- .../igfs/HadoopIgfsIpcIoListener.java | 2 +- .../{ => impl}/igfs/HadoopIgfsJclLogger.java | 2 +- .../{ => impl}/igfs/HadoopIgfsOutProc.java | 9 +- .../igfs/HadoopIgfsOutputStream.java | 7 +- .../{ => impl}/igfs/HadoopIgfsProperties.java | 5 +- .../igfs/HadoopIgfsProxyInputStream.java | 7 +- .../igfs/HadoopIgfsProxyOutputStream.java | 7 +- ...SecondaryFileSystemPositionedReadable.java | 5 +- .../igfs/HadoopIgfsStreamDelegate.java | 2 +- .../igfs/HadoopIgfsStreamEventListener.java | 2 +- .../{ => impl}/igfs/HadoopIgfsUtils.java | 7 +- .../{ => impl}/igfs/HadoopIgfsWrapper.java | 24 +- .../proto/HadoopClientProtocol.java | 27 +- .../{ => impl}/v1/HadoopV1CleanupTask.java | 7 +- .../hadoop/{ => impl}/v1/HadoopV1Counter.java | 11 +- .../hadoop/{ => impl}/v1/HadoopV1MapTask.java | 4 +- .../v1/HadoopV1OutputCollector.java | 5 +- .../{ => impl}/v1/HadoopV1Partitioner.java | 2 +- .../{ => impl}/v1/HadoopV1ReduceTask.java | 4 +- .../{ => impl}/v1/HadoopV1Reporter.java | 2 +- .../{ => impl}/v1/HadoopV1SetupTask.java | 7 +- .../{ => impl}/v1/HadoopV1Splitter.java | 11 +- .../hadoop/{ => impl}/v1/HadoopV1Task.java | 9 +- .../hadoop/{ => impl}/v2/HadoopDaemon.java | 2 +- .../v2/HadoopSerializationWrapper.java | 13 +- .../v2/HadoopShutdownHookManager.java | 2 +- .../{ => impl}/v2/HadoopV2CleanupTask.java | 5 +- .../hadoop/{ => impl}/v2/HadoopV2Context.java | 7 +- .../hadoop/{ => impl}/v2/HadoopV2Counter.java | 7 +- .../hadoop/{ => impl}/v2/HadoopV2Job.java | 83 +- .../v2/HadoopV2JobResourceManager.java | 33 +- .../hadoop/{ => impl}/v2/HadoopV2MapTask.java | 2 +- .../{ => impl}/v2/HadoopV2Partitioner.java | 2 +- .../{ => impl}/v2/HadoopV2ReduceTask.java | 2 +- .../{ => impl}/v2/HadoopV2SetupTask.java | 5 +- .../{ => impl}/v2/HadoopV2Splitter.java | 15 +- .../hadoop/{ => impl}/v2/HadoopV2Task.java | 5 +- .../{ => impl}/v2/HadoopV2TaskContext.java | 61 +- .../v2/HadoopWritableSerialization.java | 9 +- .../hadoop/jobtracker/HadoopJobTracker.java | 23 +- .../planner/HadoopDefaultMapReducePlan.java | 7 +- .../child/HadoopChildProcessRunner.java | 16 +- .../hadoop/HadoopClassLoaderTest.java | 110 -- .../processors/hadoop/HadoopSharedMap.java | 1 + .../hadoop/HadoopTestClassLoader.java | 106 ++ .../hadoop/deps/CircularWithoutHadoop.java | 27 - .../processors/hadoop/deps/WithCast.java | 41 - .../deps/WithConstructorInvocation.java | 31 - .../processors/hadoop/deps/WithExtends.java | 27 - .../processors/hadoop/deps/WithField.java | 29 - .../hadoop/deps/WithIndirectField.java | 27 - .../hadoop/deps/WithInnerClass.java | 31 - .../hadoop/deps/WithLocalVariable.java | 38 - .../hadoop/deps/WithMethodAnnotation.java | 32 - .../hadoop/deps/WithMethodArgument.java | 31 - .../deps/WithMethodCheckedException.java | 31 - .../hadoop/deps/WithMethodInvocation.java | 31 - .../hadoop/deps/WithMethodReturnType.java | 31 - .../deps/WithMethodRuntimeException.java | 31 - .../hadoop/deps/WithParameterAnnotation.java | 31 - .../hadoop/deps/WithStaticField.java | 29 - .../hadoop/deps/WithStaticInitializer.java | 34 - .../HadoopAbstractMapReduceTest.java | 13 +- .../{ => impl}/HadoopAbstractSelfTest.java | 4 +- .../HadoopAbstractWordCountTest.java | 2 +- .../{ => impl}/HadoopCommandLineTest.java | 6 +- ...HadoopDefaultMapReducePlannerSelfTest.java | 6 +- .../{ => impl}/HadoopErrorSimulator.java | 2 +- .../{ => impl}/HadoopFileSystemsTest.java | 4 +- .../hadoop/{ => impl}/HadoopGroupingTest.java | 41 +- .../{ => impl}/HadoopJobTrackerSelfTest.java | 29 +- .../HadoopMapReduceEmbeddedSelfTest.java | 16 +- .../HadoopMapReduceErrorResilienceTest.java | 4 +- .../{ => impl}/HadoopMapReduceTest.java | 4 +- .../HadoopNoHadoopMapReduceTest.java | 2 +- .../{ => impl}/HadoopPlannerMockJob.java | 11 +- .../{ => impl}/HadoopPopularWordsTest.java | 2 +- .../HadoopSerializationWrapperSelfTest.java | 5 +- .../HadoopSnappyFullMapReduceTest.java | 2 +- .../hadoop/{ => impl}/HadoopSnappyTest.java | 6 +- .../{ => impl}/HadoopSortingExternalTest.java | 2 +- .../hadoop/{ => impl}/HadoopSortingTest.java | 5 +- .../HadoopSplitWrapperSelfTest.java | 6 +- .../hadoop/{ => impl}/HadoopStartup.java | 2 +- .../HadoopTaskExecutionSelfTest.java | 39 +- .../HadoopTasksAllVersionsTest.java | 8 +- .../hadoop/{ => impl}/HadoopTasksV1Test.java | 14 +- .../hadoop/{ => impl}/HadoopTasksV2Test.java | 14 +- .../HadoopTestRoundRobinMrPlanner.java | 6 +- .../{ => impl}/HadoopTestTaskContext.java | 11 +- .../hadoop/{ => impl}/HadoopTestUtils.java | 2 +- .../hadoop/impl}/HadoopTxConfigCacheTest.java | 2 +- .../{ => impl}/HadoopUserLibsSelfTest.java | 3 +- .../{ => impl}/HadoopV2JobSelfTest.java | 22 +- .../{ => impl}/HadoopValidationSelfTest.java | 2 +- .../HadoopWeightedMapReducePlannerTest.java | 5 +- .../HadoopWeightedPlannerMapReduceTest.java | 2 +- .../{ => impl}/books/alice-in-wonderland.txt | 0 .../hadoop/{ => impl}/books/art-of-war.txt | 0 .../{ => impl}/books/huckleberry-finn.txt | 0 .../{ => impl}/books/sherlock-holmes.txt | 0 .../hadoop/{ => impl}/books/tom-sawyer.txt | 0 .../HadoopClientProtocolEmbeddedSelfTest.java | 2 +- .../client}/HadoopClientProtocolSelfTest.java | 8 +- .../{ => impl}/examples/HadoopWordCount1.java | 2 +- .../examples/HadoopWordCount1Map.java | 4 +- .../examples/HadoopWordCount1Reduce.java | 4 +- .../{ => impl}/examples/HadoopWordCount2.java | 2 +- .../examples/HadoopWordCount2Combiner.java | 4 +- .../examples/HadoopWordCount2Mapper.java | 4 +- .../examples/HadoopWordCount2Reducer.java | 4 +- ...rberosHadoopFileSystemFactorySelfTest.java | 9 +- .../impl}/igfs/Hadoop1DualAbstractTest.java | 15 +- .../igfs/Hadoop1OverIgfsDualAsyncTest.java | 4 +- .../igfs/Hadoop1OverIgfsDualSyncTest.java | 4 +- .../igfs/HadoopFIleSystemFactorySelfTest.java | 62 +- ...adoopIgfs20FileSystemAbstractSelfTest.java | 11 +- ...fs20FileSystemLoopbackPrimarySelfTest.java | 5 +- ...pIgfs20FileSystemShmemPrimarySelfTest.java | 5 +- .../igfs/HadoopIgfsDualAbstractSelfTest.java | 11 +- .../igfs/HadoopIgfsDualAsyncSelfTest.java | 2 +- .../igfs/HadoopIgfsDualSyncSelfTest.java | 2 +- ...oopIgfsSecondaryFileSystemTestAdapter.java | 14 +- ...pSecondaryFileSystemConfigurationTest.java | 22 +- .../impl}/igfs/IgfsEventsTestSuite.java | 6 +- .../igfs/IgfsNearOnlyMultiNodeSelfTest.java | 5 +- ...gniteHadoopFileSystemAbstractSelfTest.java | 13 +- .../IgniteHadoopFileSystemClientSelfTest.java | 14 +- ...niteHadoopFileSystemHandshakeSelfTest.java | 12 +- ...gniteHadoopFileSystemIpcCacheSelfTest.java | 7 +- .../IgniteHadoopFileSystemLoggerSelfTest.java | 3 +- ...teHadoopFileSystemLoggerStateSelfTest.java | 9 +- ...oopFileSystemLoopbackAbstractSelfTest.java | 6 +- ...stemLoopbackEmbeddedDualAsyncSelfTest.java | 2 +- ...ystemLoopbackEmbeddedDualSyncSelfTest.java | 2 +- ...SystemLoopbackEmbeddedPrimarySelfTest.java | 2 +- ...stemLoopbackEmbeddedSecondarySelfTest.java | 2 +- ...stemLoopbackExternalDualAsyncSelfTest.java | 2 +- ...ystemLoopbackExternalDualSyncSelfTest.java | 2 +- ...SystemLoopbackExternalPrimarySelfTest.java | 2 +- ...stemLoopbackExternalSecondarySelfTest.java | 2 +- ...ndaryFileSystemInitializationSelfTest.java | 5 +- ...HadoopFileSystemShmemAbstractSelfTest.java | 5 +- ...eSystemShmemEmbeddedDualAsyncSelfTest.java | 2 +- ...leSystemShmemEmbeddedDualSyncSelfTest.java | 2 +- ...ileSystemShmemEmbeddedPrimarySelfTest.java | 2 +- ...eSystemShmemEmbeddedSecondarySelfTest.java | 2 +- ...eSystemShmemExternalDualAsyncSelfTest.java | 2 +- ...leSystemShmemExternalDualSyncSelfTest.java | 2 +- ...ileSystemShmemExternalPrimarySelfTest.java | 2 +- ...eSystemShmemExternalSecondarySelfTest.java | 2 +- .../collections/HadoopAbstractMapTest.java | 7 +- .../HadoopConcurrentHashMultimapSelftest.java | 4 +- .../collections/HadoopHashMapSelfTest.java | 4 +- .../collections/HadoopSkipListSelfTest.java | 8 +- .../streams/HadoopDataStreamSelfTest.java | 5 +- .../HadoopExecutorServiceTest.java | 3 +- .../HadoopExternalTaskExecutionSelfTest.java | 6 +- .../HadoopExternalCommunicationSelfTest.java | 4 +- .../util/BasicUserNameMapperSelfTest.java | 3 +- .../util/ChainedUserNameMapperSelfTest.java | 6 +- .../util/KerberosUserNameMapperSelfTest.java | 3 +- .../HadoopGroupingTestState.java} | 30 +- .../state/HadoopJobTrackerSelfTestState.java | 45 + ...HadoopMapReduceEmbeddedSelfTestState.java} | 21 +- .../HadoopTaskExecutionSelfTestValues.java | 51 + .../testsuites/IgniteHadoopTestSuite.java | 114 +-- .../IgniteIgfsLinuxAndMacOSTestSuite.java | 25 +- 234 files changed, 3377 insertions(+), 3305 deletions(-) create mode 100644 modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java rename modules/{hadoop => core}/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java (89%) rename modules/{hadoop => core}/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopDefaultJobInfo.java (96%) create mode 100644 modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopHelper.java rename modules/{hadoop => core}/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopLocations.java (100%) create mode 100644 modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopNoopHelper.java rename modules/{hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/CircularWIthHadoop.java => core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsKernalContextAware.java} (69%) delete mode 100644 modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java delete mode 100644 modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java create mode 100644 modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopCommonUtils.java rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{v2 => }/HadoopExternalSplit.java (94%) create mode 100644 modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopHelperImpl.java rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{v2 => }/HadoopSplitWrapper.java (95%) create mode 100644 modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/delegate/HadoopDelegateUtils.java rename modules/hadoop/src/{test/java/org/apache/ignite/internal/processors/hadoop/deps/WithClassAnnotation.java => main/java/org/apache/ignite/internal/processors/hadoop/delegate/HadoopFileSystemCounterWriterDelegate.java} (56%) rename modules/hadoop/src/{test/java/org/apache/ignite/internal/processors/hadoop/deps/WithImplements.java => main/java/org/apache/ignite/internal/processors/hadoop/delegate/HadoopFileSystemFactoryDelegate.java} (62%) rename modules/hadoop/src/{test/java/org/apache/ignite/internal/processors/hadoop/deps/Without.java => main/java/org/apache/ignite/internal/processors/hadoop/delegate/HadoopIgfsSecondaryFileSystemDelegate.java} (70%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopMapReduceCounterGroup.java (98%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopMapReduceCounters.java (98%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopUtils.java (73%) create mode 100644 modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopBasicFileSystemFactoryDelegate.java create mode 100644 modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopCachingFileSystemFactoryDelegate.java create mode 100644 modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopDefaultFileSystemFactoryDelegate.java create mode 100644 modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopFileSystemCounterWriterDelegateImpl.java create mode 100644 modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopIgfsSecondaryFileSystemDelegateImpl.java create mode 100644 modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopKerberosFileSystemFactoryDelegate.java rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/fs/HadoopFileSystemCacheUtils.java (99%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/fs/HadoopFileSystemsUtils.java (97%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/fs/HadoopLazyConcurrentMap.java (97%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/fs/HadoopLocalFileSystemV1.java (95%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/fs/HadoopLocalFileSystemV2.java (97%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/fs/HadoopParameters.java (98%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/fs/HadoopRawLocalFileSystem.java (99%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/igfs/HadoopIgfs.java (99%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/igfs/HadoopIgfsCommunicationException.java (96%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/igfs/HadoopIgfsEx.java (98%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/igfs/HadoopIgfsFuture.java (97%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/igfs/HadoopIgfsInProc.java (99%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/igfs/HadoopIgfsInputStream.java (99%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/igfs/HadoopIgfsIo.java (98%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/igfs/HadoopIgfsIpcIo.java (99%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/igfs/HadoopIgfsIpcIoListener.java (94%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/igfs/HadoopIgfsJclLogger.java (98%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/igfs/HadoopIgfsOutProc.java (99%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/igfs/HadoopIgfsOutputStream.java (98%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/igfs/HadoopIgfsProperties.java (97%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/igfs/HadoopIgfsProxyInputStream.java (99%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/igfs/HadoopIgfsProxyOutputStream.java (98%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/igfs/HadoopIgfsSecondaryFileSystemPositionedReadable.java (98%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/igfs/HadoopIgfsStreamDelegate.java (97%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/igfs/HadoopIgfsStreamEventListener.java (95%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/igfs/HadoopIgfsUtils.java (98%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/igfs/HadoopIgfsWrapper.java (97%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/proto/HadoopClientProtocol.java (91%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v1/HadoopV1CleanupTask.java (93%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v1/HadoopV1Counter.java (95%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v1/HadoopV1MapTask.java (96%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v1/HadoopV1OutputCollector.java (98%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v1/HadoopV1Partitioner.java (96%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v1/HadoopV1ReduceTask.java (95%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v1/HadoopV1Reporter.java (97%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v1/HadoopV1SetupTask.java (92%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v1/HadoopV1Splitter.java (96%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v1/HadoopV1Task.java (95%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v2/HadoopDaemon.java (98%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v2/HadoopSerializationWrapper.java (98%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v2/HadoopShutdownHookManager.java (97%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v2/HadoopV2CleanupTask.java (97%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v2/HadoopV2Context.java (99%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v2/HadoopV2Counter.java (97%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v2/HadoopV2Job.java (88%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v2/HadoopV2JobResourceManager.java (96%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v2/HadoopV2MapTask.java (98%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v2/HadoopV2Partitioner.java (96%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v2/HadoopV2ReduceTask.java (97%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v2/HadoopV2SetupTask.java (97%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v2/HadoopV2Splitter.java (96%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v2/HadoopV2Task.java (98%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v2/HadoopV2TaskContext.java (89%) rename modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/v2/HadoopWritableSerialization.java (97%) delete mode 100644 modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoaderTest.java create mode 100644 modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTestClassLoader.java delete mode 100644 modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/CircularWithoutHadoop.java delete mode 100644 modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithCast.java delete mode 100644 modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithConstructorInvocation.java delete mode 100644 modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithExtends.java delete mode 100644 modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithField.java delete mode 100644 modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithIndirectField.java delete mode 100644 modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithInnerClass.java delete mode 100644 modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithLocalVariable.java delete mode 100644 modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithMethodAnnotation.java delete mode 100644 modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithMethodArgument.java delete mode 100644 modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithMethodCheckedException.java delete mode 100644 modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithMethodInvocation.java delete mode 100644 modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithMethodReturnType.java delete mode 100644 modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithMethodRuntimeException.java delete mode 100644 modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithParameterAnnotation.java delete mode 100644 modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithStaticField.java delete mode 100644 modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithStaticInitializer.java rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopAbstractMapReduceTest.java (96%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopAbstractSelfTest.java (98%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopAbstractWordCountTest.java (98%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopCommandLineTest.java (98%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopDefaultMapReducePlannerSelfTest.java (98%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopErrorSimulator.java (99%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopFileSystemsTest.java (97%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopGroupingTest.java (93%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopJobTrackerSelfTest.java (92%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopMapReduceEmbeddedSelfTest.java (94%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopMapReduceErrorResilienceTest.java (97%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopMapReduceTest.java (94%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopNoHadoopMapReduceTest.java (96%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopPlannerMockJob.java (87%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopPopularWordsTest.java (99%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopSerializationWrapperSelfTest.java (93%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopSnappyFullMapReduceTest.java (95%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopSnappyTest.java (93%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopSortingExternalTest.java (96%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopSortingTest.java (97%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopSplitWrapperSelfTest.java (91%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopStartup.java (96%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopTaskExecutionSelfTest.java (93%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopTasksAllVersionsTest.java (95%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopTasksV1Test.java (75%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopTasksV2Test.java (80%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopTestRoundRobinMrPlanner.java (87%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopTestTaskContext.java (92%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopTestUtils.java (98%) rename modules/hadoop/src/test/java/org/apache/ignite/{hadoop/cache => internal/processors/hadoop/impl}/HadoopTxConfigCacheTest.java (96%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopUserLibsSelfTest.java (98%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopV2JobSelfTest.java (79%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopValidationSelfTest.java (96%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopWeightedMapReducePlannerTest.java (98%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/HadoopWeightedPlannerMapReduceTest.java (96%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/books/alice-in-wonderland.txt (100%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/books/art-of-war.txt (100%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/books/huckleberry-finn.txt (100%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/books/sherlock-holmes.txt (100%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/books/tom-sawyer.txt (100%) rename modules/hadoop/src/test/java/org/apache/ignite/{client/hadoop => internal/processors/hadoop/impl/client}/HadoopClientProtocolEmbeddedSelfTest.java (95%) rename modules/hadoop/src/test/java/org/apache/ignite/{client/hadoop => internal/processors/hadoop/impl/client}/HadoopClientProtocolSelfTest.java (98%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/examples/HadoopWordCount1.java (97%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/examples/HadoopWordCount1Map.java (94%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/examples/HadoopWordCount1Reduce.java (93%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/examples/HadoopWordCount2.java (98%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/examples/HadoopWordCount2Combiner.java (91%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/examples/HadoopWordCount2Mapper.java (95%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/examples/HadoopWordCount2Reducer.java (95%) rename modules/hadoop/src/test/java/org/apache/ignite/{hadoop => internal/processors/hadoop/impl}/fs/KerberosHadoopFileSystemFactorySelfTest.java (90%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/Hadoop1DualAbstractTest.java (88%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/Hadoop1OverIgfsDualAsyncTest.java (90%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/Hadoop1OverIgfsDualSyncTest.java (90%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/HadoopFIleSystemFactorySelfTest.java (83%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/HadoopIgfs20FileSystemAbstractSelfTest.java (99%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/HadoopIgfs20FileSystemLoopbackPrimarySelfTest.java (93%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/HadoopIgfs20FileSystemShmemPrimarySelfTest.java (93%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/HadoopIgfsDualAbstractSelfTest.java (95%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/HadoopIgfsDualAsyncSelfTest.java (94%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/HadoopIgfsDualSyncSelfTest.java (94%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/HadoopIgfsSecondaryFileSystemTestAdapter.java (89%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/HadoopSecondaryFileSystemConfigurationTest.java (95%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgfsEventsTestSuite.java (97%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgfsNearOnlyMultiNodeSelfTest.java (97%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgniteHadoopFileSystemAbstractSelfTest.java (99%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgniteHadoopFileSystemClientSelfTest.java (92%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgniteHadoopFileSystemHandshakeSelfTest.java (95%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgniteHadoopFileSystemIpcCacheSelfTest.java (96%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgniteHadoopFileSystemLoggerSelfTest.java (99%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgniteHadoopFileSystemLoggerStateSelfTest.java (95%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgniteHadoopFileSystemLoopbackAbstractSelfTest.java (88%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgniteHadoopFileSystemLoopbackEmbeddedDualAsyncSelfTest.java (94%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgniteHadoopFileSystemLoopbackEmbeddedDualSyncSelfTest.java (94%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgniteHadoopFileSystemLoopbackEmbeddedPrimarySelfTest.java (94%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgniteHadoopFileSystemLoopbackEmbeddedSecondarySelfTest.java (94%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgniteHadoopFileSystemLoopbackExternalDualAsyncSelfTest.java (94%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgniteHadoopFileSystemLoopbackExternalDualSyncSelfTest.java (94%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgniteHadoopFileSystemLoopbackExternalPrimarySelfTest.java (94%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgniteHadoopFileSystemLoopbackExternalSecondarySelfTest.java (94%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgniteHadoopFileSystemSecondaryFileSystemInitializationSelfTest.java (96%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgniteHadoopFileSystemShmemAbstractSelfTest.java (93%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgniteHadoopFileSystemShmemEmbeddedDualAsyncSelfTest.java (94%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgniteHadoopFileSystemShmemEmbeddedDualSyncSelfTest.java (94%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgniteHadoopFileSystemShmemEmbeddedPrimarySelfTest.java (94%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgniteHadoopFileSystemShmemEmbeddedSecondarySelfTest.java (94%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgniteHadoopFileSystemShmemExternalDualAsyncSelfTest.java (94%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgniteHadoopFileSystemShmemExternalDualSyncSelfTest.java (94%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgniteHadoopFileSystemShmemExternalPrimarySelfTest.java (94%) rename modules/hadoop/src/test/java/org/apache/ignite/{ => internal/processors/hadoop/impl}/igfs/IgniteHadoopFileSystemShmemExternalSecondarySelfTest.java (94%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/shuffle/collections/HadoopAbstractMapTest.java (94%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/shuffle/collections/HadoopConcurrentHashMultimapSelftest.java (96%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/shuffle/collections/HadoopHashMapSelfTest.java (93%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/shuffle/collections/HadoopSkipListSelfTest.java (96%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/shuffle/streams/HadoopDataStreamSelfTest.java (95%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/taskexecutor/HadoopExecutorServiceTest.java (95%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/taskexecutor/external/HadoopExternalTaskExecutionSelfTest.java (96%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{ => impl}/taskexecutor/external/communication/HadoopExternalCommunicationSelfTest.java (95%) rename modules/hadoop/src/test/java/org/apache/ignite/{hadoop => internal/processors/hadoop/impl}/util/BasicUserNameMapperSelfTest.java (97%) rename modules/hadoop/src/test/java/org/apache/ignite/{hadoop => internal/processors/hadoop/impl}/util/ChainedUserNameMapperSelfTest.java (92%) rename modules/hadoop/src/test/java/org/apache/ignite/{hadoop => internal/processors/hadoop/impl}/util/KerberosUserNameMapperSelfTest.java (96%) rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{deps/WithOuterClass.java => state/HadoopGroupingTestState.java} (57%) create mode 100644 modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/state/HadoopJobTrackerSelfTestState.java rename modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/{deps/WithInitializer.java => state/HadoopMapReduceEmbeddedSelfTestState.java} (64%) create mode 100644 modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/state/HadoopTaskExecutionSelfTestValues.java diff --git a/bin/include/setenv.bat b/bin/include/setenv.bat index 9d55521655623..82b8fcec5d6bf 100644 --- a/bin/include/setenv.bat +++ b/bin/include/setenv.bat @@ -43,16 +43,8 @@ set IGNITE_LIBS=%IGNITE_HOME%\libs\* for /D %%F in (%IGNITE_HOME%\libs\*) do if not "%%F" == "%IGNITE_HOME%\libs\optional" call :concat %%F\* -if exist %IGNITE_HOME%\libs\ignite-hadoop set HADOOP_EDITION=1 - if defined USER_LIBS set IGNITE_LIBS=%USER_LIBS%;%IGNITE_LIBS% -if "%HADOOP_EDITION%" == "1" FOR /F "delims=" %%i IN ('%JAVA_HOME%\bin\java.exe -cp %IGNITE_HOME%\libs\ignite-hadoop\* org.apache.ignite.internal.processors.hadoop.HadoopClasspathMain ";"' ) DO set IGNITE_HADOOP_CLASSPATH=%%i - -if "%IGNITE_HADOOP_CLASSPATH%" == "" goto :eof - -set IGNITE_LIBS=%IGNITE_LIBS%;%IGNITE_HADOOP_CLASSPATH% - goto :eof :concat diff --git a/bin/include/setenv.sh b/bin/include/setenv.sh index a85cba3004092..e088c0852cd4d 100755 --- a/bin/include/setenv.sh +++ b/bin/include/setenv.sh @@ -61,39 +61,8 @@ do if [ -d ${file} ] && [ "${file}" != "${IGNITE_HOME}"/libs/optional ]; then IGNITE_LIBS=${IGNITE_LIBS}${SEP}${file}/* fi - - if [ -d ${file} ] && [ "${file}" == "${IGNITE_HOME}"/libs/ignite-hadoop ]; then - HADOOP_EDITION=1 - fi done if [ "${USER_LIBS}" != "" ]; then IGNITE_LIBS=${USER_LIBS}${SEP}${IGNITE_LIBS} fi - -if [ "${HADOOP_EDITION}" == "1" ]; then - # Resolve constants. - HADOOP_DEFAULTS="/etc/default/hadoop" - - # - # Resolve the rest of Hadoop environment variables. - # - if [[ -z "${HADOOP_COMMON_HOME}" || -z "${HADOOP_HDFS_HOME}" || -z "${HADOOP_MAPRED_HOME}" ]]; then - if [ -f "$HADOOP_DEFAULTS" ]; then - source "$HADOOP_DEFAULTS" - fi - fi - - IGNITE_HADOOP_CLASSPATH=$( "$JAVA" -cp "${IGNITE_HOME}"/libs/ignite-hadoop/'*' \ - org.apache.ignite.internal.processors.hadoop.HadoopClasspathMain ":" ) - - statusCode=${?} - - if [ "${statusCode}" -ne 0 ]; then - exit ${statusCode} - fi - - unset statusCode - - IGNITE_LIBS=${IGNITE_LIBS}${SEP}${IGNITE_HADOOP_CLASSPATH} -fi diff --git a/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContext.java b/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContext.java index 3eaef1e705cee..b123a4a49cc95 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContext.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContext.java @@ -44,6 +44,7 @@ import org.apache.ignite.internal.processors.datastreamer.DataStreamProcessor; import org.apache.ignite.internal.processors.datastructures.DataStructuresProcessor; import org.apache.ignite.internal.processors.hadoop.HadoopProcessorAdapter; +import org.apache.ignite.internal.processors.hadoop.HadoopHelper; import org.apache.ignite.internal.processors.igfs.IgfsHelper; import org.apache.ignite.internal.processors.igfs.IgfsProcessorAdapter; import org.apache.ignite.internal.processors.job.GridJobProcessor; @@ -284,6 +285,13 @@ public interface GridKernalContext extends Iterable { */ public HadoopProcessorAdapter hadoop(); + /** + * Gets Hadoop helper. + * + * @return Hadoop helper. + */ + public HadoopHelper hadoopHelper(); + /** * Gets utility cache pool. * diff --git a/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContextImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContextImpl.java index 1ff4543d1419c..eb214e884b5ff 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContextImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContextImpl.java @@ -60,6 +60,7 @@ import org.apache.ignite.internal.processors.datastreamer.DataStreamProcessor; import org.apache.ignite.internal.processors.datastructures.DataStructuresProcessor; import org.apache.ignite.internal.processors.hadoop.HadoopProcessorAdapter; +import org.apache.ignite.internal.processors.hadoop.HadoopHelper; import org.apache.ignite.internal.processors.igfs.IgfsHelper; import org.apache.ignite.internal.processors.igfs.IgfsProcessorAdapter; import org.apache.ignite.internal.processors.job.GridJobProcessor; @@ -236,6 +237,10 @@ public class GridKernalContextImpl implements GridKernalContext, Externalizable @GridToStringInclude private IgfsHelper igfsHelper; + /** */ + @GridToStringInclude + private HadoopHelper hadoopHelper; + /** */ @GridToStringInclude private GridSegmentationProcessor segProc; @@ -541,6 +546,8 @@ public void addHelper(Object helper) { if (helper instanceof IgfsHelper) igfsHelper = (IgfsHelper)helper; + else if (helper instanceof HadoopHelper) + hadoopHelper = (HadoopHelper)helper; else assert false : "Unknown helper class: " + helper.getClass(); } @@ -732,6 +739,11 @@ public void addHelper(Object helper) { return igfsHelper; } + /** {@inheritDoc} */ + @Override public HadoopHelper hadoopHelper() { + return hadoopHelper; + } + /** {@inheritDoc} */ @Override public GridContinuousProcessor continuous() { return contProc; diff --git a/modules/core/src/main/java/org/apache/ignite/internal/IgniteComponentType.java b/modules/core/src/main/java/org/apache/ignite/internal/IgniteComponentType.java index 76e495f354678..0cd2fc16d1e2a 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/IgniteComponentType.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/IgniteComponentType.java @@ -41,6 +41,13 @@ public enum IgniteComponentType { "ignite-hadoop" ), + /** Hadoop Helper component. */ + HADOOP_HELPER( + "org.apache.ignite.internal.processors.hadoop.HadoopNoopHelper", + "org.apache.ignite.internal.processors.hadoop.HadoopHelperImpl", + "ignite-hadoop" + ), + /** IGFS helper component. */ IGFS_HELPER( "org.apache.ignite.internal.processors.igfs.IgfsNoopHelper", @@ -160,7 +167,7 @@ public boolean inClassPath() { * @return Created component. * @throws IgniteCheckedException If failed. */ - public T create(GridKernalContext ctx, boolean noOp) throws IgniteCheckedException { + public T create(GridKernalContext ctx, boolean noOp) throws IgniteCheckedException { return create0(ctx, noOp ? noOpClsName : clsName); } @@ -172,7 +179,7 @@ public T create(GridKernalContext ctx, boolean noOp) t * @return Created component. * @throws IgniteCheckedException If failed. */ - public T createIfInClassPath(GridKernalContext ctx, boolean mandatory) + public T createIfInClassPath(GridKernalContext ctx, boolean mandatory) throws IgniteCheckedException { String cls = clsName; diff --git a/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java b/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java index c5d274848d7ee..6c5a6282420b9 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java @@ -115,6 +115,7 @@ import org.apache.ignite.internal.processors.datastreamer.DataStreamProcessor; import org.apache.ignite.internal.processors.datastructures.DataStructuresProcessor; import org.apache.ignite.internal.processors.hadoop.Hadoop; +import org.apache.ignite.internal.processors.hadoop.HadoopClassLoader; import org.apache.ignite.internal.processors.hadoop.HadoopProcessorAdapter; import org.apache.ignite.internal.processors.job.GridJobProcessor; import org.apache.ignite.internal.processors.jobmetrics.GridJobMetricsProcessor; @@ -188,6 +189,7 @@ import static org.apache.ignite.internal.GridKernalState.STARTING; import static org.apache.ignite.internal.GridKernalState.STOPPED; import static org.apache.ignite.internal.GridKernalState.STOPPING; +import static org.apache.ignite.internal.IgniteComponentType.HADOOP_HELPER; import static org.apache.ignite.internal.IgniteComponentType.IGFS; import static org.apache.ignite.internal.IgniteComponentType.IGFS_HELPER; import static org.apache.ignite.internal.IgniteComponentType.SCHEDULE; @@ -821,6 +823,8 @@ public void start(final IgniteConfiguration cfg, addHelper(IGFS_HELPER.create(F.isEmpty(cfg.getFileSystemConfiguration()))); + addHelper(HADOOP_HELPER.createIfInClassPath(ctx, false)); + startProcessor(new IgnitePluginProcessor(ctx, cfg, plugins)); // Off-heap processor has no dependencies. @@ -881,7 +885,7 @@ public void start(final IgniteConfiguration cfg, startProcessor(new DataStreamProcessor(ctx)); startProcessor((GridProcessor)IGFS.create(ctx, F.isEmpty(cfg.getFileSystemConfiguration()))); startProcessor(new GridContinuousProcessor(ctx)); - startProcessor((GridProcessor)createHadoopComponent()); + startProcessor(createHadoopComponent()); startProcessor(new DataStructuresProcessor(ctx)); startProcessor(createComponent(PlatformProcessor.class, ctx)); @@ -1148,7 +1152,7 @@ private HadoopProcessorAdapter createHadoopComponent() throws IgniteCheckedExcep else { HadoopProcessorAdapter cmp = null; - if (IgniteComponentType.HADOOP.inClassPath() && cfg.isPeerClassLoadingEnabled()) { + if (!ctx.hadoopHelper().isNoOp() && cfg.isPeerClassLoadingEnabled()) { U.warn(log, "Hadoop module is found in classpath, but will not be started because peer class " + "loading is enabled (set IgniteConfiguration.peerClassLoadingEnabled to \"false\" if you want " + "to use Hadoop module)."); diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java new file mode 100644 index 0000000000000..cd94c8978573d --- /dev/null +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java @@ -0,0 +1,487 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.hadoop; + +import org.apache.ignite.IgniteCheckedException; +import org.apache.ignite.IgniteException; +import org.apache.ignite.internal.util.ClassCache; +import org.apache.ignite.internal.util.typedef.F; +import org.apache.ignite.internal.util.typedef.internal.S; +import org.apache.ignite.internal.util.typedef.internal.U; +import org.jetbrains.annotations.Nullable; +import org.jsr166.ConcurrentHashMap8; + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.net.URL; +import java.net.URLClassLoader; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.Map; +import java.util.Vector; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +/** + * Class loader allowing explicitly load classes without delegation to parent class loader. + * Also supports class parsing for finding dependencies which contain transitive dependencies + * unavailable for parent. + */ +public class HadoopClassLoader extends URLClassLoader implements ClassCache { + /** Hadoop class name: Daemon. */ + public static final String CLS_DAEMON = "org.apache.hadoop.util.Daemon"; + + /** Hadoop class name: ShutdownHookManager. */ + public static final String CLS_SHUTDOWN_HOOK_MANAGER = "org.apache.hadoop.util.ShutdownHookManager"; + + /** Hadoop class name: Daemon replacement. */ + public static final String CLS_DAEMON_REPLACE = "org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopDaemon"; + + /** Hadoop class name: ShutdownHookManager replacement. */ + public static final String CLS_SHUTDOWN_HOOK_MANAGER_REPLACE = + "org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopShutdownHookManager"; + + /** */ + private static final URLClassLoader APP_CLS_LDR = (URLClassLoader)HadoopClassLoader.class.getClassLoader(); + + /** */ + private static final Collection appJars = F.asList(APP_CLS_LDR.getURLs()); + + /** Mutex for native libraries initialization. */ + private static final Object LIBS_MUX = new Object(); + + /** Predefined native libraries to load. */ + private static final Collection PREDEFINED_NATIVE_LIBS; + + /** Native libraries. */ + private static Collection NATIVE_LIBS; + + /** */ + private static volatile Collection hadoopJars; + + /** */ + private static final Map bytesCache = new ConcurrentHashMap8<>(); + + /** Class cache. */ + private final ConcurrentMap cacheMap = new ConcurrentHashMap<>(); + + /** Diagnostic name of this class loader. */ + @SuppressWarnings({"FieldCanBeLocal", "UnusedDeclaration"}) + private final String name; + + /** Igfs Helper. */ + private final HadoopHelper helper; + + static { + // We are very parallel capable. + registerAsParallelCapable(); + + PREDEFINED_NATIVE_LIBS = new HashSet<>(); + + PREDEFINED_NATIVE_LIBS.add("hadoop"); + PREDEFINED_NATIVE_LIBS.add("MapRClient"); + } + + /** + * Gets name for the task class loader. Task class loader + * @param info The task info. + * @param prefix Get only prefix (without task type and number) + * @return The class loader name. + */ + public static String nameForTask(HadoopTaskInfo info, boolean prefix) { + if (prefix) + return "hadoop-task-" + info.jobId() + "-"; + else + return "hadoop-task-" + info.jobId() + "-" + info.type() + "-" + info.taskNumber(); + } + + /** + * Constructor. + * + * @param urls Urls. + * @param name Classloader name. + * @param libNames Optional additional native library names to be linked from parent classloader. + */ + public HadoopClassLoader(URL[] urls, String name, @Nullable String[] libNames, HadoopHelper helper) { + super(addHadoopUrls(urls), APP_CLS_LDR); + + assert !(getParent() instanceof HadoopClassLoader); + + this.name = name; + this.helper = helper; + + initializeNativeLibraries(libNames); + } + + /** + * Workaround to load native Hadoop libraries. Java doesn't allow native libraries to be loaded from different + * classloaders. But we load Hadoop classes many times and one of these classes - {@code NativeCodeLoader} - tries + * to load the same native library over and over again. + *

+ * To fix the problem, we force native library load in parent class loader and then "link" handle to this native + * library to our class loader. As a result, our class loader will think that the library is already loaded and will + * be able to link native methods. + * + * @see + * JNI specification + */ + @SuppressWarnings("SynchronizationOnLocalVariableOrMethodParameter") + private void initializeNativeLibraries(@Nullable String[] usrLibs) { + Collection res; + + synchronized (LIBS_MUX) { + if (NATIVE_LIBS == null) { + LinkedList libs = new LinkedList<>(); + + for (String lib : PREDEFINED_NATIVE_LIBS) + libs.add(new NativeLibrary(lib, true)); + + if (!F.isEmpty(usrLibs)) { + for (String usrLib : usrLibs) + libs.add(new NativeLibrary(usrLib, false)); + } + + NATIVE_LIBS = initializeNativeLibraries0(libs); + } + + res = NATIVE_LIBS; + } + + // Link libraries to class loader. + Vector ldrLibs = nativeLibraries(this); + + synchronized (ldrLibs) { + ldrLibs.addAll(res); + } + } + + /** + * Initialize native libraries. + * + * @param libs Libraries to initialize. + * @return Initialized libraries. + */ + @SuppressWarnings("SynchronizationOnLocalVariableOrMethodParameter") + private static Collection initializeNativeLibraries0(Collection libs) { + assert Thread.holdsLock(LIBS_MUX); + + Collection res = new HashSet<>(); + + for (NativeLibrary lib : libs) { + String libName = lib.name; + + File libFile = new File(libName); + + try { + // Load library. + if (libFile.isAbsolute()) + System.load(libName); + else + System.loadLibrary(libName); + + // Find library in class loader internals. + Object libObj = null; + + ClassLoader ldr = APP_CLS_LDR; + + while (ldr != null) { + Vector ldrLibObjs = nativeLibraries(ldr); + + synchronized (ldrLibObjs) { + for (Object ldrLibObj : ldrLibObjs) { + String name = nativeLibraryName(ldrLibObj); + + if (libFile.isAbsolute()) { + if (F.eq(name, libFile.getCanonicalPath())) { + libObj = ldrLibObj; + + break; + } + } else { + if (name.contains(libName)) { + libObj = ldrLibObj; + + break; + } + } + } + } + + if (libObj != null) + break; + + ldr = ldr.getParent(); + } + + if (libObj == null) + throw new IgniteException("Failed to find loaded library: " + libName); + + res.add(libObj); + } + catch (UnsatisfiedLinkError e) { + if (!lib.optional) + throw e; + } + catch (IOException e) { + throw new IgniteException("Failed to initialize native libraries due to unexpected exception.", e); + } + } + + return res; + } + + /** + * Get native libraries collection for the given class loader. + * + * @param ldr Class loaded. + * @return Native libraries. + */ + private static Vector nativeLibraries(ClassLoader ldr) { + assert ldr != null; + + return U.field(ldr, "nativeLibraries"); + } + + /** + * Get native library name. + * + * @param lib Library. + * @return Name. + */ + private static String nativeLibraryName(Object lib) { + assert lib != null; + + return U.field(lib, "name"); + } + + /** {@inheritDoc} */ + @Override protected Class loadClass(String name, boolean resolve) throws ClassNotFoundException { + try { + // Always load Hadoop classes explicitly, since Hadoop can be available in App classpath. + if (name.equals(CLS_SHUTDOWN_HOOK_MANAGER)) // Dirty hack to get rid of Hadoop shutdown hooks. + return loadReplace(name, CLS_SHUTDOWN_HOOK_MANAGER_REPLACE); + else if (name.equals(CLS_DAEMON)) + // We replace this in order to be able to forcibly stop some daemon threads + // that otherwise never stop (e.g. PeerCache runnables): + return loadReplace(name, CLS_DAEMON_REPLACE); + + // For Ignite Hadoop and IGFS classes we have to check if they depend on Hadoop. + if (loadByCurrentClassloader(name)) + return loadClassExplicitly(name, resolve); + + return super.loadClass(name, resolve); + } + catch (NoClassDefFoundError | ClassNotFoundException e) { + throw new ClassNotFoundException("Failed to load class: " + name, e); + } + } + + /** + * Load a class replacing it with our own implementation. + * + * @param originalName Name. + * @param replaceName Replacement. + * @return Class. + */ + private Class loadReplace(final String originalName, final String replaceName) { + synchronized (getClassLoadingLock(originalName)) { + // First, check if the class has already been loaded + Class c = findLoadedClass(originalName); + + if (c != null) + return c; + + byte[] bytes = bytesCache.get(originalName); + + if (bytes == null) { + InputStream in = helper.loadClassBytes(this, replaceName); + + if (in == null) + throw new IgniteException("Failed to replace class [originalName=" + originalName + + ", replaceName=" + replaceName + ']'); + + bytes = helper.loadReplace(in, originalName, replaceName); + + bytesCache.put(originalName, bytes); + } + + return defineClass(originalName, bytes, 0, bytes.length); + } + } + + /** {@inheritDoc} */ + @Override public Class getFromCache(String clsName) throws ClassNotFoundException { + Class cls = cacheMap.get(clsName); + + if (cls == null) { + Class old = cacheMap.putIfAbsent(clsName, cls = Class.forName(clsName, true, this)); + + if (old != null) + cls = old; + } + + return cls; + } + + /** + * Check whether file must be loaded with current class loader, or normal delegation model should be used. + *

+ * Override is only necessary for Ignite classes which have direct or transitive dependencies on Hadoop classes. + * These are all classes from "org.apache.ignite.internal.processors.hadoop.impl" package, + * and these are several well-know classes from "org.apache.ignite.hadoop" package. + * + * @param clsName Class name. + * @return Whether class must be loaded by current classloader without delegation. + */ + @SuppressWarnings("RedundantIfStatement") + public static boolean loadByCurrentClassloader(String clsName) { + // All impl classes. + if (clsName.startsWith("org.apache.ignite.internal.processors.hadoop.impl")) + return true; + + // Several classes from public API. + if (clsName.startsWith("org.apache.ignite.hadoop")) { + // We use "contains" instead of "equals" to handle subclasses properly. + if (clsName.contains("org.apache.ignite.hadoop.fs.v1.IgniteHadoopFileSystem") || + clsName.contains("org.apache.ignite.hadoop.fs.v2.IgniteHadoopFileSystem") || + clsName.contains("org.apache.ignite.hadoop.mapreduce.IgniteHadoopClientProtocolProvider")) + return true; + } + + return false; + } + + /** + * @param name Class name. + * @param resolve Resolve class. + * @return Class. + * @throws ClassNotFoundException If failed. + */ + private Class loadClassExplicitly(String name, boolean resolve) throws ClassNotFoundException { + synchronized (getClassLoadingLock(name)) { + // First, check if the class has already been loaded + Class c = findLoadedClass(name); + + if (c == null) { + long t1 = System.nanoTime(); + + c = findClass(name); + + // this is the defining class loader; record the stats + sun.misc.PerfCounter.getFindClassTime().addElapsedTimeFrom(t1); + sun.misc.PerfCounter.getFindClasses().increment(); + } + + if (resolve) + resolveClass(c); + + return c; + } + } + + /** + * @param urls URLs. + * @return URLs. + */ + private static URL[] addHadoopUrls(URL[] urls) { + Collection hadoopJars; + + try { + hadoopJars = hadoopUrls(); + } + catch (IgniteCheckedException e) { + throw new RuntimeException(e); + } + + ArrayList list = new ArrayList<>(hadoopJars.size() + appJars.size() + (urls == null ? 0 : urls.length)); + + list.addAll(appJars); + list.addAll(hadoopJars); + + if (!F.isEmpty(urls)) + list.addAll(F.asList(urls)); + + return list.toArray(new URL[list.size()]); + } + + /** + * @return Collection of jar URLs. + * @throws IgniteCheckedException If failed. + */ + public static Collection hadoopUrls() throws IgniteCheckedException { + Collection hadoopUrls = hadoopJars; + + if (hadoopUrls != null) + return hadoopUrls; + + synchronized (HadoopClassLoader.class) { + hadoopUrls = hadoopJars; + + if (hadoopUrls != null) + return hadoopUrls; + + try { + hadoopUrls = HadoopClasspathUtils.classpathForClassLoader(); + } + catch (IOException e) { + throw new IgniteCheckedException("Failed to resolve Hadoop JAR locations: " + e.getMessage(), e); + } + + hadoopJars = hadoopUrls; + + return hadoopUrls; + } + } + + /** {@inheritDoc} */ + @Override public String toString() { + return S.toString(HadoopClassLoader.class, this); + } + + /** + * Getter for name field. + */ + public String name() { + return name; + } + + /** + * Native library abstraction. + */ + private static class NativeLibrary { + /** Library name. */ + private final String name; + + /** Whether library is optional. */ + private final boolean optional; + + /** + * Constructor. + * + * @param name Library name. + * @param optional Optional flag. + */ + public NativeLibrary(String name, boolean optional) { + this.name = name; + this.optional = optional; + } + } +} \ No newline at end of file diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java similarity index 89% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java rename to modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java index f5c2814d0f3dd..7579ddb1d8e7f 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java @@ -55,31 +55,6 @@ public class HadoopClasspathUtils { /** Empty string. */ private static final String EMPTY_STR = ""; - /** - * Gets Hadoop class path as list of classpath elements for process. - * - * @return List of the class path elements. - * @throws IOException If failed. - */ - public static List classpathForProcess() throws IOException { - List res = new ArrayList<>(); - - for (final SearchDirectory dir : classpathDirectories()) { - File[] files = dir.files(); - - if (dir.useWildcard()) { - if (files.length > 0) - res.add(dir.absolutePath() + File.separator + '*'); - } - else { - for (File file : files) - res.add(file.getAbsolutePath()); - } - } - - return res; - } - /** * Gets Hadoop class path as a list of URLs (for in-process class loader usage). * @@ -190,6 +165,7 @@ private static Collection classpathDirectories() throws IOExcep Collection res = new ArrayList<>(); + // Add libraries from Hadoop distribution: res.add(new SearchDirectory(new File(loc.common(), "lib"), AcceptAllDirectoryFilter.INSTANCE)); res.add(new SearchDirectory(new File(loc.hdfs(), "lib"), AcceptAllDirectoryFilter.INSTANCE)); res.add(new SearchDirectory(new File(loc.mapred(), "lib"), AcceptAllDirectoryFilter.INSTANCE)); @@ -204,6 +180,7 @@ private static Collection classpathDirectories() throws IOExcep res.add(new SearchDirectory(new File(loc.mapred()), new PrefixDirectoryFilter("hadoop-mapreduce-client-core"))); + // Add user provided libs: res.addAll(parseUserLibs()); return res; @@ -215,7 +192,7 @@ private static Collection classpathDirectories() throws IOExcep * @return Parsed libs search patterns. * @throws IOException If failed. */ - static Collection parseUserLibs() throws IOException { + public static Collection parseUserLibs() throws IOException { return parseUserLibs(systemOrEnv(HADOOP_USER_LIBS, null)); } @@ -226,7 +203,7 @@ static Collection parseUserLibs() throws IOException { * @return Result. * @throws IOException If failed. */ - static Collection parseUserLibs(String str) throws IOException { + public static Collection parseUserLibs(String str) throws IOException { Collection res = new LinkedList<>(); if (!isEmpty(str)) { @@ -314,7 +291,7 @@ private static String normalize(String str) { /** * Simple pair-like structure to hold directory name and a mask assigned to it. */ - static class SearchDirectory { + public static class SearchDirectory { /** File. */ private final File dir; @@ -352,17 +329,10 @@ private SearchDirectory(File dir, DirectoryFilter filter, boolean strict) throws throw new IOException("Directory cannot be read: " + dir.getAbsolutePath()); } - /** - * @return Absolute path. - */ - String absolutePath() { - return dir.getAbsolutePath(); - } - /** * @return Child files. */ - File[] files() throws IOException { + public File[] files() throws IOException { File[] files = dir.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return filter.test(name); @@ -378,19 +348,12 @@ File[] files() throws IOException { else return files; } - - /** - * @return {@code True} if wildcard can be used. - */ - boolean useWildcard() { - return filter instanceof AcceptAllDirectoryFilter; - } } /** * Directory filter interface. */ - static interface DirectoryFilter { + public static interface DirectoryFilter { /** * Test if file with this name should be included. * @@ -403,7 +366,7 @@ static interface DirectoryFilter { /** * Filter to accept all files. */ - static class AcceptAllDirectoryFilter implements DirectoryFilter { + public static class AcceptAllDirectoryFilter implements DirectoryFilter { /** Singleton instance. */ public static final AcceptAllDirectoryFilter INSTANCE = new AcceptAllDirectoryFilter(); @@ -416,7 +379,7 @@ static class AcceptAllDirectoryFilter implements DirectoryFilter { /** * Filter which uses prefix to filter files. */ - static class PrefixDirectoryFilter implements DirectoryFilter { + public static class PrefixDirectoryFilter implements DirectoryFilter { /** Prefix. */ private final String prefix; @@ -440,7 +403,7 @@ public PrefixDirectoryFilter(String prefix) { /** * Filter which uses exact comparison. */ - static class ExactDirectoryFilter implements DirectoryFilter { + public static class ExactDirectoryFilter implements DirectoryFilter { /** Name. */ private final String name; diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopDefaultJobInfo.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopDefaultJobInfo.java similarity index 96% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopDefaultJobInfo.java rename to modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopDefaultJobInfo.java index 1382c1fff84c7..ae17ac813acff 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopDefaultJobInfo.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopDefaultJobInfo.java @@ -83,14 +83,14 @@ public HadoopDefaultJobInfo(String jobName, String user, boolean hasCombiner, in /** {@inheritDoc} */ @Override public HadoopJob createJob(Class jobCls, HadoopJobId jobId, IgniteLogger log, - @Nullable String[] libNames) throws IgniteCheckedException { + @Nullable String[] libNames, HadoopHelper helper) throws IgniteCheckedException { assert jobCls != null; try { Constructor constructor = jobCls.getConstructor(HadoopJobId.class, - HadoopDefaultJobInfo.class, IgniteLogger.class, String[].class); + HadoopDefaultJobInfo.class, IgniteLogger.class, String[].class, HadoopHelper.class); - return constructor.newInstance(jobId, this, log, libNames); + return constructor.newInstance(jobId, this, log, libNames, helper); } catch (Throwable t) { if (t instanceof Error) diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopHelper.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopHelper.java new file mode 100644 index 0000000000000..a8fee79d3380e --- /dev/null +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopHelper.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.hadoop; + +import java.io.InputStream; +import org.jetbrains.annotations.Nullable; + +/** + * IGFS utility processor adapter. + */ +public interface HadoopHelper { + /** + * @return Whether this is no-op implementation. + */ + public boolean isNoOp(); + + /** + * Get common Hadoop class loader. + * + * @return Common Hadoop class loader. + */ + public HadoopClassLoader commonClassLoader(); + + /** + * Load special replacement and impersonate. + * + * @param in Input stream. + * @param originalName Original class name. + * @param replaceName Replacer class name. + * @return Result. + */ + public byte[] loadReplace(InputStream in, final String originalName, final String replaceName); + + /** + * @param ldr Loader. + * @param clsName Class. + * @return Input stream. + */ + @Nullable public InputStream loadClassBytes(ClassLoader ldr, String clsName); +} \ No newline at end of file diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopJobInfo.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopJobInfo.java index a3b1bb66b3960..853c63dd42509 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopJobInfo.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopJobInfo.java @@ -59,11 +59,13 @@ public interface HadoopJobInfo extends Serializable { * @param jobId Job ID. * @param log Logger. * @param libNames Optional additional native library names. + * @param helper HadoopHelper. * @return Job. * @throws IgniteCheckedException If failed. */ public HadoopJob createJob(Class jobCls, - HadoopJobId jobId, IgniteLogger log, @Nullable String[] libNames) throws IgniteCheckedException; + HadoopJobId jobId, IgniteLogger log, @Nullable String[] libNames, HadoopHelper helper) + throws IgniteCheckedException; /** * @return Number of reducers configured for job. diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopLocations.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopLocations.java similarity index 100% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopLocations.java rename to modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopLocations.java diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopNoopHelper.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopNoopHelper.java new file mode 100644 index 0000000000000..d3348caae33f4 --- /dev/null +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopNoopHelper.java @@ -0,0 +1,66 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.hadoop; + +import org.apache.ignite.internal.GridKernalContext; +import org.jetbrains.annotations.Nullable; + +import java.io.InputStream; + +/** + * Noop Hadoop Helper implementation. + */ +@SuppressWarnings("unused") +public class HadoopNoopHelper implements HadoopHelper { + /** + * Constructor. + * + * @param ctx Kernal context. + */ + @SuppressWarnings("UnusedParameters") + public HadoopNoopHelper(GridKernalContext ctx) { + // No-op. + } + + /** {@inheritDoc} */ + @Override public boolean isNoOp() { + return true; + } + + /** {@inheritDoc} */ + @Override public HadoopClassLoader commonClassLoader() { + throw unsupported(); + } + + /** {@inheritDoc} */ + @Override public byte[] loadReplace(InputStream in, String originalName, String replaceName) { + throw unsupported(); + } + + /** {@inheritDoc} */ + @Nullable @Override public InputStream loadClassBytes(ClassLoader ldr, String clsName) { + throw unsupported(); + } + + /** + * @return Exception. + */ + private static UnsupportedOperationException unsupported() { + throw new UnsupportedOperationException("Operation is unsupported (Hadoop module is not in the classpath)."); + } +} diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopNoopProcessor.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopNoopProcessor.java index 501870a31040e..fa4ab47087ee5 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopNoopProcessor.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopNoopProcessor.java @@ -79,8 +79,8 @@ public HadoopNoopProcessor(GridKernalContext ctx) { * Creates an exception to be uniformly thrown from all the methods. */ private IllegalStateException createException() { - return new IllegalStateException("Hadoop module is not loaded (please ensure that ignite-hadoop.jar is in " + - "classpath and IgniteConfiguration.peerClassLoadingEnabled is set to false)."); + return new IllegalStateException("Hadoop module is not loaded (please ensure that ignite-hadoop.jar is " + + "in libs and IgniteConfiguration.peerClassLoadingEnabled is set to false)."); } /** {@inheritDoc} */ diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java index 6ff1f8fa3eddb..1dd12d913a590 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java @@ -183,8 +183,11 @@ public final class IgfsImpl implements IgfsEx { data = igfsCtx.data(); secondaryFs = cfg.getSecondaryFileSystem(); + if (secondaryFs instanceof IgfsKernalContextAware) + ((IgfsKernalContextAware)secondaryFs).setKernalContext(igfsCtx.kernalContext()); + if (secondaryFs instanceof LifecycleAware) - ((LifecycleAware) secondaryFs).start(); + ((LifecycleAware)secondaryFs).start(); /* Default IGFS mode. */ IgfsMode dfltMode; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/CircularWIthHadoop.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsKernalContextAware.java similarity index 69% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/CircularWIthHadoop.java rename to modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsKernalContextAware.java index c3aa7d9a760b1..7f59db4dc2089 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/CircularWIthHadoop.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsKernalContextAware.java @@ -15,18 +15,18 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.deps; +package org.apache.ignite.internal.processors.igfs; -import org.apache.hadoop.mapreduce.Job; +import org.apache.ignite.internal.GridKernalContext; /** - * Class has a direct Hadoop dependency and a circular dependency on another class. + * Indicates whether particular file system accepts kernal context. */ -@SuppressWarnings("unused") -public class CircularWIthHadoop { - /** */ - private Job[][] jobs = new Job[4][4]; - - /** */ - private CircularWithoutHadoop y; +public interface IgfsKernalContextAware { + /** + * Set kernal context. + * + * @param ctx Kernal context. + */ + public void setKernalContext(GridKernalContext ctx); } diff --git a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/BasicHadoopFileSystemFactory.java b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/BasicHadoopFileSystemFactory.java index a01bfaf690d40..89b802806515e 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/BasicHadoopFileSystemFactory.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/BasicHadoopFileSystemFactory.java @@ -17,37 +17,26 @@ package org.apache.ignite.hadoop.fs; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.ignite.IgniteException; -import org.apache.ignite.hadoop.fs.v1.IgniteHadoopFileSystem; import org.apache.ignite.hadoop.util.KerberosUserNameMapper; import org.apache.ignite.hadoop.util.UserNameMapper; -import org.apache.ignite.internal.processors.hadoop.HadoopUtils; -import org.apache.ignite.internal.processors.igfs.IgfsUtils; import org.apache.ignite.internal.util.typedef.internal.U; -import org.apache.ignite.lifecycle.LifecycleAware; import org.jetbrains.annotations.Nullable; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URL; -import java.util.Arrays; /** * Simple Hadoop file system factory which delegates to {@code FileSystem.get()} on each call. *

* If {@code "fs.[prefix].impl.disable.cache"} is set to {@code true}, file system instances will be cached by Hadoop. */ -public class BasicHadoopFileSystemFactory implements HadoopFileSystemFactory, Externalizable, LifecycleAware { +public class BasicHadoopFileSystemFactory implements HadoopFileSystemFactory, Externalizable { /** */ private static final long serialVersionUID = 0L; - /** File system URI. */ + /** File system URI. */ private String uri; /** File system config paths. */ @@ -56,12 +45,6 @@ public class BasicHadoopFileSystemFactory implements HadoopFileSystemFactory, Ex /** User name mapper. */ private UserNameMapper usrNameMapper; - /** Configuration of the secondary filesystem, never null. */ - protected transient Configuration cfg; - - /** Resulting URI. */ - protected transient URI fullUri; - /** * Constructor. */ @@ -70,64 +53,17 @@ public BasicHadoopFileSystemFactory() { } /** {@inheritDoc} */ - @Override public final FileSystem get(String name) throws IOException { - String name0 = IgfsUtils.fixUserName(name); - - if (usrNameMapper != null) - name0 = IgfsUtils.fixUserName(usrNameMapper.map(name0)); - - return getWithMappedName(name0); - } - - /** - * Internal file system create routine. - * - * @param usrName User name. - * @return File system. - * @throws IOException If failed. - */ - protected FileSystem getWithMappedName(String usrName) throws IOException { - assert cfg != null; - - try { - // FileSystem.get() might delegate to ServiceLoader to get the list of file system implementation. - // And ServiceLoader is known to be sensitive to context classloader. Therefore, we change context - // classloader to classloader of current class to avoid strange class-cast-exceptions. - ClassLoader oldLdr = HadoopUtils.setContextClassLoader(getClass().getClassLoader()); - - try { - return create(usrName); - } - finally { - HadoopUtils.restoreContextClassLoader(oldLdr); - } - } - catch (InterruptedException e) { - Thread.currentThread().interrupt(); - - throw new IOException("Failed to create file system due to interrupt.", e); - } - } - - /** - * Internal file system creation routine, invoked in correct class loader context. - * - * @param usrName User name. - * @return File system. - * @throws IOException If failed. - * @throws InterruptedException if the current thread is interrupted. - */ - protected FileSystem create(String usrName) throws IOException, InterruptedException { - return FileSystem.get(fullUri, cfg, usrName); + @Override public final Object get(String name) throws IOException { + throw new UnsupportedOperationException("Method should not be called directly."); } /** * Gets file system URI. *

- * This URI will be used as a first argument when calling {@link FileSystem#get(URI, Configuration, String)}. + * This URI will be used as a first argument when calling {@code FileSystem.get(URI, Configuration, String)}. *

* If not set, default URI will be picked from file system configuration using - * {@link FileSystem#getDefaultUri(Configuration)} method. + * {@code FileSystem.getDefaultUri(Configuration)} method. * * @return File system URI. */ @@ -149,11 +85,8 @@ public void setUri(@Nullable String uri) { *

* Path could be either absolute or relative to {@code IGNITE_HOME} environment variable. *

- * All provided paths will be loaded in the order they provided and then applied to {@link Configuration}. It means + * All provided paths will be loaded in the order they provided and then applied to {@code Configuration}. It means * that path order might be important in some cases. - *

- * NOTE! Factory can be serialized and transferred to other machines where instance of - * {@link IgniteHadoopFileSystem} resides. Corresponding paths must exist on these machines as well. * * @return Paths to file system configuration files. */ @@ -197,50 +130,6 @@ public void setUserNameMapper(@Nullable UserNameMapper usrNameMapper) { this.usrNameMapper = usrNameMapper; } - /** {@inheritDoc} */ - @Override public void start() throws IgniteException { - cfg = HadoopUtils.safeCreateConfiguration(); - - if (cfgPaths != null) { - for (String cfgPath : cfgPaths) { - if (cfgPath == null) - throw new NullPointerException("Configuration path cannot be null: " + Arrays.toString(cfgPaths)); - else { - URL url = U.resolveIgniteUrl(cfgPath); - - if (url == null) { - // If secConfPath is given, it should be resolvable: - throw new IgniteException("Failed to resolve secondary file system configuration path " + - "(ensure that it exists locally and you have read access to it): " + cfgPath); - } - - cfg.addResource(url); - } - } - } - - // If secondary fs URI is not given explicitly, try to get it from the configuration: - if (uri == null) - fullUri = FileSystem.getDefaultUri(cfg); - else { - try { - fullUri = new URI(uri); - } - catch (URISyntaxException use) { - throw new IgniteException("Failed to resolve secondary file system URI: " + uri); - } - } - - if (usrNameMapper != null && usrNameMapper instanceof LifecycleAware) - ((LifecycleAware)usrNameMapper).start(); - } - - /** {@inheritDoc} */ - @Override public void stop() throws IgniteException { - if (usrNameMapper != null && usrNameMapper instanceof LifecycleAware) - ((LifecycleAware)usrNameMapper).stop(); - } - /** {@inheritDoc} */ @Override public void writeExternal(ObjectOutput out) throws IOException { U.writeString(out, uri); diff --git a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/CachingHadoopFileSystemFactory.java b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/CachingHadoopFileSystemFactory.java index bcbb08236d6e7..b90777c4d173a 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/CachingHadoopFileSystemFactory.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/CachingHadoopFileSystemFactory.java @@ -17,24 +17,14 @@ package org.apache.ignite.hadoop.fs; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.ignite.IgniteCheckedException; -import org.apache.ignite.IgniteException; -import org.apache.ignite.internal.processors.hadoop.fs.HadoopFileSystemsUtils; -import org.apache.ignite.internal.processors.hadoop.fs.HadoopLazyConcurrentMap; - -import java.io.IOException; -import java.net.URI; - /** - * Caching Hadoop file system factory. Caches {@link FileSystem} instances on per-user basis. Doesn't rely on + * Caching Hadoop file system factory. Caches {@code FileSystem} instances on per-user basis. Doesn't rely on * built-in Hadoop {@code FileSystem} caching mechanics. Separate {@code FileSystem} instance is created for each * user instead. *

- * This makes cache instance resistant to concurrent calls to {@link FileSystem#close()} in other parts of the user + * This makes cache instance resistant to concurrent calls to {@code FileSystem.close()} in other parts of the user * code. On the other hand, this might cause problems on some environments. E.g. if Kerberos is enabled, a call to - * {@link FileSystem#get(URI, Configuration, String)} will refresh Kerberos token. But this factory implementation + * {@code FileSystem.get(URI, Configuration, String)} will refresh Kerberos token. But this factory implementation * calls this method only once per user what may lead to token expiration. In such cases it makes sense to either * use {@link BasicHadoopFileSystemFactory} or implement your own factory. */ @@ -42,44 +32,10 @@ public class CachingHadoopFileSystemFactory extends BasicHadoopFileSystemFactory /** */ private static final long serialVersionUID = 0L; - /** Per-user file system cache. */ - private final transient HadoopLazyConcurrentMap cache = new HadoopLazyConcurrentMap<>( - new HadoopLazyConcurrentMap.ValueFactory() { - @Override public FileSystem createValue(String key) throws IOException { - return CachingHadoopFileSystemFactory.super.getWithMappedName(key); - } - } - ); - /** - * Public non-arg constructor. + * Constructor. */ public CachingHadoopFileSystemFactory() { - // noop - } - - /** {@inheritDoc} */ - @Override public FileSystem getWithMappedName(String name) throws IOException { - return cache.getOrCreate(name); - } - - /** {@inheritDoc} */ - @Override public void start() throws IgniteException { - super.start(); - - // Disable caching. - cfg.setBoolean(HadoopFileSystemsUtils.disableFsCachePropertyName(fullUri.getScheme()), true); - } - - /** {@inheritDoc} */ - @Override public void stop() throws IgniteException { - super.stop(); - - try { - cache.close(); - } - catch (IgniteCheckedException ice) { - throw new IgniteException(ice); - } + // No-op. } } diff --git a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/HadoopFileSystemFactory.java b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/HadoopFileSystemFactory.java index 5ad08abd5ee9b..214328fefdd1b 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/HadoopFileSystemFactory.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/HadoopFileSystemFactory.java @@ -17,16 +17,13 @@ package org.apache.ignite.hadoop.fs; -import org.apache.hadoop.fs.FileSystem; -import org.apache.ignite.hadoop.fs.v1.IgniteHadoopFileSystem; -import org.apache.ignite.igfs.IgfsMode; import org.apache.ignite.lifecycle.LifecycleAware; import java.io.IOException; import java.io.Serializable; /** - * Factory for Hadoop {@link FileSystem} used by {@link IgniteHadoopIgfsSecondaryFileSystem}. + * Factory for Hadoop {@code FileSystem} used by {@link IgniteHadoopIgfsSecondaryFileSystem}. *

* {@link #get(String)} method will be used whenever a call to a target {@code FileSystem} is required. *

@@ -35,10 +32,6 @@ *

* Concrete factory may implement {@link LifecycleAware} interface. In this case start and stop callbacks will be * performed by Ignite. You may want to implement some initialization or cleanup there. - *

- * Note that factory extends {@link Serializable} interface as it might be necessary to transfer factories over the - * wire to {@link IgniteHadoopFileSystem} if {@link IgfsMode#PROXY} is enabled for some file - * system paths. */ public interface HadoopFileSystemFactory extends Serializable { /** @@ -48,5 +41,5 @@ public interface HadoopFileSystemFactory extends Serializable { * @return File system. * @throws IOException In case of error. */ - public FileSystem get(String usrName) throws IOException; + public Object get(String usrName) throws IOException; } diff --git a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/IgniteHadoopFileSystemCounterWriter.java b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/IgniteHadoopFileSystemCounterWriter.java index 80858269eac25..f1c1b16524466 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/IgniteHadoopFileSystemCounterWriter.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/IgniteHadoopFileSystemCounterWriter.java @@ -17,25 +17,12 @@ package org.apache.ignite.hadoop.fs; -import java.io.IOException; -import java.io.PrintStream; -import java.util.Map; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.ignite.IgniteCheckedException; -import org.apache.ignite.internal.processors.hadoop.HadoopDefaultJobInfo; import org.apache.ignite.internal.processors.hadoop.HadoopJob; -import org.apache.ignite.internal.processors.hadoop.HadoopJobId; -import org.apache.ignite.internal.processors.hadoop.HadoopJobInfo; -import org.apache.ignite.internal.processors.hadoop.HadoopUtils; import org.apache.ignite.internal.processors.hadoop.counter.HadoopCounterWriter; import org.apache.ignite.internal.processors.hadoop.counter.HadoopCounters; -import org.apache.ignite.internal.processors.hadoop.counter.HadoopPerformanceCounter; -import org.apache.ignite.internal.processors.hadoop.v2.HadoopV2Job; -import org.apache.ignite.internal.processors.igfs.IgfsUtils; -import org.apache.ignite.internal.util.typedef.T2; +import org.apache.ignite.internal.processors.hadoop.delegate.HadoopDelegateUtils; +import org.apache.ignite.internal.processors.hadoop.delegate.HadoopFileSystemCounterWriterDelegate; /** * Statistic writer implementation that writes info into any Hadoop file system. @@ -47,57 +34,39 @@ public class IgniteHadoopFileSystemCounterWriter implements HadoopCounterWriter /** */ public static final String COUNTER_WRITER_DIR_PROPERTY = "ignite.counters.fswriter.directory"; - /** */ - private static final String USER_MACRO = "${USER}"; + /** Mutex. */ + private final Object mux = new Object(); - /** */ - private static final String DEFAULT_COUNTER_WRITER_DIR = "/user/" + USER_MACRO; + /** Delegate. */ + private volatile HadoopFileSystemCounterWriterDelegate delegate; /** {@inheritDoc} */ @Override public void write(HadoopJob job, HadoopCounters cntrs) throws IgniteCheckedException { + delegate(job).write(job, cntrs); + } - Configuration hadoopCfg = HadoopUtils.safeCreateConfiguration(); - - final HadoopJobInfo jobInfo = job.info(); - - final HadoopJobId jobId = job.id(); - - for (Map.Entry e : ((HadoopDefaultJobInfo)jobInfo).properties().entrySet()) - hadoopCfg.set(e.getKey(), e.getValue()); - - String user = jobInfo.user(); - - user = IgfsUtils.fixUserName(user); - - String dir = jobInfo.property(COUNTER_WRITER_DIR_PROPERTY); - - if (dir == null) - dir = DEFAULT_COUNTER_WRITER_DIR; - - Path jobStatPath = new Path(new Path(dir.replace(USER_MACRO, user)), jobId.toString()); - - HadoopPerformanceCounter perfCntr = HadoopPerformanceCounter.getCounter(cntrs, null); - - try { - hadoopCfg.set(MRJobConfig.USER_NAME, user); + /** + * Get delegate creating it if needed. + * + * @param job Job. + * @return Delegate. + */ + private HadoopFileSystemCounterWriterDelegate delegate(HadoopJob job) { + HadoopFileSystemCounterWriterDelegate delegate0 = delegate; - FileSystem fs = ((HadoopV2Job)job).fileSystem(jobStatPath.toUri(), hadoopCfg); + if (delegate0 == null) { + synchronized (mux) { + delegate0 = delegate; - fs.mkdirs(jobStatPath); + if (delegate0 == null) { + delegate0 = HadoopDelegateUtils.counterWriterDelegate(job.getClass().getClassLoader(), this); - try (PrintStream out = new PrintStream(fs.create(new Path(jobStatPath, PERFORMANCE_COUNTER_FILE_NAME)))) { - for (T2 evt : perfCntr.evts()) { - out.print(evt.get1()); - out.print(':'); - out.println(evt.get2().toString()); + delegate = delegate0; } - - out.flush(); } } - catch (IOException e) { - throw new IgniteCheckedException(e); - } + + return delegate0; } } \ No newline at end of file diff --git a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/IgniteHadoopIgfsSecondaryFileSystem.java b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/IgniteHadoopIgfsSecondaryFileSystem.java index 6b5c7762f9ff7..c9d08c5be4a2c 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/IgniteHadoopIgfsSecondaryFileSystem.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/IgniteHadoopIgfsSecondaryFileSystem.java @@ -17,62 +17,48 @@ package org.apache.ignite.hadoop.fs; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.ParentNotDirectoryException; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.PathExistsException; -import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; -import org.apache.hadoop.fs.permission.FsPermission; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteFileSystem; -import org.apache.ignite.igfs.IgfsDirectoryNotEmptyException; -import org.apache.ignite.igfs.IgfsException; import org.apache.ignite.igfs.IgfsFile; -import org.apache.ignite.igfs.IgfsParentNotDirectoryException; import org.apache.ignite.igfs.IgfsPath; -import org.apache.ignite.igfs.IgfsPathAlreadyExistsException; -import org.apache.ignite.igfs.IgfsPathNotFoundException; import org.apache.ignite.igfs.IgfsUserContext; import org.apache.ignite.igfs.secondary.IgfsSecondaryFileSystemPositionedReadable; +import org.apache.ignite.internal.GridKernalContext; +import org.apache.ignite.internal.processors.hadoop.HadoopClassLoader; +import org.apache.ignite.internal.processors.hadoop.HadoopCommonUtils; import org.apache.ignite.internal.processors.hadoop.HadoopPayloadAware; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsProperties; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsSecondaryFileSystemPositionedReadable; -import org.apache.ignite.internal.processors.igfs.IgfsEntryInfo; -import org.apache.ignite.internal.processors.igfs.IgfsFileImpl; +import org.apache.ignite.internal.processors.hadoop.delegate.HadoopDelegateUtils; +import org.apache.ignite.internal.processors.hadoop.delegate.HadoopIgfsSecondaryFileSystemDelegate; +import org.apache.ignite.internal.processors.igfs.IgfsKernalContextAware; import org.apache.ignite.internal.processors.igfs.IgfsSecondaryFileSystemV2; -import org.apache.ignite.internal.processors.igfs.IgfsUtils; -import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.lang.IgniteOutClosure; -import org.apache.ignite.lang.IgniteUuid; import org.apache.ignite.lifecycle.LifecycleAware; import org.jetbrains.annotations.Nullable; -import java.io.FileNotFoundException; -import java.io.IOException; import java.io.OutputStream; -import java.net.URI; -import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; import java.util.Map; import java.util.concurrent.Callable; /** - * Secondary file system which delegates calls to an instance of Hadoop {@link FileSystem}. + * Secondary file system which delegates calls to Hadoop {@code org.apache.hadoop.fs.FileSystem}. *

* Target {@code FileSystem}'s are created on per-user basis using passed {@link HadoopFileSystemFactory}. */ -public class IgniteHadoopIgfsSecondaryFileSystem implements IgfsSecondaryFileSystemV2, LifecycleAware, - HadoopPayloadAware { +public class IgniteHadoopIgfsSecondaryFileSystem implements IgfsSecondaryFileSystemV2, IgfsKernalContextAware, + LifecycleAware, HadoopPayloadAware { /** The default user name. It is used if no user context is set. */ private String dfltUsrName; /** Factory. */ - private HadoopFileSystemFactory fsFactory; + private HadoopFileSystemFactory factory; + + /** Kernal context. */ + private volatile GridKernalContext ctx; + + /** Target. */ + private volatile HadoopIgfsSecondaryFileSystemDelegate target; /** * Default constructor for Spring. @@ -135,7 +121,7 @@ public IgniteHadoopIgfsSecondaryFileSystem(@Nullable String uri, @Nullable Strin * Gets default user name. *

* Defines user name which will be used during file system invocation in case no user name is defined explicitly - * through {@link FileSystem#get(URI, Configuration, String)}. + * through {@code FileSystem.get(URI, Configuration, String)}. *

* Also this name will be used if you manipulate {@link IgniteFileSystem} directly and do not set user name * explicitly using {@link IgfsUserContext#doAs(String, IgniteOutClosure)} or @@ -162,14 +148,14 @@ public void setDefaultUserName(@Nullable String dfltUsrName) { /** * Gets secondary file system factory. *

- * This factory will be used whenever a call to a target {@link FileSystem} is required. + * This factory will be used whenever a call to a target {@code FileSystem} is required. *

* If not set, {@link CachingHadoopFileSystemFactory} will be used. * * @return Secondary file system factory. */ public HadoopFileSystemFactory getFileSystemFactory() { - return fsFactory; + return factory; } /** @@ -178,403 +164,115 @@ public HadoopFileSystemFactory getFileSystemFactory() { * @param factory Secondary file system factory. */ public void setFileSystemFactory(HadoopFileSystemFactory factory) { - this.fsFactory = factory; - } - - /** - * Convert IGFS path into Hadoop path. - * - * @param path IGFS path. - * @return Hadoop path. - */ - private Path convert(IgfsPath path) { - URI uri = fileSystemForUser().getUri(); - - return new Path(uri.getScheme(), uri.getAuthority(), path.toString()); - } - - /** - * Heuristically checks if exception was caused by invalid HDFS version and returns appropriate exception. - * - * @param e Exception to check. - * @param detailMsg Detailed error message. - * @return Appropriate exception. - */ - private IgfsException handleSecondaryFsError(IOException e, String detailMsg) { - return cast(detailMsg, e); - } - - /** - * Cast IO exception to IGFS exception. - * - * @param e IO exception. - * @return IGFS exception. - */ - public static IgfsException cast(String msg, IOException e) { - if (e instanceof FileNotFoundException) - return new IgfsPathNotFoundException(e); - else if (e instanceof ParentNotDirectoryException) - return new IgfsParentNotDirectoryException(msg, e); - else if (e instanceof PathIsNotEmptyDirectoryException) - return new IgfsDirectoryNotEmptyException(e); - else if (e instanceof PathExistsException) - return new IgfsPathAlreadyExistsException(msg, e); - else - return new IgfsException(msg, e); - } - - /** - * Convert Hadoop FileStatus properties to map. - * - * @param status File status. - * @return IGFS attributes. - */ - private static Map properties(FileStatus status) { - FsPermission perm = status.getPermission(); - - if (perm == null) - perm = FsPermission.getDefault(); - - HashMap res = new HashMap<>(3); - - res.put(IgfsUtils.PROP_PERMISSION, String.format("%04o", perm.toShort())); - res.put(IgfsUtils.PROP_USER_NAME, status.getOwner()); - res.put(IgfsUtils.PROP_GROUP_NAME, status.getGroup()); - - return res; + this.factory = factory; } /** {@inheritDoc} */ @Override public boolean exists(IgfsPath path) { - try { - return fileSystemForUser().exists(convert(path)); - } - catch (IOException e) { - throw handleSecondaryFsError(e, "Failed to check file existence [path=" + path + "]"); - } + return target.exists(path); } /** {@inheritDoc} */ @Nullable @Override public IgfsFile update(IgfsPath path, Map props) { - HadoopIgfsProperties props0 = new HadoopIgfsProperties(props); - - final FileSystem fileSys = fileSystemForUser(); - - try { - if (props0.userName() != null || props0.groupName() != null) - fileSys.setOwner(convert(path), props0.userName(), props0.groupName()); - - if (props0.permission() != null) - fileSys.setPermission(convert(path), props0.permission()); - } - catch (IOException e) { - throw handleSecondaryFsError(e, "Failed to update file properties [path=" + path + "]"); - } - - //Result is not used in case of secondary FS. - return null; + return target.update(path, props); } /** {@inheritDoc} */ @Override public void rename(IgfsPath src, IgfsPath dest) { - // Delegate to the secondary file system. - try { - if (!fileSystemForUser().rename(convert(src), convert(dest))) - throw new IgfsException("Failed to rename (secondary file system returned false) " + - "[src=" + src + ", dest=" + dest + ']'); - } - catch (IOException e) { - throw handleSecondaryFsError(e, "Failed to rename file [src=" + src + ", dest=" + dest + ']'); - } + target.rename(src, dest); } /** {@inheritDoc} */ @Override public boolean delete(IgfsPath path, boolean recursive) { - try { - return fileSystemForUser().delete(convert(path), recursive); - } - catch (IOException e) { - throw handleSecondaryFsError(e, "Failed to delete file [path=" + path + ", recursive=" + recursive + "]"); - } + return target.delete(path, recursive); } /** {@inheritDoc} */ @Override public void mkdirs(IgfsPath path) { - try { - if (!fileSystemForUser().mkdirs(convert(path))) - throw new IgniteException("Failed to make directories [path=" + path + "]"); - } - catch (IOException e) { - throw handleSecondaryFsError(e, "Failed to make directories [path=" + path + "]"); - } + target.mkdirs(path); } /** {@inheritDoc} */ @Override public void mkdirs(IgfsPath path, @Nullable Map props) { - try { - if (!fileSystemForUser().mkdirs(convert(path), new HadoopIgfsProperties(props).permission())) - throw new IgniteException("Failed to make directories [path=" + path + ", props=" + props + "]"); - } - catch (IOException e) { - throw handleSecondaryFsError(e, "Failed to make directories [path=" + path + ", props=" + props + "]"); - } + target.mkdirs(path, props); } /** {@inheritDoc} */ @Override public Collection listPaths(IgfsPath path) { - try { - FileStatus[] statuses = fileSystemForUser().listStatus(convert(path)); - - if (statuses == null) - throw new IgfsPathNotFoundException("Failed to list files (path not found): " + path); - - Collection res = new ArrayList<>(statuses.length); - - for (FileStatus status : statuses) - res.add(new IgfsPath(path, status.getPath().getName())); - - return res; - } - catch (FileNotFoundException ignored) { - throw new IgfsPathNotFoundException("Failed to list files (path not found): " + path); - } - catch (IOException e) { - throw handleSecondaryFsError(e, "Failed to list statuses due to secondary file system exception: " + path); - } + return target.listPaths(path); } /** {@inheritDoc} */ @Override public Collection listFiles(IgfsPath path) { - try { - FileStatus[] statuses = fileSystemForUser().listStatus(convert(path)); - - if (statuses == null) - throw new IgfsPathNotFoundException("Failed to list files (path not found): " + path); - - Collection res = new ArrayList<>(statuses.length); - - for (FileStatus s : statuses) { - IgfsEntryInfo fsInfo = s.isDirectory() ? - IgfsUtils.createDirectory( - IgniteUuid.randomUuid(), - null, - properties(s), - s.getAccessTime(), - s.getModificationTime() - ) : - IgfsUtils.createFile( - IgniteUuid.randomUuid(), - (int)s.getBlockSize(), - s.getLen(), - null, - null, - false, - properties(s), - s.getAccessTime(), - s.getModificationTime() - ); - - res.add(new IgfsFileImpl(new IgfsPath(path, s.getPath().getName()), fsInfo, 1)); - } - - return res; - } - catch (FileNotFoundException ignored) { - throw new IgfsPathNotFoundException("Failed to list files (path not found): " + path); - } - catch (IOException e) { - throw handleSecondaryFsError(e, "Failed to list statuses due to secondary file system exception: " + path); - } + return target.listFiles(path); } /** {@inheritDoc} */ @Override public IgfsSecondaryFileSystemPositionedReadable open(IgfsPath path, int bufSize) { - return new HadoopIgfsSecondaryFileSystemPositionedReadable(fileSystemForUser(), convert(path), bufSize); + return target.open(path, bufSize); } /** {@inheritDoc} */ @Override public OutputStream create(IgfsPath path, boolean overwrite) { - try { - return fileSystemForUser().create(convert(path), overwrite); - } - catch (IOException e) { - throw handleSecondaryFsError(e, "Failed to create file [path=" + path + ", overwrite=" + overwrite + "]"); - } + return target.create(path, overwrite); } /** {@inheritDoc} */ @Override public OutputStream create(IgfsPath path, int bufSize, boolean overwrite, int replication, long blockSize, @Nullable Map props) { - HadoopIgfsProperties props0 = - new HadoopIgfsProperties(props != null ? props : Collections.emptyMap()); - - try { - return fileSystemForUser().create(convert(path), props0.permission(), overwrite, bufSize, - (short) replication, blockSize, null); - } - catch (IOException e) { - throw handleSecondaryFsError(e, "Failed to create file [path=" + path + ", props=" + props + - ", overwrite=" + overwrite + ", bufSize=" + bufSize + ", replication=" + replication + - ", blockSize=" + blockSize + "]"); - } + return target.create(path, bufSize, overwrite, replication, blockSize, props); } /** {@inheritDoc} */ @Override public OutputStream append(IgfsPath path, int bufSize, boolean create, @Nullable Map props) { - try { - return fileSystemForUser().append(convert(path), bufSize); - } - catch (IOException e) { - throw handleSecondaryFsError(e, "Failed to append file [path=" + path + ", bufSize=" + bufSize + "]"); - } + return target.append(path, bufSize, create, props); } /** {@inheritDoc} */ @Override public IgfsFile info(final IgfsPath path) { - try { - final FileStatus status = fileSystemForUser().getFileStatus(convert(path)); - - if (status == null) - return null; - - final Map props = properties(status); - - return new IgfsFile() { - @Override public IgfsPath path() { - return path; - } - - @Override public boolean isFile() { - return status.isFile(); - } - - @Override public boolean isDirectory() { - return status.isDirectory(); - } - - @Override public int blockSize() { - // By convention directory has blockSize == 0, while file has blockSize > 0: - return isDirectory() ? 0 : (int)status.getBlockSize(); - } - - @Override public long groupBlockSize() { - return status.getBlockSize(); - } - - @Override public long accessTime() { - return status.getAccessTime(); - } - - @Override public long modificationTime() { - return status.getModificationTime(); - } - - @Override public String property(String name) throws IllegalArgumentException { - String val = props.get(name); - - if (val == null) - throw new IllegalArgumentException("File property not found [path=" + path + ", name=" + name + ']'); - - return val; - } - - @Nullable @Override public String property(String name, @Nullable String dfltVal) { - String val = props.get(name); - - return val == null ? dfltVal : val; - } - - @Override public long length() { - return status.getLen(); - } - - /** {@inheritDoc} */ - @Override public Map properties() { - return props; - } - }; - } - catch (FileNotFoundException ignore) { - return null; - } - catch (IOException e) { - throw handleSecondaryFsError(e, "Failed to get file status [path=" + path + "]"); - } + return target.info(path); } /** {@inheritDoc} */ @Override public long usedSpaceSize() { - try { - // We don't use FileSystem#getUsed() since it counts only the files - // in the filesystem root, not all the files recursively. - return fileSystemForUser().getContentSummary(new Path("/")).getSpaceConsumed(); - } - catch (IOException e) { - throw handleSecondaryFsError(e, "Failed to get used space size of file system."); - } + return target.usedSpaceSize(); } /** {@inheritDoc} */ @Override public void setTimes(IgfsPath path, long accessTime, long modificationTime) throws IgniteException { - try { - // We don't use FileSystem#getUsed() since it counts only the files - // in the filesystem root, not all the files recursively. - fileSystemForUser().setTimes(convert(path), modificationTime, accessTime); - } - catch (IOException e) { - throw handleSecondaryFsError(e, "Failed set times for path: " + path); - } + target.setTimes(path, accessTime, modificationTime); } - /** - * Gets the underlying {@link FileSystem}. - * This method is used solely for testing. - * @return the underlying Hadoop {@link FileSystem}. - */ - public FileSystem fileSystem() { - return fileSystemForUser(); + /** {@inheritDoc} */ + @Override public void setKernalContext(GridKernalContext ctx) { + this.ctx = ctx; } - /** - * Gets the FileSystem for the current context user. - * @return the FileSystem instance, never null. - */ - private FileSystem fileSystemForUser() { - String user = IgfsUserContext.currentUser(); - - if (F.isEmpty(user)) - user = IgfsUtils.fixUserName(dfltUsrName); + /** {@inheritDoc} */ + @Override public void start() throws IgniteException { + HadoopClassLoader ldr = ctx.hadoopHelper().commonClassLoader(); - assert !F.isEmpty(user); + ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(ldr); try { - return fsFactory.get(user); + target = HadoopDelegateUtils.secondaryFileSystemDelegate(ldr, this); + + target.start(); } - catch (IOException ioe) { - throw new IgniteException(ioe); + finally { + HadoopCommonUtils.restoreContextClassLoader(oldLdr); } } - /** {@inheritDoc} */ - @Override public void start() throws IgniteException { - dfltUsrName = IgfsUtils.fixUserName(dfltUsrName); - - if (fsFactory == null) - fsFactory = new CachingHadoopFileSystemFactory(); - - if (fsFactory instanceof LifecycleAware) - ((LifecycleAware) fsFactory).start(); - } - /** {@inheritDoc} */ @Override public void stop() throws IgniteException { - if (fsFactory instanceof LifecycleAware) - ((LifecycleAware)fsFactory).stop(); + if (target != null) + target.stop(); } /** {@inheritDoc} */ @Override public HadoopFileSystemFactory getPayload() { - return fsFactory; + return factory; } } \ No newline at end of file diff --git a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/KerberosHadoopFileSystemFactory.java b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/KerberosHadoopFileSystemFactory.java index bbfbc59100d62..46d626bfb61fe 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/KerberosHadoopFileSystemFactory.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/KerberosHadoopFileSystemFactory.java @@ -17,19 +17,12 @@ package org.apache.ignite.hadoop.fs; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.ignite.IgniteException; -import org.apache.ignite.hadoop.fs.v1.IgniteHadoopFileSystem; -import org.apache.ignite.internal.util.typedef.F; -import org.apache.ignite.internal.util.typedef.internal.A; import org.apache.ignite.internal.util.typedef.internal.U; import org.jetbrains.annotations.Nullable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; -import java.security.PrivilegedExceptionAction; /** * Secure Hadoop file system factory that can work with underlying file system protected with Kerberos. @@ -57,9 +50,6 @@ public class KerberosHadoopFileSystemFactory extends BasicHadoopFileSystemFactor /** The re-login interval. See {@link #getReloginInterval()} for more information. */ private long reloginInterval = DFLT_RELOGIN_INTERVAL; - /** Time of last re-login attempt, in system milliseconds. */ - private transient volatile long lastReloginTime; - /** * Constructor. */ @@ -67,25 +57,6 @@ public KerberosHadoopFileSystemFactory() { // No-op. } - /** {@inheritDoc} */ - @Override public FileSystem getWithMappedName(String name) throws IOException { - reloginIfNeeded(); - - return super.getWithMappedName(name); - } - - /** {@inheritDoc} */ - @Override protected FileSystem create(String usrName) throws IOException, InterruptedException { - UserGroupInformation proxyUgi = UserGroupInformation.createProxyUser(usrName, - UserGroupInformation.getLoginUser()); - - return proxyUgi.doAs(new PrivilegedExceptionAction() { - @Override public FileSystem run() throws Exception { - return FileSystem.get(fullUri, cfg); - } - }); - } - /** * Gets the key tab principal short name (e.g. "hdfs"). * @@ -106,9 +77,6 @@ public void setKeyTabPrincipal(@Nullable String keyTabPrincipal) { /** * Gets the key tab full file name (e.g. "/etc/security/keytabs/hdfs.headless.keytab" or "/etc/krb5.keytab"). - *

- * NOTE! Factory can be serialized and transferred to other machines where instance of - * {@link IgniteHadoopFileSystem} resides. Corresponding path must exist on these machines as well. * * @return The key tab file name. */ @@ -136,10 +104,8 @@ public void setKeyTab(@Nullable String keyTab) { * Negative values are not allowed. * *

Note, however, that it does not make sense to make this value small, because Hadoop does not allow to - * login if less than {@link org.apache.hadoop.security.UserGroupInformation#MIN_TIME_BEFORE_RELOGIN} milliseconds + * login if less than {@code org.apache.hadoop.security.UserGroupInformation.MIN_TIME_BEFORE_RELOGIN} milliseconds * have passed since the time of the previous login. - * See {@link org.apache.hadoop.security.UserGroupInformation#hasSufficientTimeElapsed(long)} and its usages for - * more detail. * * @return The re-login interval, in milliseconds. */ @@ -156,47 +122,6 @@ public void setReloginInterval(long reloginInterval) { this.reloginInterval = reloginInterval; } - /** {@inheritDoc} */ - @Override public void start() throws IgniteException { - A.ensure(!F.isEmpty(keyTab), "keyTab cannot not be empty."); - A.ensure(!F.isEmpty(keyTabPrincipal), "keyTabPrincipal cannot not be empty."); - A.ensure(reloginInterval >= 0, "reloginInterval cannot not be negative."); - - super.start(); - - try { - UserGroupInformation.setConfiguration(cfg); - UserGroupInformation.loginUserFromKeytab(keyTabPrincipal, keyTab); - } - catch (IOException ioe) { - throw new IgniteException("Failed login from keytab [keyTab=" + keyTab + - ", keyTabPrincipal=" + keyTabPrincipal + ']', ioe); - } - } - - /** - * Re-logins the user if needed. - * First, the re-login interval defined in factory is checked. The re-login attempts will be not more - * frequent than one attempt per {@code reloginInterval}. - * Second, {@link UserGroupInformation#checkTGTAndReloginFromKeytab()} method invoked that gets existing - * TGT and checks its validity. If the TGT is expired or is close to expiry, it performs re-login. - * - *

This operation expected to be called upon each operation with the file system created with the factory. - * As long as {@link #get(String)} operation is invoked upon each file {@link IgniteHadoopFileSystem}, there - * is no need to invoke it otherwise specially. - * - * @throws IOException If login fails. - */ - private void reloginIfNeeded() throws IOException { - long now = System.currentTimeMillis(); - - if (now >= lastReloginTime + reloginInterval) { - UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab(); - - lastReloginTime = now; - } - } - /** {@inheritDoc} */ @Override public void writeExternal(ObjectOutput out) throws IOException { super.writeExternal(out); diff --git a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/v1/IgniteHadoopFileSystem.java b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/v1/IgniteHadoopFileSystem.java index a06129e43fc63..7133c087b9dbe 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/v1/IgniteHadoopFileSystem.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/v1/IgniteHadoopFileSystem.java @@ -42,12 +42,14 @@ import org.apache.ignite.igfs.IgfsPath; import org.apache.ignite.igfs.IgfsPathSummary; import org.apache.ignite.internal.igfs.common.IgfsLogger; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsInputStream; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsOutputStream; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsProxyInputStream; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsProxyOutputStream; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsStreamDelegate; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsWrapper; +import org.apache.ignite.internal.processors.hadoop.delegate.HadoopDelegateUtils; +import org.apache.ignite.internal.processors.hadoop.delegate.HadoopFileSystemFactoryDelegate; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsInputStream; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsOutputStream; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsProxyInputStream; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsProxyOutputStream; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsStreamDelegate; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsWrapper; import org.apache.ignite.internal.processors.igfs.IgfsHandshakeResponse; import org.apache.ignite.internal.processors.igfs.IgfsModeResolver; import org.apache.ignite.internal.processors.igfs.IgfsPaths; @@ -58,7 +60,6 @@ import org.apache.ignite.internal.util.typedef.internal.A; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; -import org.apache.ignite.lifecycle.LifecycleAware; import org.jetbrains.annotations.Nullable; import java.io.BufferedOutputStream; @@ -78,13 +79,13 @@ import static org.apache.ignite.configuration.FileSystemConfiguration.DFLT_IGFS_LOG_BATCH_SIZE; import static org.apache.ignite.configuration.FileSystemConfiguration.DFLT_IGFS_LOG_DIR; import static org.apache.ignite.igfs.IgfsMode.PROXY; -import static org.apache.ignite.internal.processors.hadoop.fs.HadoopParameters.PARAM_IGFS_COLOCATED_WRITES; -import static org.apache.ignite.internal.processors.hadoop.fs.HadoopParameters.PARAM_IGFS_LOG_BATCH_SIZE; -import static org.apache.ignite.internal.processors.hadoop.fs.HadoopParameters.PARAM_IGFS_LOG_DIR; -import static org.apache.ignite.internal.processors.hadoop.fs.HadoopParameters.PARAM_IGFS_LOG_ENABLED; -import static org.apache.ignite.internal.processors.hadoop.fs.HadoopParameters.PARAM_IGFS_PREFER_LOCAL_WRITES; -import static org.apache.ignite.internal.processors.hadoop.fs.HadoopParameters.PARAM_IGFS_SEQ_READS_BEFORE_PREFETCH; -import static org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsUtils.parameter; +import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_COLOCATED_WRITES; +import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_LOG_BATCH_SIZE; +import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_LOG_DIR; +import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_LOG_ENABLED; +import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_PREFER_LOCAL_WRITES; +import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_SEQ_READS_BEFORE_PREFETCH; +import static org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsUtils.parameter; import static org.apache.ignite.internal.processors.igfs.IgfsEx.IGFS_SCHEME; /** @@ -165,7 +166,7 @@ public class IgniteHadoopFileSystem extends FileSystem { private IgfsModeResolver modeRslvr; /** The secondary file system factory. */ - private HadoopFileSystemFactory factory; + private HadoopFileSystemFactoryDelegate factory; /** Management connection flag. */ private boolean mgmt; @@ -332,7 +333,10 @@ public void colocateFileWrites(boolean colocateFileWrites) { if (initSecondary) { try { - factory = (HadoopFileSystemFactory) paths.getPayload(getClass().getClassLoader()); + HadoopFileSystemFactory factory0 = + (HadoopFileSystemFactory)paths.getPayload(getClass().getClassLoader()); + + factory = HadoopDelegateUtils.fileSystemFactoryDelegate(factory0); } catch (IgniteCheckedException e) { throw new IOException("Failed to get secondary file system factory.", e); @@ -343,11 +347,10 @@ public void colocateFileWrites(boolean colocateFileWrites) { IgniteHadoopIgfsSecondaryFileSystem.class.getName() + " as \"secondaryFIleSystem\" in " + FileSystemConfiguration.class.getName() + "?)"); - if (factory instanceof LifecycleAware) - ((LifecycleAware) factory).start(); + factory.start(); try { - FileSystem secFs = factory.get(user); + FileSystem secFs = (FileSystem)factory.get(user); secondaryUri = secFs.getUri(); @@ -423,8 +426,8 @@ private void close0() throws IOException { if (clientLog.isLogEnabled()) clientLog.close(); - if (factory instanceof LifecycleAware) - ((LifecycleAware) factory).stop(); + if (factory != null) + factory.stop(); // Reset initialized resources. uri = null; @@ -1359,6 +1362,6 @@ public String user() { if (factory == null) return null; - return factory.get(user); + return (FileSystem)factory.get(user); } } \ No newline at end of file diff --git a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/v2/IgniteHadoopFileSystem.java b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/v2/IgniteHadoopFileSystem.java index bd8ed2db35c93..18b8bf9fc7e19 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/v2/IgniteHadoopFileSystem.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/v2/IgniteHadoopFileSystem.java @@ -46,13 +46,15 @@ import org.apache.ignite.igfs.IgfsMode; import org.apache.ignite.igfs.IgfsPath; import org.apache.ignite.internal.igfs.common.IgfsLogger; +import org.apache.ignite.internal.processors.hadoop.delegate.HadoopDelegateUtils; +import org.apache.ignite.internal.processors.hadoop.delegate.HadoopFileSystemFactoryDelegate; import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsEndpoint; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsInputStream; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsOutputStream; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsProxyInputStream; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsProxyOutputStream; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsStreamDelegate; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsWrapper; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsInputStream; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsOutputStream; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsProxyInputStream; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsProxyOutputStream; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsStreamDelegate; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsWrapper; import org.apache.ignite.internal.processors.igfs.IgfsHandshakeResponse; import org.apache.ignite.internal.processors.igfs.IgfsModeResolver; import org.apache.ignite.internal.processors.igfs.IgfsPaths; @@ -63,7 +65,6 @@ import org.apache.ignite.internal.util.typedef.internal.A; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; -import org.apache.ignite.lifecycle.LifecycleAware; import org.jetbrains.annotations.Nullable; import java.io.BufferedOutputStream; @@ -86,13 +87,13 @@ import static org.apache.ignite.configuration.FileSystemConfiguration.DFLT_IGFS_LOG_DIR; import static org.apache.ignite.hadoop.fs.v1.IgniteHadoopFileSystem.getFsHadoopUser; import static org.apache.ignite.igfs.IgfsMode.PROXY; -import static org.apache.ignite.internal.processors.hadoop.fs.HadoopParameters.PARAM_IGFS_COLOCATED_WRITES; -import static org.apache.ignite.internal.processors.hadoop.fs.HadoopParameters.PARAM_IGFS_LOG_BATCH_SIZE; -import static org.apache.ignite.internal.processors.hadoop.fs.HadoopParameters.PARAM_IGFS_LOG_DIR; -import static org.apache.ignite.internal.processors.hadoop.fs.HadoopParameters.PARAM_IGFS_LOG_ENABLED; -import static org.apache.ignite.internal.processors.hadoop.fs.HadoopParameters.PARAM_IGFS_PREFER_LOCAL_WRITES; -import static org.apache.ignite.internal.processors.hadoop.fs.HadoopParameters.PARAM_IGFS_SEQ_READS_BEFORE_PREFETCH; -import static org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsUtils.parameter; +import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_COLOCATED_WRITES; +import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_LOG_BATCH_SIZE; +import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_LOG_DIR; +import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_LOG_ENABLED; +import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_PREFER_LOCAL_WRITES; +import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_SEQ_READS_BEFORE_PREFETCH; +import static org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsUtils.parameter; import static org.apache.ignite.internal.processors.igfs.IgfsEx.IGFS_SCHEME; /** @@ -169,7 +170,7 @@ public class IgniteHadoopFileSystem extends AbstractFileSystem implements Closea private IgfsModeResolver modeRslvr; /** The secondary file system factory. */ - private HadoopFileSystemFactory factory; + private HadoopFileSystemFactoryDelegate factory; /** Whether custom sequential reads before prefetch value is provided. */ private boolean seqReadsBeforePrefetchOverride; @@ -341,7 +342,10 @@ private void initialize(URI name, Configuration cfg) throws IOException { if (initSecondary) { try { - factory = (HadoopFileSystemFactory) paths.getPayload(getClass().getClassLoader()); + HadoopFileSystemFactory factory0 = + (HadoopFileSystemFactory) paths.getPayload(getClass().getClassLoader()); + + factory = HadoopDelegateUtils.fileSystemFactoryDelegate(factory0); } catch (IgniteCheckedException e) { throw new IOException("Failed to get secondary file system factory.", e); @@ -354,11 +358,10 @@ private void initialize(URI name, Configuration cfg) throws IOException { assert factory != null; - if (factory instanceof LifecycleAware) - ((LifecycleAware) factory).start(); + factory.start(); try { - FileSystem secFs = factory.get(user); + FileSystem secFs = (FileSystem)factory.get(user); secondaryUri = secFs.getUri(); @@ -385,8 +388,8 @@ private void initialize(URI name, Configuration cfg) throws IOException { if (clientLog.isLogEnabled()) clientLog.close(); - if (factory instanceof LifecycleAware) - ((LifecycleAware) factory).stop(); + if (factory != null) + factory.stop(); // Reset initialized resources. rmtClient = null; @@ -1071,6 +1074,6 @@ public String user() { private FileSystem secondaryFileSystem() throws IOException{ assert factory != null; - return factory.get(user); + return (FileSystem)factory.get(user); } } \ No newline at end of file diff --git a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/mapreduce/IgniteHadoopClientProtocolProvider.java b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/mapreduce/IgniteHadoopClientProtocolProvider.java index 583af35e58c2a..343b5edbb4182 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/mapreduce/IgniteHadoopClientProtocolProvider.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/mapreduce/IgniteHadoopClientProtocolProvider.java @@ -17,10 +17,6 @@ package org.apache.ignite.hadoop.mapreduce; -import java.io.IOException; -import java.net.InetSocketAddress; -import java.util.Collections; -import java.util.concurrent.ConcurrentHashMap; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.protocol.ClientProtocol; @@ -32,10 +28,15 @@ import org.apache.ignite.internal.client.GridClientException; import org.apache.ignite.internal.client.GridClientFactory; import org.apache.ignite.internal.client.marshaller.jdk.GridClientJdkMarshaller; -import org.apache.ignite.internal.processors.hadoop.proto.HadoopClientProtocol; +import org.apache.ignite.internal.processors.hadoop.impl.proto.HadoopClientProtocol; import org.apache.ignite.internal.util.future.GridFutureAdapter; import org.apache.ignite.internal.util.typedef.F; +import java.io.IOException; +import java.net.InetSocketAddress; +import java.util.Collections; +import java.util.concurrent.ConcurrentHashMap; + import static org.apache.ignite.internal.client.GridClientProtocol.TCP; diff --git a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/mapreduce/IgniteHadoopMapReducePlanner.java b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/mapreduce/IgniteHadoopMapReducePlanner.java index d4a44fa9e1be4..e1101c5a0e8e4 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/mapreduce/IgniteHadoopMapReducePlanner.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/mapreduce/IgniteHadoopMapReducePlanner.java @@ -17,16 +17,6 @@ package org.apache.ignite.hadoop.mapreduce; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.ListIterator; -import java.util.Map; -import java.util.UUID; - import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteException; import org.apache.ignite.cluster.ClusterNode; @@ -38,13 +28,23 @@ import org.apache.ignite.internal.processors.hadoop.HadoopJob; import org.apache.ignite.internal.processors.hadoop.HadoopMapReducePlan; import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsEndpoint; -import org.apache.ignite.internal.processors.hadoop.planner.HadoopDefaultMapReducePlan; import org.apache.ignite.internal.processors.hadoop.planner.HadoopAbstractMapReducePlanner; +import org.apache.ignite.internal.processors.hadoop.planner.HadoopDefaultMapReducePlan; import org.apache.ignite.internal.processors.igfs.IgfsEx; import org.apache.ignite.internal.util.typedef.F; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.ListIterator; +import java.util.Map; +import java.util.UUID; + import static org.apache.ignite.IgniteFileSystem.IGFS_SCHEME; /** diff --git a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/mapreduce/IgniteHadoopWeightedMapReducePlanner.java b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/mapreduce/IgniteHadoopWeightedMapReducePlanner.java index 27ffc19f9d105..2d1ac0b2ffa8c 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/mapreduce/IgniteHadoopWeightedMapReducePlanner.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/mapreduce/IgniteHadoopWeightedMapReducePlanner.java @@ -24,11 +24,11 @@ import org.apache.ignite.igfs.IgfsBlockLocation; import org.apache.ignite.igfs.IgfsPath; import org.apache.ignite.internal.IgniteEx; +import org.apache.ignite.internal.processors.hadoop.HadoopCommonUtils; import org.apache.ignite.internal.processors.hadoop.HadoopFileBlock; import org.apache.ignite.internal.processors.hadoop.HadoopInputSplit; import org.apache.ignite.internal.processors.hadoop.HadoopJob; import org.apache.ignite.internal.processors.hadoop.HadoopMapReducePlan; -import org.apache.ignite.internal.processors.hadoop.HadoopUtils; import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsEndpoint; import org.apache.ignite.internal.processors.hadoop.planner.HadoopAbstractMapReducePlanner; import org.apache.ignite.internal.processors.hadoop.planner.HadoopDefaultMapReducePlan; @@ -116,7 +116,7 @@ public class IgniteHadoopWeightedMapReducePlanner extends HadoopAbstractMapReduc /** {@inheritDoc} */ @Override public HadoopMapReducePlan preparePlan(HadoopJob job, Collection nodes, @Nullable HadoopMapReducePlan oldPlan) throws IgniteCheckedException { - List splits = HadoopUtils.sortInputSplits(job.input()); + List splits = HadoopCommonUtils.sortInputSplits(job.input()); int reducerCnt = job.info().reducers(); if (reducerCnt < 0) diff --git a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/util/UserNameMapper.java b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/util/UserNameMapper.java index 26dc4b2f86673..12669aa3f2573 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/util/UserNameMapper.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/util/UserNameMapper.java @@ -17,14 +17,12 @@ package org.apache.ignite.hadoop.util; -import org.apache.ignite.hadoop.fs.HadoopFileSystemFactory; import org.jetbrains.annotations.Nullable; import java.io.Serializable; /** - * Hadoop file system name mapper. Used by {@link HadoopFileSystemFactory} implementation to pass proper user names - * to the underlying Hadoop file system. + * Hadoop file system name mapper. Ensures that correct user name is passed to the underlying Hadoop file system. */ public interface UserNameMapper extends Serializable { /** diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java deleted file mode 100644 index 2e0e271b78b5d..0000000000000 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java +++ /dev/null @@ -1,964 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.internal.processors.hadoop; - -import java.io.IOException; -import java.io.InputStream; -import java.net.URL; -import java.net.URLClassLoader; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; -import java.util.UUID; -import java.util.Vector; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; - -import org.apache.hadoop.util.NativeCodeLoader; -import org.apache.ignite.IgniteCheckedException; -import org.apache.ignite.internal.processors.hadoop.v2.HadoopDaemon; -import org.apache.ignite.internal.processors.hadoop.v2.HadoopShutdownHookManager; -import org.apache.ignite.internal.util.ClassCache; -import org.apache.ignite.internal.util.typedef.F; -import org.apache.ignite.internal.util.typedef.internal.S; -import org.apache.ignite.internal.util.typedef.internal.U; -import org.jetbrains.annotations.Nullable; -import org.jsr166.ConcurrentHashMap8; -import org.objectweb.asm.AnnotationVisitor; -import org.objectweb.asm.Attribute; -import org.objectweb.asm.ClassReader; -import org.objectweb.asm.ClassVisitor; -import org.objectweb.asm.ClassWriter; -import org.objectweb.asm.FieldVisitor; -import org.objectweb.asm.Handle; -import org.objectweb.asm.Label; -import org.objectweb.asm.MethodVisitor; -import org.objectweb.asm.Opcodes; -import org.objectweb.asm.Type; -import org.objectweb.asm.commons.Remapper; -import org.objectweb.asm.commons.RemappingClassAdapter; - -/** - * Class loader allowing explicitly load classes without delegation to parent class loader. - * Also supports class parsing for finding dependencies which contain transitive dependencies - * unavailable for parent. - */ -public class HadoopClassLoader extends URLClassLoader implements ClassCache { - static { - // We are very parallel capable. - registerAsParallelCapable(); - } - - /** Name of the Hadoop daemon class. */ - public static final String HADOOP_DAEMON_CLASS_NAME = "org.apache.hadoop.util.Daemon"; - - /** Name of libhadoop library. */ - private static final String LIBHADOOP = "hadoop."; - - /** */ - private static final URLClassLoader APP_CLS_LDR = (URLClassLoader)HadoopClassLoader.class.getClassLoader(); - - /** */ - private static final Collection appJars = F.asList(APP_CLS_LDR.getURLs()); - - /** */ - private static volatile Collection hadoopJars; - - /** */ - private static final Map cache = new ConcurrentHashMap8<>(); - - /** */ - private static final Map bytesCache = new ConcurrentHashMap8<>(); - - /** Class cache. */ - private final ConcurrentMap cacheMap = new ConcurrentHashMap<>(); - - /** Diagnostic name of this class loader. */ - @SuppressWarnings({"FieldCanBeLocal", "UnusedDeclaration"}) - private final String name; - - /** Native library names. */ - private final String[] libNames; - - /** - * Gets name for Job class loader. The name is specific for local node id. - * @param locNodeId The local node id. - * @return The class loader name. - */ - public static String nameForJob(UUID locNodeId) { - return "hadoop-job-node-" + locNodeId.toString(); - } - - /** - * Gets name for the task class loader. Task class loader - * @param info The task info. - * @param prefix Get only prefix (without task type and number) - * @return The class loader name. - */ - public static String nameForTask(HadoopTaskInfo info, boolean prefix) { - if (prefix) - return "hadoop-task-" + info.jobId() + "-"; - else - return "hadoop-task-" + info.jobId() + "-" + info.type() + "-" + info.taskNumber(); - } - - /** - * Constructor. - * - * @param urls Urls. - * @param name Classloader name. - * @param libNames Optional additional native library names to be linked from parent classloader. - */ - public HadoopClassLoader(URL[] urls, String name, @Nullable String[] libNames) { - super(addHadoopUrls(urls), APP_CLS_LDR); - - assert !(getParent() instanceof HadoopClassLoader); - - this.name = name; - this.libNames = libNames; - - initializeNativeLibraries(); - } - - /** - * Workaround to load native Hadoop libraries. Java doesn't allow native libraries to be loaded from different - * classloaders. But we load Hadoop classes many times and one of these classes - {@code NativeCodeLoader} - tries - * to load the same native library over and over again. - *

- * To fix the problem, we force native library load in parent class loader and then "link" handle to this native - * library to our class loader. As a result, our class loader will think that the library is already loaded and will - * be able to link native methods. - * - * @see - * JNI specification - */ - private void initializeNativeLibraries() { - try { - // This must trigger native library load. - Class.forName(NativeCodeLoader.class.getName(), true, APP_CLS_LDR); - - final Vector curVector = U.field(this, "nativeLibraries"); - - ClassLoader ldr = APP_CLS_LDR; - - while (ldr != null) { - Vector vector = U.field(ldr, "nativeLibraries"); - - for (Object lib : vector) { - String name = U.field(lib, "name"); - - boolean add = name.contains(LIBHADOOP); - - if (!add && libNames != null) { - for (String libName : libNames) { - if (libName != null && name.contains(libName)) { - add = true; - - break; - } - } - } - - if (add) { - curVector.add(lib); - - return; - } - } - - ldr = ldr.getParent(); - } - } - catch (Exception e) { - U.quietAndWarn(null, "Failed to initialize Hadoop native library " + - "(native Hadoop methods might not work properly): " + e); - } - } - - /** - * Need to parse only Ignite Hadoop and IGFS classes. - * - * @param cls Class name. - * @return {@code true} if we need to check this class. - */ - private static boolean isHadoopIgfs(String cls) { - String ignitePkgPrefix = "org.apache.ignite"; - - int len = ignitePkgPrefix.length(); - - return cls.startsWith(ignitePkgPrefix) && ( - cls.indexOf("igfs.", len) != -1 || - cls.indexOf(".fs.", len) != -1 || - cls.indexOf("hadoop.", len) != -1); - } - - /** - * @param cls Class name. - * @return {@code true} If this is Hadoop class. - */ - private static boolean isHadoop(String cls) { - return cls.startsWith("org.apache.hadoop."); - } - - /** {@inheritDoc} */ - @Override protected Class loadClass(String name, boolean resolve) throws ClassNotFoundException { - try { - if (isHadoop(name)) { // Always load Hadoop classes explicitly, since Hadoop can be available in App classpath. - if (name.endsWith(".util.ShutdownHookManager")) // Dirty hack to get rid of Hadoop shutdown hooks. - return loadFromBytes(name, HadoopShutdownHookManager.class.getName()); - else if (name.equals(HADOOP_DAEMON_CLASS_NAME)) - // We replace this in order to be able to forcibly stop some daemon threads - // that otherwise never stop (e.g. PeerCache runnables): - return loadFromBytes(name, HadoopDaemon.class.getName()); - - return loadClassExplicitly(name, resolve); - } - - if (isHadoopIgfs(name)) { // For Ignite Hadoop and IGFS classes we have to check if they depend on Hadoop. - Boolean hasDeps = cache.get(name); - - if (hasDeps == null) { - hasDeps = hasExternalDependencies(name); - - cache.put(name, hasDeps); - } - - if (hasDeps) - return loadClassExplicitly(name, resolve); - } - - return super.loadClass(name, resolve); - } - catch (NoClassDefFoundError | ClassNotFoundException e) { - throw new ClassNotFoundException("Failed to load class: " + name, e); - } - } - - /** - * @param name Name. - * @param replace Replacement. - * @return Class. - */ - private Class loadFromBytes(final String name, final String replace) { - synchronized (getClassLoadingLock(name)) { - // First, check if the class has already been loaded - Class c = findLoadedClass(name); - - if (c != null) - return c; - - byte[] bytes = bytesCache.get(name); - - if (bytes == null) { - InputStream in = loadClassBytes(getParent(), replace); - - ClassReader rdr; - - try { - rdr = new ClassReader(in); - } - catch (IOException e) { - throw new RuntimeException(e); - } - - ClassWriter w = new ClassWriter(Opcodes.ASM4); - - rdr.accept(new RemappingClassAdapter(w, new Remapper() { - /** */ - String replaceType = replace.replace('.', '/'); - - /** */ - String nameType = name.replace('.', '/'); - - @Override public String map(String type) { - if (type.equals(replaceType)) - return nameType; - - return type; - } - }), ClassReader.EXPAND_FRAMES); - - bytes = w.toByteArray(); - - bytesCache.put(name, bytes); - } - - return defineClass(name, bytes, 0, bytes.length); - } - } - - /** {@inheritDoc} */ - @Override public Class getFromCache(String clsName) throws ClassNotFoundException { - Class cls = cacheMap.get(clsName); - - if (cls == null) { - Class old = cacheMap.putIfAbsent(clsName, cls = Class.forName(clsName, true, this)); - - if (old != null) - cls = old; - } - - return cls; - } - - /** - * @param name Class name. - * @param resolve Resolve class. - * @return Class. - * @throws ClassNotFoundException If failed. - */ - private Class loadClassExplicitly(String name, boolean resolve) throws ClassNotFoundException { - synchronized (getClassLoadingLock(name)) { - // First, check if the class has already been loaded - Class c = findLoadedClass(name); - - if (c == null) { - long t1 = System.nanoTime(); - - c = findClass(name); - - // this is the defining class loader; record the stats - sun.misc.PerfCounter.getFindClassTime().addElapsedTimeFrom(t1); - sun.misc.PerfCounter.getFindClasses().increment(); - } - - if (resolve) - resolveClass(c); - - return c; - } - } - - /** - * @param ldr Loader. - * @param clsName Class. - * @return Input stream. - */ - @Nullable private InputStream loadClassBytes(ClassLoader ldr, String clsName) { - return ldr.getResourceAsStream(clsName.replace('.', '/') + ".class"); - } - - /** - * Check whether class has external dependencies on Hadoop. - * - * @param clsName Class name. - * @return {@code True} if class has external dependencies. - */ - boolean hasExternalDependencies(String clsName) { - CollectingContext ctx = new CollectingContext(); - - ctx.annVisitor = new CollectingAnnotationVisitor(ctx); - ctx.mthdVisitor = new CollectingMethodVisitor(ctx, ctx.annVisitor); - ctx.fldVisitor = new CollectingFieldVisitor(ctx, ctx.annVisitor); - ctx.clsVisitor = new CollectingClassVisitor(ctx, ctx.annVisitor, ctx.mthdVisitor, ctx.fldVisitor); - - return hasExternalDependencies(clsName, ctx); - } - - /** - * Check whether class has external dependencies on Hadoop. - * - * @param clsName Class name. - * @param ctx Context. - * @return {@code true} If the class has external dependencies. - */ - boolean hasExternalDependencies(String clsName, CollectingContext ctx) { - if (isHadoop(clsName)) // Hadoop must not be in classpath but Idea sucks, so filtering explicitly as external. - return true; - - // Try to get from parent to check if the type accessible. - InputStream in = loadClassBytes(getParent(), clsName); - - if (in == null) // The class is external itself, it must be loaded from this class loader. - return true; - - if (!isHadoopIgfs(clsName)) // Other classes should not have external dependencies. - return false; - - final ClassReader rdr; - - try { - rdr = new ClassReader(in); - } - catch (IOException e) { - throw new RuntimeException("Failed to read class: " + clsName, e); - } - - ctx.visited.add(clsName); - - rdr.accept(ctx.clsVisitor, 0); - - if (ctx.found) // We already know that we have dependencies, no need to check parent. - return true; - - // Here we are known to not have any dependencies but possibly we have a parent which has them. - int idx = clsName.lastIndexOf('$'); - - if (idx == -1) // No parent class. - return false; - - String parentCls = clsName.substring(0, idx); - - if (ctx.visited.contains(parentCls)) - return false; - - Boolean res = cache.get(parentCls); - - if (res == null) - res = hasExternalDependencies(parentCls, ctx); - - return res; - } - - /** - * @param name Class name. - * @return {@code true} If this is a valid class name. - */ - private static boolean validateClassName(String name) { - int len = name.length(); - - if (len <= 1) - return false; - - if (!Character.isJavaIdentifierStart(name.charAt(0))) - return false; - - boolean hasDot = false; - - for (int i = 1; i < len; i++) { - char c = name.charAt(i); - - if (c == '.') - hasDot = true; - else if (!Character.isJavaIdentifierPart(c)) - return false; - } - - return hasDot; - } - - /** - * @param urls URLs. - * @return URLs. - */ - private static URL[] addHadoopUrls(URL[] urls) { - Collection hadoopJars; - - try { - hadoopJars = hadoopUrls(); - } - catch (IgniteCheckedException e) { - throw new RuntimeException(e); - } - - ArrayList list = new ArrayList<>(hadoopJars.size() + appJars.size() + (urls == null ? 0 : urls.length)); - - list.addAll(appJars); - list.addAll(hadoopJars); - - if (!F.isEmpty(urls)) - list.addAll(F.asList(urls)); - - return list.toArray(new URL[list.size()]); - } - - /** - * @return Collection of jar URLs. - * @throws IgniteCheckedException If failed. - */ - public static Collection hadoopUrls() throws IgniteCheckedException { - Collection hadoopUrls = hadoopJars; - - if (hadoopUrls != null) - return hadoopUrls; - - synchronized (HadoopClassLoader.class) { - hadoopUrls = hadoopJars; - - if (hadoopUrls != null) - return hadoopUrls; - - try { - hadoopUrls = HadoopClasspathUtils.classpathForClassLoader(); - } - catch (IOException e) { - throw new IgniteCheckedException("Failed to resolve Hadoop JAR locations: " + e.getMessage(), e); - } - - hadoopJars = hadoopUrls; - - return hadoopUrls; - } - } - - /** {@inheritDoc} */ - @Override public String toString() { - return S.toString(HadoopClassLoader.class, this); - } - - /** - * Getter for name field. - */ - public String name() { - return name; - } - - /** - * Context for dependencies collection. - */ - private class CollectingContext { - /** Visited classes. */ - private final Set visited = new HashSet<>(); - - /** Whether dependency found. */ - private boolean found; - - /** Annotation visitor. */ - private AnnotationVisitor annVisitor; - - /** Method visitor. */ - private MethodVisitor mthdVisitor; - - /** Field visitor. */ - private FieldVisitor fldVisitor; - - /** Class visitor. */ - private ClassVisitor clsVisitor; - - /** - * Processes a method descriptor - * @param methDesc The method desc String. - */ - void onMethodsDesc(final String methDesc) { - // Process method return type: - onType(Type.getReturnType(methDesc)); - - if (found) - return; - - // Process method argument types: - for (Type t: Type.getArgumentTypes(methDesc)) { - onType(t); - - if (found) - return; - } - } - - /** - * Processes dependencies of a class. - * - * @param depCls The class name as dot-notated FQN. - */ - void onClass(final String depCls) { - assert depCls.indexOf('/') == -1 : depCls; // class name should be fully converted to dot notation. - assert depCls.charAt(0) != 'L' : depCls; - assert validateClassName(depCls) : depCls; - - if (depCls.startsWith("java.") || depCls.startsWith("javax.")) // Filter out platform classes. - return; - - if (visited.contains(depCls)) - return; - - Boolean res = cache.get(depCls); - - if (res == Boolean.TRUE || (res == null && hasExternalDependencies(depCls, this))) - found = true; - } - - /** - * Analyses dependencies of given type. - * - * @param t The type to process. - */ - void onType(Type t) { - if (t == null) - return; - - int sort = t.getSort(); - - switch (sort) { - case Type.ARRAY: - onType(t.getElementType()); - - break; - - case Type.OBJECT: - onClass(t.getClassName()); - - break; - } - } - - /** - * Analyses dependencies of given object type. - * - * @param objType The object type to process. - */ - void onInternalTypeName(String objType) { - if (objType == null) - return; - - assert objType.length() > 1 : objType; - - if (objType.charAt(0) == '[') - // handle array. In this case this is a type descriptor notation, like "[Ljava/lang/Object;" - onType(objType); - else { - assert objType.indexOf('.') == -1 : objType; // Must be slash-separated FQN. - - String clsName = objType.replace('/', '.'); // Convert it to dot notation. - - onClass(clsName); // Process. - } - } - - /** - * Type description analyser. - * - * @param desc The description. - */ - void onType(String desc) { - if (!F.isEmpty(desc)) { - if (desc.length() <= 1) - return; // Optimization: filter out primitive types in early stage. - - Type t = Type.getType(desc); - - onType(t); - } - } - } - - /** - * Annotation visitor. - */ - private static class CollectingAnnotationVisitor extends AnnotationVisitor { - /** */ - final CollectingContext ctx; - - /** - * Annotation visitor. - * - * @param ctx The collector. - */ - CollectingAnnotationVisitor(CollectingContext ctx) { - super(Opcodes.ASM4); - - this.ctx = ctx; - } - - /** {@inheritDoc} */ - @Override public AnnotationVisitor visitAnnotation(String name, String desc) { - if (ctx.found) - return null; - - ctx.onType(desc); - - return this; - } - - /** {@inheritDoc} */ - @Override public void visitEnum(String name, String desc, String val) { - if (ctx.found) - return; - - ctx.onType(desc); - } - - /** {@inheritDoc} */ - @Override public AnnotationVisitor visitArray(String name) { - return ctx.found ? null : this; - } - - /** {@inheritDoc} */ - @Override public void visit(String name, Object val) { - if (ctx.found) - return; - - if (val instanceof Type) - ctx.onType((Type)val); - } - - /** {@inheritDoc} */ - @Override public void visitEnd() { - // No-op. - } - } - - /** - * Field visitor. - */ - private static class CollectingFieldVisitor extends FieldVisitor { - /** Collector. */ - private final CollectingContext ctx; - - /** Annotation visitor. */ - private final AnnotationVisitor av; - - /** - * Constructor. - */ - CollectingFieldVisitor(CollectingContext ctx, AnnotationVisitor av) { - super(Opcodes.ASM4); - - this.ctx = ctx; - this.av = av; - } - - /** {@inheritDoc} */ - @Override public AnnotationVisitor visitAnnotation(String desc, boolean visible) { - if (ctx.found) - return null; - - ctx.onType(desc); - - return ctx.found ? null : av; - } - - /** {@inheritDoc} */ - @Override public void visitAttribute(Attribute attr) { - // No-op. - } - - /** {@inheritDoc} */ - @Override public void visitEnd() { - // No-op. - } - } - - /** - * Class visitor. - */ - private static class CollectingClassVisitor extends ClassVisitor { - /** Collector. */ - private final CollectingContext ctx; - - /** Annotation visitor. */ - private final AnnotationVisitor av; - - /** Method visitor. */ - private final MethodVisitor mv; - - /** Field visitor. */ - private final FieldVisitor fv; - - /** - * Constructor. - * - * @param ctx Collector. - * @param av Annotation visitor. - * @param mv Method visitor. - * @param fv Field visitor. - */ - CollectingClassVisitor(CollectingContext ctx, AnnotationVisitor av, MethodVisitor mv, FieldVisitor fv) { - super(Opcodes.ASM4); - - this.ctx = ctx; - this.av = av; - this.mv = mv; - this.fv = fv; - } - - /** {@inheritDoc} */ - @Override public void visit(int i, int i2, String name, String signature, String superName, String[] ifaces) { - if (ctx.found) - return; - - ctx.onInternalTypeName(superName); - - if (ctx.found) - return; - - if (ifaces != null) { - for (String iface : ifaces) { - ctx.onInternalTypeName(iface); - - if (ctx.found) - return; - } - } - } - - /** {@inheritDoc} */ - @Override public AnnotationVisitor visitAnnotation(String desc, boolean visible) { - if (ctx.found) - return null; - - ctx.onType(desc); - - return ctx.found ? null : av; - } - - /** {@inheritDoc} */ - @Override public void visitInnerClass(String name, String outerName, String innerName, int i) { - if (ctx.found) - return; - - ctx.onInternalTypeName(name); - } - - /** {@inheritDoc} */ - @Override public FieldVisitor visitField(int i, String name, String desc, String signature, Object val) { - if (ctx.found) - return null; - - ctx.onType(desc); - - return ctx.found ? null : fv; - } - - /** {@inheritDoc} */ - @Override public MethodVisitor visitMethod(int i, String name, String desc, String signature, - String[] exceptions) { - if (ctx.found) - return null; - - ctx.onMethodsDesc(desc); - - // Process declared method exceptions: - if (exceptions != null) { - for (String e : exceptions) - ctx.onInternalTypeName(e); - } - - return ctx.found ? null : mv; - } - } - - /** - * Method visitor. - */ - private static class CollectingMethodVisitor extends MethodVisitor { - /** Collector. */ - private final CollectingContext ctx; - - /** Annotation visitor. */ - private final AnnotationVisitor av; - - /** - * Constructor. - * - * @param ctx Collector. - * @param av Annotation visitor. - */ - private CollectingMethodVisitor(CollectingContext ctx, AnnotationVisitor av) { - super(Opcodes.ASM4); - - this.ctx = ctx; - this.av = av; - } - - /** {@inheritDoc} */ - @Override public AnnotationVisitor visitAnnotation(String desc, boolean visible) { - if (ctx.found) - return null; - - ctx.onType(desc); - - return ctx.found ? null : av; - } - - /** {@inheritDoc} */ - @Override public AnnotationVisitor visitParameterAnnotation(int i, String desc, boolean b) { - if (ctx.found) - return null; - - ctx.onType(desc); - - return ctx.found ? null : av; - } - - /** {@inheritDoc} */ - @Override public AnnotationVisitor visitAnnotationDefault() { - return ctx.found ? null : av; - } - - /** {@inheritDoc} */ - @Override public void visitFieldInsn(int opcode, String owner, String name, String desc) { - if (ctx.found) - return; - - ctx.onInternalTypeName(owner); - - if (ctx.found) - return; - - ctx.onType(desc); - } - - /** {@inheritDoc} */ - @Override public void visitInvokeDynamicInsn(String name, String desc, Handle bsm, Object... bsmArgs) { - // No-op. - } - - /** {@inheritDoc} */ - @Override public void visitFrame(int type, int nLoc, Object[] locTypes, int nStack, Object[] stackTypes) { - // No-op. - } - - /** {@inheritDoc} */ - @Override public void visitLocalVariable(String name, String desc, String signature, Label lb, - Label lb2, int i) { - if (ctx.found) - return; - - ctx.onType(desc); - } - - /** {@inheritDoc} */ - @Override public void visitMethodInsn(int i, String owner, String name, String desc) { - if (ctx.found) - return; - - ctx.onInternalTypeName(owner); - - if (ctx.found) - return; - - ctx.onMethodsDesc(desc); - } - - /** {@inheritDoc} */ - @Override public void visitMultiANewArrayInsn(String desc, int dim) { - if (ctx.found) - return; - - ctx.onType(desc); - } - - /** {@inheritDoc} */ - @Override public void visitTryCatchBlock(Label start, Label end, Label hndl, String typeStr) { - if (ctx.found) - return; - - ctx.onInternalTypeName(typeStr); - } - - /** {@inheritDoc} */ - @Override public void visitTypeInsn(int opcode, String type) { - if (ctx.found) - return; - - ctx.onInternalTypeName(type); - } - } -} \ No newline at end of file diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java deleted file mode 100644 index 40694967a278a..0000000000000 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.internal.processors.hadoop; - -/** - * Main class to compose Hadoop classpath depending on the environment. - * This class is designed to be independent on any Ignite classes as possible. - * Please make sure to pass the path separator character as the 1st parameter to the main method. - */ -public class HadoopClasspathMain { - /** - * Main method to be executed from scripts. It prints the classpath to the standard output. - * - * @param args The 1st argument should be the path separator character (":" on Linux, ";" on Windows). - */ - public static void main(String[] args) throws Exception { - if (args.length < 1) - throw new IllegalArgumentException("Path separator must be passed as the first argument."); - - String separator = args[0]; - - StringBuilder sb = new StringBuilder(); - - for (String path : HadoopClasspathUtils.classpathForProcess()) - sb.append(path).append(separator); - - System.out.println(sb); - } -} diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopCommonUtils.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopCommonUtils.java new file mode 100644 index 0000000000000..37af14737507f --- /dev/null +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopCommonUtils.java @@ -0,0 +1,154 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.hadoop; + +import org.jetbrains.annotations.Nullable; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.TreeSet; + +/** + * Common Hadoop utility methods which do not depend on Hadoop API. + */ +public class HadoopCommonUtils { + /** Job class name. */ + public static final String JOB_CLS_NAME = "org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopV2Job"; + + /** Property to store timestamp of new job id request. */ + public static final String REQ_NEW_JOBID_TS_PROPERTY = "ignite.job.requestNewIdTs"; + + /** Property to store timestamp of response of new job id request. */ + public static final String RESPONSE_NEW_JOBID_TS_PROPERTY = "ignite.job.responseNewIdTs"; + + /** Property to store timestamp of job submission. */ + public static final String JOB_SUBMISSION_START_TS_PROPERTY = "ignite.job.submissionStartTs"; + + /** Property to set custom writer of job statistics. */ + public static final String JOB_COUNTER_WRITER_PROPERTY = "ignite.counters.writer"; + + /** + * Sort input splits by length. + * + * @param splits Splits. + * @return Sorted splits. + */ + public static List sortInputSplits(Collection splits) { + int id = 0; + + TreeSet sortedSplits = new TreeSet<>(); + + for (HadoopInputSplit split : splits) { + long len = split instanceof HadoopFileBlock ? ((HadoopFileBlock)split).length() : 0; + + sortedSplits.add(new SplitSortWrapper(id++, split, len)); + } + + ArrayList res = new ArrayList<>(sortedSplits.size()); + + for (SplitSortWrapper sortedSplit : sortedSplits) + res.add(sortedSplit.split); + + return res; + } + + /** + * Set context class loader. + * + * @param newLdr New class loader. + * @return Old class loader. + */ + @Nullable public static ClassLoader setContextClassLoader(@Nullable ClassLoader newLdr) { + ClassLoader oldLdr = Thread.currentThread().getContextClassLoader(); + + if (newLdr != oldLdr) + Thread.currentThread().setContextClassLoader(newLdr); + + return oldLdr; + } + + /** + * Restore context class loader. + * + * @param oldLdr Original class loader. + */ + public static void restoreContextClassLoader(@Nullable ClassLoader oldLdr) { + ClassLoader newLdr = Thread.currentThread().getContextClassLoader(); + + if (newLdr != oldLdr) + Thread.currentThread().setContextClassLoader(oldLdr); + } + + /** + * Split wrapper for sorting. + */ + private static class SplitSortWrapper implements Comparable { + /** Unique ID. */ + private final int id; + + /** Split. */ + private final HadoopInputSplit split; + + /** Split length. */ + private final long len; + + /** + * Constructor. + * + * @param id Unique ID. + * @param split Split. + * @param len Split length. + */ + public SplitSortWrapper(int id, HadoopInputSplit split, long len) { + this.id = id; + this.split = split; + this.len = len; + } + + /** {@inheritDoc} */ + @SuppressWarnings("NullableProblems") + @Override public int compareTo(SplitSortWrapper other) { + long res = len - other.len; + + if (res > 0) + return -1; + else if (res < 0) + return 1; + else + return id - other.id; + } + + /** {@inheritDoc} */ + @Override public int hashCode() { + return id; + } + + /** {@inheritDoc} */ + @Override public boolean equals(Object obj) { + return obj instanceof SplitSortWrapper && id == ((SplitSortWrapper)obj).id; + } + } + + /** + * Private constructor. + */ + private HadoopCommonUtils() { + // No-op. + } +} diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopContext.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopContext.java index 42a3d726896ce..4326ad282ad17 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopContext.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopContext.java @@ -24,7 +24,6 @@ import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.configuration.HadoopConfiguration; import org.apache.ignite.internal.GridKernalContext; -import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.apache.ignite.internal.processors.hadoop.jobtracker.HadoopJobMetadata; import org.apache.ignite.internal.processors.hadoop.jobtracker.HadoopJobTracker; import org.apache.ignite.internal.processors.hadoop.shuffle.HadoopShuffle; diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopExternalSplit.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopExternalSplit.java similarity index 94% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopExternalSplit.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopExternalSplit.java index c7e8a0a23f9b2..bd767b368dba1 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopExternalSplit.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopExternalSplit.java @@ -15,13 +15,12 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v2; +package org.apache.ignite.internal.processors.hadoop; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; -import org.apache.ignite.internal.processors.hadoop.HadoopInputSplit; /** * Split serialized in external file. diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopHelperImpl.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopHelperImpl.java new file mode 100644 index 0000000000000..71bb8a4cafded --- /dev/null +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopHelperImpl.java @@ -0,0 +1,120 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ignite.internal.processors.hadoop; + +import org.apache.ignite.internal.GridKernalContext; +import org.jetbrains.annotations.Nullable; +import org.objectweb.asm.ClassReader; +import org.objectweb.asm.ClassWriter; +import org.objectweb.asm.Opcodes; +import org.objectweb.asm.commons.Remapper; +import org.objectweb.asm.commons.RemappingClassAdapter; + +import java.io.IOException; +import java.io.InputStream; + +/** + * Utility methods for Hadoop classloader required to avoid direct 3rd-party dependencies in class loader. + */ +public class HadoopHelperImpl implements HadoopHelper { + /** Kernal context. */ + private final GridKernalContext ctx; + + /** Common class loader. */ + private volatile HadoopClassLoader ldr; + + /** + * Default constructor. + */ + public HadoopHelperImpl() { + this(null); + } + + /** + * Constructor. + * + * @param ctx Kernal context. + */ + public HadoopHelperImpl(GridKernalContext ctx) { + this.ctx = ctx; + } + + /** {@inheritDoc} */ + @Override public boolean isNoOp() { + return false; + } + + /** {@inheritDoc} */ + @Override public HadoopClassLoader commonClassLoader() { + HadoopClassLoader res = ldr; + + if (res == null) { + synchronized (this) { + res = ldr; + + if (res == null) { + String[] libNames = null; + + if (ctx != null && ctx.config().getHadoopConfiguration() != null) + libNames = ctx.config().getHadoopConfiguration().getNativeLibraryNames(); + + res = new HadoopClassLoader(null, "hadoop-common", libNames, this); + + ldr = res; + } + } + } + + return res; + } + + /** {@inheritDoc} */ + @Override public byte[] loadReplace(InputStream in, final String originalName, final String replaceName) { + ClassReader rdr; + + try { + rdr = new ClassReader(in); + } + catch (IOException e) { + throw new RuntimeException(e); + } + + ClassWriter w = new ClassWriter(Opcodes.ASM4); + + rdr.accept(new RemappingClassAdapter(w, new Remapper() { + /** */ + String replaceType = replaceName.replace('.', '/'); + + /** */ + String nameType = originalName.replace('.', '/'); + + @Override public String map(String type) { + if (type.equals(replaceType)) + return nameType; + + return type; + } + }), ClassReader.EXPAND_FRAMES); + + return w.toByteArray(); + } + + /** {@inheritDoc} */ + @Override @Nullable public InputStream loadClassBytes(ClassLoader ldr, String clsName) { + return ldr.getResourceAsStream(clsName.replace('.', '/') + ".class"); + } +} diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopProcessor.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopProcessor.java index b9c20c30a023f..f0df1e90c7f6b 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopProcessor.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopProcessor.java @@ -40,6 +40,9 @@ * Hadoop processor. */ public class HadoopProcessor extends HadoopProcessorAdapter { + /** Class to probe for Hadoop libraries in Ignite classpath. */ + private static final String HADOOP_PROBE_CLS = "org.apache.hadoop.conf.Configuration"; + /** Job ID counter. */ private final AtomicInteger idCtr = new AtomicInteger(); @@ -164,7 +167,14 @@ public HadoopContext context() { /** {@inheritDoc} */ @Override public IgniteInternalFuture submit(HadoopJobId jobId, HadoopJobInfo jobInfo) { - return hctx.jobTracker().submit(jobId, jobInfo); + ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(getClass().getClassLoader()); + + try { + return hctx.jobTracker().submit(jobId, jobInfo); + } + finally { + HadoopCommonUtils.restoreContextClassLoader(oldLdr); + } } /** {@inheritDoc} */ @@ -203,6 +213,26 @@ public HadoopContext context() { throw new IgniteCheckedException(ioe.getMessage(), ioe); } + // Check if Hadoop is in parent class loader classpath. + try { + Class cls = Class.forName(HADOOP_PROBE_CLS, false, getClass().getClassLoader()); + + try { + String path = cls.getProtectionDomain().getCodeSource().getLocation().toString(); + + U.warn(log, "Hadoop libraries are found in Ignite classpath, this could lead to class loading " + + "errors (please remove all Hadoop libraries from Ignite classpath) [path=" + path + ']'); + } + catch (Throwable ignore) { + U.warn(log, "Hadoop libraries are found in Ignite classpath, this could lead to class loading " + + "errors (please remove all Hadoop libraries from Ignite classpath)"); + } + } + catch (Throwable ignore) { + // All is fine. + } + + // Try assembling Hadoop URLs. HadoopClassLoader.hadoopUrls(); } diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopSplitWrapper.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopSplitWrapper.java similarity index 95% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopSplitWrapper.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopSplitWrapper.java index df77adbdac81c..511aa5a2ad265 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopSplitWrapper.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopSplitWrapper.java @@ -15,12 +15,12 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v2; +package org.apache.ignite.internal.processors.hadoop; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; -import org.apache.ignite.internal.processors.hadoop.HadoopInputSplit; + import org.apache.ignite.internal.util.typedef.internal.U; /** diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopCounterAdapter.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopCounterAdapter.java index 3f682d37cf5e0..ee61a823a0c2d 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopCounterAdapter.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopCounterAdapter.java @@ -21,6 +21,7 @@ import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; + import org.apache.ignite.internal.util.typedef.internal.S; import org.jetbrains.annotations.Nullable; diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopPerformanceCounter.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopPerformanceCounter.java index dedc6b372e149..9baedc2850e93 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopPerformanceCounter.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopPerformanceCounter.java @@ -24,6 +24,8 @@ import java.util.ArrayList; import java.util.Collection; import java.util.UUID; + +import org.apache.ignite.internal.processors.hadoop.HadoopCommonUtils; import org.apache.ignite.internal.processors.hadoop.HadoopJobInfo; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo; import org.apache.ignite.internal.processors.hadoop.HadoopTaskType; @@ -32,10 +34,6 @@ import org.apache.ignite.internal.util.typedef.internal.U; import org.jetbrains.annotations.Nullable; -import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.JOB_SUBMISSION_START_TS_PROPERTY; -import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.REQ_NEW_JOBID_TS_PROPERTY; -import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.RESPONSE_NEW_JOBID_TS_PROPERTY; - /** * Counter for the job statistics accumulation. */ @@ -221,9 +219,9 @@ public void onJobStart(long ts) { public void clientSubmissionEvents(HadoopJobInfo info) { assert nodeId != null; - addEventFromProperty("JOB requestId", info, REQ_NEW_JOBID_TS_PROPERTY); - addEventFromProperty("JOB responseId", info, RESPONSE_NEW_JOBID_TS_PROPERTY); - addEventFromProperty("JOB submit", info, JOB_SUBMISSION_START_TS_PROPERTY); + addEventFromProperty("JOB requestId", info, HadoopCommonUtils.REQ_NEW_JOBID_TS_PROPERTY); + addEventFromProperty("JOB responseId", info, HadoopCommonUtils.RESPONSE_NEW_JOBID_TS_PROPERTY); + addEventFromProperty("JOB submit", info, HadoopCommonUtils.JOB_SUBMISSION_START_TS_PROPERTY); } /** diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/delegate/HadoopDelegateUtils.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/delegate/HadoopDelegateUtils.java new file mode 100644 index 0000000000000..76d9bff5a712d --- /dev/null +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/delegate/HadoopDelegateUtils.java @@ -0,0 +1,138 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.hadoop.delegate; + +import org.apache.ignite.IgniteException; +import org.apache.ignite.hadoop.fs.BasicHadoopFileSystemFactory; +import org.apache.ignite.hadoop.fs.CachingHadoopFileSystemFactory; +import org.apache.ignite.hadoop.fs.IgniteHadoopFileSystemCounterWriter; +import org.apache.ignite.hadoop.fs.IgniteHadoopIgfsSecondaryFileSystem; +import org.apache.ignite.hadoop.fs.KerberosHadoopFileSystemFactory; +import org.apache.ignite.internal.processors.hadoop.HadoopClassLoader; +import org.jetbrains.annotations.Nullable; + +import java.lang.reflect.Constructor; +import java.util.HashMap; +import java.util.Map; + +/** + * Utility methods for Hadoop delegates. + */ +public class HadoopDelegateUtils { + /** Secondary file system delegate class. */ + private static final String SECONDARY_FILE_SYSTEM_CLS = + "org.apache.ignite.internal.processors.hadoop.impl.delegate.HadoopIgfsSecondaryFileSystemDelegateImpl"; + + /** Default file system factory class. */ + private static final String DFLT_FACTORY_CLS = + "org.apache.ignite.internal.processors.hadoop.impl.delegate.HadoopDefaultFileSystemFactoryDelegate"; + + /** Factory proxy to delegate class name mapping. */ + private static final Map FACTORY_CLS_MAP; + + /** Counter writer delegate implementation. */ + private static final String COUNTER_WRITER_DELEGATE_CLS = + "org.apache.ignite.internal.processors.hadoop.impl.delegate.HadoopFileSystemCounterWriterDelegateImpl"; + + static { + FACTORY_CLS_MAP = new HashMap<>(); + + FACTORY_CLS_MAP.put(BasicHadoopFileSystemFactory.class.getName(), + "org.apache.ignite.internal.processors.hadoop.impl.delegate.HadoopBasicFileSystemFactoryDelegate"); + + FACTORY_CLS_MAP.put(CachingHadoopFileSystemFactory.class.getName(), + "org.apache.ignite.internal.processors.hadoop.impl.delegate.HadoopCachingFileSystemFactoryDelegate"); + + FACTORY_CLS_MAP.put(KerberosHadoopFileSystemFactory.class.getName(), + "org.apache.ignite.internal.processors.hadoop.impl.delegate.HadoopKerberosFileSystemFactoryDelegate"); + } + + /** + * Create delegate for secondary file system. + * + * @param ldr Hadoop class loader. + * @param proxy Proxy. + * @return Delegate. + */ + public static HadoopIgfsSecondaryFileSystemDelegate secondaryFileSystemDelegate(HadoopClassLoader ldr, + IgniteHadoopIgfsSecondaryFileSystem proxy) { + return newInstance(SECONDARY_FILE_SYSTEM_CLS, ldr, proxy); + } + + /** + * Create delegate for certain file system factory. + * + * @param proxy Proxy. + * @return Delegate. + */ + @SuppressWarnings("unchecked") + public static HadoopFileSystemFactoryDelegate fileSystemFactoryDelegate(Object proxy) { + String clsName = FACTORY_CLS_MAP.get(proxy.getClass().getName()); + + if (clsName == null) + clsName = DFLT_FACTORY_CLS; + + return newInstance(clsName, null, proxy); + } + + /** + * Create delegate for Hadoop counter writer. + * + * @param ldr Class loader. + * @param proxy Proxy. + * @return Delegate. + */ + public static HadoopFileSystemCounterWriterDelegate counterWriterDelegate(ClassLoader ldr, + IgniteHadoopFileSystemCounterWriter proxy) { + return newInstance(COUNTER_WRITER_DELEGATE_CLS, ldr, proxy); + } + + /** + * Get new delegate instance. + * + * @param clsName Class name. + * @param ldr Optional class loader. + * @param proxy Proxy. + * @return Instance. + */ + @SuppressWarnings("unchecked") + private static T newInstance(String clsName, @Nullable ClassLoader ldr, Object proxy) { + try { + Class delegateCls = ldr == null ? Class.forName(clsName) : Class.forName(clsName, true, ldr); + + Constructor[] ctors = delegateCls.getConstructors(); + + assert ctors.length == 1; + + Object res = ctors[0].newInstance(proxy); + + return (T)res; + } + catch (ReflectiveOperationException e) { + throw new IgniteException("Failed to instantiate delegate for proxy [proxy=" + proxy + + ", delegateClsName=" + clsName + ']', e); + } + } + + /** + * Private constructor. + */ + private HadoopDelegateUtils() { + // No-op. + } +} diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithClassAnnotation.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/delegate/HadoopFileSystemCounterWriterDelegate.java similarity index 56% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithClassAnnotation.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/delegate/HadoopFileSystemCounterWriterDelegate.java index a9ecae0bf10e3..541cf808e3efb 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithClassAnnotation.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/delegate/HadoopFileSystemCounterWriterDelegate.java @@ -15,14 +15,22 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.deps; +package org.apache.ignite.internal.processors.hadoop.delegate; -import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.ignite.IgniteCheckedException; +import org.apache.ignite.internal.processors.hadoop.HadoopJob; +import org.apache.ignite.internal.processors.hadoop.counter.HadoopCounters; /** - * Class has Hadoop annotation. + * Counter writer delegate interface. */ -@SuppressWarnings("unused") -@InterfaceAudience.Public -public class WithClassAnnotation { +public interface HadoopFileSystemCounterWriterDelegate { + /** + * Writes counters of given job to some statistics storage. + * + * @param job The job. + * @param cntrs Counters. + * @throws IgniteCheckedException If failed. + */ + public void write(HadoopJob job, HadoopCounters cntrs) throws IgniteCheckedException; } diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithImplements.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/delegate/HadoopFileSystemFactoryDelegate.java similarity index 62% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithImplements.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/delegate/HadoopFileSystemFactoryDelegate.java index c2d8e5bd039aa..f051d620c3ef7 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithImplements.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/delegate/HadoopFileSystemFactoryDelegate.java @@ -15,22 +15,22 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.deps; +package org.apache.ignite.internal.processors.hadoop.delegate; -import org.apache.hadoop.conf.Configurable; -import org.apache.hadoop.conf.Configuration; +import org.apache.ignite.lifecycle.LifecycleAware; + +import java.io.IOException; /** - * Implements a Hadoop interface. + * Hadoop file system factory delegate. */ -public class WithImplements implements Configurable { - /** {@inheritDoc} */ - @Override public void setConf(Configuration conf) { - // noop - } - - /** {@inheritDoc} */ - @Override public Configuration getConf() { - return null; - } +public interface HadoopFileSystemFactoryDelegate extends LifecycleAware { + /** + * Gets file system for the given user name. + * + * @param usrName User name + * @return File system. + * @throws IOException In case of error. + */ + public Object get(String usrName) throws IOException; } diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/Without.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/delegate/HadoopIgfsSecondaryFileSystemDelegate.java similarity index 70% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/Without.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/delegate/HadoopIgfsSecondaryFileSystemDelegate.java index ab8474026df39..e381272203cfb 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/Without.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/delegate/HadoopIgfsSecondaryFileSystemDelegate.java @@ -15,11 +15,14 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.deps; +package org.apache.ignite.internal.processors.hadoop.delegate; + +import org.apache.ignite.internal.processors.igfs.IgfsSecondaryFileSystemV2; +import org.apache.ignite.lifecycle.LifecycleAware; /** - * Class that does not anyhow depend on Hadoop. + * Interface to secondary file system implementation. */ -public class Without { +public interface HadoopIgfsSecondaryFileSystemDelegate extends IgfsSecondaryFileSystemV2, LifecycleAware { // No-op. } diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopMapReduceCounterGroup.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopMapReduceCounterGroup.java similarity index 98% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopMapReduceCounterGroup.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopMapReduceCounterGroup.java index 4e03e172ec63d..0ab64d96e1344 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopMapReduceCounterGroup.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopMapReduceCounterGroup.java @@ -15,15 +15,16 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; + +import org.apache.hadoop.mapreduce.Counter; +import org.apache.hadoop.mapreduce.CounterGroup; +import org.apache.hadoop.mapreduce.counters.CounterGroupBase; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.Iterator; -import org.apache.hadoop.mapreduce.Counter; -import org.apache.hadoop.mapreduce.CounterGroup; -import org.apache.hadoop.mapreduce.counters.CounterGroupBase; /** * Hadoop +counter group adapter. diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopMapReduceCounters.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopMapReduceCounters.java similarity index 98% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopMapReduceCounters.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopMapReduceCounters.java index 57a853f884483..df5c1ee53d263 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopMapReduceCounters.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopMapReduceCounters.java @@ -15,7 +15,18 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; + +import org.apache.hadoop.mapreduce.Counter; +import org.apache.hadoop.mapreduce.CounterGroup; +import org.apache.hadoop.mapreduce.Counters; +import org.apache.hadoop.mapreduce.FileSystemCounter; +import org.apache.hadoop.mapreduce.counters.AbstractCounters; +import org.apache.hadoop.mapreduce.counters.Limits; +import org.apache.ignite.internal.processors.hadoop.counter.HadoopCounter; +import org.apache.ignite.internal.processors.hadoop.counter.HadoopLongCounter; +import org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopV2Counter; +import org.apache.ignite.internal.util.typedef.T2; import java.io.DataInput; import java.io.DataOutput; @@ -27,16 +38,6 @@ import java.util.Iterator; import java.util.Map; import java.util.NoSuchElementException; -import org.apache.hadoop.mapreduce.Counter; -import org.apache.hadoop.mapreduce.CounterGroup; -import org.apache.hadoop.mapreduce.Counters; -import org.apache.hadoop.mapreduce.FileSystemCounter; -import org.apache.hadoop.mapreduce.counters.AbstractCounters; -import org.apache.hadoop.mapreduce.counters.Limits; -import org.apache.ignite.internal.processors.hadoop.counter.HadoopCounter; -import org.apache.ignite.internal.processors.hadoop.counter.HadoopLongCounter; -import org.apache.ignite.internal.processors.hadoop.v2.HadoopV2Counter; -import org.apache.ignite.internal.util.typedef.T2; /** * Hadoop counters adapter. diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopUtils.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopUtils.java similarity index 73% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopUtils.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopUtils.java index 65d9810ee909a..347bfae09451c 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopUtils.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopUtils.java @@ -15,7 +15,24 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.Writable; +import org.apache.hadoop.mapred.JobConf; +import org.apache.hadoop.mapreduce.JobID; +import org.apache.hadoop.mapreduce.JobPriority; +import org.apache.hadoop.mapreduce.JobStatus; +import org.apache.hadoop.mapreduce.MRJobConfig; +import org.apache.ignite.IgniteCheckedException; +import org.apache.ignite.internal.processors.hadoop.HadoopCommonUtils; +import org.apache.ignite.internal.processors.hadoop.HadoopDefaultJobInfo; +import org.apache.ignite.internal.processors.hadoop.HadoopJobId; +import org.apache.ignite.internal.processors.hadoop.HadoopJobStatus; +import org.apache.ignite.internal.processors.hadoop.HadoopSplitWrapper; +import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo; +import org.apache.ignite.internal.util.typedef.internal.U; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -25,42 +42,14 @@ import java.io.ObjectOutput; import java.io.ObjectOutputStream; import java.io.PrintStream; -import java.util.ArrayList; -import java.util.Collection; import java.util.HashMap; -import java.util.List; import java.util.Map; -import java.util.TreeSet; import java.util.UUID; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.io.Writable; -import org.apache.hadoop.mapred.JobConf; -import org.apache.hadoop.mapreduce.JobID; -import org.apache.hadoop.mapreduce.JobPriority; -import org.apache.hadoop.mapreduce.JobStatus; -import org.apache.hadoop.mapreduce.MRJobConfig; -import org.apache.ignite.IgniteCheckedException; -import org.apache.ignite.internal.processors.hadoop.v2.HadoopSplitWrapper; -import org.apache.ignite.internal.util.typedef.internal.U; -import org.jetbrains.annotations.Nullable; /** * Hadoop utility methods. */ public class HadoopUtils { - /** Property to store timestamp of new job id request. */ - public static final String REQ_NEW_JOBID_TS_PROPERTY = "ignite.job.requestNewIdTs"; - - /** Property to store timestamp of response of new job id request. */ - public static final String RESPONSE_NEW_JOBID_TS_PROPERTY = "ignite.job.responseNewIdTs"; - - /** Property to store timestamp of job submission. */ - public static final String JOB_SUBMISSION_START_TS_PROPERTY = "ignite.job.submissionStartTs"; - - /** Property to set custom writer of job statistics. */ - public static final String JOB_COUNTER_WRITER_PROPERTY = "ignite.counters.writer"; - /** Staging constant. */ private static final String STAGING_CONSTANT = ".staging"; @@ -327,117 +316,13 @@ public static File taskLocalDir(UUID locNodeId, HadoopTaskInfo info) throws Igni * @return New instance of {@link Configuration}. */ public static Configuration safeCreateConfiguration() { - final ClassLoader oldLdr = setContextClassLoader(Configuration.class.getClassLoader()); + final ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(Configuration.class.getClassLoader()); try { return new Configuration(); } finally { - restoreContextClassLoader(oldLdr); - } - } - - /** - * Sort input splits by length. - * - * @param splits Splits. - * @return Sorted splits. - */ - public static List sortInputSplits(Collection splits) { - int id = 0; - - TreeSet sortedSplits = new TreeSet<>(); - - for (HadoopInputSplit split : splits) { - long len = split instanceof HadoopFileBlock ? ((HadoopFileBlock)split).length() : 0; - - sortedSplits.add(new SplitSortWrapper(id++, split, len)); - } - - ArrayList res = new ArrayList<>(sortedSplits.size()); - - for (SplitSortWrapper sortedSplit : sortedSplits) - res.add(sortedSplit.split); - - return res; - } - - /** - * Set context class loader. - * - * @param newLdr New class loader. - * @return Old class loader. - */ - @Nullable public static ClassLoader setContextClassLoader(@Nullable ClassLoader newLdr) { - ClassLoader oldLdr = Thread.currentThread().getContextClassLoader(); - - if (newLdr != oldLdr) - Thread.currentThread().setContextClassLoader(newLdr); - - return oldLdr; - } - - /** - * Restore context class loader. - * - * @param oldLdr Original class loader. - */ - public static void restoreContextClassLoader(@Nullable ClassLoader oldLdr) { - ClassLoader newLdr = Thread.currentThread().getContextClassLoader(); - - if (newLdr != oldLdr) - Thread.currentThread().setContextClassLoader(oldLdr); - } - - /** - * Split wrapper for sorting. - */ - private static class SplitSortWrapper implements Comparable { - /** Unique ID. */ - private final int id; - - /** Split. */ - private final HadoopInputSplit split; - - /** Split length. */ - private final long len; - - /** - * Constructor. - * - * @param id Unique ID. - * @param split Split. - * @param len Split length. - */ - public SplitSortWrapper(int id, HadoopInputSplit split, long len) { - this.id = id; - this.split = split; - this.len = len; - } - - /** {@inheritDoc} */ - @SuppressWarnings("NullableProblems") - @Override public int compareTo(SplitSortWrapper other) { - assert other != null; - - long res = len - other.len; - - if (res > 0) - return -1; - else if (res < 0) - return 1; - else - return id - other.id; - } - - /** {@inheritDoc} */ - @Override public int hashCode() { - return id; - } - - /** {@inheritDoc} */ - @Override public boolean equals(Object obj) { - return obj instanceof SplitSortWrapper && id == ((SplitSortWrapper)obj).id; + HadoopCommonUtils.restoreContextClassLoader(oldLdr); } } } \ No newline at end of file diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopBasicFileSystemFactoryDelegate.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopBasicFileSystemFactoryDelegate.java new file mode 100644 index 0000000000000..a190b14699ba4 --- /dev/null +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopBasicFileSystemFactoryDelegate.java @@ -0,0 +1,164 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.hadoop.impl.delegate; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.ignite.IgniteException; +import org.apache.ignite.hadoop.fs.BasicHadoopFileSystemFactory; +import org.apache.ignite.hadoop.fs.HadoopFileSystemFactory; +import org.apache.ignite.hadoop.util.UserNameMapper; +import org.apache.ignite.internal.processors.hadoop.HadoopCommonUtils; +import org.apache.ignite.internal.processors.hadoop.delegate.HadoopFileSystemFactoryDelegate; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils; +import org.apache.ignite.internal.processors.igfs.IgfsUtils; +import org.apache.ignite.internal.util.typedef.internal.U; +import org.apache.ignite.lifecycle.LifecycleAware; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.util.Arrays; + +/** + * Basic Hadoop file system factory delegate. + */ +public class HadoopBasicFileSystemFactoryDelegate implements HadoopFileSystemFactoryDelegate { + /** Proxy. */ + protected final HadoopFileSystemFactory proxy; + + /** Configuration of the secondary filesystem, never null. */ + protected Configuration cfg; + + /** Resulting URI. */ + protected URI fullUri; + + /** User name mapper. */ + private UserNameMapper usrNameMapper; + + /** + * Constructor. + * + * @param proxy Proxy. + */ + public HadoopBasicFileSystemFactoryDelegate(BasicHadoopFileSystemFactory proxy) { + this.proxy = proxy; + } + + /** {@inheritDoc} */ + @Override public FileSystem get(String name) throws IOException { + String name0 = IgfsUtils.fixUserName(name); + + if (usrNameMapper != null) + name0 = IgfsUtils.fixUserName(usrNameMapper.map(name0)); + + return getWithMappedName(name0); + } + + /** + * Internal file system create routine. + * + * @param usrName User name. + * @return File system. + * @throws IOException If failed. + */ + protected FileSystem getWithMappedName(String usrName) throws IOException { + assert cfg != null; + + try { + // FileSystem.get() might delegate to ServiceLoader to get the list of file system implementation. + // And ServiceLoader is known to be sensitive to context classloader. Therefore, we change context + // classloader to classloader of current class to avoid strange class-cast-exceptions. + ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(getClass().getClassLoader()); + + try { + return create(usrName); + } + finally { + HadoopCommonUtils.restoreContextClassLoader(oldLdr); + } + } + catch (InterruptedException e) { + Thread.currentThread().interrupt(); + + throw new IOException("Failed to create file system due to interrupt.", e); + } + } + + /** + * Internal file system creation routine, invoked in correct class loader context. + * + * @param usrName User name. + * @return File system. + * @throws IOException If failed. + * @throws InterruptedException if the current thread is interrupted. + */ + protected FileSystem create(String usrName) throws IOException, InterruptedException { + return FileSystem.get(fullUri, cfg, usrName); + } + + /** {@inheritDoc} */ + @Override public void start() throws IgniteException { + BasicHadoopFileSystemFactory proxy0 = (BasicHadoopFileSystemFactory)proxy; + + cfg = HadoopUtils.safeCreateConfiguration(); + + if (proxy0.getConfigPaths() != null) { + for (String cfgPath : proxy0.getConfigPaths()) { + if (cfgPath == null) + throw new NullPointerException("Configuration path cannot be null: " + + Arrays.toString(proxy0.getConfigPaths())); + else { + URL url = U.resolveIgniteUrl(cfgPath); + + if (url == null) { + // If secConfPath is given, it should be resolvable: + throw new IgniteException("Failed to resolve secondary file system configuration path " + + "(ensure that it exists locally and you have read access to it): " + cfgPath); + } + + cfg.addResource(url); + } + } + } + + // If secondary fs URI is not given explicitly, try to get it from the configuration: + if (proxy0.getUri() == null) + fullUri = FileSystem.getDefaultUri(cfg); + else { + try { + fullUri = new URI(proxy0.getUri()); + } + catch (URISyntaxException use) { + throw new IgniteException("Failed to resolve secondary file system URI: " + proxy0.getUri()); + } + } + + usrNameMapper = proxy0.getUserNameMapper(); + + if (usrNameMapper != null && usrNameMapper instanceof LifecycleAware) + ((LifecycleAware)usrNameMapper).start(); + } + + /** {@inheritDoc} */ + @Override public void stop() throws IgniteException { + if (usrNameMapper != null && usrNameMapper instanceof LifecycleAware) + ((LifecycleAware)usrNameMapper).stop(); + } +} diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopCachingFileSystemFactoryDelegate.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopCachingFileSystemFactoryDelegate.java new file mode 100644 index 0000000000000..0cec8cac78eec --- /dev/null +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopCachingFileSystemFactoryDelegate.java @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.hadoop.impl.delegate; + +import org.apache.hadoop.fs.FileSystem; +import org.apache.ignite.IgniteCheckedException; +import org.apache.ignite.IgniteException; +import org.apache.ignite.hadoop.fs.CachingHadoopFileSystemFactory; +import org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopFileSystemsUtils; +import org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopLazyConcurrentMap; + +import java.io.IOException; + +/** + * Caching Hadoop file system factory delegate. + */ +public class HadoopCachingFileSystemFactoryDelegate extends HadoopBasicFileSystemFactoryDelegate { + /** Per-user file system cache. */ + private final HadoopLazyConcurrentMap cache = new HadoopLazyConcurrentMap<>( + new HadoopLazyConcurrentMap.ValueFactory() { + @Override public FileSystem createValue(String key) throws IOException { + return HadoopCachingFileSystemFactoryDelegate.super.getWithMappedName(key); + } + } + ); + + /** + * Constructor. + * + * @param proxy Proxy. + */ + public HadoopCachingFileSystemFactoryDelegate(CachingHadoopFileSystemFactory proxy) { + super(proxy); + } + + /** {@inheritDoc} */ + @Override public FileSystem getWithMappedName(String name) throws IOException { + return cache.getOrCreate(name); + } + + /** {@inheritDoc} */ + @Override public void start() throws IgniteException { + super.start(); + + // Disable caching. + cfg.setBoolean(HadoopFileSystemsUtils.disableFsCachePropertyName(fullUri.getScheme()), true); + } + + /** {@inheritDoc} */ + @Override public void stop() throws IgniteException { + super.stop(); + + try { + cache.close(); + } + catch (IgniteCheckedException ice) { + throw new IgniteException(ice); + } + } +} diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopDefaultFileSystemFactoryDelegate.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopDefaultFileSystemFactoryDelegate.java new file mode 100644 index 0000000000000..20ac88e6cde44 --- /dev/null +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopDefaultFileSystemFactoryDelegate.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.hadoop.impl.delegate; + +import org.apache.hadoop.fs.FileSystem; +import org.apache.ignite.IgniteException; +import org.apache.ignite.hadoop.fs.HadoopFileSystemFactory; +import org.apache.ignite.internal.processors.hadoop.delegate.HadoopFileSystemFactoryDelegate; +import org.apache.ignite.lifecycle.LifecycleAware; + +import java.io.IOException; + +/** + * Hadoop file system factory delegate for non-standard factories. + */ +public class HadoopDefaultFileSystemFactoryDelegate implements HadoopFileSystemFactoryDelegate { + /** Factory. */ + private final HadoopFileSystemFactory factory; + + /** + * Constructor. + * + * @param factory Factory. + */ + public HadoopDefaultFileSystemFactoryDelegate(HadoopFileSystemFactory factory) { + assert factory != null; + + this.factory = factory; + } + + /** {@inheritDoc} */ + @Override public FileSystem get(String usrName) throws IOException { + return (FileSystem)factory.get(usrName); + } + + /** {@inheritDoc} */ + @Override public void start() throws IgniteException { + if (factory instanceof LifecycleAware) + ((LifecycleAware)factory).start(); + } + + /** {@inheritDoc} */ + @Override public void stop() throws IgniteException { + if (factory instanceof LifecycleAware) + ((LifecycleAware)factory).stop(); + } +} diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopFileSystemCounterWriterDelegateImpl.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopFileSystemCounterWriterDelegateImpl.java new file mode 100644 index 0000000000000..d4c10dad4af53 --- /dev/null +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopFileSystemCounterWriterDelegateImpl.java @@ -0,0 +1,108 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.hadoop.impl.delegate; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.mapreduce.MRJobConfig; +import org.apache.ignite.IgniteCheckedException; +import org.apache.ignite.hadoop.fs.IgniteHadoopFileSystemCounterWriter; +import org.apache.ignite.internal.processors.hadoop.HadoopDefaultJobInfo; +import org.apache.ignite.internal.processors.hadoop.HadoopJob; +import org.apache.ignite.internal.processors.hadoop.HadoopJobId; +import org.apache.ignite.internal.processors.hadoop.HadoopJobInfo; +import org.apache.ignite.internal.processors.hadoop.counter.HadoopCounters; +import org.apache.ignite.internal.processors.hadoop.counter.HadoopPerformanceCounter; +import org.apache.ignite.internal.processors.hadoop.delegate.HadoopFileSystemCounterWriterDelegate; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils; +import org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopV2Job; +import org.apache.ignite.internal.processors.igfs.IgfsUtils; +import org.apache.ignite.internal.util.typedef.T2; + +import java.io.IOException; +import java.io.PrintStream; +import java.util.Map; + +/** + * Counter writer delegate implementation. + */ +@SuppressWarnings("unused") +public class HadoopFileSystemCounterWriterDelegateImpl implements HadoopFileSystemCounterWriterDelegate { + /** */ + private static final String USER_MACRO = "${USER}"; + + /** */ + private static final String DEFAULT_COUNTER_WRITER_DIR = "/user/" + USER_MACRO; + + /** + * Constructor. + * + * @param proxy Proxy (not used). + */ + public HadoopFileSystemCounterWriterDelegateImpl(IgniteHadoopFileSystemCounterWriter proxy) { + // No-op. + } + + /** {@inheritDoc} */ + public void write(HadoopJob job, HadoopCounters cntrs) throws IgniteCheckedException { + Configuration hadoopCfg = HadoopUtils.safeCreateConfiguration(); + + final HadoopJobInfo jobInfo = job.info(); + + final HadoopJobId jobId = job.id(); + + for (Map.Entry e : ((HadoopDefaultJobInfo)jobInfo).properties().entrySet()) + hadoopCfg.set(e.getKey(), e.getValue()); + + String user = jobInfo.user(); + + user = IgfsUtils.fixUserName(user); + + String dir = jobInfo.property(IgniteHadoopFileSystemCounterWriter.COUNTER_WRITER_DIR_PROPERTY); + + if (dir == null) + dir = DEFAULT_COUNTER_WRITER_DIR; + + Path jobStatPath = new Path(new Path(dir.replace(USER_MACRO, user)), jobId.toString()); + + HadoopPerformanceCounter perfCntr = HadoopPerformanceCounter.getCounter(cntrs, null); + + try { + hadoopCfg.set(MRJobConfig.USER_NAME, user); + + FileSystem fs = ((HadoopV2Job)job).fileSystem(jobStatPath.toUri(), hadoopCfg); + + fs.mkdirs(jobStatPath); + + try (PrintStream out = new PrintStream(fs.create( + new Path(jobStatPath, IgniteHadoopFileSystemCounterWriter.PERFORMANCE_COUNTER_FILE_NAME)))) { + for (T2 evt : perfCntr.evts()) { + out.print(evt.get1()); + out.print(':'); + out.println(evt.get2().toString()); + } + + out.flush(); + } + } + catch (IOException e) { + throw new IgniteCheckedException(e); + } + } +} diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopIgfsSecondaryFileSystemDelegateImpl.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopIgfsSecondaryFileSystemDelegateImpl.java new file mode 100644 index 0000000000000..fcad674deb116 --- /dev/null +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopIgfsSecondaryFileSystemDelegateImpl.java @@ -0,0 +1,472 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.hadoop.impl.delegate; + +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.ParentNotDirectoryException; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.PathExistsException; +import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.ignite.IgniteException; +import org.apache.ignite.hadoop.fs.CachingHadoopFileSystemFactory; +import org.apache.ignite.hadoop.fs.HadoopFileSystemFactory; +import org.apache.ignite.hadoop.fs.IgniteHadoopIgfsSecondaryFileSystem; +import org.apache.ignite.igfs.IgfsDirectoryNotEmptyException; +import org.apache.ignite.igfs.IgfsException; +import org.apache.ignite.igfs.IgfsFile; +import org.apache.ignite.igfs.IgfsParentNotDirectoryException; +import org.apache.ignite.igfs.IgfsPath; +import org.apache.ignite.igfs.IgfsPathAlreadyExistsException; +import org.apache.ignite.igfs.IgfsPathNotFoundException; +import org.apache.ignite.igfs.IgfsUserContext; +import org.apache.ignite.igfs.secondary.IgfsSecondaryFileSystemPositionedReadable; +import org.apache.ignite.internal.processors.hadoop.delegate.HadoopDelegateUtils; +import org.apache.ignite.internal.processors.hadoop.delegate.HadoopFileSystemFactoryDelegate; +import org.apache.ignite.internal.processors.hadoop.delegate.HadoopIgfsSecondaryFileSystemDelegate; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsProperties; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsSecondaryFileSystemPositionedReadable; +import org.apache.ignite.internal.processors.igfs.IgfsEntryInfo; +import org.apache.ignite.internal.processors.igfs.IgfsFileImpl; +import org.apache.ignite.internal.processors.igfs.IgfsUtils; +import org.apache.ignite.internal.util.typedef.F; +import org.apache.ignite.internal.util.typedef.internal.S; +import org.apache.ignite.lang.IgniteUuid; +import org.jetbrains.annotations.Nullable; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.OutputStream; +import java.net.URI; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +/** + * Secondary file system implementation. + */ +@SuppressWarnings("unused") +public class HadoopIgfsSecondaryFileSystemDelegateImpl implements HadoopIgfsSecondaryFileSystemDelegate { + /** The default user name. It is used if no user context is set. */ + private final String dfltUsrName; + + /** Factory. */ + private final HadoopFileSystemFactoryDelegate factory; + + /** + * Constructor. + * + * @param proxy Proxy. + */ + public HadoopIgfsSecondaryFileSystemDelegateImpl(IgniteHadoopIgfsSecondaryFileSystem proxy) { + assert proxy.getFileSystemFactory() != null; + + dfltUsrName = IgfsUtils.fixUserName(proxy.getDefaultUserName()); + + HadoopFileSystemFactory factory0 = proxy.getFileSystemFactory(); + + if (factory0 == null) + factory0 = new CachingHadoopFileSystemFactory(); + + factory = HadoopDelegateUtils.fileSystemFactoryDelegate(factory0); + } + + /** {@inheritDoc} */ + @Override public boolean exists(IgfsPath path) { + try { + return fileSystemForUser().exists(convert(path)); + } + catch (IOException e) { + throw handleSecondaryFsError(e, "Failed to check file existence [path=" + path + "]"); + } + } + + /** {@inheritDoc} */ + @Nullable @Override public IgfsFile update(IgfsPath path, Map props) { + HadoopIgfsProperties props0 = new HadoopIgfsProperties(props); + + final FileSystem fileSys = fileSystemForUser(); + + try { + if (props0.userName() != null || props0.groupName() != null) + fileSys.setOwner(convert(path), props0.userName(), props0.groupName()); + + if (props0.permission() != null) + fileSys.setPermission(convert(path), props0.permission()); + } + catch (IOException e) { + throw handleSecondaryFsError(e, "Failed to update file properties [path=" + path + "]"); + } + + //Result is not used in case of secondary FS. + return null; + } + + /** {@inheritDoc} */ + @Override public void rename(IgfsPath src, IgfsPath dest) { + // Delegate to the secondary file system. + try { + if (!fileSystemForUser().rename(convert(src), convert(dest))) + throw new IgfsException("Failed to rename (secondary file system returned false) " + + "[src=" + src + ", dest=" + dest + ']'); + } + catch (IOException e) { + throw handleSecondaryFsError(e, "Failed to rename file [src=" + src + ", dest=" + dest + ']'); + } + } + + /** {@inheritDoc} */ + @Override public boolean delete(IgfsPath path, boolean recursive) { + try { + return fileSystemForUser().delete(convert(path), recursive); + } + catch (IOException e) { + throw handleSecondaryFsError(e, "Failed to delete file [path=" + path + ", recursive=" + recursive + "]"); + } + } + + /** {@inheritDoc} */ + @Override public void mkdirs(IgfsPath path) { + try { + if (!fileSystemForUser().mkdirs(convert(path))) + throw new IgniteException("Failed to make directories [path=" + path + "]"); + } + catch (IOException e) { + throw handleSecondaryFsError(e, "Failed to make directories [path=" + path + "]"); + } + } + + /** {@inheritDoc} */ + @Override public void mkdirs(IgfsPath path, @Nullable Map props) { + try { + if (!fileSystemForUser().mkdirs(convert(path), new HadoopIgfsProperties(props).permission())) + throw new IgniteException("Failed to make directories [path=" + path + ", props=" + props + "]"); + } + catch (IOException e) { + throw handleSecondaryFsError(e, "Failed to make directories [path=" + path + ", props=" + props + "]"); + } + } + + /** {@inheritDoc} */ + @Override public Collection listPaths(IgfsPath path) { + try { + FileStatus[] statuses = fileSystemForUser().listStatus(convert(path)); + + if (statuses == null) + throw new IgfsPathNotFoundException("Failed to list files (path not found): " + path); + + Collection res = new ArrayList<>(statuses.length); + + for (FileStatus status : statuses) + res.add(new IgfsPath(path, status.getPath().getName())); + + return res; + } + catch (FileNotFoundException ignored) { + throw new IgfsPathNotFoundException("Failed to list files (path not found): " + path); + } + catch (IOException e) { + throw handleSecondaryFsError(e, "Failed to list statuses due to secondary file system exception: " + path); + } + } + + /** {@inheritDoc} */ + @Override public Collection listFiles(IgfsPath path) { + try { + FileStatus[] statuses = fileSystemForUser().listStatus(convert(path)); + + if (statuses == null) + throw new IgfsPathNotFoundException("Failed to list files (path not found): " + path); + + Collection res = new ArrayList<>(statuses.length); + + for (FileStatus s : statuses) { + IgfsEntryInfo fsInfo = s.isDirectory() ? + IgfsUtils.createDirectory( + IgniteUuid.randomUuid(), + null, + properties(s), + s.getAccessTime(), + s.getModificationTime() + ) : + IgfsUtils.createFile( + IgniteUuid.randomUuid(), + (int)s.getBlockSize(), + s.getLen(), + null, + null, + false, + properties(s), + s.getAccessTime(), + s.getModificationTime() + ); + + res.add(new IgfsFileImpl(new IgfsPath(path, s.getPath().getName()), fsInfo, 1)); + } + + return res; + } + catch (FileNotFoundException ignored) { + throw new IgfsPathNotFoundException("Failed to list files (path not found): " + path); + } + catch (IOException e) { + throw handleSecondaryFsError(e, "Failed to list statuses due to secondary file system exception: " + path); + } + } + + /** {@inheritDoc} */ + @Override public IgfsSecondaryFileSystemPositionedReadable open(IgfsPath path, int bufSize) { + return new HadoopIgfsSecondaryFileSystemPositionedReadable(fileSystemForUser(), convert(path), bufSize); + } + + /** {@inheritDoc} */ + @Override public OutputStream create(IgfsPath path, boolean overwrite) { + try { + return fileSystemForUser().create(convert(path), overwrite); + } + catch (IOException e) { + throw handleSecondaryFsError(e, "Failed to create file [path=" + path + ", overwrite=" + overwrite + "]"); + } + } + + /** {@inheritDoc} */ + @Override public OutputStream create(IgfsPath path, int bufSize, boolean overwrite, int replication, + long blockSize, @Nullable Map props) { + HadoopIgfsProperties props0 = + new HadoopIgfsProperties(props != null ? props : Collections.emptyMap()); + + try { + return fileSystemForUser().create(convert(path), props0.permission(), overwrite, bufSize, + (short) replication, blockSize, null); + } + catch (IOException e) { + throw handleSecondaryFsError(e, "Failed to create file [path=" + path + ", props=" + props + + ", overwrite=" + overwrite + ", bufSize=" + bufSize + ", replication=" + replication + + ", blockSize=" + blockSize + "]"); + } + } + + /** {@inheritDoc} */ + @Override public OutputStream append(IgfsPath path, int bufSize, boolean create, + @Nullable Map props) { + try { + return fileSystemForUser().append(convert(path), bufSize); + } + catch (IOException e) { + throw handleSecondaryFsError(e, "Failed to append file [path=" + path + ", bufSize=" + bufSize + "]"); + } + } + + /** {@inheritDoc} */ + @Override public IgfsFile info(final IgfsPath path) { + try { + final FileStatus status = fileSystemForUser().getFileStatus(convert(path)); + + if (status == null) + return null; + + final Map props = properties(status); + + return new IgfsFile() { + @Override public IgfsPath path() { + return path; + } + + @Override public boolean isFile() { + return status.isFile(); + } + + @Override public boolean isDirectory() { + return status.isDirectory(); + } + + @Override public int blockSize() { + // By convention directory has blockSize == 0, while file has blockSize > 0: + return isDirectory() ? 0 : (int)status.getBlockSize(); + } + + @Override public long groupBlockSize() { + return status.getBlockSize(); + } + + @Override public long accessTime() { + return status.getAccessTime(); + } + + @Override public long modificationTime() { + return status.getModificationTime(); + } + + @Override public String property(String name) throws IllegalArgumentException { + String val = props.get(name); + + if (val == null) + throw new IllegalArgumentException("File property not found [path=" + path + ", name=" + name + ']'); + + return val; + } + + @Nullable @Override public String property(String name, @Nullable String dfltVal) { + String val = props.get(name); + + return val == null ? dfltVal : val; + } + + @Override public long length() { + return status.getLen(); + } + + /** {@inheritDoc} */ + @Override public Map properties() { + return props; + } + }; + } + catch (FileNotFoundException ignore) { + return null; + } + catch (IOException e) { + throw handleSecondaryFsError(e, "Failed to get file status [path=" + path + "]"); + } + } + + /** {@inheritDoc} */ + @Override public long usedSpaceSize() { + try { + // We don't use FileSystem#getUsed() since it counts only the files + // in the filesystem root, not all the files recursively. + return fileSystemForUser().getContentSummary(new Path("/")).getSpaceConsumed(); + } + catch (IOException e) { + throw handleSecondaryFsError(e, "Failed to get used space size of file system."); + } + } + + /** {@inheritDoc} */ + @Override public void setTimes(IgfsPath path, long accessTime, long modificationTime) throws IgniteException { + try { + // We don't use FileSystem#getUsed() since it counts only the files + // in the filesystem root, not all the files recursively. + fileSystemForUser().setTimes(convert(path), modificationTime, accessTime); + } + catch (IOException e) { + throw handleSecondaryFsError(e, "Failed set times for path: " + path); + } + } + + /** {@inheritDoc} */ + public void start() { + factory.start(); + } + + /** {@inheritDoc} */ + public void stop() { + factory.stop(); + } + + /** + * Convert IGFS path into Hadoop path. + * + * @param path IGFS path. + * @return Hadoop path. + */ + private Path convert(IgfsPath path) { + URI uri = fileSystemForUser().getUri(); + + return new Path(uri.getScheme(), uri.getAuthority(), path.toString()); + } + + /** + * Heuristically checks if exception was caused by invalid HDFS version and returns appropriate exception. + * + * @param e Exception to check. + * @param detailMsg Detailed error message. + * @return Appropriate exception. + */ + private IgfsException handleSecondaryFsError(IOException e, String detailMsg) { + return cast(detailMsg, e); + } + + /** + * Cast IO exception to IGFS exception. + * + * @param e IO exception. + * @return IGFS exception. + */ + public static IgfsException cast(String msg, IOException e) { + if (e instanceof FileNotFoundException) + return new IgfsPathNotFoundException(e); + else if (e instanceof ParentNotDirectoryException) + return new IgfsParentNotDirectoryException(msg, e); + else if (e instanceof PathIsNotEmptyDirectoryException) + return new IgfsDirectoryNotEmptyException(e); + else if (e instanceof PathExistsException) + return new IgfsPathAlreadyExistsException(msg, e); + else + return new IgfsException(msg, e); + } + + /** + * Convert Hadoop FileStatus properties to map. + * + * @param status File status. + * @return IGFS attributes. + */ + private static Map properties(FileStatus status) { + FsPermission perm = status.getPermission(); + + if (perm == null) + perm = FsPermission.getDefault(); + + HashMap res = new HashMap<>(3); + + res.put(IgfsUtils.PROP_PERMISSION, String.format("%04o", perm.toShort())); + res.put(IgfsUtils.PROP_USER_NAME, status.getOwner()); + res.put(IgfsUtils.PROP_GROUP_NAME, status.getGroup()); + + return res; + } + + /** + * Gets the FileSystem for the current context user. + * @return the FileSystem instance, never null. + */ + private FileSystem fileSystemForUser() { + String user = IgfsUserContext.currentUser(); + + if (F.isEmpty(user)) + user = IgfsUtils.fixUserName(dfltUsrName); + + assert !F.isEmpty(user); + + try { + return (FileSystem)factory.get(user); + } + catch (IOException ioe) { + throw new IgniteException(ioe); + } + } + + /** {@inheritDoc} */ + @Override public String toString() { + return S.toString(HadoopIgfsSecondaryFileSystemDelegateImpl.class, this); + } +} diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopKerberosFileSystemFactoryDelegate.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopKerberosFileSystemFactoryDelegate.java new file mode 100644 index 0000000000000..c71dedbc79eab --- /dev/null +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopKerberosFileSystemFactoryDelegate.java @@ -0,0 +1,112 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.hadoop.impl.delegate; + +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.ignite.IgniteException; +import org.apache.ignite.hadoop.fs.KerberosHadoopFileSystemFactory; +import org.apache.ignite.hadoop.fs.v1.IgniteHadoopFileSystem; +import org.apache.ignite.internal.util.typedef.F; +import org.apache.ignite.internal.util.typedef.internal.A; + +import java.io.IOException; +import java.security.PrivilegedExceptionAction; + +/** + * Kerberos Hadoop file system factory delegate. + */ +public class HadoopKerberosFileSystemFactoryDelegate extends HadoopBasicFileSystemFactoryDelegate { + /** The re-login interval. */ + private long reloginInterval; + + /** Time of last re-login attempt, in system milliseconds. */ + private volatile long lastReloginTime; + + /** + * Constructor. + * + * @param proxy Proxy. + */ + public HadoopKerberosFileSystemFactoryDelegate(KerberosHadoopFileSystemFactory proxy) { + super(proxy); + } + + /** {@inheritDoc} */ + @Override public FileSystem getWithMappedName(String name) throws IOException { + reloginIfNeeded(); + + return super.getWithMappedName(name); + } + + /** {@inheritDoc} */ + @Override protected FileSystem create(String usrName) throws IOException, InterruptedException { + UserGroupInformation proxyUgi = UserGroupInformation.createProxyUser(usrName, + UserGroupInformation.getLoginUser()); + + return proxyUgi.doAs(new PrivilegedExceptionAction() { + @Override public FileSystem run() throws Exception { + return FileSystem.get(fullUri, cfg); + } + }); + } + + @Override public void start() throws IgniteException { + super.start(); + + KerberosHadoopFileSystemFactory proxy0 = (KerberosHadoopFileSystemFactory)proxy; + + A.ensure(!F.isEmpty(proxy0.getKeyTab()), "keyTab cannot not be empty."); + A.ensure(!F.isEmpty(proxy0.getKeyTabPrincipal()), "keyTabPrincipal cannot not be empty."); + A.ensure(proxy0.getReloginInterval() >= 0, "reloginInterval cannot not be negative."); + + reloginInterval = proxy0.getReloginInterval(); + + try { + UserGroupInformation.setConfiguration(cfg); + UserGroupInformation.loginUserFromKeytab(proxy0.getKeyTabPrincipal(), proxy0.getKeyTab()); + } + catch (IOException ioe) { + throw new IgniteException("Failed login from keytab [keyTab=" + proxy0.getKeyTab() + + ", keyTabPrincipal=" + proxy0.getKeyTabPrincipal() + ']', ioe); + } + } + + /** + * Re-logins the user if needed. + * First, the re-login interval defined in factory is checked. The re-login attempts will be not more + * frequent than one attempt per {@code reloginInterval}. + * Second, {@code UserGroupInformation.checkTGTAndReloginFromKeytab()} method invoked that gets existing + * TGT and checks its validity. If the TGT is expired or is close to expiry, it performs re-login. + * + *

This operation expected to be called upon each operation with the file system created with the factory. + * As long as {@link #get(String)} operation is invoked upon each file {@link IgniteHadoopFileSystem}, there + * is no need to invoke it otherwise specially. + * + * @throws IOException If login fails. + */ + private void reloginIfNeeded() throws IOException { + long now = System.currentTimeMillis(); + + if (now >= lastReloginTime + reloginInterval) { + UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab(); + + lastReloginTime = now; + } + } +} diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopFileSystemCacheUtils.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/fs/HadoopFileSystemCacheUtils.java similarity index 99% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopFileSystemCacheUtils.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/fs/HadoopFileSystemCacheUtils.java index 1ecbee5e3d939..0b673e9ff2a79 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopFileSystemCacheUtils.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/fs/HadoopFileSystemCacheUtils.java @@ -15,10 +15,8 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.fs; +package org.apache.ignite.internal.processors.hadoop.impl.fs; -import java.io.IOException; -import java.net.URI; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.mapreduce.MRJobConfig; @@ -28,6 +26,9 @@ import org.apache.ignite.internal.util.typedef.F; import org.jetbrains.annotations.Nullable; +import java.io.IOException; +import java.net.URI; + /** * File system cache utility methods used by Map-Reduce tasks and jobs. */ diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopFileSystemsUtils.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/fs/HadoopFileSystemsUtils.java similarity index 97% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopFileSystemsUtils.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/fs/HadoopFileSystemsUtils.java index 68c0dc4890b56..5115cb47c212f 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopFileSystemsUtils.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/fs/HadoopFileSystemsUtils.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.fs; +package org.apache.ignite.internal.processors.hadoop.impl.fs; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FsConstants; diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopLazyConcurrentMap.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/fs/HadoopLazyConcurrentMap.java similarity index 97% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopLazyConcurrentMap.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/fs/HadoopLazyConcurrentMap.java index 681cddbf50717..ea1f6642b7d5e 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopLazyConcurrentMap.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/fs/HadoopLazyConcurrentMap.java @@ -15,7 +15,12 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.fs; +package org.apache.ignite.internal.processors.hadoop.impl.fs; + +import org.apache.ignite.IgniteCheckedException; +import org.apache.ignite.IgniteException; +import org.apache.ignite.internal.util.future.GridFutureAdapter; +import org.jsr166.ConcurrentHashMap8; import java.io.Closeable; import java.io.IOException; @@ -23,11 +28,6 @@ import java.util.concurrent.ConcurrentMap; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; -import org.apache.ignite.Ignite; -import org.apache.ignite.IgniteCheckedException; -import org.apache.ignite.IgniteException; -import org.apache.ignite.internal.util.future.GridFutureAdapter; -import org.jsr166.ConcurrentHashMap8; /** * Maps values by keys. @@ -54,8 +54,6 @@ public class HadoopLazyConcurrentMap { */ public HadoopLazyConcurrentMap(ValueFactory factory) { this.factory = factory; - - assert getClass().getClassLoader() == Ignite.class.getClassLoader(); } /** diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopLocalFileSystemV1.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/fs/HadoopLocalFileSystemV1.java similarity index 95% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopLocalFileSystemV1.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/fs/HadoopLocalFileSystemV1.java index cbb007f982b0c..df3ca2b5e8299 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopLocalFileSystemV1.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/fs/HadoopLocalFileSystemV1.java @@ -15,12 +15,13 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.fs; +package org.apache.ignite.internal.processors.hadoop.impl.fs; -import java.io.File; import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; +import java.io.File; + /** * Local file system replacement for Hadoop jobs. */ diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopLocalFileSystemV2.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/fs/HadoopLocalFileSystemV2.java similarity index 97% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopLocalFileSystemV2.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/fs/HadoopLocalFileSystemV2.java index 24844920ee9fe..a98b2e387a873 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopLocalFileSystemV2.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/fs/HadoopLocalFileSystemV2.java @@ -15,17 +15,18 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.fs; +package org.apache.ignite.internal.processors.hadoop.impl.fs; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.ChecksumFs; import org.apache.hadoop.fs.DelegateToFileSystem; import org.apache.hadoop.fs.FsServerDefaults; import org.apache.hadoop.fs.local.LocalConfigKeys; +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; + import static org.apache.hadoop.fs.FsConstants.LOCAL_FS_URI; /** diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopParameters.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/fs/HadoopParameters.java similarity index 98% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopParameters.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/fs/HadoopParameters.java index 0aac4a308c6e2..b583a1df33aea 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopParameters.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/fs/HadoopParameters.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.fs; +package org.apache.ignite.internal.processors.hadoop.impl.fs; /** * This class lists parameters that can be specified in Hadoop configuration. diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopRawLocalFileSystem.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/fs/HadoopRawLocalFileSystem.java similarity index 99% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopRawLocalFileSystem.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/fs/HadoopRawLocalFileSystem.java index b8fc8e772965d..89c5938332fd4 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopRawLocalFileSystem.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/fs/HadoopRawLocalFileSystem.java @@ -15,17 +15,8 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.fs; +package org.apache.ignite.internal.processors.hadoop.impl.fs; -import java.io.BufferedOutputStream; -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.RandomAccessFile; -import java.net.URI; -import java.nio.file.Files; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -40,6 +31,16 @@ import org.apache.hadoop.util.Progressable; import org.apache.ignite.internal.util.typedef.internal.U; +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.RandomAccessFile; +import java.net.URI; +import java.nio.file.Files; + /** * Local file system implementation for Hadoop. */ diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfs.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfs.java similarity index 99% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfs.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfs.java index fe435967b2354..8bb904fc96f73 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfs.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfs.java @@ -15,11 +15,8 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; -import java.io.IOException; -import java.util.Collection; -import java.util.Map; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.igfs.IgfsBlockLocation; import org.apache.ignite.igfs.IgfsFile; @@ -29,6 +26,10 @@ import org.apache.ignite.internal.processors.igfs.IgfsStatus; import org.jetbrains.annotations.Nullable; +import java.io.IOException; +import java.util.Collection; +import java.util.Map; + /** * Facade for communication with grid. */ diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsCommunicationException.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsCommunicationException.java similarity index 96% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsCommunicationException.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsCommunicationException.java index d610091790cdb..ddfe35b69cfdc 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsCommunicationException.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsCommunicationException.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import org.apache.ignite.IgniteCheckedException; diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsEx.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsEx.java similarity index 98% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsEx.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsEx.java index 014e2a1deb09d..2294134b1717c 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsEx.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsEx.java @@ -15,12 +15,13 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; -import java.io.IOException; import org.apache.ignite.internal.IgniteInternalFuture; import org.jetbrains.annotations.Nullable; +import java.io.IOException; + /** * Extended IGFS server interface. */ diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsFuture.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsFuture.java similarity index 97% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsFuture.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsFuture.java index 5ff1b2eb43590..cfdd7925a1f8d 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsFuture.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsFuture.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import org.apache.ignite.internal.util.future.GridFutureAdapter; import org.jetbrains.annotations.Nullable; diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsInProc.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsInProc.java similarity index 99% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsInProc.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsInProc.java index 322053829653e..8bdcc83876583 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsInProc.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsInProc.java @@ -15,14 +15,8 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; -import java.io.Closeable; -import java.io.IOException; -import java.util.Collection; -import java.util.Map; -import java.util.concurrent.Callable; -import java.util.concurrent.ConcurrentHashMap; import org.apache.commons.logging.Log; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteException; @@ -42,6 +36,13 @@ import org.apache.ignite.lang.IgniteOutClosure; import org.jetbrains.annotations.Nullable; +import java.io.Closeable; +import java.io.IOException; +import java.util.Collection; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.concurrent.ConcurrentHashMap; + /** * Communication with grid in the same process. */ diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsInputStream.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsInputStream.java similarity index 99% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsInputStream.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsInputStream.java index 46b46d7690bc8..efc270b97e4cb 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsInputStream.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsInputStream.java @@ -15,11 +15,8 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; -import java.io.EOFException; -import java.io.IOException; -import java.io.InputStream; import org.apache.commons.logging.Log; import org.apache.hadoop.fs.PositionedReadable; import org.apache.hadoop.fs.Seekable; @@ -30,6 +27,10 @@ import org.apache.ignite.internal.util.typedef.internal.U; import org.jetbrains.annotations.NotNull; +import java.io.EOFException; +import java.io.IOException; +import java.io.InputStream; + /** * IGFS input stream wrapper for hadoop interfaces. */ diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsIo.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsIo.java similarity index 98% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsIo.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsIo.java index 70f645f58e399..b8bcad9a59ee0 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsIo.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsIo.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.IgniteInternalFuture; diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsIpcIo.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsIpcIo.java similarity index 99% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsIpcIo.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsIpcIo.java index b0a41354af23d..a2ec977bea74d 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsIpcIo.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsIpcIo.java @@ -15,20 +15,8 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; -import java.io.BufferedOutputStream; -import java.io.EOFException; -import java.io.IOException; -import java.io.InputStream; -import java.util.Collection; -import java.util.Iterator; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.locks.Lock; -import java.util.concurrent.locks.ReadWriteLock; -import java.util.concurrent.locks.ReentrantReadWriteLock; import org.apache.commons.logging.Log; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.igfs.IgfsException; @@ -52,6 +40,19 @@ import org.jetbrains.annotations.Nullable; import org.jsr166.ConcurrentHashMap8; +import java.io.BufferedOutputStream; +import java.io.EOFException; +import java.io.IOException; +import java.io.InputStream; +import java.util.Collection; +import java.util.Iterator; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; + /** * IO layer implementation based on blocking IPC streams. */ diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsIpcIoListener.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsIpcIoListener.java similarity index 94% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsIpcIoListener.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsIpcIoListener.java index c26e896035b74..88e59e67a8051 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsIpcIoListener.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsIpcIoListener.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; /** * Listens to the events of {@link HadoopIgfsIpcIo}. diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsJclLogger.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsJclLogger.java similarity index 98% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsJclLogger.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsJclLogger.java index 3a7f45b762f72..0ae8a9fe1a421 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsJclLogger.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsJclLogger.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsOutProc.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsOutProc.java similarity index 99% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsOutProc.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsOutProc.java index 99021426481f7..4dc3c7fa19d1b 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsOutProc.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsOutProc.java @@ -15,11 +15,8 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; -import java.io.IOException; -import java.util.Collection; -import java.util.Map; import org.apache.commons.logging.Log; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.igfs.IgfsBlockLocation; @@ -44,6 +41,10 @@ import org.jetbrains.annotations.Nullable; import org.jsr166.ConcurrentHashMap8; +import java.io.IOException; +import java.util.Collection; +import java.util.Map; + import static org.apache.ignite.internal.igfs.common.IgfsIpcCommand.AFFINITY; import static org.apache.ignite.internal.igfs.common.IgfsIpcCommand.CLOSE; import static org.apache.ignite.internal.igfs.common.IgfsIpcCommand.DELETE; diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsOutputStream.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsOutputStream.java similarity index 98% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsOutputStream.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsOutputStream.java index 8f7458b442315..7f95a6b166d45 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsOutputStream.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsOutputStream.java @@ -15,15 +15,16 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; -import java.io.IOException; -import java.io.OutputStream; import org.apache.commons.logging.Log; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.igfs.common.IgfsLogger; import org.jetbrains.annotations.NotNull; +import java.io.IOException; +import java.io.OutputStream; + /** * IGFS Hadoop output stream implementation. */ diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsProperties.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsProperties.java similarity index 97% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsProperties.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsProperties.java index 90f6bcac8b2f8..5427bf15b6033 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsProperties.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsProperties.java @@ -15,13 +15,14 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; -import java.util.Map; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.ignite.IgniteException; import org.apache.ignite.internal.processors.igfs.IgfsUtils; +import java.util.Map; + /** * Hadoop file system properties. */ diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsProxyInputStream.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsProxyInputStream.java similarity index 99% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsProxyInputStream.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsProxyInputStream.java index 5cee947ac4d17..133e2077fae2e 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsProxyInputStream.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsProxyInputStream.java @@ -15,15 +15,16 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; -import java.io.IOException; -import java.io.InputStream; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.PositionedReadable; import org.apache.hadoop.fs.Seekable; import org.apache.ignite.internal.igfs.common.IgfsLogger; +import java.io.IOException; +import java.io.InputStream; + /** * Secondary Hadoop file system input stream wrapper. */ diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsProxyOutputStream.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsProxyOutputStream.java similarity index 98% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsProxyOutputStream.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsProxyOutputStream.java index eade0f013a7f7..8917a95ca01e6 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsProxyOutputStream.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsProxyOutputStream.java @@ -15,13 +15,14 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; -import java.io.IOException; -import java.io.OutputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.ignite.internal.igfs.common.IgfsLogger; +import java.io.IOException; +import java.io.OutputStream; + /** * Secondary Hadoop file system output stream wrapper. */ diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsSecondaryFileSystemPositionedReadable.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsSecondaryFileSystemPositionedReadable.java similarity index 98% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsSecondaryFileSystemPositionedReadable.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsSecondaryFileSystemPositionedReadable.java index a0577cea08305..1a4add54891be 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsSecondaryFileSystemPositionedReadable.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsSecondaryFileSystemPositionedReadable.java @@ -15,9 +15,8 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; -import java.io.IOException; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -25,6 +24,8 @@ import org.apache.ignite.igfs.secondary.IgfsSecondaryFileSystemPositionedReadable; import org.apache.ignite.internal.util.typedef.internal.U; +import java.io.IOException; + /** * Secondary file system input stream wrapper which actually opens input stream only in case it is explicitly * requested. diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsStreamDelegate.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsStreamDelegate.java similarity index 97% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsStreamDelegate.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsStreamDelegate.java index 37b58ab6cf072..e6f8061a65f22 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsStreamDelegate.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsStreamDelegate.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import org.apache.ignite.internal.util.typedef.internal.S; diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsStreamEventListener.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsStreamEventListener.java similarity index 95% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsStreamEventListener.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsStreamEventListener.java index d81f765114e0b..ee46ed414423e 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsStreamEventListener.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsStreamEventListener.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import org.apache.ignite.IgniteCheckedException; diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsUtils.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsUtils.java similarity index 98% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsUtils.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsUtils.java index fa5cbc584e6d8..ee7756e082eef 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsUtils.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsUtils.java @@ -15,10 +15,8 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; -import java.io.FileNotFoundException; -import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.AbstractFileSystem; import org.apache.hadoop.fs.FileStatus; @@ -34,6 +32,9 @@ import org.apache.ignite.igfs.IgfsPathNotFoundException; import org.jetbrains.annotations.Nullable; +import java.io.FileNotFoundException; +import java.io.IOException; + /** * Utility constants and methods for IGFS Hadoop file system. */ diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsWrapper.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsWrapper.java similarity index 97% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsWrapper.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsWrapper.java index f4ee97fd8e997..1fda1c32ea8c3 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/igfs/HadoopIgfsWrapper.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsWrapper.java @@ -15,14 +15,8 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; -import java.io.IOException; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicReference; import org.apache.commons.logging.Log; import org.apache.hadoop.conf.Configuration; import org.apache.ignite.IgniteCheckedException; @@ -33,6 +27,7 @@ import org.apache.ignite.igfs.IgfsFile; import org.apache.ignite.igfs.IgfsPath; import org.apache.ignite.igfs.IgfsPathSummary; +import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsEndpoint; import org.apache.ignite.internal.processors.igfs.IgfsEx; import org.apache.ignite.internal.processors.igfs.IgfsHandshakeResponse; import org.apache.ignite.internal.processors.igfs.IgfsStatus; @@ -42,12 +37,19 @@ import org.apache.ignite.internal.util.typedef.internal.U; import org.jetbrains.annotations.Nullable; +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; + import static org.apache.ignite.IgniteState.STARTED; import static org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsEndpoint.LOCALHOST; -import static org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsUtils.PARAM_IGFS_ENDPOINT_NO_EMBED; -import static org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsUtils.PARAM_IGFS_ENDPOINT_NO_LOCAL_SHMEM; -import static org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsUtils.PARAM_IGFS_ENDPOINT_NO_LOCAL_TCP; -import static org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsUtils.parameter; +import static org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsUtils.PARAM_IGFS_ENDPOINT_NO_EMBED; +import static org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsUtils.PARAM_IGFS_ENDPOINT_NO_LOCAL_SHMEM; +import static org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsUtils.PARAM_IGFS_ENDPOINT_NO_LOCAL_TCP; +import static org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsUtils.parameter; /** * Wrapper for IGFS server. diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/proto/HadoopClientProtocol.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/proto/HadoopClientProtocol.java similarity index 91% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/proto/HadoopClientProtocol.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/proto/HadoopClientProtocol.java index 5f96e082cd3f8..be2aa093978da 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/proto/HadoopClientProtocol.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/proto/HadoopClientProtocol.java @@ -15,9 +15,8 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.proto; +package org.apache.ignite.internal.processors.hadoop.impl.proto; -import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -46,18 +45,24 @@ import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.client.GridClient; import org.apache.ignite.internal.client.GridClientException; +import org.apache.ignite.internal.processors.hadoop.HadoopCommonUtils; import org.apache.ignite.internal.processors.hadoop.HadoopJobId; import org.apache.ignite.internal.processors.hadoop.HadoopJobProperty; import org.apache.ignite.internal.processors.hadoop.HadoopJobStatus; -import org.apache.ignite.internal.processors.hadoop.HadoopMapReduceCounters; -import org.apache.ignite.internal.processors.hadoop.HadoopUtils; import org.apache.ignite.internal.processors.hadoop.counter.HadoopCounters; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopMapReduceCounters; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils; +import org.apache.ignite.internal.processors.hadoop.proto.HadoopProtocolJobCountersTask; +import org.apache.ignite.internal.processors.hadoop.proto.HadoopProtocolJobStatusTask; +import org.apache.ignite.internal.processors.hadoop.proto.HadoopProtocolKillJobTask; +import org.apache.ignite.internal.processors.hadoop.proto.HadoopProtocolNextTaskIdTask; +import org.apache.ignite.internal.processors.hadoop.proto.HadoopProtocolSubmitJobTask; +import org.apache.ignite.internal.processors.hadoop.proto.HadoopProtocolTaskArguments; import org.apache.ignite.internal.util.typedef.internal.U; -import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.JOB_SUBMISSION_START_TS_PROPERTY; -import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.REQ_NEW_JOBID_TS_PROPERTY; -import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.RESPONSE_NEW_JOBID_TS_PROPERTY; -import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.createJobInfo; +import java.io.IOException; + +import static org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils.createJobInfo; /** * Hadoop client protocol. @@ -97,11 +102,11 @@ public HadoopClientProtocol(Configuration conf, GridClient cli) { /** {@inheritDoc} */ @Override public JobID getNewJobID() throws IOException, InterruptedException { try { - conf.setLong(REQ_NEW_JOBID_TS_PROPERTY, U.currentTimeMillis()); + conf.setLong(HadoopCommonUtils.REQ_NEW_JOBID_TS_PROPERTY, U.currentTimeMillis()); HadoopJobId jobID = cli.compute().execute(HadoopProtocolNextTaskIdTask.class.getName(), null); - conf.setLong(RESPONSE_NEW_JOBID_TS_PROPERTY, U.currentTimeMillis()); + conf.setLong(HadoopCommonUtils.RESPONSE_NEW_JOBID_TS_PROPERTY, U.currentTimeMillis()); return new JobID(jobID.globalId().toString(), jobID.localId()); } @@ -114,7 +119,7 @@ public HadoopClientProtocol(Configuration conf, GridClient cli) { @Override public JobStatus submitJob(JobID jobId, String jobSubmitDir, Credentials ts) throws IOException, InterruptedException { try { - conf.setLong(JOB_SUBMISSION_START_TS_PROPERTY, U.currentTimeMillis()); + conf.setLong(HadoopCommonUtils.JOB_SUBMISSION_START_TS_PROPERTY, U.currentTimeMillis()); HadoopJobStatus status = cli.compute().execute(HadoopProtocolSubmitJobTask.class.getName(), new HadoopProtocolTaskArguments(jobId.getJtIdentifier(), jobId.getId(), createJobInfo(conf))); diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1CleanupTask.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1CleanupTask.java similarity index 93% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1CleanupTask.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1CleanupTask.java index 750b31451bd09..ddf6c297c1ec2 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1CleanupTask.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1CleanupTask.java @@ -15,16 +15,17 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v1; +package org.apache.ignite.internal.processors.hadoop.impl.v1; -import java.io.IOException; import org.apache.hadoop.mapred.JobContext; import org.apache.hadoop.mapred.JobStatus; import org.apache.hadoop.mapred.OutputCommitter; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.processors.hadoop.HadoopTaskContext; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo; -import org.apache.ignite.internal.processors.hadoop.v2.HadoopV2TaskContext; +import org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopV2TaskContext; + +import java.io.IOException; /** * Hadoop cleanup task implementation for v1 API. diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1Counter.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1Counter.java similarity index 95% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1Counter.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1Counter.java index c623eab3f7784..d91730fa5c499 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1Counter.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1Counter.java @@ -15,15 +15,16 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v1; +package org.apache.ignite.internal.processors.hadoop.impl.v1; -import java.io.DataInput; -import java.io.DataOutput; -import java.io.IOException; import org.apache.hadoop.mapred.Counters; import org.apache.hadoop.mapreduce.Counter; import org.apache.ignite.internal.processors.hadoop.counter.HadoopLongCounter; -import org.apache.ignite.internal.processors.hadoop.v2.HadoopV2Counter; +import org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopV2Counter; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; import static org.apache.hadoop.mapreduce.util.CountersStrings.toEscapedCompactString; diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1MapTask.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1MapTask.java similarity index 96% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1MapTask.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1MapTask.java index fb2266a2435b9..65ff280df7659 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1MapTask.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1MapTask.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v1; +package org.apache.ignite.internal.processors.hadoop.impl.v1; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.FileSplit; @@ -33,7 +33,7 @@ import org.apache.ignite.internal.processors.hadoop.HadoopTaskCancelledException; import org.apache.ignite.internal.processors.hadoop.HadoopTaskContext; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo; -import org.apache.ignite.internal.processors.hadoop.v2.HadoopV2TaskContext; +import org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopV2TaskContext; /** * Hadoop map task implementation for v1 API. diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1OutputCollector.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1OutputCollector.java similarity index 98% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1OutputCollector.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1OutputCollector.java index 37f81a66510b4..1a3c4bd169f41 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1OutputCollector.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1OutputCollector.java @@ -15,9 +15,8 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v1; +package org.apache.ignite.internal.processors.hadoop.impl.v1; -import java.io.IOException; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.OutputCommitter; @@ -31,6 +30,8 @@ import org.apache.ignite.internal.processors.hadoop.HadoopTaskContext; import org.jetbrains.annotations.Nullable; +import java.io.IOException; + /** * Hadoop output collector. */ diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1Partitioner.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1Partitioner.java similarity index 96% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1Partitioner.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1Partitioner.java index 0ab1bba8decfe..97634d989575f 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1Partitioner.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1Partitioner.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v1; +package org.apache.ignite.internal.processors.hadoop.impl.v1; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapred.Partitioner; diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1ReduceTask.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1ReduceTask.java similarity index 95% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1ReduceTask.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1ReduceTask.java index e656695a5b031..92c024ec06924 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1ReduceTask.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1ReduceTask.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v1; +package org.apache.ignite.internal.processors.hadoop.impl.v1; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.Reducer; @@ -27,7 +27,7 @@ import org.apache.ignite.internal.processors.hadoop.HadoopTaskContext; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInput; -import org.apache.ignite.internal.processors.hadoop.v2.HadoopV2TaskContext; +import org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopV2TaskContext; /** * Hadoop reduce task implementation for v1 API. diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1Reporter.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1Reporter.java similarity index 97% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1Reporter.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1Reporter.java index 5a63aab86cab0..f3229e21a6355 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1Reporter.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1Reporter.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v1; +package org.apache.ignite.internal.processors.hadoop.impl.v1; import org.apache.hadoop.mapred.Counters; import org.apache.hadoop.mapred.InputSplit; diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1SetupTask.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1SetupTask.java similarity index 92% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1SetupTask.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1SetupTask.java index d2f6823a56dc6..2fd7332ba00c4 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1SetupTask.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1SetupTask.java @@ -15,14 +15,15 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v1; +package org.apache.ignite.internal.processors.hadoop.impl.v1; -import java.io.IOException; import org.apache.hadoop.mapred.OutputCommitter; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.processors.hadoop.HadoopTaskContext; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo; -import org.apache.ignite.internal.processors.hadoop.v2.HadoopV2TaskContext; +import org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopV2TaskContext; + +import java.io.IOException; /** * Hadoop setup task implementation for v1 API. diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1Splitter.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1Splitter.java similarity index 96% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1Splitter.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1Splitter.java index 203def4779ef9..11a35988b81ea 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1Splitter.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1Splitter.java @@ -15,11 +15,8 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v1; +package org.apache.ignite.internal.processors.hadoop.impl.v1; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.mapred.FileSplit; import org.apache.hadoop.mapred.InputFormat; @@ -28,10 +25,14 @@ import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.processors.hadoop.HadoopFileBlock; import org.apache.ignite.internal.processors.hadoop.HadoopInputSplit; -import org.apache.ignite.internal.processors.hadoop.HadoopUtils; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils; import org.apache.ignite.internal.util.typedef.internal.U; import org.jetbrains.annotations.Nullable; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; + /** * Hadoop API v1 splitter. */ diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1Task.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1Task.java similarity index 95% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1Task.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1Task.java index a89323cf9359f..4ed5eb3ca83c9 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v1/HadoopV1Task.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v1/HadoopV1Task.java @@ -15,18 +15,19 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v1; +package org.apache.ignite.internal.processors.hadoop.impl.v1; -import java.io.IOException; -import java.text.NumberFormat; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.TaskAttemptID; import org.apache.ignite.internal.processors.hadoop.HadoopTask; import org.apache.ignite.internal.processors.hadoop.HadoopTaskCancelledException; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo; -import org.apache.ignite.internal.processors.hadoop.v2.HadoopV2TaskContext; +import org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopV2TaskContext; import org.jetbrains.annotations.Nullable; +import java.io.IOException; +import java.text.NumberFormat; + /** * Extended Hadoop v1 task. */ diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopDaemon.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopDaemon.java similarity index 98% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopDaemon.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopDaemon.java index 9632525772021..ea7128c59bd45 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopDaemon.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopDaemon.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v2; +package org.apache.ignite.internal.processors.hadoop.impl.v2; import java.util.Collection; import java.util.LinkedList; diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopSerializationWrapper.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopSerializationWrapper.java similarity index 98% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopSerializationWrapper.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopSerializationWrapper.java index 844e7f8847a02..e045dba0e17bb 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopSerializationWrapper.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopSerializationWrapper.java @@ -15,13 +15,8 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v2; +package org.apache.ignite.internal.processors.hadoop.impl.v2; -import java.io.DataInput; -import java.io.DataOutput; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; import org.apache.hadoop.io.serializer.Deserializer; import org.apache.hadoop.io.serializer.Serialization; import org.apache.hadoop.io.serializer.Serializer; @@ -29,6 +24,12 @@ import org.apache.ignite.internal.processors.hadoop.HadoopSerialization; import org.jetbrains.annotations.Nullable; +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; + /** * The wrapper around external serializer. */ diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopShutdownHookManager.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopShutdownHookManager.java similarity index 97% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopShutdownHookManager.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopShutdownHookManager.java index 8bd71e0913307..6d947e8ef4bef 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopShutdownHookManager.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopShutdownHookManager.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v2; +package org.apache.ignite.internal.processors.hadoop.impl.v2; import java.util.Collections; import java.util.HashSet; diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2CleanupTask.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2CleanupTask.java similarity index 97% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2CleanupTask.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2CleanupTask.java index abb904ce2db4b..15e3d0f910da8 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2CleanupTask.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2CleanupTask.java @@ -15,9 +15,8 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v2; +package org.apache.ignite.internal.processors.hadoop.impl.v2; -import java.io.IOException; import org.apache.hadoop.mapred.JobContextImpl; import org.apache.hadoop.mapreduce.JobStatus; import org.apache.hadoop.mapreduce.OutputCommitter; @@ -26,6 +25,8 @@ import org.apache.ignite.internal.IgniteInterruptedCheckedException; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo; +import java.io.IOException; + /** * Hadoop cleanup task (commits or aborts job). */ diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Context.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2Context.java similarity index 99% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Context.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2Context.java index 2ff294566ffa9..90a1bad10a97c 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Context.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2Context.java @@ -15,10 +15,8 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v2; +package org.apache.ignite.internal.processors.hadoop.impl.v2; -import java.io.IOException; -import java.util.Iterator; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.Counter; import org.apache.hadoop.mapreduce.InputSplit; @@ -39,6 +37,9 @@ import org.apache.ignite.internal.processors.hadoop.HadoopTaskOutput; import org.apache.ignite.internal.processors.hadoop.counter.HadoopLongCounter; +import java.io.IOException; +import java.util.Iterator; + /** * Hadoop context implementation for v2 API. It provides IO operations for hadoop tasks. */ diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Counter.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2Counter.java similarity index 97% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Counter.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2Counter.java index cad9e645b82fb..bd304456a6d89 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Counter.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2Counter.java @@ -15,13 +15,14 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v2; +package org.apache.ignite.internal.processors.hadoop.impl.v2; + +import org.apache.hadoop.mapreduce.Counter; +import org.apache.ignite.internal.processors.hadoop.counter.HadoopLongCounter; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; -import org.apache.hadoop.mapreduce.Counter; -import org.apache.ignite.internal.processors.hadoop.counter.HadoopLongCounter; /** * Adapter from own counter implementation into Hadoop API Counter od version 2.0. diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Job.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2Job.java similarity index 88% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Job.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2Job.java index a0f30eba54416..3731b2c1e1f3e 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Job.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2Job.java @@ -15,26 +15,8 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v2; +package org.apache.ignite.internal.processors.hadoop.impl.v2; -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.DataInput; -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.File; -import java.io.IOException; -import java.lang.reflect.Constructor; -import java.lang.reflect.Method; -import java.net.URI; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Map; -import java.util.Queue; -import java.util.UUID; -import java.util.concurrent.ConcurrentLinkedDeque; -import java.util.concurrent.ConcurrentLinkedQueue; -import java.util.concurrent.ConcurrentMap; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -50,8 +32,11 @@ import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.internal.processors.hadoop.HadoopClassLoader; +import org.apache.ignite.internal.processors.hadoop.HadoopCommonUtils; import org.apache.ignite.internal.processors.hadoop.HadoopDefaultJobInfo; +import org.apache.ignite.internal.processors.hadoop.HadoopExternalSplit; import org.apache.ignite.internal.processors.hadoop.HadoopFileBlock; +import org.apache.ignite.internal.processors.hadoop.HadoopHelper; import org.apache.ignite.internal.processors.hadoop.HadoopInputSplit; import org.apache.ignite.internal.processors.hadoop.HadoopJob; import org.apache.ignite.internal.processors.hadoop.HadoopJobId; @@ -59,10 +44,9 @@ import org.apache.ignite.internal.processors.hadoop.HadoopTaskContext; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo; import org.apache.ignite.internal.processors.hadoop.HadoopTaskType; -import org.apache.ignite.internal.processors.hadoop.HadoopUtils; -import org.apache.ignite.internal.processors.hadoop.fs.HadoopFileSystemsUtils; -import org.apache.ignite.internal.processors.hadoop.fs.HadoopLazyConcurrentMap; -import org.apache.ignite.internal.processors.hadoop.v1.HadoopV1Splitter; +import org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopFileSystemsUtils; +import org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopLazyConcurrentMap; +import org.apache.ignite.internal.processors.hadoop.impl.v1.HadoopV1Splitter; import org.apache.ignite.internal.util.future.GridFutureAdapter; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.T2; @@ -70,12 +54,31 @@ import org.jetbrains.annotations.Nullable; import org.jsr166.ConcurrentHashMap8; -import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.jobLocalDir; -import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.taskLocalDir; -import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.transformException; -import static org.apache.ignite.internal.processors.hadoop.fs.HadoopFileSystemCacheUtils.FsCacheKey; -import static org.apache.ignite.internal.processors.hadoop.fs.HadoopFileSystemCacheUtils.createHadoopLazyConcurrentMap; -import static org.apache.ignite.internal.processors.hadoop.fs.HadoopFileSystemCacheUtils.fileSystemForMrUserWithCaching; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.DataInput; +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.File; +import java.io.IOException; +import java.lang.reflect.Constructor; +import java.lang.reflect.Method; +import java.net.URI; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Map; +import java.util.Queue; +import java.util.UUID; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.ConcurrentMap; + +import static org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils.jobLocalDir; +import static org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils.taskLocalDir; +import static org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils.transformException; +import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopFileSystemCacheUtils.FsCacheKey; +import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopFileSystemCacheUtils.createHadoopLazyConcurrentMap; +import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopFileSystemCacheUtils.fileSystemForMrUserWithCaching; /** * Hadoop job implementation for v2 API. @@ -87,6 +90,9 @@ public class HadoopV2Job implements HadoopJob { /** */ private final JobContextImpl jobCtx; + /** */ + private final HadoopHelper helper; + /** Hadoop job ID. */ private final HadoopJobId jobId; @@ -130,15 +136,16 @@ public class HadoopV2Job implements HadoopJob { * @param libNames Optional additional native library names. */ public HadoopV2Job(HadoopJobId jobId, final HadoopDefaultJobInfo jobInfo, IgniteLogger log, - @Nullable String[] libNames) { + @Nullable String[] libNames, HadoopHelper helper) { assert jobId != null; assert jobInfo != null; this.jobId = jobId; this.jobInfo = jobInfo; this.libNames = libNames; + this.helper = helper; - ClassLoader oldLdr = HadoopUtils.setContextClassLoader(getClass().getClassLoader()); + ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(getClass().getClassLoader()); try { hadoopJobID = new JobID(jobId.globalId().toString(), jobId.localId()); @@ -155,7 +162,7 @@ public HadoopV2Job(HadoopJobId jobId, final HadoopDefaultJobInfo jobInfo, Ignite rsrcMgr = new HadoopV2JobResourceManager(jobId, jobCtx, log, this); } finally { - HadoopUtils.setContextClassLoader(oldLdr); + HadoopCommonUtils.restoreContextClassLoader(oldLdr); } } @@ -171,7 +178,7 @@ public HadoopV2Job(HadoopJobId jobId, final HadoopDefaultJobInfo jobInfo, Ignite /** {@inheritDoc} */ @Override public Collection input() throws IgniteCheckedException { - ClassLoader oldLdr = HadoopUtils.setContextClassLoader(jobConf.getClassLoader()); + ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(jobConf.getClassLoader()); try { String jobDirPath = jobConf.get(MRJobConfig.MAPREDUCE_JOB_DIR); @@ -228,7 +235,7 @@ public HadoopV2Job(HadoopJobId jobId, final HadoopDefaultJobInfo jobInfo, Ignite } } finally { - HadoopUtils.restoreContextClassLoader(oldLdr); + HadoopCommonUtils.restoreContextClassLoader(oldLdr); } } @@ -255,7 +262,7 @@ public HadoopV2Job(HadoopJobId jobId, final HadoopDefaultJobInfo jobInfo, Ignite // Note that the classloader identified by the task it was initially created for, // but later it may be reused for other tasks. HadoopClassLoader ldr = new HadoopClassLoader(rsrcMgr.classPath(), - HadoopClassLoader.nameForTask(info, false), libNames); + HadoopClassLoader.nameForTask(info, false), libNames, helper); cls = (Class)ldr.loadClass(HadoopV2TaskContext.class.getName()); @@ -301,13 +308,13 @@ public HadoopV2Job(HadoopJobId jobId, final HadoopDefaultJobInfo jobInfo, Ignite this.locNodeId = locNodeId; - ClassLoader oldLdr = HadoopUtils.setContextClassLoader(getClass().getClassLoader()); + ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(getClass().getClassLoader()); try { rsrcMgr.prepareJobEnvironment(!external, jobLocalDir(locNodeId, jobId)); } finally { - HadoopUtils.restoreContextClassLoader(oldLdr); + HadoopCommonUtils.restoreContextClassLoader(oldLdr); } } @@ -382,7 +389,7 @@ public HadoopV2Job(HadoopJobId jobId, final HadoopDefaultJobInfo jobInfo, Ignite * @throws Exception On error. */ private void stopHadoopFsDaemons(ClassLoader ldr) throws Exception { - Class daemonCls = ldr.loadClass(HadoopClassLoader.HADOOP_DAEMON_CLASS_NAME); + Class daemonCls = ldr.loadClass(HadoopClassLoader.CLS_DAEMON); Method m = daemonCls.getMethod("dequeueAndStopAll"); diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2JobResourceManager.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2JobResourceManager.java similarity index 96% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2JobResourceManager.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2JobResourceManager.java index 33aef60c63039..3984f838aa46f 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2JobResourceManager.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2JobResourceManager.java @@ -15,18 +15,8 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v2; +package org.apache.ignite.internal.processors.hadoop.impl.v2; -import java.io.File; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URL; -import java.nio.file.FileSystemException; -import java.nio.file.Files; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; @@ -36,13 +26,24 @@ import org.apache.hadoop.util.RunJar; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteLogger; +import org.apache.ignite.internal.processors.hadoop.HadoopCommonUtils; import org.apache.ignite.internal.processors.hadoop.HadoopJobId; -import org.apache.ignite.internal.processors.hadoop.HadoopUtils; -import org.apache.ignite.internal.processors.hadoop.fs.HadoopFileSystemsUtils; +import org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopFileSystemsUtils; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.U; import org.jetbrains.annotations.Nullable; +import java.io.File; +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.nio.file.FileSystemException; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; + /** * Provides all resources are needed to the job execution. Downloads the main jar, the configuration and additional * files are needed to be placed on local files system. @@ -95,7 +96,7 @@ public HadoopV2JobResourceManager(HadoopJobId jobId, JobContextImpl ctx, IgniteL private void setLocalFSWorkingDirectory(File dir) throws IOException { JobConf cfg = ctx.getJobConf(); - ClassLoader oldLdr = HadoopUtils.setContextClassLoader(cfg.getClassLoader()); + ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(cfg.getClassLoader()); try { cfg.set(HadoopFileSystemsUtils.LOC_FS_WORK_DIR_PROP, dir.getAbsolutePath()); @@ -104,7 +105,7 @@ private void setLocalFSWorkingDirectory(File dir) throws IOException { FileSystem.getLocal(cfg).setWorkingDirectory(new Path(dir.getAbsolutePath())); } finally { - HadoopUtils.restoreContextClassLoader(oldLdr); + HadoopCommonUtils.restoreContextClassLoader(oldLdr); } } @@ -176,7 +177,7 @@ else if (!jobLocDir.mkdirs()) * Process list of resources. * * @param jobLocDir Job working directory. - * @param files Array of {@link java.net.URI} or {@link org.apache.hadoop.fs.Path} to process resources. + * @param files Array of {@link URI} or {@link org.apache.hadoop.fs.Path} to process resources. * @param download {@code true}, if need to download. Process class path only else. * @param extract {@code true}, if need to extract archive. * @param clsPathUrls Collection to add resource as classpath resource. diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2MapTask.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2MapTask.java similarity index 98% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2MapTask.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2MapTask.java index fafa79b704920..418df4eb759f4 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2MapTask.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2MapTask.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v2; +package org.apache.ignite.internal.processors.hadoop.impl.v2; import org.apache.hadoop.mapred.JobContextImpl; import org.apache.hadoop.mapreduce.InputFormat; diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Partitioner.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2Partitioner.java similarity index 96% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Partitioner.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2Partitioner.java index e199ede44e485..5a82dcfe8981c 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Partitioner.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2Partitioner.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v2; +package org.apache.ignite.internal.processors.hadoop.impl.v2; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.Partitioner; diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2ReduceTask.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2ReduceTask.java similarity index 97% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2ReduceTask.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2ReduceTask.java index e5c2ed2164e74..930ec1d49746e 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2ReduceTask.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2ReduceTask.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v2; +package org.apache.ignite.internal.processors.hadoop.impl.v2; import org.apache.hadoop.mapred.JobContextImpl; import org.apache.hadoop.mapreduce.OutputFormat; diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2SetupTask.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2SetupTask.java similarity index 97% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2SetupTask.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2SetupTask.java index 49b5ee7e23034..b466019f63e96 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2SetupTask.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2SetupTask.java @@ -15,9 +15,8 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v2; +package org.apache.ignite.internal.processors.hadoop.impl.v2; -import java.io.IOException; import org.apache.hadoop.mapred.JobContextImpl; import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.mapreduce.OutputFormat; @@ -25,6 +24,8 @@ import org.apache.ignite.internal.IgniteInterruptedCheckedException; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo; +import java.io.IOException; + /** * Hadoop setup task (prepares job). */ diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Splitter.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2Splitter.java similarity index 96% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Splitter.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2Splitter.java index f4ed668eceeb1..667ef1e4a6e8f 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Splitter.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2Splitter.java @@ -15,13 +15,8 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v2; +package org.apache.ignite.internal.processors.hadoop.impl.v2; -import java.io.DataInput; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.JobContext; @@ -31,9 +26,15 @@ import org.apache.ignite.internal.IgniteInterruptedCheckedException; import org.apache.ignite.internal.processors.hadoop.HadoopFileBlock; import org.apache.ignite.internal.processors.hadoop.HadoopInputSplit; -import org.apache.ignite.internal.processors.hadoop.HadoopUtils; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils; import org.jetbrains.annotations.Nullable; +import java.io.DataInput; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + /** * Hadoop API v2 splitter. */ diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Task.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2Task.java similarity index 98% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Task.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2Task.java index 1383a613f6fef..ee8bd98e305bf 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Task.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2Task.java @@ -15,9 +15,8 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v2; +package org.apache.ignite.internal.processors.hadoop.impl.v2; -import java.io.IOException; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.mapreduce.OutputFormat; @@ -29,6 +28,8 @@ import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo; import org.jetbrains.annotations.Nullable; +import java.io.IOException; + /** * Extended Hadoop v2 task. */ diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2TaskContext.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2TaskContext.java similarity index 89% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2TaskContext.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2TaskContext.java index 4b1121c45e8ed..96fa89218250c 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2TaskContext.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopV2TaskContext.java @@ -15,15 +15,8 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v2; +package org.apache.ignite.internal.processors.hadoop.impl.v2; -import java.io.DataInput; -import java.io.File; -import java.io.IOException; -import java.security.PrivilegedExceptionAction; -import java.util.Comparator; -import java.util.UUID; -import java.util.concurrent.Callable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -47,39 +40,49 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.processors.hadoop.HadoopClassLoader; +import org.apache.ignite.internal.processors.hadoop.HadoopCommonUtils; +import org.apache.ignite.internal.processors.hadoop.HadoopExternalSplit; import org.apache.ignite.internal.processors.hadoop.HadoopInputSplit; import org.apache.ignite.internal.processors.hadoop.HadoopJob; import org.apache.ignite.internal.processors.hadoop.HadoopJobId; import org.apache.ignite.internal.processors.hadoop.HadoopPartitioner; import org.apache.ignite.internal.processors.hadoop.HadoopSerialization; +import org.apache.ignite.internal.processors.hadoop.HadoopSplitWrapper; import org.apache.ignite.internal.processors.hadoop.HadoopTask; import org.apache.ignite.internal.processors.hadoop.HadoopTaskCancelledException; import org.apache.ignite.internal.processors.hadoop.HadoopTaskContext; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo; import org.apache.ignite.internal.processors.hadoop.HadoopTaskType; -import org.apache.ignite.internal.processors.hadoop.HadoopUtils; import org.apache.ignite.internal.processors.hadoop.counter.HadoopCounter; import org.apache.ignite.internal.processors.hadoop.counter.HadoopCounters; import org.apache.ignite.internal.processors.hadoop.counter.HadoopCountersImpl; -import org.apache.ignite.internal.processors.hadoop.fs.HadoopLazyConcurrentMap; -import org.apache.ignite.internal.processors.hadoop.v1.HadoopV1CleanupTask; -import org.apache.ignite.internal.processors.hadoop.v1.HadoopV1MapTask; -import org.apache.ignite.internal.processors.hadoop.v1.HadoopV1Partitioner; -import org.apache.ignite.internal.processors.hadoop.v1.HadoopV1ReduceTask; -import org.apache.ignite.internal.processors.hadoop.v1.HadoopV1SetupTask; +import org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopLazyConcurrentMap; +import org.apache.ignite.internal.processors.hadoop.impl.v1.HadoopV1CleanupTask; +import org.apache.ignite.internal.processors.hadoop.impl.v1.HadoopV1MapTask; +import org.apache.ignite.internal.processors.hadoop.impl.v1.HadoopV1Partitioner; +import org.apache.ignite.internal.processors.hadoop.impl.v1.HadoopV1ReduceTask; +import org.apache.ignite.internal.processors.hadoop.impl.v1.HadoopV1SetupTask; import org.apache.ignite.internal.processors.igfs.IgfsUtils; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.A; import org.jetbrains.annotations.Nullable; -import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.jobLocalDir; -import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.taskLocalDir; -import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.transformException; -import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.unwrapSplit; -import static org.apache.ignite.internal.processors.hadoop.fs.HadoopFileSystemCacheUtils.FsCacheKey; -import static org.apache.ignite.internal.processors.hadoop.fs.HadoopFileSystemCacheUtils.createHadoopLazyConcurrentMap; -import static org.apache.ignite.internal.processors.hadoop.fs.HadoopFileSystemCacheUtils.fileSystemForMrUserWithCaching; -import static org.apache.ignite.internal.processors.hadoop.fs.HadoopParameters.PARAM_IGFS_PREFER_LOCAL_WRITES; +import java.io.DataInput; +import java.io.File; +import java.io.IOException; +import java.security.PrivilegedExceptionAction; +import java.util.Comparator; +import java.util.UUID; +import java.util.concurrent.Callable; + +import static org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils.jobLocalDir; +import static org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils.taskLocalDir; +import static org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils.transformException; +import static org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils.unwrapSplit; +import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopFileSystemCacheUtils.FsCacheKey; +import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopFileSystemCacheUtils.createHadoopLazyConcurrentMap; +import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopFileSystemCacheUtils.fileSystemForMrUserWithCaching; +import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_PREFER_LOCAL_WRITES; /** * Context for task execution. @@ -159,7 +162,7 @@ public HadoopV2TaskContext(HadoopTaskInfo taskInfo, HadoopJob job, HadoopJobId j this.locNodeId = locNodeId; // Before create JobConf instance we should set new context class loader. - ClassLoader oldLdr = HadoopUtils.setContextClassLoader(getClass().getClassLoader()); + ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(getClass().getClassLoader()); try { JobConf jobConf = new JobConf(); @@ -181,7 +184,7 @@ public HadoopV2TaskContext(HadoopTaskInfo taskInfo, HadoopJob job, HadoopJobId j useNewCombiner = jobConf.getCombinerClass() == null; } finally { - HadoopUtils.restoreContextClassLoader(oldLdr); + HadoopCommonUtils.restoreContextClassLoader(oldLdr); } } @@ -230,7 +233,7 @@ private HadoopTask createTask() { /** {@inheritDoc} */ @Override public void run() throws IgniteCheckedException { - ClassLoader oldLdr = HadoopUtils.setContextClassLoader(jobConf().getClassLoader()); + ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(jobConf().getClassLoader()); try { try { @@ -259,7 +262,7 @@ private HadoopTask createTask() { finally { task = null; - HadoopUtils.restoreContextClassLoader(oldLdr); + HadoopCommonUtils.restoreContextClassLoader(oldLdr); } } @@ -290,7 +293,7 @@ private HadoopTask createTask() { locDir = jobLocalDir(locNodeId, taskInfo().jobId()); } - ClassLoader oldLdr = HadoopUtils.setContextClassLoader(jobConf().getClassLoader()); + ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(jobConf().getClassLoader()); try { FileSystem.get(jobConf()); @@ -306,7 +309,7 @@ private HadoopTask createTask() { throw transformException(e); } finally { - HadoopUtils.restoreContextClassLoader(oldLdr); + HadoopCommonUtils.restoreContextClassLoader(oldLdr); } } diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopWritableSerialization.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopWritableSerialization.java similarity index 97% rename from modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopWritableSerialization.java rename to modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopWritableSerialization.java index f46f06800689e..e612f1bccfb36 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopWritableSerialization.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/v2/HadoopWritableSerialization.java @@ -15,17 +15,18 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.v2; +package org.apache.ignite.internal.processors.hadoop.impl.v2; -import java.io.DataInput; -import java.io.DataOutput; -import java.io.IOException; import org.apache.hadoop.io.Writable; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.processors.hadoop.HadoopSerialization; import org.apache.ignite.internal.util.typedef.internal.U; import org.jetbrains.annotations.Nullable; +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; + /** * Optimized serialization for Hadoop {@link Writable} types. */ diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/jobtracker/HadoopJobTracker.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/jobtracker/HadoopJobTracker.java index f3e17f350aae9..bffb82b1e9180 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/jobtracker/HadoopJobTracker.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/jobtracker/HadoopJobTracker.java @@ -46,6 +46,7 @@ import org.apache.ignite.internal.processors.cache.GridCacheAdapter; import org.apache.ignite.internal.processors.cache.IgniteInternalCache; import org.apache.ignite.internal.processors.hadoop.HadoopClassLoader; +import org.apache.ignite.internal.processors.hadoop.HadoopCommonUtils; import org.apache.ignite.internal.processors.hadoop.HadoopComponent; import org.apache.ignite.internal.processors.hadoop.HadoopContext; import org.apache.ignite.internal.processors.hadoop.HadoopInputSplit; @@ -58,14 +59,12 @@ import org.apache.ignite.internal.processors.hadoop.HadoopMapReducePlanner; import org.apache.ignite.internal.processors.hadoop.HadoopTaskCancelledException; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo; -import org.apache.ignite.internal.processors.hadoop.HadoopUtils; import org.apache.ignite.internal.processors.hadoop.counter.HadoopCounterWriter; import org.apache.ignite.internal.processors.hadoop.counter.HadoopCounters; import org.apache.ignite.internal.processors.hadoop.counter.HadoopCountersImpl; import org.apache.ignite.internal.processors.hadoop.counter.HadoopPerformanceCounter; import org.apache.ignite.internal.processors.hadoop.taskexecutor.HadoopTaskStatus; import org.apache.ignite.internal.processors.hadoop.taskexecutor.external.HadoopProcessDescriptor; -import org.apache.ignite.internal.processors.hadoop.v2.HadoopV2Job; import org.apache.ignite.internal.util.GridMutex; import org.apache.ignite.internal.util.GridSpinReadWriteLock; import org.apache.ignite.internal.util.future.GridFinishedFuture; @@ -153,23 +152,16 @@ public class HadoopJobTracker extends HadoopComponent { evtProcSvc = Executors.newFixedThreadPool(1); - UUID nodeId = ctx.localNodeId(); - assert jobCls == null; - String[] libNames = null; - - if (ctx.configuration() != null) - libNames = ctx.configuration().getNativeLibraryNames(); - - HadoopClassLoader ldr = new HadoopClassLoader(null, HadoopClassLoader.nameForJob(nodeId), libNames); + HadoopClassLoader ldr = ctx.kernalContext().hadoopHelper().commonClassLoader(); try { - jobCls = (Class)ldr.loadClass(HadoopV2Job.class.getName()); + jobCls = (Class)ldr.loadClass(HadoopCommonUtils.JOB_CLS_NAME); } catch (Exception ioe) { - throw new IgniteCheckedException("Failed to load job class [class=" - + HadoopV2Job.class.getName() + ']', ioe); + throw new IgniteCheckedException("Failed to load job class [class=" + + HadoopCommonUtils.JOB_CLS_NAME + ']', ioe); } } @@ -903,7 +895,7 @@ private void processJobMetaUpdate(HadoopJobId jobId, HadoopJobMetadata meta, UUI ClassLoader ldr = job.getClass().getClassLoader(); try { - String statWriterClsName = job.info().property(HadoopUtils.JOB_COUNTER_WRITER_PROPERTY); + String statWriterClsName = job.info().property(HadoopCommonUtils.JOB_COUNTER_WRITER_PROPERTY); if (statWriterClsName != null) { Class cls = ldr.loadClass(statWriterClsName); @@ -1060,7 +1052,8 @@ private JobLocalState initState(HadoopJobId jobId) { jobInfo = meta.jobInfo(); } - job = jobInfo.createJob(jobCls, jobId, log, ctx.configuration().getNativeLibraryNames()); + job = jobInfo.createJob(jobCls, jobId, log, ctx.configuration().getNativeLibraryNames(), + ctx.kernalContext().hadoopHelper()); job.initialize(false, ctx.localNodeId()); diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/planner/HadoopDefaultMapReducePlan.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/planner/HadoopDefaultMapReducePlan.java index 15c62c8dc842a..7aaf3faece580 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/planner/HadoopDefaultMapReducePlan.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/planner/HadoopDefaultMapReducePlan.java @@ -17,13 +17,14 @@ package org.apache.ignite.internal.processors.hadoop.planner; -import java.util.Collection; -import java.util.Map; -import java.util.UUID; import org.apache.ignite.internal.processors.hadoop.HadoopInputSplit; import org.apache.ignite.internal.processors.hadoop.HadoopMapReducePlan; import org.jetbrains.annotations.Nullable; +import java.util.Collection; +import java.util.Map; +import java.util.UUID; + /** * Map-reduce plan. */ diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/child/HadoopChildProcessRunner.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/child/HadoopChildProcessRunner.java index 4a946e938a9ed..45d9a27c51dec 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/child/HadoopChildProcessRunner.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/child/HadoopChildProcessRunner.java @@ -21,8 +21,11 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import org.apache.ignite.IgniteCheckedException; +import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.internal.IgniteInternalFuture; +import org.apache.ignite.internal.processors.hadoop.HadoopCommonUtils; +import org.apache.ignite.internal.processors.hadoop.HadoopHelperImpl; import org.apache.ignite.internal.processors.hadoop.HadoopJob; import org.apache.ignite.internal.processors.hadoop.HadoopTaskContext; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo; @@ -44,7 +47,6 @@ import org.apache.ignite.internal.processors.hadoop.taskexecutor.external.HadoopTaskFinishedMessage; import org.apache.ignite.internal.processors.hadoop.taskexecutor.external.communication.HadoopExternalCommunication; import org.apache.ignite.internal.processors.hadoop.taskexecutor.external.communication.HadoopMessageListener; -import org.apache.ignite.internal.processors.hadoop.v2.HadoopV2Job; import org.apache.ignite.internal.util.future.GridFutureAdapter; import org.apache.ignite.internal.util.lang.IgniteInClosure2X; import org.apache.ignite.internal.util.offheap.unsafe.GridUnsafeMemory; @@ -126,6 +128,7 @@ public void start(HadoopExternalCommunication comm, HadoopProcessDescriptor node * * @param req Initialization request. */ + @SuppressWarnings("unchecked") private void prepareProcess(HadoopPrepareForJobRequest req) { if (initGuard.compareAndSet(false, true)) { try { @@ -134,7 +137,16 @@ private void prepareProcess(HadoopPrepareForJobRequest req) { assert job == null; - job = req.jobInfo().createJob(HadoopV2Job.class, req.jobId(), log, null); + Class jobCls; + + try { + jobCls = Class.forName(HadoopCommonUtils.JOB_CLS_NAME); + } + catch (ClassNotFoundException e) { + throw new IgniteException("Failed to load job class: " + HadoopCommonUtils.JOB_CLS_NAME, e); + } + + job = req.jobInfo().createJob(jobCls, req.jobId(), log, null, new HadoopHelperImpl()); job.initialize(true, nodeDesc.processId()); diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoaderTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoaderTest.java deleted file mode 100644 index 02d98d0a89f34..0000000000000 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoaderTest.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.internal.processors.hadoop; - -import javax.security.auth.AuthPermission; -import junit.framework.TestCase; -import org.apache.hadoop.conf.Configuration; -import org.apache.ignite.internal.processors.hadoop.deps.CircularWIthHadoop; -import org.apache.ignite.internal.processors.hadoop.deps.CircularWithoutHadoop; -import org.apache.ignite.internal.processors.hadoop.deps.WithIndirectField; -import org.apache.ignite.internal.processors.hadoop.deps.WithCast; -import org.apache.ignite.internal.processors.hadoop.deps.WithClassAnnotation; -import org.apache.ignite.internal.processors.hadoop.deps.WithConstructorInvocation; -import org.apache.ignite.internal.processors.hadoop.deps.WithMethodCheckedException; -import org.apache.ignite.internal.processors.hadoop.deps.WithMethodRuntimeException; -import org.apache.ignite.internal.processors.hadoop.deps.WithExtends; -import org.apache.ignite.internal.processors.hadoop.deps.WithField; -import org.apache.ignite.internal.processors.hadoop.deps.WithImplements; -import org.apache.ignite.internal.processors.hadoop.deps.WithInitializer; -import org.apache.ignite.internal.processors.hadoop.deps.WithInnerClass; -import org.apache.ignite.internal.processors.hadoop.deps.WithLocalVariable; -import org.apache.ignite.internal.processors.hadoop.deps.WithMethodAnnotation; -import org.apache.ignite.internal.processors.hadoop.deps.WithMethodInvocation; -import org.apache.ignite.internal.processors.hadoop.deps.WithMethodArgument; -import org.apache.ignite.internal.processors.hadoop.deps.WithMethodReturnType; -import org.apache.ignite.internal.processors.hadoop.deps.WithOuterClass; -import org.apache.ignite.internal.processors.hadoop.deps.WithParameterAnnotation; -import org.apache.ignite.internal.processors.hadoop.deps.WithStaticField; -import org.apache.ignite.internal.processors.hadoop.deps.WithStaticInitializer; -import org.apache.ignite.internal.processors.hadoop.deps.Without; - -/** - * Tests for Hadoop classloader. - */ -public class HadoopClassLoaderTest extends TestCase { - /** */ - final HadoopClassLoader ldr = new HadoopClassLoader(null, "test", null); - - /** - * @throws Exception If failed. - */ - public void testClassLoading() throws Exception { - assertNotSame(CircularWIthHadoop.class, ldr.loadClass(CircularWIthHadoop.class.getName())); - assertNotSame(CircularWithoutHadoop.class, ldr.loadClass(CircularWithoutHadoop.class.getName())); - - assertSame(Without.class, ldr.loadClass(Without.class.getName())); - } - - /** - * Test dependency search. - */ - public void testDependencySearch() { - // Positive cases: - final Class[] positiveClasses = { - Configuration.class, - HadoopUtils.class, - WithStaticField.class, - WithCast.class, - WithClassAnnotation.class, - WithConstructorInvocation.class, - WithMethodCheckedException.class, - WithMethodRuntimeException.class, - WithExtends.class, - WithField.class, - WithImplements.class, - WithInitializer.class, - WithInnerClass.class, - WithOuterClass.InnerNoHadoop.class, - WithLocalVariable.class, - WithMethodAnnotation.class, - WithMethodInvocation.class, - WithMethodArgument.class, - WithMethodReturnType.class, - WithParameterAnnotation.class, - WithStaticField.class, - WithStaticInitializer.class, - WithIndirectField.class, - CircularWIthHadoop.class, - CircularWithoutHadoop.class, - }; - - for (Class c: positiveClasses) - assertTrue(c.getName(), ldr.hasExternalDependencies(c.getName())); - - // Negative cases: - final Class[] negativeClasses = { - Object.class, - AuthPermission.class, - Without.class, - }; - - for (Class c: negativeClasses) - assertFalse(c.getName(), ldr.hasExternalDependencies(c.getName())); - } -} \ No newline at end of file diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSharedMap.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSharedMap.java index 7552028a93d79..fd72821c3f4b7 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSharedMap.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSharedMap.java @@ -43,6 +43,7 @@ private HadoopSharedMap() { * @param key Key. * @param val Value. */ + @SuppressWarnings("unchecked") public T put(String key, T val) { Object old = map.putIfAbsent(key, val); diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTestClassLoader.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTestClassLoader.java new file mode 100644 index 0000000000000..c5302f811c1d1 --- /dev/null +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTestClassLoader.java @@ -0,0 +1,106 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.hadoop; + +import org.apache.ignite.IgniteException; +import org.apache.ignite.internal.util.typedef.F; +import org.apache.ignite.internal.util.typedef.X; + +import java.net.URL; +import java.net.URLClassLoader; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +/** + * Hadoop test class loader aimed to provide better isolation. + */ +public class HadoopTestClassLoader extends URLClassLoader { + /** Parent class loader. */ + private static final URLClassLoader APP_CLS_LDR = (URLClassLoader)HadoopTestClassLoader.class.getClassLoader(); + + /** */ + private static final Collection APP_JARS = F.asList(APP_CLS_LDR.getURLs()); + + /** All participating URLs. */ + private static final URL[] URLS; + + static { + try { + List res = new ArrayList<>(); + + for (URL url : APP_JARS) { + String urlStr = url.toString(); + + if (urlStr.contains("modules/hadoop/")) + res.add(url); + } + + res.addAll(HadoopClasspathUtils.classpathForClassLoader()); + + X.println(">>> " + HadoopTestClassLoader.class.getSimpleName() + " static paths:"); + + for (URL url : res) + X.println(">>> \t" + url.toString()); + + URLS = res.toArray(new URL[res.size()]); + } + catch (Exception e) { + throw new IgniteException("Failed to initialize class loader JARs.", e); + } + } + + /** + * Constructor. + */ + public HadoopTestClassLoader() { + super(URLS, APP_CLS_LDR); + } + + /** {@inheritDoc} */ + @Override protected Class loadClass(String name, boolean resolve) throws ClassNotFoundException { + if (HadoopClassLoader.loadByCurrentClassloader(name)) { + try { + synchronized (getClassLoadingLock(name)) { + // First, check if the class has already been loaded + Class c = findLoadedClass(name); + + if (c == null) { + long t1 = System.nanoTime(); + + c = findClass(name); + + // this is the defining class loader; record the stats + sun.misc.PerfCounter.getFindClassTime().addElapsedTimeFrom(t1); + sun.misc.PerfCounter.getFindClasses().increment(); + } + + if (resolve) + resolveClass(c); + + return c; + } + } + catch (NoClassDefFoundError | ClassNotFoundException e) { + throw new IgniteException("Failed to load class by test class loader: " + name, e); + } + } + + return super.loadClass(name, resolve); + } +} diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/CircularWithoutHadoop.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/CircularWithoutHadoop.java deleted file mode 100644 index 93d659ccd9f09..0000000000000 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/CircularWithoutHadoop.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.internal.processors.hadoop.deps; - -/** - * Does not have direct Hadoop dependency, but has a circular - */ -@SuppressWarnings("unused") -public class CircularWithoutHadoop { - /** */ - private CircularWIthHadoop x; -} diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithCast.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithCast.java deleted file mode 100644 index 5b1e8e0e4abb7..0000000000000 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithCast.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.internal.processors.hadoop.deps; - -import org.apache.hadoop.fs.FileSystem; - -/** - * Class contains casting to a Hadoop type. - */ -@SuppressWarnings("unused") -public abstract class WithCast { - /** */ - public abstract T create(); - - /** */ - public void consume(T t) { - // noop - } - - /** */ - void test(WithCast c) { - FileSystem fs = c.create(); - - c.consume(fs); - } -} diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithConstructorInvocation.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithConstructorInvocation.java deleted file mode 100644 index 98c8991d3cc47..0000000000000 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithConstructorInvocation.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.internal.processors.hadoop.deps; - -import org.apache.hadoop.conf.Configuration; - -/** - * Invokes a Hadoop type constructor. - */ -@SuppressWarnings("unused") -public class WithConstructorInvocation { - /** */ - private void foo() { - Object x = new Configuration(); - } -} diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithExtends.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithExtends.java deleted file mode 100644 index 80c99e1e2e769..0000000000000 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithExtends.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.internal.processors.hadoop.deps; - -import org.apache.hadoop.fs.LocalFileSystem; - -/** - * Class extends a Hadoop class. - */ -public class WithExtends extends LocalFileSystem { - // noop -} diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithField.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithField.java deleted file mode 100644 index dd979db104f63..0000000000000 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithField.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.internal.processors.hadoop.deps; - -import org.apache.hadoop.conf.Configuration; - -/** - * Has a Hadoop field. - */ -@SuppressWarnings("unused") -public class WithField { - /** */ - private Configuration conf; -} diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithIndirectField.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithIndirectField.java deleted file mode 100644 index ce078f19fc1a8..0000000000000 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithIndirectField.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.internal.processors.hadoop.deps; - -/** - * Has a unidirected dependency on Hadoop-dependent class. - */ -@SuppressWarnings("unused") -public class WithIndirectField { - /** */ - WithField x; -} diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithInnerClass.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithInnerClass.java deleted file mode 100644 index 4a5a49c70fcaf..0000000000000 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithInnerClass.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.internal.processors.hadoop.deps; - -import org.apache.hadoop.conf.Configurable; - -/** - * Has a *static* inner class depending on Hadoop. - */ -@SuppressWarnings("unused") -public class WithInnerClass { - /** */ - private static abstract class Foo implements Configurable { - // No-op. - } -} diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithLocalVariable.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithLocalVariable.java deleted file mode 100644 index ea4a5de6b9e7a..0000000000000 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithLocalVariable.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.internal.processors.hadoop.deps; - -import org.apache.hadoop.conf.Configuration; - -/** - * Has a local variable of Hadoop type. - */ -@SuppressWarnings({"unused", "ConstantConditions"}) -public class WithLocalVariable { - /** */ - void foo() { - Configuration c = null; - - moo(c); - } - - /** */ - void moo(Object x) { - - } -} diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithMethodAnnotation.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithMethodAnnotation.java deleted file mode 100644 index ff9fbe0df8043..0000000000000 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithMethodAnnotation.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.internal.processors.hadoop.deps; - -import org.apache.hadoop.classification.InterfaceStability; - -/** - * Method has a Hadoop annotation. - */ -@SuppressWarnings("unused") -public class WithMethodAnnotation { - /** */ - @InterfaceStability.Unstable - void foo() { - // No-op. - } -} diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithMethodArgument.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithMethodArgument.java deleted file mode 100644 index 7f639e41df1a9..0000000000000 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithMethodArgument.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.internal.processors.hadoop.deps; - -import org.apache.hadoop.conf.Configuration; - -/** - * Contains a formal parameter of Hadoop type. - */ -@SuppressWarnings("unused") -public class WithMethodArgument { - /** */ - protected void paramaterMethod(Configuration c) { - // No-op. - } -} diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithMethodCheckedException.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithMethodCheckedException.java deleted file mode 100644 index 8fd12ae5cc86e..0000000000000 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithMethodCheckedException.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.internal.processors.hadoop.deps; - -import org.apache.hadoop.fs.ChecksumException; - -/** - * Method declares a checked Hadoop Exception. - */ -@SuppressWarnings("unused") -public class WithMethodCheckedException { - /** */ - void foo() throws ChecksumException { - // No-op. - } -} diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithMethodInvocation.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithMethodInvocation.java deleted file mode 100644 index de8b306e4ac27..0000000000000 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithMethodInvocation.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.internal.processors.hadoop.deps; - -import org.apache.hadoop.fs.FileSystem; - -/** - * Method contains a Hadoop type method invocation. - */ -@SuppressWarnings("unused") -public class WithMethodInvocation { - /** */ - void foo(FileSystem fs) { - fs.getChildFileSystems(); - } -} diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithMethodReturnType.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithMethodReturnType.java deleted file mode 100644 index 0e0ea7282463b..0000000000000 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithMethodReturnType.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.internal.processors.hadoop.deps; - -import org.apache.hadoop.fs.FileSystem; - -/** - * Contains a method return value of Hadoop type. - */ -@SuppressWarnings("unused") -public class WithMethodReturnType { - /** */ - FileSystem fsMethod() { - return null; - } -} diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithMethodRuntimeException.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithMethodRuntimeException.java deleted file mode 100644 index dcd471c95d67f..0000000000000 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithMethodRuntimeException.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.internal.processors.hadoop.deps; - -import org.apache.hadoop.HadoopIllegalArgumentException; - -/** - * Method declares a runtime Hadoop Exception. - */ -@SuppressWarnings("unused") -public class WithMethodRuntimeException { - /** */ - void foo() throws HadoopIllegalArgumentException { - // No-op. - } -} diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithParameterAnnotation.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithParameterAnnotation.java deleted file mode 100644 index 9d3414e010857..0000000000000 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithParameterAnnotation.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.internal.processors.hadoop.deps; - -import org.apache.hadoop.classification.InterfaceStability; - -/** - * Has a paramater annotated with a Hadoop annotation. - */ -@SuppressWarnings("unused") -public class WithParameterAnnotation { - /** */ - void foo(@InterfaceStability.Stable Object annotatedParam) { - // No-op. - } -} diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithStaticField.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithStaticField.java deleted file mode 100644 index 301b9129b4d8a..0000000000000 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithStaticField.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.internal.processors.hadoop.deps; - -import org.apache.hadoop.fs.FileSystem; - -/** - * Has a static field of Hadoop type. - */ -@SuppressWarnings("unused") -public class WithStaticField { - /** */ - static FileSystem fs; -} diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithStaticInitializer.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithStaticInitializer.java deleted file mode 100644 index e0fc2f3578fb5..0000000000000 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithStaticInitializer.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.internal.processors.hadoop.deps; - -import java.util.List; -import org.apache.hadoop.fs.FileSystem; - -/** - * Uses Hadoop type in a static initializer. - */ -@SuppressWarnings("unused") -public class WithStaticInitializer { - /** */ - static final List x; - - static { - x = FileSystem.getAllStatistics(); - } -} diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopAbstractMapReduceTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopAbstractMapReduceTest.java similarity index 96% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopAbstractMapReduceTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopAbstractMapReduceTest.java index 37312133ea5ac..89005f60b44e4 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopAbstractMapReduceTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopAbstractMapReduceTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import java.io.BufferedReader; import java.io.IOException; @@ -49,10 +49,12 @@ import org.apache.ignite.igfs.IgfsUserContext; import org.apache.ignite.igfs.secondary.IgfsSecondaryFileSystem; import org.apache.ignite.internal.IgniteInternalFuture; +import org.apache.ignite.internal.processors.hadoop.HadoopCommonUtils; +import org.apache.ignite.internal.processors.hadoop.HadoopJobId; import org.apache.ignite.internal.processors.hadoop.counter.HadoopCounters; import org.apache.ignite.internal.processors.hadoop.counter.HadoopPerformanceCounter; -import org.apache.ignite.internal.processors.hadoop.examples.HadoopWordCount1; -import org.apache.ignite.internal.processors.hadoop.examples.HadoopWordCount2; +import org.apache.ignite.internal.processors.hadoop.impl.examples.HadoopWordCount1; +import org.apache.ignite.internal.processors.hadoop.impl.examples.HadoopWordCount2; import org.apache.ignite.internal.processors.igfs.IgfsEx; import org.apache.ignite.internal.processors.igfs.IgfsUtils; import org.apache.ignite.internal.util.lang.GridAbsPredicate; @@ -68,8 +70,7 @@ import static org.apache.ignite.cache.CacheMode.PARTITIONED; import static org.apache.ignite.cache.CacheMode.REPLICATED; import static org.apache.ignite.igfs.IgfsMode.PRIMARY; -import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.JOB_COUNTER_WRITER_PROPERTY; -import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.createJobInfo; +import static org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils.createJobInfo; /** * Abstract test of whole cycle of map-reduce processing via Job tracker. @@ -175,7 +176,7 @@ protected final void doTest(IgfsPath inFile, boolean useNewMapper, boolean useNe JobConf jobConf = new JobConf(); - jobConf.set(JOB_COUNTER_WRITER_PROPERTY, IgniteHadoopFileSystemCounterWriter.class.getName()); + jobConf.set(HadoopCommonUtils.JOB_COUNTER_WRITER_PROPERTY, IgniteHadoopFileSystemCounterWriter.class.getName()); jobConf.setUser(USER); jobConf.set(IgniteHadoopFileSystemCounterWriter.COUNTER_WRITER_DIR_PROPERTY, "/xxx/${USER}/zzz"); diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopAbstractSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopAbstractSelfTest.java similarity index 98% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopAbstractSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopAbstractSelfTest.java index fb1698885ea28..68009dd7d3949 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopAbstractSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopAbstractSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import java.io.File; import org.apache.hadoop.conf.Configuration; @@ -28,7 +28,7 @@ import org.apache.ignite.igfs.IgfsGroupDataBlocksKeyMapper; import org.apache.ignite.igfs.IgfsIpcEndpointConfiguration; import org.apache.ignite.igfs.IgfsIpcEndpointType; -import org.apache.ignite.internal.processors.hadoop.fs.HadoopFileSystemsUtils; +import org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopFileSystemsUtils; import org.apache.ignite.spi.communication.tcp.TcpCommunicationSpi; import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopAbstractWordCountTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopAbstractWordCountTest.java similarity index 98% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopAbstractWordCountTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopAbstractWordCountTest.java index e45c1275905a5..3cb8f914db5f5 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopAbstractWordCountTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopAbstractWordCountTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import com.google.common.base.Joiner; import java.io.BufferedReader; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopCommandLineTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopCommandLineTest.java similarity index 98% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopCommandLineTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopCommandLineTest.java index 7ee318a283f9e..0be8bf9ac663b 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopCommandLineTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopCommandLineTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import com.google.common.base.Joiner; import java.io.BufferedReader; @@ -37,6 +37,8 @@ import org.apache.ignite.igfs.IgfsInputStream; import org.apache.ignite.igfs.IgfsPath; import org.apache.ignite.internal.IgnitionEx; +import org.apache.ignite.internal.processors.hadoop.HadoopCommonUtils; +import org.apache.ignite.internal.processors.hadoop.HadoopJob; import org.apache.ignite.internal.processors.hadoop.jobtracker.HadoopJobTracker; import org.apache.ignite.internal.processors.igfs.IgfsEx; import org.apache.ignite.internal.processors.resource.GridSpringResourceContext; @@ -183,7 +185,7 @@ private void generateHiveTestFiles() throws FileNotFoundException { if (line.startsWith("")) out.println( " \n" + - " " + HadoopUtils.JOB_COUNTER_WRITER_PROPERTY + "\n" + + " " + HadoopCommonUtils.JOB_COUNTER_WRITER_PROPERTY + "\n" + " " + IgniteHadoopFileSystemCounterWriter.class.getName() + "\n" + " \n"); diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopDefaultMapReducePlannerSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopDefaultMapReducePlannerSelfTest.java similarity index 98% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopDefaultMapReducePlannerSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopDefaultMapReducePlannerSelfTest.java index a69b72a161567..ee1c88f05ef97 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopDefaultMapReducePlannerSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopDefaultMapReducePlannerSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteFileSystem; @@ -23,6 +23,10 @@ import org.apache.ignite.hadoop.mapreduce.IgniteHadoopMapReducePlanner; import org.apache.ignite.igfs.IgfsBlockLocation; import org.apache.ignite.igfs.IgfsPath; +import org.apache.ignite.internal.processors.hadoop.HadoopFileBlock; +import org.apache.ignite.internal.processors.hadoop.HadoopInputSplit; +import org.apache.ignite.internal.processors.hadoop.HadoopMapReducePlan; +import org.apache.ignite.internal.processors.hadoop.HadoopMapReducePlanner; import org.apache.ignite.internal.processors.hadoop.planner.HadoopAbstractMapReducePlanner; import org.apache.ignite.internal.processors.igfs.IgfsBlockLocationImpl; import org.apache.ignite.internal.processors.igfs.IgfsIgniteMock; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopErrorSimulator.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopErrorSimulator.java similarity index 99% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopErrorSimulator.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopErrorSimulator.java index 843b42ba540d8..b89dcc124eca3 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopErrorSimulator.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopErrorSimulator.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import java.io.IOException; import java.util.concurrent.atomic.AtomicReference; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopFileSystemsTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopFileSystemsTest.java similarity index 97% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopFileSystemsTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopFileSystemsTest.java index 946ba77d63313..252d6cb4af907 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopFileSystemsTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopFileSystemsTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import java.io.IOException; import java.net.URI; @@ -25,7 +25,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.MRJobConfig; -import org.apache.ignite.internal.processors.hadoop.fs.HadoopFileSystemsUtils; +import org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopFileSystemsUtils; import org.apache.ignite.testframework.GridTestUtils; /** diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopGroupingTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopGroupingTest.java similarity index 93% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopGroupingTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopGroupingTest.java index db87e331e4dfc..19c71e84e9778 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopGroupingTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopGroupingTest.java @@ -15,17 +15,8 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; -import java.io.DataInput; -import java.io.DataOutput; -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Random; -import java.util.Set; -import java.util.UUID; import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.WritableComparable; @@ -41,24 +32,28 @@ import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.ignite.configuration.HadoopConfiguration; -import org.apache.ignite.internal.util.GridConcurrentHashSet; +import org.apache.ignite.internal.processors.hadoop.state.HadoopGroupingTestState; +import org.apache.ignite.internal.processors.hadoop.HadoopJobId; import org.apache.ignite.internal.util.GridRandom; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.internal.util.typedef.internal.S; -import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.createJobInfo; +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Random; +import java.util.Set; +import java.util.UUID; + +import static org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils.createJobInfo; /** * Grouping test. */ public class HadoopGroupingTest extends HadoopAbstractSelfTest { - /** */ - private static final String PATH_OUTPUT = "/test-out"; - - /** */ - private static final GridConcurrentHashSet vals = HadoopSharedMap.map(HadoopGroupingTest.class) - .put("vals", new GridConcurrentHashSet()); - /** {@inheritDoc} */ @Override protected int gridCount() { return 3; @@ -108,7 +103,7 @@ public void testGroupingCombiner() throws Exception { * @throws Exception If failed. */ public void doTestGrouping(boolean combiner) throws Exception { - vals.clear(); + HadoopGroupingTestState.values().clear(); Job job = Job.getInstance(); @@ -134,7 +129,7 @@ public void doTestGrouping(boolean combiner) throws Exception { grid(0).hadoop().submit(new HadoopJobId(UUID.randomUUID(), 2), createJobInfo(job.getConfiguration())).get(30000); - assertTrue(vals.isEmpty()); + assertTrue(HadoopGroupingTestState.values().isEmpty()); } public static class MyReducer extends Reducer { @@ -160,7 +155,7 @@ public static class MyReducer extends Reducer { @Override public Text getCurrentValue() { UUID id = UUID.randomUUID(); - assertTrue(vals.add(id)); + assertTrue(HadoopGroupingTestState.values().add(id)); val.set(id.toString()); diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopJobTrackerSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopJobTrackerSelfTest.java similarity index 92% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopJobTrackerSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopJobTrackerSelfTest.java index 9e268b7771e21..a3bf49c59c22f 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopJobTrackerSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopJobTrackerSelfTest.java @@ -15,18 +15,15 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; -import java.util.Map; import java.util.UUID; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputSplit; @@ -41,9 +38,16 @@ import org.apache.ignite.configuration.HadoopConfiguration; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.IgniteKernal; +import org.apache.ignite.internal.processors.hadoop.Hadoop; +import org.apache.ignite.internal.processors.hadoop.HadoopJobId; +import org.apache.ignite.internal.processors.hadoop.HadoopJobStatus; import org.apache.ignite.internal.util.typedef.internal.U; -import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.createJobInfo; +import static org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils.createJobInfo; +import static org.apache.ignite.internal.processors.hadoop.state.HadoopJobTrackerSelfTestState.combineExecCnt; +import static org.apache.ignite.internal.processors.hadoop.state.HadoopJobTrackerSelfTestState.latch; +import static org.apache.ignite.internal.processors.hadoop.state.HadoopJobTrackerSelfTestState.mapExecCnt; +import static org.apache.ignite.internal.processors.hadoop.state.HadoopJobTrackerSelfTestState.reduceExecCnt; /** * Job tracker self test. @@ -55,21 +59,6 @@ public class HadoopJobTrackerSelfTest extends HadoopAbstractSelfTest { /** Test block count parameter name. */ private static final int BLOCK_CNT = 10; - /** */ - private static HadoopSharedMap m = HadoopSharedMap.map(HadoopJobTrackerSelfTest.class); - - /** Map task execution count. */ - private static final AtomicInteger mapExecCnt = m.put("mapExecCnt", new AtomicInteger()); - - /** Reduce task execution count. */ - private static final AtomicInteger reduceExecCnt = m.put("reduceExecCnt", new AtomicInteger()); - - /** Reduce task execution count. */ - private static final AtomicInteger combineExecCnt = m.put("combineExecCnt", new AtomicInteger()); - - /** */ - private static final Map latch = m.put("latch", new HashMap()); - /** {@inheritDoc} */ @Override protected boolean igfsEnabled() { return true; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopMapReduceEmbeddedSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopMapReduceEmbeddedSelfTest.java similarity index 94% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopMapReduceEmbeddedSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopMapReduceEmbeddedSelfTest.java index 25ef38298e1ef..b04deeb8e4017 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopMapReduceEmbeddedSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopMapReduceEmbeddedSelfTest.java @@ -15,10 +15,8 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; -import java.util.HashMap; -import java.util.Map; import java.util.UUID; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; @@ -35,19 +33,17 @@ import org.apache.ignite.configuration.HadoopConfiguration; import org.apache.ignite.igfs.IgfsPath; import org.apache.ignite.internal.IgniteInternalFuture; -import org.apache.ignite.internal.processors.hadoop.examples.HadoopWordCount1; -import org.apache.ignite.internal.processors.hadoop.examples.HadoopWordCount2; +import org.apache.ignite.internal.processors.hadoop.HadoopJobId; +import org.apache.ignite.internal.processors.hadoop.impl.examples.HadoopWordCount1; +import org.apache.ignite.internal.processors.hadoop.impl.examples.HadoopWordCount2; -import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.createJobInfo; +import static org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils.createJobInfo; +import static org.apache.ignite.internal.processors.hadoop.state.HadoopMapReduceEmbeddedSelfTestState.flags; /** * Tests map-reduce execution with embedded mode. */ public class HadoopMapReduceEmbeddedSelfTest extends HadoopMapReduceTest { - /** */ - private static Map flags = HadoopSharedMap.map(HadoopMapReduceEmbeddedSelfTest.class) - .put("flags", new HashMap()); - /** {@inheritDoc} */ @Override public HadoopConfiguration hadoopConfiguration(String gridName) { HadoopConfiguration cfg = super.hadoopConfiguration(gridName); diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopMapReduceErrorResilienceTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopMapReduceErrorResilienceTest.java similarity index 97% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopMapReduceErrorResilienceTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopMapReduceErrorResilienceTest.java index dd12935d63bd4..afd6f26d92e69 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopMapReduceErrorResilienceTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopMapReduceErrorResilienceTest.java @@ -15,10 +15,10 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import org.apache.ignite.igfs.IgfsPath; -import org.apache.ignite.internal.processors.hadoop.examples.HadoopWordCount2; +import org.apache.ignite.internal.processors.hadoop.impl.examples.HadoopWordCount2; /** * Test of error resiliency after an error in a map-reduce job execution. diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopMapReduceTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopMapReduceTest.java similarity index 94% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopMapReduceTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopMapReduceTest.java index b7038965f041c..feccb59193001 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopMapReduceTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopMapReduceTest.java @@ -15,10 +15,10 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import org.apache.ignite.igfs.IgfsPath; -import org.apache.ignite.internal.processors.hadoop.examples.HadoopWordCount2; +import org.apache.ignite.internal.processors.hadoop.impl.examples.HadoopWordCount2; /** * Test of whole cycle of map-reduce processing via Job tracker. diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopNoHadoopMapReduceTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopNoHadoopMapReduceTest.java similarity index 96% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopNoHadoopMapReduceTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopNoHadoopMapReduceTest.java index 0c172c37cf3bb..3bb873521e036 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopNoHadoopMapReduceTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopNoHadoopMapReduceTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import org.apache.ignite.configuration.IgniteConfiguration; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopPlannerMockJob.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopPlannerMockJob.java similarity index 87% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopPlannerMockJob.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopPlannerMockJob.java index 88d0f80f4e440..220614c88244f 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopPlannerMockJob.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopPlannerMockJob.java @@ -15,10 +15,17 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteLogger; +import org.apache.ignite.internal.processors.hadoop.HadoopHelper; +import org.apache.ignite.internal.processors.hadoop.HadoopInputSplit; +import org.apache.ignite.internal.processors.hadoop.HadoopJob; +import org.apache.ignite.internal.processors.hadoop.HadoopJobId; +import org.apache.ignite.internal.processors.hadoop.HadoopJobInfo; +import org.apache.ignite.internal.processors.hadoop.HadoopTaskContext; +import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo; import org.jetbrains.annotations.Nullable; import java.util.Collection; @@ -145,7 +152,7 @@ public JobInfo(int reducers) { /** {@inheritDoc} */ @Override public HadoopJob createJob(Class jobCls, HadoopJobId jobId, IgniteLogger log, - @Nullable String[] libNames) throws IgniteCheckedException { + @Nullable String[] libNames, HadoopHelper helper) throws IgniteCheckedException { throwUnsupported(); return null; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopPopularWordsTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopPopularWordsTest.java similarity index 99% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopPopularWordsTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopPopularWordsTest.java index 3f825b0c83a50..5a55430be1b4e 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopPopularWordsTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopPopularWordsTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import com.google.common.collect.MinMaxPriorityQueue; import java.io.IOException; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSerializationWrapperSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopSerializationWrapperSelfTest.java similarity index 93% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSerializationWrapperSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopSerializationWrapperSelfTest.java index 789a6b3556375..5ccc8cea50115 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSerializationWrapperSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopSerializationWrapperSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -27,7 +27,8 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.serializer.JavaSerialization; import org.apache.hadoop.io.serializer.WritableSerialization; -import org.apache.ignite.internal.processors.hadoop.v2.HadoopSerializationWrapper; +import org.apache.ignite.internal.processors.hadoop.HadoopSerialization; +import org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopSerializationWrapper; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; /** diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSnappyFullMapReduceTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopSnappyFullMapReduceTest.java similarity index 95% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSnappyFullMapReduceTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopSnappyFullMapReduceTest.java index 27a5fcdebe7ae..e27c212d1952d 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSnappyFullMapReduceTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopSnappyFullMapReduceTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; /** * Same test as HadoopMapReduceTest, but with enabled Snappy output compression. diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSnappyTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopSnappyTest.java similarity index 93% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSnappyTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopSnappyTest.java index b4e3dc2bc0f26..80ff7547e2988 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSnappyTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopSnappyTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -28,6 +28,8 @@ import org.apache.hadoop.io.compress.SnappyCodec; import org.apache.hadoop.io.compress.snappy.SnappyCompressor; import org.apache.hadoop.util.NativeCodeLoader; +import org.apache.ignite.internal.processors.hadoop.HadoopClassLoader; +import org.apache.ignite.internal.processors.hadoop.HadoopHelperImpl; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; @@ -49,7 +51,7 @@ public void testSnappy() throws Throwable { // Run the same in several more class loaders simulating jobs and tasks: for (int i = 0; i < 2; i++) { - ClassLoader hadoopClsLdr = new HadoopClassLoader(null, "cl-" + i, null); + ClassLoader hadoopClsLdr = new HadoopClassLoader(null, "cl-" + i, null, new HadoopHelperImpl()); Class cls = (Class)Class.forName(HadoopSnappyTest.class.getName(), true, hadoopClsLdr); diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSortingExternalTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopSortingExternalTest.java similarity index 96% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSortingExternalTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopSortingExternalTest.java index dff5e7008ec2c..eb4a7d425112d 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSortingExternalTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopSortingExternalTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import org.apache.ignite.configuration.HadoopConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSortingTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopSortingTest.java similarity index 97% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSortingTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopSortingTest.java index 20f5eef3f3d26..a4e736804743c 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSortingTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopSortingTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import java.io.DataInput; import java.io.DataOutput; @@ -48,9 +48,10 @@ import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.ignite.configuration.HadoopConfiguration; +import org.apache.ignite.internal.processors.hadoop.HadoopJobId; import org.apache.ignite.internal.util.typedef.X; -import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.createJobInfo; +import static org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils.createJobInfo; /** * Tests correct sorting. diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSplitWrapperSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopSplitWrapperSelfTest.java similarity index 91% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSplitWrapperSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopSplitWrapperSelfTest.java index 11c3907e03992..be2bfc2413727 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSplitWrapperSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopSplitWrapperSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -27,11 +27,11 @@ import java.util.concurrent.Callable; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.lib.input.FileSplit; -import org.apache.ignite.internal.processors.hadoop.v2.HadoopSplitWrapper; +import org.apache.ignite.internal.processors.hadoop.HadoopSplitWrapper; import org.apache.ignite.testframework.GridTestUtils; /** - * Self test of {@link org.apache.ignite.internal.processors.hadoop.v2.HadoopSplitWrapper}. + * Self test of {@link HadoopSplitWrapper}. */ public class HadoopSplitWrapperSelfTest extends HadoopAbstractSelfTest { /** diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopStartup.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopStartup.java similarity index 96% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopStartup.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopStartup.java index 820a1f3e745f1..66e341b60f2e4 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopStartup.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopStartup.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import org.apache.hadoop.conf.Configuration; import org.apache.ignite.hadoop.fs.v2.IgniteHadoopFileSystem; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTaskExecutionSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTaskExecutionSelfTest.java similarity index 93% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTaskExecutionSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTaskExecutionSelfTest.java index 431433e3e3444..027f921f0d83b 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTaskExecutionSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTaskExecutionSelfTest.java @@ -15,18 +15,15 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.util.List; -import java.util.Map; import java.util.UUID; import java.util.concurrent.Callable; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalFileSystem; @@ -48,43 +45,29 @@ import org.apache.ignite.hadoop.fs.v1.IgniteHadoopFileSystem; import org.apache.ignite.igfs.IgfsPath; import org.apache.ignite.internal.IgniteInternalFuture; +import org.apache.ignite.internal.processors.hadoop.Hadoop; +import org.apache.ignite.internal.processors.hadoop.HadoopJobId; +import org.apache.ignite.internal.processors.hadoop.HadoopTaskCancelledException; import org.apache.ignite.internal.util.lang.GridAbsPredicate; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.testframework.GridTestUtils; -import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.createJobInfo; +import static org.apache.ignite.internal.processors.hadoop.state.HadoopTaskExecutionSelfTestValues.cancelledTasks; +import static org.apache.ignite.internal.processors.hadoop.state.HadoopTaskExecutionSelfTestValues.executedTasks; +import static org.apache.ignite.internal.processors.hadoop.state.HadoopTaskExecutionSelfTestValues.failMapperId; +import static org.apache.ignite.internal.processors.hadoop.state.HadoopTaskExecutionSelfTestValues.splitsCount; +import static org.apache.ignite.internal.processors.hadoop.state.HadoopTaskExecutionSelfTestValues.taskWorkDirs; +import static org.apache.ignite.internal.processors.hadoop.state.HadoopTaskExecutionSelfTestValues.totalLineCnt; +import static org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils.createJobInfo; /** * Tests map-reduce task execution basics. */ public class HadoopTaskExecutionSelfTest extends HadoopAbstractSelfTest { - /** */ - private static HadoopSharedMap m = HadoopSharedMap.map(HadoopTaskExecutionSelfTest.class); - - /** Line count. */ - private static final AtomicInteger totalLineCnt = m.put("totalLineCnt", new AtomicInteger()); - - /** Executed tasks. */ - private static final AtomicInteger executedTasks = m.put("executedTasks", new AtomicInteger()); - - /** Cancelled tasks. */ - private static final AtomicInteger cancelledTasks = m.put("cancelledTasks", new AtomicInteger()); - - /** Working directory of each task. */ - private static final Map taskWorkDirs = m.put("taskWorkDirs", - new ConcurrentHashMap()); - - /** Mapper id to fail. */ - private static final AtomicInteger failMapperId = m.put("failMapperId", new AtomicInteger()); - - /** Number of splits of the current input. */ - private static final AtomicInteger splitsCount = m.put("splitsCount", new AtomicInteger()); - /** Test param. */ private static final String MAP_WRITE = "test.map.write"; - /** {@inheritDoc} */ @Override public FileSystemConfiguration igfsConfiguration() throws Exception { FileSystemConfiguration cfg = super.igfsConfiguration(); diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTasksAllVersionsTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTasksAllVersionsTest.java similarity index 95% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTasksAllVersionsTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTasksAllVersionsTest.java index 7c6d244819224..8b1b6935301e0 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTasksAllVersionsTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTasksAllVersionsTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import com.google.common.base.Joiner; import java.io.IOException; @@ -27,7 +27,11 @@ import org.apache.hadoop.io.Text; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.igfs.IgfsPath; -import org.apache.ignite.internal.processors.hadoop.examples.HadoopWordCount2; +import org.apache.ignite.internal.processors.hadoop.HadoopFileBlock; +import org.apache.ignite.internal.processors.hadoop.HadoopJob; +import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo; +import org.apache.ignite.internal.processors.hadoop.HadoopTaskType; +import org.apache.ignite.internal.processors.hadoop.impl.examples.HadoopWordCount2; /** * Tests of Map, Combine and Reduce task executions of any version of hadoop API. diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTasksV1Test.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTasksV1Test.java similarity index 75% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTasksV1Test.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTasksV1Test.java index 27d7fc229fa03..d7cd7388f0d9d 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTasksV1Test.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTasksV1Test.java @@ -15,15 +15,19 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import java.io.IOException; import java.util.UUID; import org.apache.hadoop.mapred.JobConf; -import org.apache.ignite.internal.processors.hadoop.examples.HadoopWordCount1; -import org.apache.ignite.internal.processors.hadoop.v2.HadoopV2Job; +import org.apache.ignite.internal.processors.hadoop.HadoopDefaultJobInfo; +import org.apache.ignite.internal.processors.hadoop.HadoopJob; +import org.apache.ignite.internal.processors.hadoop.HadoopJobId; +import org.apache.ignite.internal.processors.hadoop.HadoopHelperImpl; +import org.apache.ignite.internal.processors.hadoop.impl.examples.HadoopWordCount1; +import org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopV2Job; -import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.createJobInfo; +import static org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils.createJobInfo; /** * Tests of Map, Combine and Reduce task executions via running of job of hadoop API v1. @@ -48,7 +52,7 @@ public class HadoopTasksV1Test extends HadoopTasksAllVersionsTest { HadoopJobId jobId = new HadoopJobId(uuid, 0); - return jobInfo.createJob(HadoopV2Job.class, jobId, log, null); + return jobInfo.createJob(HadoopV2Job.class, jobId, log, null, new HadoopHelperImpl()); } /** {@inheritDoc} */ diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTasksV2Test.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTasksV2Test.java similarity index 80% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTasksV2Test.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTasksV2Test.java index 30cf50c85301c..c635c41bff85c 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTasksV2Test.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTasksV2Test.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import java.util.UUID; import org.apache.hadoop.conf.Configuration; @@ -25,10 +25,14 @@ import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; -import org.apache.ignite.internal.processors.hadoop.examples.HadoopWordCount2; -import org.apache.ignite.internal.processors.hadoop.v2.HadoopV2Job; +import org.apache.ignite.internal.processors.hadoop.HadoopDefaultJobInfo; +import org.apache.ignite.internal.processors.hadoop.HadoopJob; +import org.apache.ignite.internal.processors.hadoop.HadoopJobId; +import org.apache.ignite.internal.processors.hadoop.HadoopHelperImpl; +import org.apache.ignite.internal.processors.hadoop.impl.examples.HadoopWordCount2; +import org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopV2Job; -import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.createJobInfo; +import static org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils.createJobInfo; /** * Tests of Map, Combine and Reduce task executions via running of job of hadoop API v2. @@ -67,7 +71,7 @@ public class HadoopTasksV2Test extends HadoopTasksAllVersionsTest { HadoopJobId jobId = new HadoopJobId(uuid, 0); - return jobInfo.createJob(HadoopV2Job.class, jobId, log, null); + return jobInfo.createJob(HadoopV2Job.class, jobId, log, null, new HadoopHelperImpl()); } /** {@inheritDoc} */ diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTestRoundRobinMrPlanner.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTestRoundRobinMrPlanner.java similarity index 87% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTestRoundRobinMrPlanner.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTestRoundRobinMrPlanner.java index edafecd4c60d8..81f6f3c866e5e 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTestRoundRobinMrPlanner.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTestRoundRobinMrPlanner.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import java.util.ArrayList; import java.util.Collection; @@ -26,6 +26,10 @@ import java.util.UUID; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.cluster.ClusterNode; +import org.apache.ignite.internal.processors.hadoop.HadoopInputSplit; +import org.apache.ignite.internal.processors.hadoop.HadoopJob; +import org.apache.ignite.internal.processors.hadoop.HadoopMapReducePlan; +import org.apache.ignite.internal.processors.hadoop.HadoopMapReducePlanner; import org.apache.ignite.internal.processors.hadoop.planner.HadoopDefaultMapReducePlan; import org.jetbrains.annotations.Nullable; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTestTaskContext.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTestTaskContext.java similarity index 92% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTestTaskContext.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTestTaskContext.java index f542cf2c7c1ba..cfd41cf7b420b 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTestTaskContext.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTestTaskContext.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -32,7 +32,12 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.JobConf; import org.apache.ignite.IgniteCheckedException; -import org.apache.ignite.internal.processors.hadoop.v2.HadoopV2TaskContext; +import org.apache.ignite.internal.processors.hadoop.HadoopDefaultJobInfo; +import org.apache.ignite.internal.processors.hadoop.HadoopJob; +import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo; +import org.apache.ignite.internal.processors.hadoop.HadoopTaskInput; +import org.apache.ignite.internal.processors.hadoop.HadoopTaskOutput; +import org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopV2TaskContext; /** * Context for test purpose. @@ -168,7 +173,7 @@ public Map mockInput() { public void makeTreeOfWritables(Iterable> flatData) { Text key = new Text(); - for (HadoopTestTaskContext.Pair pair : flatData) { + for (Pair pair : flatData) { key.set(pair.key); ArrayList valList; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTestUtils.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTestUtils.java similarity index 98% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTestUtils.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTestUtils.java index da0d922a988f0..e8ec8a9a2d24f 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTestUtils.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTestUtils.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.U; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/hadoop/cache/HadoopTxConfigCacheTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTxConfigCacheTest.java similarity index 96% rename from modules/hadoop/src/test/java/org/apache/ignite/hadoop/cache/HadoopTxConfigCacheTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTxConfigCacheTest.java index 6f910f1cf10ff..e85baed8404b6 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/hadoop/cache/HadoopTxConfigCacheTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopTxConfigCacheTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.hadoop.cache; +package org.apache.ignite.internal.processors.hadoop.impl; import org.apache.ignite.Ignite; import org.apache.ignite.internal.processors.cache.IgniteInternalCache; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopUserLibsSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopUserLibsSelfTest.java similarity index 98% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopUserLibsSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopUserLibsSelfTest.java index 9e3c8f402917a..0e4a0ef1d1a82 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopUserLibsSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopUserLibsSelfTest.java @@ -15,8 +15,9 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; +import org.apache.ignite.internal.processors.hadoop.HadoopClasspathUtils; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import java.io.File; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopV2JobSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopV2JobSelfTest.java similarity index 79% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopV2JobSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopV2JobSelfTest.java index ae2c00d412499..540a7aaea3305 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopV2JobSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopV2JobSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import java.io.ByteArrayInputStream; import java.io.DataInput; @@ -30,13 +30,21 @@ import org.apache.hadoop.io.serializer.WritableSerialization; import org.apache.hadoop.mapred.JobConf; import org.apache.ignite.IgniteCheckedException; -import org.apache.ignite.internal.processors.hadoop.v2.HadoopSerializationWrapper; -import org.apache.ignite.internal.processors.hadoop.v2.HadoopV2Job; - -import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.createJobInfo; +import org.apache.ignite.internal.processors.hadoop.HadoopDefaultJobInfo; +import org.apache.ignite.internal.processors.hadoop.HadoopJob; +import org.apache.ignite.internal.processors.hadoop.HadoopJobId; +import org.apache.ignite.internal.processors.hadoop.HadoopSerialization; +import org.apache.ignite.internal.processors.hadoop.HadoopTaskContext; +import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo; +import org.apache.ignite.internal.processors.hadoop.HadoopTaskType; +import org.apache.ignite.internal.processors.hadoop.HadoopHelperImpl; +import org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopSerializationWrapper; +import org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopV2Job; + +import static org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils.createJobInfo; /** - * Self test of {@link org.apache.ignite.internal.processors.hadoop.v2.HadoopV2Job}. + * Self test of {@link org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopV2Job}. */ public class HadoopV2JobSelfTest extends HadoopAbstractSelfTest { /** */ @@ -78,7 +86,7 @@ public void testCustomSerializationApplying() throws IgniteCheckedException { HadoopJobId id = new HadoopJobId(uuid, 1); - HadoopJob job = info.createJob(HadoopV2Job.class, id, log, null); + HadoopJob job = info.createJob(HadoopV2Job.class, id, log, null, new HadoopHelperImpl()); HadoopTaskContext taskCtx = job.getTaskContext(new HadoopTaskInfo(HadoopTaskType.MAP, null, 0, 0, null)); diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopValidationSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopValidationSelfTest.java similarity index 96% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopValidationSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopValidationSelfTest.java index 14961509f1790..2d61016290400 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopValidationSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopValidationSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import org.apache.ignite.configuration.IgniteConfiguration; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopWeightedMapReducePlannerTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopWeightedMapReducePlannerTest.java similarity index 98% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopWeightedMapReducePlannerTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopWeightedMapReducePlannerTest.java index 4e7cc50c86b1e..430c6757e2456 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopWeightedMapReducePlannerTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopWeightedMapReducePlannerTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import org.apache.ignite.cluster.ClusterMetrics; import org.apache.ignite.cluster.ClusterNode; @@ -23,6 +23,9 @@ import org.apache.ignite.igfs.IgfsBlockLocation; import org.apache.ignite.igfs.IgfsPath; import org.apache.ignite.internal.IgniteNodeAttributes; +import org.apache.ignite.internal.processors.hadoop.HadoopFileBlock; +import org.apache.ignite.internal.processors.hadoop.HadoopInputSplit; +import org.apache.ignite.internal.processors.hadoop.HadoopMapReducePlan; import org.apache.ignite.internal.processors.hadoop.planner.HadoopAbstractMapReducePlanner; import org.apache.ignite.internal.processors.igfs.IgfsIgniteMock; import org.apache.ignite.internal.processors.igfs.IgfsMock; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopWeightedPlannerMapReduceTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopWeightedPlannerMapReduceTest.java similarity index 96% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopWeightedPlannerMapReduceTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopWeightedPlannerMapReduceTest.java index e0403c28d3d41..13f00bdbd42b8 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopWeightedPlannerMapReduceTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/HadoopWeightedPlannerMapReduceTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl; import org.apache.ignite.configuration.HadoopConfiguration; import org.apache.ignite.hadoop.mapreduce.IgniteHadoopWeightedMapReducePlanner; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/books/alice-in-wonderland.txt b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/books/alice-in-wonderland.txt similarity index 100% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/books/alice-in-wonderland.txt rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/books/alice-in-wonderland.txt diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/books/art-of-war.txt b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/books/art-of-war.txt similarity index 100% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/books/art-of-war.txt rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/books/art-of-war.txt diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/books/huckleberry-finn.txt b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/books/huckleberry-finn.txt similarity index 100% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/books/huckleberry-finn.txt rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/books/huckleberry-finn.txt diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/books/sherlock-holmes.txt b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/books/sherlock-holmes.txt similarity index 100% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/books/sherlock-holmes.txt rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/books/sherlock-holmes.txt diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/books/tom-sawyer.txt b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/books/tom-sawyer.txt similarity index 100% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/books/tom-sawyer.txt rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/books/tom-sawyer.txt diff --git a/modules/hadoop/src/test/java/org/apache/ignite/client/hadoop/HadoopClientProtocolEmbeddedSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/client/HadoopClientProtocolEmbeddedSelfTest.java similarity index 95% rename from modules/hadoop/src/test/java/org/apache/ignite/client/hadoop/HadoopClientProtocolEmbeddedSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/client/HadoopClientProtocolEmbeddedSelfTest.java index 5a20a754fb7ce..a65d691994f0a 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/client/hadoop/HadoopClientProtocolEmbeddedSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/client/HadoopClientProtocolEmbeddedSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.client.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl.client; import org.apache.ignite.configuration.HadoopConfiguration; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/client/hadoop/HadoopClientProtocolSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/client/HadoopClientProtocolSelfTest.java similarity index 98% rename from modules/hadoop/src/test/java/org/apache/ignite/client/hadoop/HadoopClientProtocolSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/client/HadoopClientProtocolSelfTest.java index 1344e261afe96..1ef7dd0a5cca9 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/client/hadoop/HadoopClientProtocolSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/client/HadoopClientProtocolSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.client.hadoop; +package org.apache.ignite.internal.processors.hadoop.impl.client; import java.io.BufferedReader; import java.io.BufferedWriter; @@ -50,8 +50,9 @@ import org.apache.ignite.hadoop.mapreduce.IgniteHadoopClientProtocolProvider; import org.apache.ignite.igfs.IgfsFile; import org.apache.ignite.igfs.IgfsPath; -import org.apache.ignite.internal.processors.hadoop.HadoopAbstractSelfTest; -import org.apache.ignite.internal.processors.hadoop.HadoopUtils; +import org.apache.ignite.internal.processors.hadoop.HadoopCommonUtils; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopAbstractSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils; import org.apache.ignite.internal.util.lang.GridAbsPredicate; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.U; @@ -114,7 +115,6 @@ public class HadoopClientProtocolSelfTest extends HadoopAbstractSelfTest { stopAllGrids(); super.afterTestsStopped(); - // IgniteHadoopClientProtocolProvider.cliMap.clear(); } diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/examples/HadoopWordCount1.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/examples/HadoopWordCount1.java similarity index 97% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/examples/HadoopWordCount1.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/examples/HadoopWordCount1.java index a2faf954c16f6..0df9c6a437732 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/examples/HadoopWordCount1.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/examples/HadoopWordCount1.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.examples; +package org.apache.ignite.internal.processors.hadoop.impl.examples; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/examples/HadoopWordCount1Map.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/examples/HadoopWordCount1Map.java similarity index 94% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/examples/HadoopWordCount1Map.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/examples/HadoopWordCount1Map.java index d4cd190bcf059..6a98a244bf743 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/examples/HadoopWordCount1Map.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/examples/HadoopWordCount1Map.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.examples; +package org.apache.ignite.internal.processors.hadoop.impl.examples; import java.io.IOException; import java.util.StringTokenizer; @@ -27,7 +27,7 @@ import org.apache.hadoop.mapred.Mapper; import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reporter; -import org.apache.ignite.internal.processors.hadoop.HadoopErrorSimulator; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopErrorSimulator; /** * Mapper phase of WordCount job. diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/examples/HadoopWordCount1Reduce.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/examples/HadoopWordCount1Reduce.java similarity index 93% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/examples/HadoopWordCount1Reduce.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/examples/HadoopWordCount1Reduce.java index b400d9b222344..ab91e0c438b5e 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/examples/HadoopWordCount1Reduce.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/examples/HadoopWordCount1Reduce.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.examples; +package org.apache.ignite.internal.processors.hadoop.impl.examples; import java.io.IOException; import java.util.Iterator; @@ -26,7 +26,7 @@ import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reducer; import org.apache.hadoop.mapred.Reporter; -import org.apache.ignite.internal.processors.hadoop.HadoopErrorSimulator; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopErrorSimulator; /** * Combiner and Reducer phase of WordCount job. diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/examples/HadoopWordCount2.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/examples/HadoopWordCount2.java similarity index 98% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/examples/HadoopWordCount2.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/examples/HadoopWordCount2.java index b2cfee334f9c4..3ddc923cdefcf 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/examples/HadoopWordCount2.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/examples/HadoopWordCount2.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.examples; +package org.apache.ignite.internal.processors.hadoop.impl.examples; import java.io.IOException; import org.apache.hadoop.fs.Path; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/examples/HadoopWordCount2Combiner.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/examples/HadoopWordCount2Combiner.java similarity index 91% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/examples/HadoopWordCount2Combiner.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/examples/HadoopWordCount2Combiner.java index 0d25e3c20d349..a643a924918ab 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/examples/HadoopWordCount2Combiner.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/examples/HadoopWordCount2Combiner.java @@ -14,10 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.examples; +package org.apache.ignite.internal.processors.hadoop.impl.examples; import java.io.IOException; -import org.apache.ignite.internal.processors.hadoop.HadoopErrorSimulator; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopErrorSimulator; /** * Combiner function with pluggable error simulator. diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/examples/HadoopWordCount2Mapper.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/examples/HadoopWordCount2Mapper.java similarity index 95% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/examples/HadoopWordCount2Mapper.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/examples/HadoopWordCount2Mapper.java index 76857e6e2ffaf..336db8410b8b2 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/examples/HadoopWordCount2Mapper.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/examples/HadoopWordCount2Mapper.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.examples; +package org.apache.ignite.internal.processors.hadoop.impl.examples; import java.io.IOException; import java.util.StringTokenizer; @@ -24,7 +24,7 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Mapper; -import org.apache.ignite.internal.processors.hadoop.HadoopErrorSimulator; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopErrorSimulator; /** * Mapper phase of WordCount job. diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/examples/HadoopWordCount2Reducer.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/examples/HadoopWordCount2Reducer.java similarity index 95% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/examples/HadoopWordCount2Reducer.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/examples/HadoopWordCount2Reducer.java index e780170e9caee..f24288ebe1bb0 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/examples/HadoopWordCount2Reducer.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/examples/HadoopWordCount2Reducer.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.examples; +package org.apache.ignite.internal.processors.hadoop.impl.examples; import java.io.IOException; import org.apache.hadoop.conf.Configurable; @@ -23,7 +23,7 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Reducer; -import org.apache.ignite.internal.processors.hadoop.HadoopErrorSimulator; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopErrorSimulator; /** * Combiner and Reducer phase of WordCount job. diff --git a/modules/hadoop/src/test/java/org/apache/ignite/hadoop/fs/KerberosHadoopFileSystemFactorySelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/fs/KerberosHadoopFileSystemFactorySelfTest.java similarity index 90% rename from modules/hadoop/src/test/java/org/apache/ignite/hadoop/fs/KerberosHadoopFileSystemFactorySelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/fs/KerberosHadoopFileSystemFactorySelfTest.java index ea7fa996bc80a..8c95a0ea57d51 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/hadoop/fs/KerberosHadoopFileSystemFactorySelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/fs/KerberosHadoopFileSystemFactorySelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.hadoop.fs; +package org.apache.ignite.internal.processors.hadoop.impl.fs; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -25,6 +25,9 @@ import java.io.ObjectOutputStream; import java.util.concurrent.Callable; +import org.apache.ignite.hadoop.fs.KerberosHadoopFileSystemFactory; +import org.apache.ignite.internal.processors.hadoop.delegate.HadoopDelegateUtils; +import org.apache.ignite.internal.processors.hadoop.delegate.HadoopFileSystemFactoryDelegate; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.junit.Assert; @@ -67,7 +70,9 @@ private void checkParameters(String keyTab, String keyTabPrincipal, long relogin GridTestUtils.assertThrows(null, new Callable() { @Override public Object call() throws Exception { - fac.start(); + HadoopFileSystemFactoryDelegate delegate = HadoopDelegateUtils.fileSystemFactoryDelegate(fac); + + delegate.start(); return null; } diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/Hadoop1DualAbstractTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/Hadoop1DualAbstractTest.java similarity index 88% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/Hadoop1DualAbstractTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/Hadoop1DualAbstractTest.java index 2c25a065276ba..a585e544d72d9 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/Hadoop1DualAbstractTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/Hadoop1DualAbstractTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import java.io.IOException; import org.apache.hadoop.conf.Configuration; @@ -25,16 +25,15 @@ import org.apache.ignite.hadoop.util.ChainedUserNameMapper; import org.apache.ignite.hadoop.util.KerberosUserNameMapper; import org.apache.ignite.hadoop.util.UserNameMapper; +import org.apache.ignite.igfs.IgfsIpcEndpointConfiguration; +import org.apache.ignite.igfs.IgfsIpcEndpointType; +import org.apache.ignite.igfs.IgfsMode; import org.apache.ignite.igfs.secondary.IgfsSecondaryFileSystem; import org.apache.ignite.internal.processors.igfs.IgfsDualAbstractSelfTest; import org.apache.ignite.lifecycle.LifecycleAware; import org.jetbrains.annotations.Nullable; import static org.apache.ignite.IgniteFileSystem.IGFS_SCHEME; -import static org.apache.ignite.igfs.HadoopSecondaryFileSystemConfigurationTest.SECONDARY_CFG_PATH; -import static org.apache.ignite.igfs.HadoopSecondaryFileSystemConfigurationTest.configuration; -import static org.apache.ignite.igfs.HadoopSecondaryFileSystemConfigurationTest.mkUri; -import static org.apache.ignite.igfs.HadoopSecondaryFileSystemConfigurationTest.writeConfiguration; import static org.apache.ignite.igfs.IgfsMode.PRIMARY; /** @@ -118,13 +117,13 @@ protected void startUnderlying() throws Exception { * @throws IOException On failure. */ protected void prepareConfiguration() throws IOException { - Configuration secondaryConf = configuration(IGFS_SCHEME, SECONDARY_AUTHORITY, true, true); + Configuration secondaryConf = HadoopSecondaryFileSystemConfigurationTest.configuration(IGFS_SCHEME, SECONDARY_AUTHORITY, true, true); secondaryConf.setInt("fs.igfs.block.size", 1024); - secondaryConfFullPath = writeConfiguration(secondaryConf, SECONDARY_CFG_PATH); + secondaryConfFullPath = HadoopSecondaryFileSystemConfigurationTest.writeConfiguration(secondaryConf, HadoopSecondaryFileSystemConfigurationTest.SECONDARY_CFG_PATH); - secondaryUri = mkUri(IGFS_SCHEME, SECONDARY_AUTHORITY); + secondaryUri = HadoopSecondaryFileSystemConfigurationTest.mkUri(IGFS_SCHEME, SECONDARY_AUTHORITY); } /** diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/Hadoop1OverIgfsDualAsyncTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/Hadoop1OverIgfsDualAsyncTest.java similarity index 90% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/Hadoop1OverIgfsDualAsyncTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/Hadoop1OverIgfsDualAsyncTest.java index bbf12232aaa2a..97cc7e95cb033 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/Hadoop1OverIgfsDualAsyncTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/Hadoop1OverIgfsDualAsyncTest.java @@ -15,7 +15,9 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; + +import org.apache.ignite.igfs.IgfsMode; /** * DUAL_ASYNC mode test. diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/Hadoop1OverIgfsDualSyncTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/Hadoop1OverIgfsDualSyncTest.java similarity index 90% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/Hadoop1OverIgfsDualSyncTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/Hadoop1OverIgfsDualSyncTest.java index c57415cd9aced..12036bc6f234f 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/Hadoop1OverIgfsDualSyncTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/Hadoop1OverIgfsDualSyncTest.java @@ -15,7 +15,9 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; + +import org.apache.ignite.igfs.IgfsMode; /** * DUAL_SYNC mode. diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopFIleSystemFactorySelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopFIleSystemFactorySelfTest.java similarity index 83% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopFIleSystemFactorySelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopFIleSystemFactorySelfTest.java index 5be3a6413c8ee..7cf7f2dcaa1f2 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopFIleSystemFactorySelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopFIleSystemFactorySelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; @@ -26,20 +26,29 @@ import org.apache.ignite.configuration.FileSystemConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.hadoop.fs.CachingHadoopFileSystemFactory; +import org.apache.ignite.hadoop.fs.HadoopFileSystemFactory; import org.apache.ignite.hadoop.fs.IgniteHadoopIgfsSecondaryFileSystem; import org.apache.ignite.hadoop.fs.v1.IgniteHadoopFileSystem; +import org.apache.ignite.igfs.IgfsGroupDataBlocksKeyMapper; +import org.apache.ignite.igfs.IgfsIpcEndpointConfiguration; +import org.apache.ignite.igfs.IgfsIpcEndpointType; +import org.apache.ignite.igfs.IgfsMode; +import org.apache.ignite.igfs.IgfsPath; import org.apache.ignite.igfs.secondary.IgfsSecondaryFileSystem; +import org.apache.ignite.internal.processors.hadoop.delegate.HadoopDelegateUtils; +import org.apache.ignite.internal.processors.hadoop.delegate.HadoopFileSystemFactoryDelegate; import org.apache.ignite.internal.processors.igfs.IgfsCommonAbstractTest; import org.apache.ignite.internal.processors.igfs.IgfsEx; import org.apache.ignite.internal.util.typedef.G; import org.apache.ignite.internal.util.typedef.internal.U; +import org.apache.ignite.lifecycle.LifecycleAware; import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder; import org.jetbrains.annotations.Nullable; -import java.io.Externalizable; import java.io.File; import java.io.FileOutputStream; +import java.io.IOException; import java.net.URI; import java.util.concurrent.atomic.AtomicInteger; @@ -176,19 +185,22 @@ private static IgfsEx startPrimary() throws Exception { writeConfigurationToFile(conf); - // Configure factory. - TestFactory factory = new TestFactory(); + // Get file system instance to be used. + CachingHadoopFileSystemFactory delegate = new CachingHadoopFileSystemFactory(); + + delegate.setUri("igfs://secondary:secondary@127.0.0.1:11500/"); + delegate.setConfigPaths(SECONDARY_CFG_PATH); - factory.setUri("igfs://secondary:secondary@127.0.0.1:11500/"); - factory.setConfigPaths(SECONDARY_CFG_PATH); + // Configure factory. + TestFactory factory = new TestFactory(delegate); // Configure file system. - IgniteHadoopIgfsSecondaryFileSystem fs = new IgniteHadoopIgfsSecondaryFileSystem(); + IgniteHadoopIgfsSecondaryFileSystem secondaryFs = new IgniteHadoopIgfsSecondaryFileSystem(); - fs.setFileSystemFactory(factory); + secondaryFs.setFileSystemFactory(factory); // Start. - return start("primary", 10500, IgfsMode.DUAL_ASYNC, fs); + return start("primary", 10500, IgfsMode.DUAL_ASYNC, secondaryFs); } /** @@ -292,26 +304,42 @@ private static void writeConfigurationToFile(Configuration conf) throws Exceptio /** * Test factory. */ - private static class TestFactory extends CachingHadoopFileSystemFactory { + private static class TestFactory implements HadoopFileSystemFactory, LifecycleAware { + /** */ + private static final long serialVersionUID = 0L; + + /** File system factory. */ + private CachingHadoopFileSystemFactory factory; + + /** File system. */ + private transient HadoopFileSystemFactoryDelegate delegate; + /** - * {@link Externalizable} support. + * Constructor. + * + * @param factory File system factory. */ - public TestFactory() { - // No-op. + public TestFactory(CachingHadoopFileSystemFactory factory) { + this.factory = factory; + } + + /** {@inheritDoc} */ + @Override public Object get(String usrName) throws IOException { + return delegate.get(usrName); } /** {@inheritDoc} */ @Override public void start() throws IgniteException { - START_CNT.incrementAndGet(); + delegate = HadoopDelegateUtils.fileSystemFactoryDelegate(factory); - super.start(); + delegate.start(); + + START_CNT.incrementAndGet(); } /** {@inheritDoc} */ @Override public void stop() throws IgniteException { STOP_CNT.incrementAndGet(); - - super.stop(); } } } diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopIgfs20FileSystemAbstractSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfs20FileSystemAbstractSelfTest.java similarity index 99% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopIgfs20FileSystemAbstractSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfs20FileSystemAbstractSelfTest.java index 93a924c72a1ea..d8f6f546fac1c 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopIgfs20FileSystemAbstractSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfs20FileSystemAbstractSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import java.io.BufferedOutputStream; import java.io.Closeable; @@ -59,7 +59,14 @@ import org.apache.ignite.configuration.FileSystemConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.hadoop.fs.IgniteHadoopIgfsSecondaryFileSystem; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsUtils; +import org.apache.ignite.igfs.IgfsBlockLocation; +import org.apache.ignite.igfs.IgfsFile; +import org.apache.ignite.igfs.IgfsGroupDataBlocksKeyMapper; +import org.apache.ignite.igfs.IgfsIpcEndpointConfiguration; +import org.apache.ignite.igfs.IgfsMetrics; +import org.apache.ignite.igfs.IgfsMode; +import org.apache.ignite.igfs.IgfsPath; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsUtils; import org.apache.ignite.internal.processors.igfs.IgfsCommonAbstractTest; import org.apache.ignite.internal.util.GridConcurrentHashSet; import org.apache.ignite.internal.util.typedef.F; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopIgfs20FileSystemLoopbackPrimarySelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfs20FileSystemLoopbackPrimarySelfTest.java similarity index 93% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopIgfs20FileSystemLoopbackPrimarySelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfs20FileSystemLoopbackPrimarySelfTest.java index ff5cd5b7f3ea4..ee167d1699eba 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopIgfs20FileSystemLoopbackPrimarySelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfs20FileSystemLoopbackPrimarySelfTest.java @@ -15,7 +15,10 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; + +import org.apache.ignite.igfs.IgfsIpcEndpointConfiguration; +import org.apache.ignite.igfs.IgfsIpcEndpointType; import static org.apache.ignite.igfs.IgfsMode.PRIMARY; import static org.apache.ignite.internal.util.ipc.shmem.IpcSharedMemoryServerEndpoint.DFLT_IPC_PORT; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopIgfs20FileSystemShmemPrimarySelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfs20FileSystemShmemPrimarySelfTest.java similarity index 93% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopIgfs20FileSystemShmemPrimarySelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfs20FileSystemShmemPrimarySelfTest.java index 2bc9eb814cb77..5f45488facd72 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopIgfs20FileSystemShmemPrimarySelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfs20FileSystemShmemPrimarySelfTest.java @@ -15,7 +15,10 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; + +import org.apache.ignite.igfs.IgfsIpcEndpointConfiguration; +import org.apache.ignite.igfs.IgfsIpcEndpointType; import static org.apache.ignite.igfs.IgfsMode.PRIMARY; import static org.apache.ignite.internal.util.ipc.shmem.IpcSharedMemoryServerEndpoint.DFLT_IPC_PORT; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopIgfsDualAbstractSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsDualAbstractSelfTest.java similarity index 95% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopIgfsDualAbstractSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsDualAbstractSelfTest.java index bb155b40528c5..881618217ce28 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopIgfsDualAbstractSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsDualAbstractSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; @@ -28,6 +28,13 @@ import org.apache.ignite.configuration.FileSystemConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.hadoop.fs.IgniteHadoopIgfsSecondaryFileSystem; +import org.apache.ignite.igfs.IgfsGroupDataBlocksKeyMapper; +import org.apache.ignite.igfs.IgfsInputStream; +import org.apache.ignite.igfs.IgfsIpcEndpointConfiguration; +import org.apache.ignite.igfs.IgfsIpcEndpointType; +import org.apache.ignite.igfs.IgfsMode; +import org.apache.ignite.igfs.IgfsOutputStream; +import org.apache.ignite.igfs.IgfsPath; import org.apache.ignite.igfs.secondary.IgfsSecondaryFileSystem; import org.apache.ignite.internal.processors.igfs.IgfsBlockKey; import org.apache.ignite.internal.processors.igfs.IgfsCommonAbstractTest; @@ -51,7 +58,7 @@ import static org.apache.ignite.igfs.IgfsMode.DUAL_ASYNC; import static org.apache.ignite.igfs.IgfsMode.DUAL_SYNC; import static org.apache.ignite.igfs.IgfsMode.PRIMARY; -import static org.apache.ignite.internal.processors.hadoop.fs.HadoopParameters.PARAM_IGFS_SEQ_READS_BEFORE_PREFETCH; +import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_SEQ_READS_BEFORE_PREFETCH; import static org.apache.ignite.internal.processors.igfs.IgfsAbstractSelfTest.awaitFileClose; import static org.apache.ignite.internal.processors.igfs.IgfsAbstractSelfTest.clear; import static org.apache.ignite.internal.processors.igfs.IgfsAbstractSelfTest.create; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopIgfsDualAsyncSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsDualAsyncSelfTest.java similarity index 94% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopIgfsDualAsyncSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsDualAsyncSelfTest.java index 6c6e7099ab134..eb85700e5fe58 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopIgfsDualAsyncSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsDualAsyncSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import static org.apache.ignite.igfs.IgfsMode.DUAL_ASYNC; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopIgfsDualSyncSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsDualSyncSelfTest.java similarity index 94% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopIgfsDualSyncSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsDualSyncSelfTest.java index 96a63d510e49c..98a2dc5357014 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopIgfsDualSyncSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsDualSyncSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import static org.apache.ignite.igfs.IgfsMode.DUAL_SYNC; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopIgfsSecondaryFileSystemTestAdapter.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsSecondaryFileSystemTestAdapter.java similarity index 89% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopIgfsSecondaryFileSystemTestAdapter.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsSecondaryFileSystemTestAdapter.java index f7af6f07483f9..adb1330f2a319 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopIgfsSecondaryFileSystemTestAdapter.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsSecondaryFileSystemTestAdapter.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import java.io.IOException; import java.io.InputStream; @@ -28,7 +28,9 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.ignite.configuration.FileSystemConfiguration; import org.apache.ignite.hadoop.fs.HadoopFileSystemFactory; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsUtils; +import org.apache.ignite.internal.processors.hadoop.delegate.HadoopDelegateUtils; +import org.apache.ignite.internal.processors.hadoop.delegate.HadoopFileSystemFactoryDelegate; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsUtils; import org.apache.ignite.internal.processors.igfs.IgfsEx; import org.apache.ignite.internal.processors.igfs.IgfsUtils; import org.apache.ignite.internal.processors.igfs.IgfsSecondaryFileSystemTestAdapter; @@ -39,7 +41,7 @@ */ public class HadoopIgfsSecondaryFileSystemTestAdapter implements IgfsSecondaryFileSystemTestAdapter { /** File system factory. */ - private final HadoopFileSystemFactory factory; + private final HadoopFileSystemFactoryDelegate factory; /** * Constructor. @@ -48,7 +50,9 @@ public class HadoopIgfsSecondaryFileSystemTestAdapter implements IgfsSecondaryFi public HadoopIgfsSecondaryFileSystemTestAdapter(HadoopFileSystemFactory factory) { assert factory != null; - this.factory = factory; + this.factory = HadoopDelegateUtils.fileSystemFactoryDelegate(factory); + + this.factory.start(); } /** {@inheritDoc} */ @@ -144,6 +148,6 @@ private String permission(FileStatus status) { * @throws IOException If failed. */ protected FileSystem get() throws IOException { - return factory.get(FileSystemConfiguration.DFLT_USER_NAME); + return (FileSystem)factory.get(FileSystemConfiguration.DFLT_USER_NAME); } } \ No newline at end of file diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopSecondaryFileSystemConfigurationTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopSecondaryFileSystemConfigurationTest.java similarity index 95% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopSecondaryFileSystemConfigurationTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopSecondaryFileSystemConfigurationTest.java index d9b5d66797170..fd7523315c86c 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopSecondaryFileSystemConfigurationTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopSecondaryFileSystemConfigurationTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; @@ -31,7 +31,13 @@ import org.apache.ignite.hadoop.fs.CachingHadoopFileSystemFactory; import org.apache.ignite.hadoop.fs.IgniteHadoopIgfsSecondaryFileSystem; import org.apache.ignite.hadoop.fs.v1.IgniteHadoopFileSystem; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsUtils; +import org.apache.ignite.igfs.IgfsGroupDataBlocksKeyMapper; +import org.apache.ignite.igfs.IgfsIpcEndpointConfiguration; +import org.apache.ignite.igfs.IgfsIpcEndpointType; +import org.apache.ignite.igfs.IgfsMode; +import org.apache.ignite.internal.processors.hadoop.delegate.HadoopDelegateUtils; +import org.apache.ignite.internal.processors.hadoop.delegate.HadoopFileSystemFactoryDelegate; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsUtils; import org.apache.ignite.internal.processors.igfs.IgfsCommonAbstractTest; import org.apache.ignite.internal.util.typedef.G; import org.apache.ignite.internal.util.typedef.internal.U; @@ -161,7 +167,7 @@ public HadoopSecondaryFileSystemConfigurationTest() { /** * Executes before each test. - * @throws Exception + * @throws Exception If failed. */ private void before() throws Exception { initSecondary(); @@ -179,16 +185,18 @@ private void before() throws Exception { fac.setConfigPaths(primaryConfFullPath); fac.setUri(primaryFsUriStr); - fac.start(); + HadoopFileSystemFactoryDelegate facDelegate = HadoopDelegateUtils.fileSystemFactoryDelegate(fac); + + facDelegate.start(); - primaryFs = fac.get(null); //provider.createFileSystem(null); + primaryFs = (FileSystem)facDelegate.get(null); //provider.createFileSystem(null); primaryFsUri = primaryFs.getUri(); } /** * Executes after each test. - * @throws Exception + * @throws Exception If failed. */ private void after() throws Exception { if (primaryFs != null) { @@ -225,7 +233,7 @@ private static void delete(String file) { /** * Initialize underlying secondary filesystem. * - * @throws Exception + * @throws Exception If failed. */ private void initSecondary() throws Exception { if (passSecondaryConfiguration) { diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgfsEventsTestSuite.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgfsEventsTestSuite.java similarity index 97% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgfsEventsTestSuite.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgfsEventsTestSuite.java index a9d7bad49ef31..6d7dc995a8690 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgfsEventsTestSuite.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgfsEventsTestSuite.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import junit.framework.TestSuite; import org.apache.ignite.Ignite; @@ -24,6 +24,10 @@ import org.apache.ignite.configuration.FileSystemConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.hadoop.fs.IgniteHadoopIgfsSecondaryFileSystem; +import org.apache.ignite.igfs.IgfsEventsAbstractSelfTest; +import org.apache.ignite.igfs.IgfsIpcEndpointConfiguration; +import org.apache.ignite.igfs.IgfsIpcEndpointType; +import org.apache.ignite.igfs.IgfsMode; import org.apache.ignite.internal.util.ipc.shmem.IpcSharedMemoryServerEndpoint; import org.apache.ignite.internal.util.typedef.G; import org.jetbrains.annotations.Nullable; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgfsNearOnlyMultiNodeSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgfsNearOnlyMultiNodeSelfTest.java similarity index 97% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgfsNearOnlyMultiNodeSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgfsNearOnlyMultiNodeSelfTest.java index 8e7935630cf8d..51048c6b3fc62 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgfsNearOnlyMultiNodeSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgfsNearOnlyMultiNodeSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import java.io.OutputStream; import java.net.URI; @@ -31,6 +31,9 @@ import org.apache.ignite.configuration.FileSystemConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.configuration.NearCacheConfiguration; +import org.apache.ignite.igfs.IgfsGroupDataBlocksKeyMapper; +import org.apache.ignite.igfs.IgfsIpcEndpointConfiguration; +import org.apache.ignite.igfs.IgfsIpcEndpointType; import org.apache.ignite.internal.util.ipc.shmem.IpcSharedMemoryServerEndpoint; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.G; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemAbstractSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemAbstractSelfTest.java similarity index 99% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemAbstractSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemAbstractSelfTest.java index f793ec31f0b5a..bd17edc8ab2fd 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemAbstractSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemAbstractSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; @@ -39,10 +39,13 @@ import org.apache.ignite.hadoop.fs.CachingHadoopFileSystemFactory; import org.apache.ignite.hadoop.fs.IgniteHadoopIgfsSecondaryFileSystem; import org.apache.ignite.hadoop.fs.v1.IgniteHadoopFileSystem; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsEx; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsIpcIo; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsOutProc; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsUtils; +import org.apache.ignite.igfs.IgfsBlockLocation; +import org.apache.ignite.igfs.IgfsFile; +import org.apache.ignite.igfs.IgfsGroupDataBlocksKeyMapper; +import org.apache.ignite.igfs.IgfsIpcEndpointConfiguration; +import org.apache.ignite.igfs.IgfsIpcEndpointType; +import org.apache.ignite.igfs.IgfsMode; +import org.apache.ignite.igfs.IgfsPath; import org.apache.ignite.internal.processors.igfs.IgfsCommonAbstractTest; import org.apache.ignite.internal.util.GridConcurrentHashSet; import org.apache.ignite.internal.util.lang.GridAbsPredicate; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemClientSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemClientSelfTest.java similarity index 92% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemClientSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemClientSelfTest.java index 8ddb3596908fe..555da1e0422c7 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemClientSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemClientSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import java.io.IOException; import java.lang.reflect.Field; @@ -27,12 +27,16 @@ import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.FileSystemConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; +import org.apache.ignite.igfs.IgfsGroupDataBlocksKeyMapper; +import org.apache.ignite.igfs.IgfsIpcEndpointConfiguration; +import org.apache.ignite.igfs.IgfsIpcEndpointType; +import org.apache.ignite.igfs.IgfsPath; import org.apache.ignite.internal.IgniteKernal; import org.apache.ignite.internal.igfs.common.IgfsLogger; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfs; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsOutProc; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsOutputStream; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsStreamDelegate; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfs; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsOutProc; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsOutputStream; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsStreamDelegate; import org.apache.ignite.internal.processors.igfs.IgfsCommonAbstractTest; import org.apache.ignite.internal.processors.igfs.IgfsContext; import org.apache.ignite.internal.processors.igfs.IgfsProcessorAdapter; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemHandshakeSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemHandshakeSelfTest.java similarity index 95% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemHandshakeSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemHandshakeSelfTest.java index fdb0d7724cfa7..9891c602b90a5 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemHandshakeSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemHandshakeSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import java.io.IOException; import java.net.URI; @@ -29,6 +29,10 @@ import org.apache.ignite.configuration.FileSystemConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.hadoop.fs.v2.IgniteHadoopFileSystem; +import org.apache.ignite.igfs.IgfsGroupDataBlocksKeyMapper; +import org.apache.ignite.igfs.IgfsIpcEndpointConfiguration; +import org.apache.ignite.igfs.IgfsIpcEndpointType; +import org.apache.ignite.igfs.IgfsPath; import org.apache.ignite.internal.processors.igfs.IgfsCommonAbstractTest; import org.apache.ignite.internal.util.typedef.G; import org.apache.ignite.spi.communication.tcp.TcpCommunicationSpi; @@ -42,9 +46,9 @@ import static org.apache.ignite.cache.CacheMode.REPLICATED; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC; import static org.apache.ignite.igfs.IgfsMode.PRIMARY; -import static org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsUtils.PARAM_IGFS_ENDPOINT_NO_EMBED; -import static org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsUtils.PARAM_IGFS_ENDPOINT_NO_LOCAL_SHMEM; -import static org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsUtils.PARAM_IGFS_ENDPOINT_NO_LOCAL_TCP; +import static org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsUtils.PARAM_IGFS_ENDPOINT_NO_EMBED; +import static org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsUtils.PARAM_IGFS_ENDPOINT_NO_LOCAL_SHMEM; +import static org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsUtils.PARAM_IGFS_ENDPOINT_NO_LOCAL_TCP; import static org.apache.ignite.internal.util.ipc.shmem.IpcSharedMemoryServerEndpoint.DFLT_IPC_PORT; /** diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemIpcCacheSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemIpcCacheSelfTest.java similarity index 96% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemIpcCacheSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemIpcCacheSelfTest.java index 4d7a39e3c1142..35f1ceb50ee17 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemIpcCacheSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemIpcCacheSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import java.lang.reflect.Field; import java.net.URI; @@ -27,8 +27,9 @@ import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.FileSystemConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsIpcIo; -import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsUtils; +import org.apache.ignite.igfs.IgfsGroupDataBlocksKeyMapper; +import org.apache.ignite.igfs.IgfsIpcEndpointConfiguration; +import org.apache.ignite.igfs.IgfsIpcEndpointType; import org.apache.ignite.internal.processors.igfs.IgfsCommonAbstractTest; import org.apache.ignite.internal.util.ipc.shmem.IpcSharedMemoryServerEndpoint; import org.apache.ignite.internal.util.typedef.G; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoggerSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoggerSelfTest.java similarity index 99% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoggerSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoggerSelfTest.java index 3013311618bb6..b61492a6e5be7 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoggerSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoggerSelfTest.java @@ -15,8 +15,9 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; +import org.apache.ignite.igfs.IgfsPath; import org.apache.ignite.internal.igfs.common.IgfsLogger; import org.apache.ignite.internal.processors.igfs.IgfsCommonAbstractTest; import org.apache.ignite.internal.util.typedef.internal.SB; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoggerStateSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoggerStateSelfTest.java similarity index 95% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoggerStateSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoggerStateSelfTest.java index 1bd5b41195eaf..e30779f8d08f0 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoggerStateSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoggerStateSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import java.lang.reflect.Field; import java.net.URI; @@ -28,6 +28,9 @@ import org.apache.ignite.configuration.FileSystemConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.hadoop.fs.v1.IgniteHadoopFileSystem; +import org.apache.ignite.igfs.IgfsGroupDataBlocksKeyMapper; +import org.apache.ignite.igfs.IgfsIpcEndpointConfiguration; +import org.apache.ignite.igfs.IgfsIpcEndpointType; import org.apache.ignite.internal.igfs.common.IgfsLogger; import org.apache.ignite.internal.processors.igfs.IgfsCommonAbstractTest; import org.apache.ignite.internal.processors.igfs.IgfsEx; @@ -40,8 +43,8 @@ import static org.apache.ignite.cache.CacheMode.PARTITIONED; import static org.apache.ignite.cache.CacheMode.REPLICATED; import static org.apache.ignite.igfs.IgfsMode.PRIMARY; -import static org.apache.ignite.internal.processors.hadoop.fs.HadoopParameters.PARAM_IGFS_LOG_DIR; -import static org.apache.ignite.internal.processors.hadoop.fs.HadoopParameters.PARAM_IGFS_LOG_ENABLED; +import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_LOG_DIR; +import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_LOG_ENABLED; /** * Ensures that sampling is really turned on/off. diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackAbstractSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackAbstractSelfTest.java similarity index 88% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackAbstractSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackAbstractSelfTest.java index 6ed2249187213..4f48078179927 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackAbstractSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackAbstractSelfTest.java @@ -15,7 +15,11 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; + +import org.apache.ignite.igfs.IgfsIpcEndpointConfiguration; +import org.apache.ignite.igfs.IgfsIpcEndpointType; +import org.apache.ignite.igfs.IgfsMode; import static org.apache.ignite.internal.util.ipc.shmem.IpcSharedMemoryServerEndpoint.DFLT_IPC_PORT; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackEmbeddedDualAsyncSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackEmbeddedDualAsyncSelfTest.java similarity index 94% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackEmbeddedDualAsyncSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackEmbeddedDualAsyncSelfTest.java index f1edb28525433..93b64dc5171ce 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackEmbeddedDualAsyncSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackEmbeddedDualAsyncSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import static org.apache.ignite.igfs.IgfsMode.DUAL_ASYNC; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackEmbeddedDualSyncSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackEmbeddedDualSyncSelfTest.java similarity index 94% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackEmbeddedDualSyncSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackEmbeddedDualSyncSelfTest.java index 97a6991a11ce5..0c41ed357fc56 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackEmbeddedDualSyncSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackEmbeddedDualSyncSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import static org.apache.ignite.igfs.IgfsMode.DUAL_SYNC; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackEmbeddedPrimarySelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackEmbeddedPrimarySelfTest.java similarity index 94% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackEmbeddedPrimarySelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackEmbeddedPrimarySelfTest.java index f9ecc4b7fe22b..ed7eb3271f0ac 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackEmbeddedPrimarySelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackEmbeddedPrimarySelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import static org.apache.ignite.igfs.IgfsMode.PRIMARY; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackEmbeddedSecondarySelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackEmbeddedSecondarySelfTest.java similarity index 94% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackEmbeddedSecondarySelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackEmbeddedSecondarySelfTest.java index 719df6da5d4d1..b41e5daff6031 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackEmbeddedSecondarySelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackEmbeddedSecondarySelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import static org.apache.ignite.igfs.IgfsMode.PROXY; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackExternalDualAsyncSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackExternalDualAsyncSelfTest.java similarity index 94% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackExternalDualAsyncSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackExternalDualAsyncSelfTest.java index 764624da38a5c..ce8046a3d33ac 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackExternalDualAsyncSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackExternalDualAsyncSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import static org.apache.ignite.igfs.IgfsMode.DUAL_ASYNC; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackExternalDualSyncSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackExternalDualSyncSelfTest.java similarity index 94% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackExternalDualSyncSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackExternalDualSyncSelfTest.java index 21a248aa3c914..1327d6b8df0d8 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackExternalDualSyncSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackExternalDualSyncSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import static org.apache.ignite.igfs.IgfsMode.DUAL_SYNC; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackExternalPrimarySelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackExternalPrimarySelfTest.java similarity index 94% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackExternalPrimarySelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackExternalPrimarySelfTest.java index 092c7a5a31429..2a3c2eecd2744 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackExternalPrimarySelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackExternalPrimarySelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import static org.apache.ignite.igfs.IgfsMode.PRIMARY; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackExternalSecondarySelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackExternalSecondarySelfTest.java similarity index 94% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackExternalSecondarySelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackExternalSecondarySelfTest.java index 9f7d21b1b6ac2..8d42336b43ea4 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemLoopbackExternalSecondarySelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemLoopbackExternalSecondarySelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import static org.apache.ignite.igfs.IgfsMode.PROXY; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemSecondaryFileSystemInitializationSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemSecondaryFileSystemInitializationSelfTest.java similarity index 96% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemSecondaryFileSystemInitializationSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemSecondaryFileSystemInitializationSelfTest.java index 1b488702e8900..4c96244165ac1 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemSecondaryFileSystemInitializationSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemSecondaryFileSystemInitializationSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; @@ -25,6 +25,9 @@ import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.hadoop.fs.IgniteHadoopIgfsSecondaryFileSystem; import org.apache.ignite.hadoop.fs.v1.IgniteHadoopFileSystem; +import org.apache.ignite.igfs.IgfsGroupDataBlocksKeyMapper; +import org.apache.ignite.igfs.IgfsIpcEndpointConfiguration; +import org.apache.ignite.igfs.IgfsIpcEndpointType; import org.apache.ignite.internal.processors.igfs.IgfsCommonAbstractTest; import org.apache.ignite.internal.util.typedef.G; import org.apache.ignite.internal.util.typedef.internal.U; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemAbstractSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemAbstractSelfTest.java similarity index 93% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemAbstractSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemAbstractSelfTest.java index d8cf74c2b11aa..67b5007a3201a 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemAbstractSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemAbstractSelfTest.java @@ -15,12 +15,15 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import java.util.Collection; import java.util.LinkedList; import java.util.concurrent.Callable; import org.apache.ignite.IgniteCheckedException; +import org.apache.ignite.igfs.IgfsIpcEndpointConfiguration; +import org.apache.ignite.igfs.IgfsIpcEndpointType; +import org.apache.ignite.igfs.IgfsMode; import org.apache.ignite.internal.util.ipc.IpcEndpoint; import org.apache.ignite.internal.util.ipc.IpcEndpointFactory; import org.apache.ignite.internal.util.typedef.X; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemEmbeddedDualAsyncSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemEmbeddedDualAsyncSelfTest.java similarity index 94% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemEmbeddedDualAsyncSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemEmbeddedDualAsyncSelfTest.java index d0d570f0a06e5..cca960bdc89e5 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemEmbeddedDualAsyncSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemEmbeddedDualAsyncSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import static org.apache.ignite.igfs.IgfsMode.DUAL_ASYNC; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemEmbeddedDualSyncSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemEmbeddedDualSyncSelfTest.java similarity index 94% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemEmbeddedDualSyncSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemEmbeddedDualSyncSelfTest.java index 2e5b015195e45..73db4f8c414f0 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemEmbeddedDualSyncSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemEmbeddedDualSyncSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import static org.apache.ignite.igfs.IgfsMode.DUAL_SYNC; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemEmbeddedPrimarySelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemEmbeddedPrimarySelfTest.java similarity index 94% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemEmbeddedPrimarySelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemEmbeddedPrimarySelfTest.java index 214c2a8bc0c53..48a4694702c21 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemEmbeddedPrimarySelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemEmbeddedPrimarySelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import static org.apache.ignite.igfs.IgfsMode.PRIMARY; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemEmbeddedSecondarySelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemEmbeddedSecondarySelfTest.java similarity index 94% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemEmbeddedSecondarySelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemEmbeddedSecondarySelfTest.java index d7f34a1902637..ab9c35722cb1c 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemEmbeddedSecondarySelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemEmbeddedSecondarySelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import static org.apache.ignite.igfs.IgfsMode.PROXY; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemExternalDualAsyncSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemExternalDualAsyncSelfTest.java similarity index 94% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemExternalDualAsyncSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemExternalDualAsyncSelfTest.java index 0435eaaa65e06..5154642b2a62b 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemExternalDualAsyncSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemExternalDualAsyncSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import static org.apache.ignite.igfs.IgfsMode.DUAL_ASYNC; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemExternalDualSyncSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemExternalDualSyncSelfTest.java similarity index 94% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemExternalDualSyncSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemExternalDualSyncSelfTest.java index 3af72746e99b0..d88a38b87df00 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemExternalDualSyncSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemExternalDualSyncSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import static org.apache.ignite.igfs.IgfsMode.DUAL_SYNC; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemExternalPrimarySelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemExternalPrimarySelfTest.java similarity index 94% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemExternalPrimarySelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemExternalPrimarySelfTest.java index ce9dbd9f2dcbf..7b41b224669b4 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemExternalPrimarySelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemExternalPrimarySelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import static org.apache.ignite.igfs.IgfsMode.PRIMARY; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemExternalSecondarySelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemExternalSecondarySelfTest.java similarity index 94% rename from modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemExternalSecondarySelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemExternalSecondarySelfTest.java index bc8c182f0f64e..e54b0200f4717 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgniteHadoopFileSystemShmemExternalSecondarySelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/IgniteHadoopFileSystemShmemExternalSecondarySelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.igfs; +package org.apache.ignite.internal.processors.hadoop.impl.igfs; import static org.apache.ignite.igfs.IgfsMode.PROXY; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/shuffle/collections/HadoopAbstractMapTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/shuffle/collections/HadoopAbstractMapTest.java similarity index 94% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/shuffle/collections/HadoopAbstractMapTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/shuffle/collections/HadoopAbstractMapTest.java index 5266875504179..9d1fd4f710184 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/shuffle/collections/HadoopAbstractMapTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/shuffle/collections/HadoopAbstractMapTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.shuffle.collections; +package org.apache.ignite.internal.processors.hadoop.impl.shuffle.collections; import java.util.Comparator; import java.util.concurrent.Callable; @@ -23,6 +23,7 @@ import org.apache.hadoop.io.IntWritable; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteLogger; +import org.apache.ignite.internal.processors.hadoop.HadoopHelper; import org.apache.ignite.internal.processors.hadoop.HadoopJob; import org.apache.ignite.internal.processors.hadoop.HadoopJobId; import org.apache.ignite.internal.processors.hadoop.HadoopJobInfo; @@ -31,7 +32,7 @@ import org.apache.ignite.internal.processors.hadoop.HadoopTaskContext; import org.apache.ignite.internal.processors.hadoop.counter.HadoopCounter; import org.apache.ignite.internal.processors.hadoop.counter.HadoopCounters; -import org.apache.ignite.internal.processors.hadoop.v2.HadoopWritableSerialization; +import org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopWritableSerialization; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.jetbrains.annotations.Nullable; @@ -144,7 +145,7 @@ protected static class JobInfo implements HadoopJobInfo { /** {@inheritDoc} */ @Override public HadoopJob createJob(Class jobCls, HadoopJobId jobId, IgniteLogger log, - @Nullable String[] libNames) throws IgniteCheckedException { + @Nullable String[] libNames, HadoopHelper helper) throws IgniteCheckedException { assert false; return null; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/shuffle/collections/HadoopConcurrentHashMultimapSelftest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/shuffle/collections/HadoopConcurrentHashMultimapSelftest.java similarity index 96% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/shuffle/collections/HadoopConcurrentHashMultimapSelftest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/shuffle/collections/HadoopConcurrentHashMultimapSelftest.java index a37d74bb636ae..019b17279e4a4 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/shuffle/collections/HadoopConcurrentHashMultimapSelftest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/shuffle/collections/HadoopConcurrentHashMultimapSelftest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.shuffle.collections; +package org.apache.ignite.internal.processors.hadoop.impl.shuffle.collections; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Multimap; @@ -35,6 +35,8 @@ import org.apache.ignite.internal.processors.hadoop.HadoopJobInfo; import org.apache.ignite.internal.processors.hadoop.HadoopTaskContext; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInput; +import org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopConcurrentHashMultimap; +import org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopMultimap; import org.apache.ignite.internal.util.GridRandom; import org.apache.ignite.internal.util.GridUnsafe; import org.apache.ignite.internal.util.io.GridDataInput; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/shuffle/collections/HadoopHashMapSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/shuffle/collections/HadoopHashMapSelfTest.java similarity index 93% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/shuffle/collections/HadoopHashMapSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/shuffle/collections/HadoopHashMapSelfTest.java index 04585eccff2f3..195bcbbfcb752 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/shuffle/collections/HadoopHashMapSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/shuffle/collections/HadoopHashMapSelfTest.java @@ -15,13 +15,15 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.shuffle.collections; +package org.apache.ignite.internal.processors.hadoop.impl.shuffle.collections; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Multimap; import org.apache.hadoop.io.IntWritable; import org.apache.ignite.internal.processors.hadoop.HadoopTaskContext; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInput; +import org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopHashMultimap; +import org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopMultimap; import org.apache.ignite.internal.util.GridLongList; import org.apache.ignite.internal.util.offheap.unsafe.GridUnsafeMemory; import org.apache.ignite.internal.util.typedef.X; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/shuffle/collections/HadoopSkipListSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/shuffle/collections/HadoopSkipListSelfTest.java similarity index 96% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/shuffle/collections/HadoopSkipListSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/shuffle/collections/HadoopSkipListSelfTest.java index f70ef2f9829b2..d04becaa607be 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/shuffle/collections/HadoopSkipListSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/shuffle/collections/HadoopSkipListSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.shuffle.collections; +package org.apache.ignite.internal.processors.hadoop.impl.shuffle.collections; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Multimap; @@ -36,6 +36,8 @@ import org.apache.ignite.internal.processors.hadoop.HadoopJobInfo; import org.apache.ignite.internal.processors.hadoop.HadoopTaskContext; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInput; +import org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopMultimap; +import org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopSkipList; import org.apache.ignite.internal.util.GridRandom; import org.apache.ignite.internal.util.GridUnsafe; import org.apache.ignite.internal.util.io.GridDataInput; @@ -103,7 +105,7 @@ public void testMapSimple() throws Exception { HadoopMultimap m = new HadoopSkipList(job, mem); - HadoopConcurrentHashMultimap.Adder a = m.startAdding(taskCtx); + HadoopMultimap.Adder a = m.startAdding(taskCtx); Multimap mm = ArrayListMultimap.create(); Multimap vis = ArrayListMultimap.create(); @@ -180,7 +182,7 @@ private void check(HadoopMultimap m, Multimap mm, final Multim final GridDataInput dataInput = new GridUnsafeDataInput(); - m.visit(false, new HadoopConcurrentHashMultimap.Visitor() { + m.visit(false, new HadoopMultimap.Visitor() { /** */ IntWritable key = new IntWritable(); diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/shuffle/streams/HadoopDataStreamSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/shuffle/streams/HadoopDataStreamSelfTest.java similarity index 95% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/shuffle/streams/HadoopDataStreamSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/shuffle/streams/HadoopDataStreamSelfTest.java index dd571afb69c99..612e892a96afa 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/shuffle/streams/HadoopDataStreamSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/shuffle/streams/HadoopDataStreamSelfTest.java @@ -15,10 +15,13 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.shuffle.streams; +package org.apache.ignite.internal.processors.hadoop.impl.shuffle.streams; import java.io.IOException; import java.util.Arrays; + +import org.apache.ignite.internal.processors.hadoop.shuffle.streams.HadoopDataInStream; +import org.apache.ignite.internal.processors.hadoop.shuffle.streams.HadoopDataOutStream; import org.apache.ignite.internal.util.offheap.unsafe.GridUnsafeMemory; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/HadoopExecutorServiceTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/taskexecutor/HadoopExecutorServiceTest.java similarity index 95% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/HadoopExecutorServiceTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/taskexecutor/HadoopExecutorServiceTest.java index 7dd045a14ab24..b4e63d1e15f78 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/HadoopExecutorServiceTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/taskexecutor/HadoopExecutorServiceTest.java @@ -15,11 +15,12 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.taskexecutor; +package org.apache.ignite.internal.processors.hadoop.impl.taskexecutor; import java.util.concurrent.Callable; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.ignite.internal.IgniteInternalFuture; +import org.apache.ignite.internal.processors.hadoop.taskexecutor.HadoopExecutorService; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.jsr166.LongAdder8; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/HadoopExternalTaskExecutionSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/taskexecutor/external/HadoopExternalTaskExecutionSelfTest.java similarity index 96% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/HadoopExternalTaskExecutionSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/taskexecutor/external/HadoopExternalTaskExecutionSelfTest.java index ec33836d58e4e..7c43500209730 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/HadoopExternalTaskExecutionSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/taskexecutor/external/HadoopExternalTaskExecutionSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.taskexecutor.external; +package org.apache.ignite.internal.processors.hadoop.impl.taskexecutor.external; import java.io.IOException; import java.io.OutputStreamWriter; @@ -37,12 +37,12 @@ import org.apache.ignite.igfs.IgfsOutputStream; import org.apache.ignite.igfs.IgfsPath; import org.apache.ignite.internal.IgniteInternalFuture; -import org.apache.ignite.internal.processors.hadoop.HadoopAbstractSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopAbstractSelfTest; import org.apache.ignite.internal.processors.hadoop.HadoopJobId; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.marshaller.jdk.JdkMarshaller; -import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.createJobInfo; +import static org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils.createJobInfo; /** * Job tracker self test. diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/communication/HadoopExternalCommunicationSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/taskexecutor/external/communication/HadoopExternalCommunicationSelfTest.java similarity index 95% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/communication/HadoopExternalCommunicationSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/taskexecutor/external/communication/HadoopExternalCommunicationSelfTest.java index 851c3af10410d..a40c5313d8442 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/communication/HadoopExternalCommunicationSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/taskexecutor/external/communication/HadoopExternalCommunicationSelfTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.taskexecutor.external.communication; +package org.apache.ignite.internal.processors.hadoop.impl.taskexecutor.external.communication; import java.io.Externalizable; import java.io.IOException; @@ -30,6 +30,8 @@ import org.apache.ignite.IgniteLogger; import org.apache.ignite.internal.processors.hadoop.message.HadoopMessage; import org.apache.ignite.internal.processors.hadoop.taskexecutor.external.HadoopProcessDescriptor; +import org.apache.ignite.internal.processors.hadoop.taskexecutor.external.communication.HadoopExternalCommunication; +import org.apache.ignite.internal.processors.hadoop.taskexecutor.external.communication.HadoopMessageListener; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.marshaller.Marshaller; import org.apache.ignite.marshaller.jdk.JdkMarshaller; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/hadoop/util/BasicUserNameMapperSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/util/BasicUserNameMapperSelfTest.java similarity index 97% rename from modules/hadoop/src/test/java/org/apache/ignite/hadoop/util/BasicUserNameMapperSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/util/BasicUserNameMapperSelfTest.java index fd8fdef0369eb..43924ed6b29cf 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/hadoop/util/BasicUserNameMapperSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/util/BasicUserNameMapperSelfTest.java @@ -15,8 +15,9 @@ * limitations under the License. */ -package org.apache.ignite.hadoop.util; +package org.apache.ignite.internal.processors.hadoop.impl.util; +import org.apache.ignite.hadoop.util.BasicUserNameMapper; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.jetbrains.annotations.Nullable; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/hadoop/util/ChainedUserNameMapperSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/util/ChainedUserNameMapperSelfTest.java similarity index 92% rename from modules/hadoop/src/test/java/org/apache/ignite/hadoop/util/ChainedUserNameMapperSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/util/ChainedUserNameMapperSelfTest.java index bfac49c6ed2c0..a9d295f861d6e 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/hadoop/util/ChainedUserNameMapperSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/util/ChainedUserNameMapperSelfTest.java @@ -15,9 +15,13 @@ * limitations under the License. */ -package org.apache.ignite.hadoop.util; +package org.apache.ignite.internal.processors.hadoop.impl.util; import org.apache.ignite.IgniteException; +import org.apache.ignite.hadoop.util.BasicUserNameMapper; +import org.apache.ignite.hadoop.util.ChainedUserNameMapper; +import org.apache.ignite.hadoop.util.KerberosUserNameMapper; +import org.apache.ignite.hadoop.util.UserNameMapper; import org.apache.ignite.internal.processors.igfs.IgfsUtils; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/hadoop/util/KerberosUserNameMapperSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/util/KerberosUserNameMapperSelfTest.java similarity index 96% rename from modules/hadoop/src/test/java/org/apache/ignite/hadoop/util/KerberosUserNameMapperSelfTest.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/util/KerberosUserNameMapperSelfTest.java index cc685bbf22016..bd76b51a1685b 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/hadoop/util/KerberosUserNameMapperSelfTest.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/util/KerberosUserNameMapperSelfTest.java @@ -15,8 +15,9 @@ * limitations under the License. */ -package org.apache.ignite.hadoop.util; +package org.apache.ignite.internal.processors.hadoop.impl.util; +import org.apache.ignite.hadoop.util.KerberosUserNameMapper; import org.apache.ignite.internal.processors.igfs.IgfsUtils; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.jetbrains.annotations.Nullable; diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithOuterClass.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/state/HadoopGroupingTestState.java similarity index 57% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithOuterClass.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/state/HadoopGroupingTestState.java index cae1da76f4455..af82d49b4db17 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithOuterClass.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/state/HadoopGroupingTestState.java @@ -15,24 +15,26 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.deps; +package org.apache.ignite.internal.processors.hadoop.state; -import org.apache.hadoop.conf.Configuration; +import org.apache.ignite.internal.processors.hadoop.HadoopSharedMap; +import org.apache.ignite.internal.util.GridConcurrentHashSet; + +import java.util.Collection; +import java.util.UUID; /** - * Outer class depends on Hadoop, but Inner *static* one does not. + * Shared state for HadoopGroupingTest. */ -@SuppressWarnings("unused") -public class WithOuterClass { - /** */ - Configuration c; - - /** */ - public static class InnerNoHadoop { - /** */ - int x; +public class HadoopGroupingTestState { + /** Values. */ + private static final GridConcurrentHashSet vals = HadoopSharedMap.map(HadoopGroupingTestState.class) + .put("vals", new GridConcurrentHashSet()); - /** */ - void foo() {} + /** + * @return Values. + */ + public static Collection values() { + return vals; } } diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/state/HadoopJobTrackerSelfTestState.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/state/HadoopJobTrackerSelfTestState.java new file mode 100644 index 0000000000000..4183f4e220b36 --- /dev/null +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/state/HadoopJobTrackerSelfTestState.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.hadoop.state; + +import org.apache.ignite.internal.processors.hadoop.HadoopSharedMap; + +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * Shared state for HadoopJobTrackerSelfTest. + */ +public class HadoopJobTrackerSelfTestState { + /** */ + private static HadoopSharedMap m = HadoopSharedMap.map(HadoopJobTrackerSelfTestState.class); + + /** Map task execution count. */ + public static final AtomicInteger mapExecCnt = m.put("mapExecCnt", new AtomicInteger()); + + /** Reduce task execution count. */ + public static final AtomicInteger reduceExecCnt = m.put("reduceExecCnt", new AtomicInteger()); + + /** Reduce task execution count. */ + public static final AtomicInteger combineExecCnt = m.put("combineExecCnt", new AtomicInteger()); + + /** */ + public static final Map latch = m.put("latch", new HashMap()); +} diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithInitializer.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/state/HadoopMapReduceEmbeddedSelfTestState.java similarity index 64% rename from modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithInitializer.java rename to modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/state/HadoopMapReduceEmbeddedSelfTestState.java index 360986c4f2ede..ae6ce82129e00 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/deps/WithInitializer.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/state/HadoopMapReduceEmbeddedSelfTestState.java @@ -15,19 +15,18 @@ * limitations under the License. */ -package org.apache.ignite.internal.processors.hadoop.deps; +package org.apache.ignite.internal.processors.hadoop.state; -/** - * Has a field initialized with an expression invoking Hadoop method. - */ +import org.apache.ignite.internal.processors.hadoop.HadoopSharedMap; -@SuppressWarnings({"ConstantConditions", "unused"}) -public class WithInitializer { - /** */ - private final Object x = org.apache.hadoop.fs.FileSystem.getDefaultUri(null); +import java.util.HashMap; +import java.util.Map; +/** + * Sharead state for HadoopMapReduceEmbeddedSelfTest. + */ +public class HadoopMapReduceEmbeddedSelfTestState { /** */ - WithInitializer() throws Exception { - // noop - } + public static Map flags = HadoopSharedMap.map(HadoopMapReduceEmbeddedSelfTestState.class) + .put("flags", new HashMap()); } diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/state/HadoopTaskExecutionSelfTestValues.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/state/HadoopTaskExecutionSelfTestValues.java new file mode 100644 index 0000000000000..beefa68f37c43 --- /dev/null +++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/state/HadoopTaskExecutionSelfTestValues.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.hadoop.state; + +import org.apache.ignite.internal.processors.hadoop.HadoopSharedMap; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * Shared state for HadoopTaskExecutionSelfTest. + */ +public class HadoopTaskExecutionSelfTestValues { + /** */ + private static HadoopSharedMap m = HadoopSharedMap.map(HadoopTaskExecutionSelfTestValues.class); + + /** Line count. */ + public static final AtomicInteger totalLineCnt = m.put("totalLineCnt", new AtomicInteger()); + + /** Executed tasks. */ + public static final AtomicInteger executedTasks = m.put("executedTasks", new AtomicInteger()); + + /** Cancelled tasks. */ + public static final AtomicInteger cancelledTasks = m.put("cancelledTasks", new AtomicInteger()); + + /** Working directory of each task. */ + public static final Map taskWorkDirs = m.put("taskWorkDirs", + new ConcurrentHashMap()); + + /** Mapper id to fail. */ + public static final AtomicInteger failMapperId = m.put("failMapperId", new AtomicInteger()); + + /** Number of splits of the current input. */ + public static final AtomicInteger splitsCount = m.put("splitsCount", new AtomicInteger()); +} diff --git a/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteHadoopTestSuite.java b/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteHadoopTestSuite.java index 603fd5b3c2ad5..bbd92d131d21f 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteHadoopTestSuite.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteHadoopTestSuite.java @@ -22,61 +22,61 @@ import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; import org.apache.ignite.IgniteSystemProperties; -import org.apache.ignite.client.hadoop.HadoopClientProtocolEmbeddedSelfTest; -import org.apache.ignite.client.hadoop.HadoopClientProtocolSelfTest; -import org.apache.ignite.hadoop.cache.HadoopTxConfigCacheTest; -import org.apache.ignite.hadoop.fs.KerberosHadoopFileSystemFactorySelfTest; -import org.apache.ignite.hadoop.util.BasicUserNameMapperSelfTest; -import org.apache.ignite.hadoop.util.ChainedUserNameMapperSelfTest; -import org.apache.ignite.hadoop.util.KerberosUserNameMapperSelfTest; -import org.apache.ignite.igfs.Hadoop1OverIgfsDualAsyncTest; -import org.apache.ignite.igfs.Hadoop1OverIgfsDualSyncTest; -import org.apache.ignite.igfs.HadoopFIleSystemFactorySelfTest; -import org.apache.ignite.igfs.HadoopIgfs20FileSystemLoopbackPrimarySelfTest; -import org.apache.ignite.igfs.HadoopIgfsDualAsyncSelfTest; -import org.apache.ignite.igfs.HadoopIgfsDualSyncSelfTest; -import org.apache.ignite.igfs.HadoopSecondaryFileSystemConfigurationTest; -import org.apache.ignite.igfs.IgfsEventsTestSuite; -import org.apache.ignite.igfs.IgniteHadoopFileSystemClientSelfTest; -import org.apache.ignite.igfs.IgniteHadoopFileSystemHandshakeSelfTest; -import org.apache.ignite.igfs.IgniteHadoopFileSystemLoggerSelfTest; -import org.apache.ignite.igfs.IgniteHadoopFileSystemLoggerStateSelfTest; -import org.apache.ignite.igfs.IgniteHadoopFileSystemLoopbackEmbeddedDualAsyncSelfTest; -import org.apache.ignite.igfs.IgniteHadoopFileSystemLoopbackEmbeddedDualSyncSelfTest; -import org.apache.ignite.igfs.IgniteHadoopFileSystemLoopbackEmbeddedPrimarySelfTest; -import org.apache.ignite.igfs.IgniteHadoopFileSystemLoopbackEmbeddedSecondarySelfTest; -import org.apache.ignite.igfs.IgniteHadoopFileSystemLoopbackExternalDualAsyncSelfTest; -import org.apache.ignite.igfs.IgniteHadoopFileSystemLoopbackExternalDualSyncSelfTest; -import org.apache.ignite.igfs.IgniteHadoopFileSystemLoopbackExternalPrimarySelfTest; -import org.apache.ignite.igfs.IgniteHadoopFileSystemLoopbackExternalSecondarySelfTest; -import org.apache.ignite.igfs.IgniteHadoopFileSystemSecondaryFileSystemInitializationSelfTest; -import org.apache.ignite.internal.processors.hadoop.HadoopClassLoaderTest; -import org.apache.ignite.internal.processors.hadoop.HadoopCommandLineTest; -import org.apache.ignite.internal.processors.hadoop.HadoopDefaultMapReducePlannerSelfTest; -import org.apache.ignite.internal.processors.hadoop.HadoopFileSystemsTest; -import org.apache.ignite.internal.processors.hadoop.HadoopGroupingTest; -import org.apache.ignite.internal.processors.hadoop.HadoopJobTrackerSelfTest; -import org.apache.ignite.internal.processors.hadoop.HadoopMapReduceEmbeddedSelfTest; -import org.apache.ignite.internal.processors.hadoop.HadoopMapReduceErrorResilienceTest; -import org.apache.ignite.internal.processors.hadoop.HadoopMapReduceTest; -import org.apache.ignite.internal.processors.hadoop.HadoopNoHadoopMapReduceTest; -import org.apache.ignite.internal.processors.hadoop.HadoopSerializationWrapperSelfTest; -import org.apache.ignite.internal.processors.hadoop.HadoopSnappyFullMapReduceTest; -import org.apache.ignite.internal.processors.hadoop.HadoopSnappyTest; -import org.apache.ignite.internal.processors.hadoop.HadoopSortingTest; -import org.apache.ignite.internal.processors.hadoop.HadoopSplitWrapperSelfTest; -import org.apache.ignite.internal.processors.hadoop.HadoopTaskExecutionSelfTest; -import org.apache.ignite.internal.processors.hadoop.HadoopTasksV1Test; -import org.apache.ignite.internal.processors.hadoop.HadoopTasksV2Test; -import org.apache.ignite.internal.processors.hadoop.HadoopUserLibsSelfTest; -import org.apache.ignite.internal.processors.hadoop.HadoopV2JobSelfTest; -import org.apache.ignite.internal.processors.hadoop.HadoopValidationSelfTest; -import org.apache.ignite.internal.processors.hadoop.HadoopWeightedMapReducePlannerTest; -import org.apache.ignite.internal.processors.hadoop.HadoopWeightedPlannerMapReduceTest; -import org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopConcurrentHashMultimapSelftest; -import org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopHashMapSelfTest; -import org.apache.ignite.internal.processors.hadoop.shuffle.collections.HadoopSkipListSelfTest; -import org.apache.ignite.internal.processors.hadoop.shuffle.streams.HadoopDataStreamSelfTest; +import org.apache.ignite.internal.processors.hadoop.HadoopTestClassLoader; +import org.apache.ignite.internal.processors.hadoop.impl.client.HadoopClientProtocolEmbeddedSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.client.HadoopClientProtocolSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopTxConfigCacheTest; +import org.apache.ignite.internal.processors.hadoop.impl.fs.KerberosHadoopFileSystemFactorySelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.util.BasicUserNameMapperSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.util.ChainedUserNameMapperSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.util.KerberosUserNameMapperSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.Hadoop1OverIgfsDualAsyncTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.Hadoop1OverIgfsDualSyncTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopFIleSystemFactorySelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfs20FileSystemLoopbackPrimarySelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsDualAsyncSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsDualSyncSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopSecondaryFileSystemConfigurationTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.IgfsEventsTestSuite; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.IgniteHadoopFileSystemClientSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.IgniteHadoopFileSystemHandshakeSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.IgniteHadoopFileSystemLoggerSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.IgniteHadoopFileSystemLoggerStateSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.IgniteHadoopFileSystemLoopbackEmbeddedDualAsyncSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.IgniteHadoopFileSystemLoopbackEmbeddedDualSyncSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.IgniteHadoopFileSystemLoopbackEmbeddedPrimarySelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.IgniteHadoopFileSystemLoopbackEmbeddedSecondarySelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.IgniteHadoopFileSystemLoopbackExternalDualAsyncSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.IgniteHadoopFileSystemLoopbackExternalDualSyncSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.IgniteHadoopFileSystemLoopbackExternalPrimarySelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.IgniteHadoopFileSystemLoopbackExternalSecondarySelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.IgniteHadoopFileSystemSecondaryFileSystemInitializationSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopCommandLineTest; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopDefaultMapReducePlannerSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopFileSystemsTest; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopGroupingTest; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopJobTrackerSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopMapReduceEmbeddedSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopMapReduceErrorResilienceTest; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopMapReduceTest; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopNoHadoopMapReduceTest; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopSerializationWrapperSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopSnappyFullMapReduceTest; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopSnappyTest; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopSortingTest; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopSplitWrapperSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopTaskExecutionSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopTasksV1Test; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopTasksV2Test; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopUserLibsSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopV2JobSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopValidationSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopWeightedMapReducePlannerTest; +import org.apache.ignite.internal.processors.hadoop.impl.HadoopWeightedPlannerMapReduceTest; +import org.apache.ignite.internal.processors.hadoop.impl.shuffle.collections.HadoopConcurrentHashMultimapSelftest; +import org.apache.ignite.internal.processors.hadoop.impl.shuffle.collections.HadoopHashMapSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.shuffle.collections.HadoopSkipListSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.shuffle.streams.HadoopDataStreamSelfTest; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.internal.util.typedef.internal.U; @@ -107,7 +107,7 @@ public static TestSuite suite() throws Exception { downloadHadoop(); downloadHive(); - final ClassLoader ldr = TestSuite.class.getClassLoader(); + final ClassLoader ldr = new HadoopTestClassLoader(); TestSuite suite = new TestSuite("Ignite Hadoop MR Test Suite"); @@ -125,8 +125,6 @@ public static TestSuite suite() throws Exception { suite.addTest(new TestSuite(ldr.loadClass(HadoopSnappyTest.class.getName()))); suite.addTest(new TestSuite(ldr.loadClass(HadoopSnappyFullMapReduceTest.class.getName()))); - suite.addTest(new TestSuite(ldr.loadClass(HadoopClassLoaderTest.class.getName()))); - suite.addTest(new TestSuite(ldr.loadClass(HadoopIgfs20FileSystemLoopbackPrimarySelfTest.class.getName()))); suite.addTest(new TestSuite(ldr.loadClass(HadoopIgfsDualSyncSelfTest.class.getName()))); diff --git a/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteIgfsLinuxAndMacOSTestSuite.java b/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteIgfsLinuxAndMacOSTestSuite.java index 4ed1d6594d056..21d78aba3707e 100644 --- a/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteIgfsLinuxAndMacOSTestSuite.java +++ b/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteIgfsLinuxAndMacOSTestSuite.java @@ -18,17 +18,18 @@ package org.apache.ignite.testsuites; import junit.framework.TestSuite; -import org.apache.ignite.igfs.HadoopIgfs20FileSystemShmemPrimarySelfTest; -import org.apache.ignite.igfs.IgfsEventsTestSuite; -import org.apache.ignite.igfs.IgniteHadoopFileSystemIpcCacheSelfTest; -import org.apache.ignite.igfs.IgniteHadoopFileSystemShmemEmbeddedDualAsyncSelfTest; -import org.apache.ignite.igfs.IgniteHadoopFileSystemShmemEmbeddedDualSyncSelfTest; -import org.apache.ignite.igfs.IgniteHadoopFileSystemShmemEmbeddedPrimarySelfTest; -import org.apache.ignite.igfs.IgniteHadoopFileSystemShmemEmbeddedSecondarySelfTest; -import org.apache.ignite.igfs.IgniteHadoopFileSystemShmemExternalDualAsyncSelfTest; -import org.apache.ignite.igfs.IgniteHadoopFileSystemShmemExternalDualSyncSelfTest; -import org.apache.ignite.igfs.IgniteHadoopFileSystemShmemExternalPrimarySelfTest; -import org.apache.ignite.igfs.IgniteHadoopFileSystemShmemExternalSecondarySelfTest; +import org.apache.ignite.internal.processors.hadoop.HadoopTestClassLoader; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfs20FileSystemShmemPrimarySelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.IgfsEventsTestSuite; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.IgniteHadoopFileSystemIpcCacheSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.IgniteHadoopFileSystemShmemEmbeddedDualAsyncSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.IgniteHadoopFileSystemShmemEmbeddedDualSyncSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.IgniteHadoopFileSystemShmemEmbeddedPrimarySelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.IgniteHadoopFileSystemShmemEmbeddedSecondarySelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.IgniteHadoopFileSystemShmemExternalDualAsyncSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.IgniteHadoopFileSystemShmemExternalDualSyncSelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.IgniteHadoopFileSystemShmemExternalPrimarySelfTest; +import org.apache.ignite.internal.processors.hadoop.impl.igfs.IgniteHadoopFileSystemShmemExternalSecondarySelfTest; import org.apache.ignite.internal.processors.igfs.IgfsServerManagerIpcEndpointRegistrationOnLinuxAndMacSelfTest; import static org.apache.ignite.testsuites.IgniteHadoopTestSuite.downloadHadoop; @@ -45,7 +46,7 @@ public class IgniteIgfsLinuxAndMacOSTestSuite extends TestSuite { public static TestSuite suite() throws Exception { downloadHadoop(); - ClassLoader ldr = TestSuite.class.getClassLoader(); + final ClassLoader ldr = new HadoopTestClassLoader(); TestSuite suite = new TestSuite("Ignite IGFS Test Suite For Linux And Mac OS"); From 33a6878eccd60f86648067901613b713ef3bb71f Mon Sep 17 00:00:00 2001 From: Pavel Tupitsyn Date: Mon, 26 Sep 2016 14:56:51 +0300 Subject: [PATCH 24/69] IGNITE-3491 .NET: Allow type name without assembly for type properties in app.config --- .../IgniteConfigurationSerializerTest.cs | 4 ++-- .../IgniteConfigurationXmlSerializer.cs | 21 +++++++++++-------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/modules/platforms/dotnet/Apache.Ignite.Core.Tests/IgniteConfigurationSerializerTest.cs b/modules/platforms/dotnet/Apache.Ignite.Core.Tests/IgniteConfigurationSerializerTest.cs index b6ee5cb86f839..8afef33a0f256 100644 --- a/modules/platforms/dotnet/Apache.Ignite.Core.Tests/IgniteConfigurationSerializerTest.cs +++ b/modules/platforms/dotnet/Apache.Ignite.Core.Tests/IgniteConfigurationSerializerTest.cs @@ -59,7 +59,7 @@ public void TestPredefinedXml() var xml = @" 127.1.1.1 - + Apache.Ignite.Core.Tests.IgniteConfigurationSerializerTest+FooClass, Apache.Ignite.Core.Tests @@ -70,7 +70,7 @@ public void TestPredefinedXml() -Xms1g-Xmx4g - + diff --git a/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Common/IgniteConfigurationXmlSerializer.cs b/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Common/IgniteConfigurationXmlSerializer.cs index 52fbc306670b5..e1df50bcbdbbb 100644 --- a/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Common/IgniteConfigurationXmlSerializer.cs +++ b/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Common/IgniteConfigurationXmlSerializer.cs @@ -26,6 +26,7 @@ namespace Apache.Ignite.Core.Impl.Common using System.Linq; using System.Reflection; using System.Xml; + using Apache.Ignite.Core.Impl.Binary; using Apache.Ignite.Core.Impl.Events; /// @@ -57,7 +58,7 @@ public static IgniteConfiguration Deserialize(XmlReader reader) var cfg = new IgniteConfiguration(); if (reader.NodeType == XmlNodeType.Element || reader.Read()) - ReadElement(reader, cfg); + ReadElement(reader, cfg, new TypeResolver()); return cfg; } @@ -164,7 +165,7 @@ private static void WriteComplexProperty(object obj, XmlWriter writer, Type valu /// /// Reads the element. /// - private static void ReadElement(XmlReader reader, object target) + private static void ReadElement(XmlReader reader, object target, TypeResolver resolver) { var targetType = target.GetType(); @@ -197,7 +198,7 @@ private static void ReadElement(XmlReader reader, object target) else if (propType.IsGenericType && propType.GetGenericTypeDefinition() == typeof (ICollection<>)) { // Collection - ReadCollectionProperty(reader, prop, target); + ReadCollectionProperty(reader, prop, target, resolver); } else if (propType.IsGenericType && propType.GetGenericTypeDefinition() == typeof (IDictionary<,>)) { @@ -207,7 +208,7 @@ private static void ReadElement(XmlReader reader, object target) else { // Nested object (complex property) - prop.SetValue(target, ReadComplexProperty(reader, propType, prop.Name, targetType), null); + prop.SetValue(target, ReadComplexProperty(reader, propType, prop.Name, targetType, resolver), null); } } } @@ -215,7 +216,8 @@ private static void ReadElement(XmlReader reader, object target) /// /// Reads the complex property (nested object). /// - private static object ReadComplexProperty(XmlReader reader, Type propType, string propName, Type targetType) + private static object ReadComplexProperty(XmlReader reader, Type propType, string propName, Type targetType, + TypeResolver resolver) { if (propType.IsAbstract) { @@ -225,7 +227,7 @@ private static object ReadComplexProperty(XmlReader reader, Type propType, strin propType = typeName == null ? null - : Type.GetType(typeName, false) ?? derivedTypes.FirstOrDefault(x => x.Name == typeName); + : resolver.ResolveType(typeName) ?? derivedTypes.FirstOrDefault(x => x.Name == typeName); if (propType == null) { @@ -249,7 +251,7 @@ private static object ReadComplexProperty(XmlReader reader, Type propType, strin { subReader.Read(); // read first element - ReadElement(subReader, nestedVal); + ReadElement(subReader, nestedVal, resolver); } return nestedVal; @@ -258,7 +260,8 @@ private static object ReadComplexProperty(XmlReader reader, Type propType, strin /// /// Reads the collection. /// - private static void ReadCollectionProperty(XmlReader reader, PropertyInfo prop, object target) + private static void ReadCollectionProperty(XmlReader reader, PropertyInfo prop, object target, + TypeResolver resolver) { var elementType = prop.PropertyType.GetGenericArguments().Single(); @@ -283,7 +286,7 @@ private static void ReadCollectionProperty(XmlReader reader, PropertyInfo prop, list.Add(converter != null ? converter.ConvertFromInvariantString(subReader.ReadString()) - : ReadComplexProperty(subReader, elementType, prop.Name, target.GetType())); + : ReadComplexProperty(subReader, elementType, prop.Name, target.GetType(), resolver)); } } From a4a933eb708dadb6b67f6c062db32ffeef819184 Mon Sep 17 00:00:00 2001 From: Igor Sapego Date: Mon, 26 Sep 2016 17:41:57 +0300 Subject: [PATCH 25/69] IGNITE-3883: ODBC: Implemented data-at-execution dialog. This closes #1073. --- .../src/application_data_buffer_test.cpp | 129 ++++--- .../cpp/odbc-test/src/column_test.cpp | 10 +- .../cpp/odbc-test/src/queries_test.cpp | 144 +++++++- .../platforms/cpp/odbc-test/src/row_test.cpp | 4 +- modules/platforms/cpp/odbc/Makefile.am | 3 +- .../platforms/cpp/odbc/include/ignite/odbc.h | 5 +- .../ignite/odbc/app/application_data_buffer.h | 58 ++- .../odbc/include/ignite/odbc/app/parameter.h | 27 +- .../odbc/include/ignite/odbc/common_types.h | 18 +- .../ignite/odbc/config/configuration.h | 17 +- .../cpp/odbc/include/ignite/odbc/statement.h | 58 ++- .../platforms/cpp/odbc/project/vs/module.def | 1 - .../odbc/src/app/application_data_buffer.cpp | 343 +++++++++++++----- .../platforms/cpp/odbc/src/app/parameter.cpp | 126 ++++++- .../platforms/cpp/odbc/src/common_types.cpp | 3 + .../cpp/odbc/src/config/configuration.cpp | 27 ++ .../cpp/odbc/src/config/connection_info.cpp | 35 +- modules/platforms/cpp/odbc/src/connection.cpp | 2 +- .../odbc/src/diagnostic/diagnostic_record.cpp | 18 + .../platforms/cpp/odbc/src/entry_points.cpp | 30 +- modules/platforms/cpp/odbc/src/odbc.cpp | 43 ++- modules/platforms/cpp/odbc/src/statement.cpp | 150 +++++++- 22 files changed, 992 insertions(+), 259 deletions(-) diff --git a/modules/platforms/cpp/odbc-test/src/application_data_buffer_test.cpp b/modules/platforms/cpp/odbc-test/src/application_data_buffer_test.cpp index 1900e7542dfbb..f62bcd6fc6caa 100644 --- a/modules/platforms/cpp/odbc-test/src/application_data_buffer_test.cpp +++ b/modules/platforms/cpp/odbc-test/src/application_data_buffer_test.cpp @@ -43,8 +43,8 @@ BOOST_AUTO_TEST_SUITE(ApplicationDataBufferTestSuite) BOOST_AUTO_TEST_CASE(TestPutIntToString) { char buffer[1024]; - SqlLen reslen; - size_t* offset = 0; + SqlLen reslen = 0; + int* offset = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_CHAR, buffer, sizeof(buffer), &reslen, &offset); @@ -76,8 +76,8 @@ BOOST_AUTO_TEST_CASE(TestPutIntToString) BOOST_AUTO_TEST_CASE(TestPutFloatToString) { char buffer[1024]; - SqlLen reslen; - size_t* offset = 0; + SqlLen reslen = 0; + int* offset = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_CHAR, buffer, sizeof(buffer), &reslen, &offset); @@ -101,8 +101,8 @@ BOOST_AUTO_TEST_CASE(TestPutFloatToString) BOOST_AUTO_TEST_CASE(TestPutGuidToString) { char buffer[1024]; - SqlLen reslen; - size_t* offset = 0; + SqlLen reslen = 0; + int* offset = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_CHAR, buffer, sizeof(buffer), &reslen, &offset); @@ -117,7 +117,7 @@ BOOST_AUTO_TEST_CASE(TestPutGuidToString) BOOST_AUTO_TEST_CASE(TestGetGuidFromString) { char buffer[] = "1da1ef8f-39ff-4d62-8b72-e8e9f3371801"; - SqlLen reslen; + SqlLen reslen = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_CHAR, buffer, sizeof(buffer) - 1, &reslen, 0); @@ -129,8 +129,8 @@ BOOST_AUTO_TEST_CASE(TestGetGuidFromString) BOOST_AUTO_TEST_CASE(TestPutBinaryToString) { char buffer[1024]; - SqlLen reslen; - size_t* offset = 0; + SqlLen reslen = 0; + int* offset = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_CHAR, buffer, sizeof(buffer), &reslen, &offset); @@ -145,8 +145,8 @@ BOOST_AUTO_TEST_CASE(TestPutBinaryToString) BOOST_AUTO_TEST_CASE(TestPutStringToString) { char buffer[1024]; - SqlLen reslen; - size_t* offset = 0; + SqlLen reslen = 0; + int* offset = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_CHAR, buffer, sizeof(buffer), &reslen, &offset); @@ -161,8 +161,8 @@ BOOST_AUTO_TEST_CASE(TestPutStringToString) BOOST_AUTO_TEST_CASE(TestPutStringToWstring) { wchar_t buffer[1024]; - SqlLen reslen; - size_t* offset = 0; + SqlLen reslen = 0; + int* offset = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_WCHAR, buffer, sizeof(buffer), &reslen, &offset); @@ -175,8 +175,8 @@ BOOST_AUTO_TEST_CASE(TestPutStringToWstring) BOOST_AUTO_TEST_CASE(TestPutStringToLong) { long numBuf; - SqlLen reslen; - size_t* offset = 0; + SqlLen reslen = 0; + int* offset = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_SIGNED_LONG, &numBuf, sizeof(numBuf), &reslen, &offset); @@ -190,8 +190,8 @@ BOOST_AUTO_TEST_CASE(TestPutStringToLong) BOOST_AUTO_TEST_CASE(TestPutStringToTiny) { int8_t numBuf; - SqlLen reslen; - size_t* offset = 0; + SqlLen reslen = 0; + int* offset = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_SIGNED_TINYINT, &numBuf, sizeof(numBuf), &reslen, &offset); @@ -205,8 +205,8 @@ BOOST_AUTO_TEST_CASE(TestPutStringToTiny) BOOST_AUTO_TEST_CASE(TestPutStringToFloat) { float numBuf; - SqlLen reslen; - size_t* offset = 0; + SqlLen reslen = 0; + int* offset = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_FLOAT, &numBuf, sizeof(numBuf), &reslen, &offset); @@ -220,8 +220,8 @@ BOOST_AUTO_TEST_CASE(TestPutStringToFloat) BOOST_AUTO_TEST_CASE(TestPutIntToFloat) { float numBuf; - SqlLen reslen; - size_t* offset = 0; + SqlLen reslen = 0; + int* offset = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_FLOAT, &numBuf, sizeof(numBuf), &reslen, &offset); @@ -247,8 +247,8 @@ BOOST_AUTO_TEST_CASE(TestPutIntToFloat) BOOST_AUTO_TEST_CASE(TestPutFloatToShort) { short numBuf; - SqlLen reslen; - size_t* offset = 0; + SqlLen reslen = 0; + int* offset = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_SIGNED_SHORT, &numBuf, sizeof(numBuf), &reslen, &offset); @@ -268,7 +268,7 @@ BOOST_AUTO_TEST_CASE(TestPutFloatToShort) BOOST_AUTO_TEST_CASE(TestPutDecimalToDouble) { double numBuf; - SqlLen reslen; + SqlLen reslen = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_DOUBLE, &numBuf, sizeof(numBuf), &reslen, 0); @@ -297,7 +297,7 @@ BOOST_AUTO_TEST_CASE(TestPutDecimalToDouble) BOOST_AUTO_TEST_CASE(TestPutDecimalToLong) { long numBuf; - SqlLen reslen; + SqlLen reslen = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_SIGNED_LONG, &numBuf, sizeof(numBuf), &reslen, 0); @@ -324,7 +324,7 @@ BOOST_AUTO_TEST_CASE(TestPutDecimalToLong) BOOST_AUTO_TEST_CASE(TestPutDecimalToString) { char strBuf[64]; - SqlLen reslen; + SqlLen reslen = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_CHAR, &strBuf, sizeof(strBuf), &reslen, 0); @@ -351,7 +351,7 @@ BOOST_AUTO_TEST_CASE(TestPutDecimalToString) BOOST_AUTO_TEST_CASE(TestPutDecimalToNumeric) { SQL_NUMERIC_STRUCT buf; - SqlLen reslen; + SqlLen reslen = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_NUMERIC, &buf, sizeof(buf), &reslen, 0); @@ -432,8 +432,8 @@ BOOST_AUTO_TEST_CASE(TestPutDateToDate) SQL_DATE_STRUCT buf = { 0 }; SqlLen reslen = sizeof(buf); - size_t offset = 0; - size_t* offsetPtr = &offset; + int offset = 0; + int* offsetPtr = &offset; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_TDATE, &buf, sizeof(buf), &reslen, &offsetPtr); @@ -451,8 +451,8 @@ BOOST_AUTO_TEST_CASE(TestPutTimestampToDate) SQL_DATE_STRUCT buf = { 0 }; SqlLen reslen = sizeof(buf); - size_t offset = 0; - size_t* offsetPtr = &offset; + int offset = 0; + int* offsetPtr = &offset; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_TDATE, &buf, sizeof(buf), &reslen, &offsetPtr); @@ -470,8 +470,8 @@ BOOST_AUTO_TEST_CASE(TestPutTimestampToTimestamp) SQL_TIMESTAMP_STRUCT buf = { 0 }; SqlLen reslen = sizeof(buf); - size_t offset = 0; - size_t* offsetPtr = &offset; + int offset = 0; + int* offsetPtr = &offset; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_TTIMESTAMP, &buf, sizeof(buf), &reslen, &offsetPtr); @@ -494,8 +494,8 @@ BOOST_AUTO_TEST_CASE(TestPutDateToTimestamp) SqlLen reslen = sizeof(buf); - size_t offset = 0; - size_t* offsetPtr = &offset; + int offset = 0; + int* offsetPtr = &offset; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_TTIMESTAMP, &buf, sizeof(buf), &reslen, &offsetPtr); @@ -516,7 +516,7 @@ BOOST_AUTO_TEST_CASE(TestGetStringFromLong) { long numBuf = 42; SqlLen reslen = sizeof(numBuf); - size_t* offset = 0; + int* offset = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_SIGNED_LONG, &numBuf, reslen, &reslen, &offset); @@ -535,7 +535,7 @@ BOOST_AUTO_TEST_CASE(TestGetStringFromDouble) { double numBuf = 43.36; SqlLen reslen = sizeof(numBuf); - size_t* offset = 0; + int* offset = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_DOUBLE, &numBuf, reslen, &reslen, &offset); @@ -554,7 +554,7 @@ BOOST_AUTO_TEST_CASE(TestGetStringFromString) { char buf[] = "Some data 32d2d5hs"; SqlLen reslen = sizeof(buf); - size_t* offset = 0; + int* offset = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_CHAR, &buf, reslen, &reslen, &offset); @@ -567,7 +567,7 @@ BOOST_AUTO_TEST_CASE(TestGetFloatFromUshort) { unsigned short numBuf = 7162; SqlLen reslen = sizeof(numBuf); - size_t* offset = 0; + int* offset = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_UNSIGNED_SHORT, &numBuf, reslen, &reslen, &offset); @@ -584,7 +584,7 @@ BOOST_AUTO_TEST_CASE(TestGetFloatFromString) { char buf[] = "28.562"; SqlLen reslen = sizeof(buf); - size_t* offset = 0; + int* offset = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_CHAR, &buf, reslen, &reslen, &offset); @@ -601,7 +601,7 @@ BOOST_AUTO_TEST_CASE(TestGetFloatFromFloat) { float buf = 207.49f; SqlLen reslen = sizeof(buf); - size_t* offset = 0; + int* offset = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_FLOAT, &buf, reslen, &reslen, &offset); @@ -618,7 +618,7 @@ BOOST_AUTO_TEST_CASE(TestGetFloatFromDouble) { double buf = 893.162; SqlLen reslen = sizeof(buf); - size_t* offset = 0; + int* offset = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_DOUBLE, &buf, reslen, &reslen, &offset); @@ -635,7 +635,7 @@ BOOST_AUTO_TEST_CASE(TestGetIntFromString) { char buf[] = "39"; SqlLen reslen = sizeof(buf); - size_t* offset = 0; + int* offset = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_CHAR, &buf, reslen, &reslen, &offset); @@ -660,7 +660,7 @@ BOOST_AUTO_TEST_CASE(TestGetIntFromFloat) { float buf = -107.49f; SqlLen reslen = sizeof(buf); - size_t* offset = 0; + int* offset = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_FLOAT, &buf, reslen, &reslen, &offset); @@ -685,7 +685,7 @@ BOOST_AUTO_TEST_CASE(TestGetIntFromDouble) { double buf = 42.97f; SqlLen reslen = sizeof(buf); - size_t* offset = 0; + int* offset = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_DOUBLE, &buf, reslen, &reslen, &offset); @@ -710,7 +710,7 @@ BOOST_AUTO_TEST_CASE(TestGetIntFromBigint) { uint64_t buf = 19; SqlLen reslen = sizeof(buf); - size_t* offset = 0; + int* offset = 0; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_UNSIGNED_BIGINT, &buf, reslen, &reslen, &offset); @@ -744,8 +744,8 @@ BOOST_AUTO_TEST_CASE(TestGetIntWithOffset) { 42, sizeof(uint64_t) } }; - size_t offset = 0; - size_t* offsetPtr = &offset; + int offset = 0; + int* offsetPtr = &offset; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_UNSIGNED_BIGINT, &buf[0].val, sizeof(buf[0].val), &buf[0].reslen, &offsetPtr); @@ -779,8 +779,8 @@ BOOST_AUTO_TEST_CASE(TestSetStringWithOffset) { "", 0 } }; - size_t offset = 0; - size_t* offsetPtr = &offset; + int offset = 0; + int* offsetPtr = &offset; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_CHAR, &buf[0].val, sizeof(buf[0].val), &buf[0].reslen, &offsetPtr); @@ -814,8 +814,8 @@ BOOST_AUTO_TEST_CASE(TestGetDateFromString) char buf[] = "1999-02-22"; SqlLen reslen = sizeof(buf); - size_t offset = 0; - size_t* offsetPtr = &offset; + int offset = 0; + int* offsetPtr = &offset; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_CHAR, &buf[0], sizeof(buf), &reslen, &offsetPtr); @@ -837,16 +837,11 @@ BOOST_AUTO_TEST_CASE(TestGetDateFromString) BOOST_AUTO_TEST_CASE(TestGetTimestampFromString) { - LOG_MSG("Test\n"); - LOG_MSG("Test\n"); - LOG_MSG("Test\n"); - LOG_MSG("Test\n"); - char buf[] = "2018-11-01 17:45:59"; SqlLen reslen = sizeof(buf); - size_t offset = 0; - size_t* offsetPtr = &offset; + int offset = 0; + int* offsetPtr = &offset; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_CHAR, &buf[0], sizeof(buf), &reslen, &offsetPtr); @@ -876,8 +871,8 @@ BOOST_AUTO_TEST_CASE(TestGetDateFromDate) SqlLen reslen = sizeof(buf); - size_t offset = 0; - size_t* offsetPtr = &offset; + int offset = 0; + int* offsetPtr = &offset; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_TDATE, &buf, sizeof(buf), &reslen, &offsetPtr); @@ -907,8 +902,8 @@ BOOST_AUTO_TEST_CASE(TestGetTimestampFromDate) SqlLen reslen = sizeof(buf); - size_t offset = 0; - size_t* offsetPtr = &offset; + int offset = 0; + int* offsetPtr = &offset; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_TDATE, &buf, sizeof(buf), &reslen, &offsetPtr); @@ -942,8 +937,8 @@ BOOST_AUTO_TEST_CASE(TestGetTimestampFromTimestamp) SqlLen reslen = sizeof(buf); - size_t offset = 0; - size_t* offsetPtr = &offset; + int offset = 0; + int* offsetPtr = &offset; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_TTIMESTAMP, &buf, sizeof(buf), &reslen, &offsetPtr); @@ -978,8 +973,8 @@ BOOST_AUTO_TEST_CASE(TestGetDateFromTimestamp) SqlLen reslen = sizeof(buf); - size_t offset = 0; - size_t* offsetPtr = &offset; + int offset = 0; + int* offsetPtr = &offset; ApplicationDataBuffer appBuf(IGNITE_ODBC_C_TYPE_TTIMESTAMP, &buf, sizeof(buf), &reslen, &offsetPtr); diff --git a/modules/platforms/cpp/odbc-test/src/column_test.cpp b/modules/platforms/cpp/odbc-test/src/column_test.cpp index 4864a6a1a078b..6cbea8b648f4b 100644 --- a/modules/platforms/cpp/odbc-test/src/column_test.cpp +++ b/modules/platforms/cpp/odbc-test/src/column_test.cpp @@ -66,7 +66,7 @@ BOOST_AUTO_TEST_CASE(TestColumnShort) short shortBuf = 0; SqlLen reslen = 0; - size_t* offset = 0; + int* offset = 0; ApplicationDataBuffer appBuf(type_traits::IGNITE_ODBC_C_TYPE_SIGNED_SHORT, &shortBuf, sizeof(shortBuf), &reslen, &offset); @@ -114,7 +114,7 @@ BOOST_AUTO_TEST_CASE(TestColumnString) char strBuf[1024] = {}; SqlLen reslen = 0; - size_t* offset = 0; + int* offset = 0; ApplicationDataBuffer appBuf(type_traits::IGNITE_ODBC_C_TYPE_CHAR, &strBuf, sizeof(strBuf), &reslen, &offset); @@ -164,7 +164,7 @@ BOOST_AUTO_TEST_CASE(TestColumnStringSeveral) std::string strBuf(data.size() / 3 + 2, 0); SqlLen reslen = 0; - size_t* offset = 0; + int* offset = 0; ApplicationDataBuffer appBuf(type_traits::IGNITE_ODBC_C_TYPE_CHAR, &strBuf[0], strBuf.size(), &reslen, &offset); @@ -246,7 +246,7 @@ BOOST_AUTO_TEST_CASE(TestColumnMultiString) char strBuf[1024] = {}; SqlLen reslen = 0; - size_t* offset = 0; + int* offset = 0; ApplicationDataBuffer appBuf(type_traits::IGNITE_ODBC_C_TYPE_CHAR, &strBuf, sizeof(strBuf), &reslen, &offset); @@ -287,4 +287,4 @@ BOOST_AUTO_TEST_CASE(TestColumnMultiString) BOOST_REQUIRE(column2.GetUnreadDataLength() == 0); } -BOOST_AUTO_TEST_SUITE_END() \ No newline at end of file +BOOST_AUTO_TEST_SUITE_END() diff --git a/modules/platforms/cpp/odbc-test/src/queries_test.cpp b/modules/platforms/cpp/odbc-test/src/queries_test.cpp index 7c10527b4818f..f0168bdf3118e 100644 --- a/modules/platforms/cpp/odbc-test/src/queries_test.cpp +++ b/modules/platforms/cpp/odbc-test/src/queries_test.cpp @@ -174,8 +174,11 @@ struct QueriesTestSuiteFixture SQLRETURN ret; - TestType in1(1, 2, 3, 4, "5", 6.0f, 7.0, true, Guid(8, 9), BinaryUtils::MakeDateGmt(1987, 6, 5), BinaryUtils::MakeTimestampGmt(1998, 12, 27, 1, 2, 3, 456)); - TestType in2(8, 7, 6, 5, "4", 3.0f, 2.0, false, Guid(1, 0), BinaryUtils::MakeDateGmt(1976, 1, 12), BinaryUtils::MakeTimestampGmt(1978, 8, 21, 23, 13, 45, 456)); + TestType in1(1, 2, 3, 4, "5", 6.0f, 7.0, true, Guid(8, 9), BinaryUtils::MakeDateGmt(1987, 6, 5), + BinaryUtils::MakeTimestampGmt(1998, 12, 27, 1, 2, 3, 456)); + + TestType in2(8, 7, 6, 5, "4", 3.0f, 2.0, false, Guid(1, 0), BinaryUtils::MakeDateGmt(1976, 1, 12), + BinaryUtils::MakeTimestampGmt(1978, 8, 21, 23, 13, 45, 456)); testCache.Put(1, in1); testCache.Put(2, in2); @@ -335,8 +338,11 @@ BOOST_AUTO_TEST_CASE(TestTwoRowsString) SQLRETURN ret; - TestType in1(1, 2, 3, 4, "5", 6.0f, 7.0, true, Guid(8, 9), BinaryUtils::MakeDateGmt(1987, 6, 5), BinaryUtils::MakeTimestampGmt(1998, 12, 27, 1, 2, 3, 456)); - TestType in2(8, 7, 6, 5, "4", 3.0f, 2.0, false, Guid(1, 0), BinaryUtils::MakeDateGmt(1976, 1, 12), BinaryUtils::MakeTimestampGmt(1978, 8, 21, 23, 13, 45, 999999999)); + TestType in1(1, 2, 3, 4, "5", 6.0f, 7.0, true, Guid(8, 9), BinaryUtils::MakeDateGmt(1987, 6, 5), + BinaryUtils::MakeTimestampGmt(1998, 12, 27, 1, 2, 3, 456)); + + TestType in2(8, 7, 6, 5, "4", 3.0f, 2.0, false, Guid(1, 0), BinaryUtils::MakeDateGmt(1976, 1, 12), + BinaryUtils::MakeTimestampGmt(1978, 8, 21, 23, 13, 45, 999999999)); testCache.Put(1, in1); testCache.Put(2, in2); @@ -430,7 +436,8 @@ BOOST_AUTO_TEST_CASE(TestOneRowString) SQLRETURN ret; - TestType in(1, 2, 3, 4, "5", 6.0f, 7.0, true, Guid(8, 9), BinaryUtils::MakeDateGmt(1987, 6, 5), BinaryUtils::MakeTimestampGmt(1998, 12, 27, 1, 2, 3, 456)); + TestType in(1, 2, 3, 4, "5", 6.0f, 7.0, true, Guid(8, 9), BinaryUtils::MakeDateGmt(1987, 6, 5), + BinaryUtils::MakeTimestampGmt(1998, 12, 27, 1, 2, 3, 456)); testCache.Put(1, in); @@ -493,7 +500,8 @@ BOOST_AUTO_TEST_CASE(TestOneRowStringLen) SQLRETURN ret; - TestType in(1, 2, 3, 4, "5", 6.0f, 7.0, true, Guid(8, 9), BinaryUtils::MakeDateGmt(1987, 6, 5), BinaryUtils::MakeTimestampGmt(1998, 12, 27, 1, 2, 3, 456)); + TestType in(1, 2, 3, 4, "5", 6.0f, 7.0, true, Guid(8, 9), BinaryUtils::MakeDateGmt(1987, 6, 5), + BinaryUtils::MakeTimestampGmt(1998, 12, 27, 1, 2, 3, 456)); testCache.Put(1, in); @@ -535,4 +543,128 @@ BOOST_AUTO_TEST_CASE(TestOneRowStringLen) BOOST_CHECK(ret == SQL_NO_DATA); } +BOOST_AUTO_TEST_CASE(TestDataAtExecution) +{ + Connect("DRIVER={Apache Ignite};ADDRESS=127.0.0.1:11110;CACHE=cache"); + + SQLRETURN ret; + + TestType in1(1, 2, 3, 4, "5", 6.0f, 7.0, true, Guid(8, 9), BinaryUtils::MakeDateGmt(1987, 6, 5), + BinaryUtils::MakeTimestampGmt(1998, 12, 27, 1, 2, 3, 456)); + + TestType in2(8, 7, 6, 5, "4", 3.0f, 2.0, false, Guid(1, 0), BinaryUtils::MakeDateGmt(1976, 1, 12), + BinaryUtils::MakeTimestampGmt(1978, 8, 21, 23, 13, 45, 999999999)); + + testCache.Put(1, in1); + testCache.Put(2, in2); + + const size_t columnsCnt = 11; + + SQLLEN columnLens[columnsCnt] = { 0 }; + SQLCHAR columns[columnsCnt][ODBC_BUFFER_SIZE] = { 0 }; + + // Binding columns. + for (SQLSMALLINT i = 0; i < columnsCnt; ++i) + { + ret = SQLBindCol(stmt, i + 1, SQL_C_CHAR, &columns[i], ODBC_BUFFER_SIZE, &columnLens[i]); + + if (!SQL_SUCCEEDED(ret)) + BOOST_FAIL(GetOdbcErrorMessage(SQL_HANDLE_STMT, stmt)); + } + + SQLCHAR request[] = "SELECT i8Field, i16Field, i32Field, i64Field, strField, " + "floatField, doubleField, boolField, guidField, dateField, timestampField FROM TestType " + "WHERE i32Field = ? AND strField = ?"; + + ret = SQLPrepare(stmt, request, SQL_NTS); + + SQLLEN ind1 = 1; + SQLLEN ind2 = 2; + + SQLLEN len1 = SQL_DATA_AT_EXEC; + SQLLEN len2 = SQL_LEN_DATA_AT_EXEC(static_cast(in1.strField.size())); + + ret = SQLBindParam(stmt, 1, SQL_C_SLONG, SQL_INTEGER, 100, 100, &ind1, &len1); + + if (!SQL_SUCCEEDED(ret)) + BOOST_FAIL(GetOdbcErrorMessage(SQL_HANDLE_STMT, stmt)); + + ret = SQLBindParam(stmt, 2, SQL_C_CHAR, SQL_VARCHAR, 100, 100, &ind2, &len2); + + if (!SQL_SUCCEEDED(ret)) + BOOST_FAIL(GetOdbcErrorMessage(SQL_HANDLE_STMT, stmt)); + + ret = SQLExecute(stmt); + + BOOST_REQUIRE_EQUAL(ret, SQL_NEED_DATA); + + void* oind; + + ret = SQLParamData(stmt, &oind); + + BOOST_REQUIRE_EQUAL(ret, SQL_NEED_DATA); + + if (oind == &ind1) + ret = SQLPutData(stmt, &in1.i32Field, 0); + else if (oind == &ind2) + ret = SQLPutData(stmt, (SQLPOINTER)in1.strField.c_str(), (SQLLEN)in1.strField.size()); + else + BOOST_FAIL("Unknown indicator value"); + + if (!SQL_SUCCEEDED(ret)) + BOOST_FAIL(GetOdbcErrorMessage(SQL_HANDLE_STMT, stmt)); + + ret = SQLParamData(stmt, &oind); + + BOOST_REQUIRE_EQUAL(ret, SQL_NEED_DATA); + + if (oind == &ind1) + ret = SQLPutData(stmt, &in1.i32Field, 0); + else if (oind == &ind2) + ret = SQLPutData(stmt, (SQLPOINTER)in1.strField.c_str(), (SQLLEN)in1.strField.size()); + else + BOOST_FAIL("Unknown indicator value"); + + if (!SQL_SUCCEEDED(ret)) + BOOST_FAIL(GetOdbcErrorMessage(SQL_HANDLE_STMT, stmt)); + + ret = SQLParamData(stmt, &oind); + + if (!SQL_SUCCEEDED(ret)) + BOOST_FAIL(GetOdbcErrorMessage(SQL_HANDLE_STMT, stmt)); + + ret = SQLFetch(stmt); + if (!SQL_SUCCEEDED(ret)) + BOOST_FAIL(GetOdbcErrorMessage(SQL_HANDLE_STMT, stmt)); + + BOOST_CHECK_EQUAL(std::string(reinterpret_cast(columns[0])), "1"); + BOOST_CHECK_EQUAL(std::string(reinterpret_cast(columns[1])), "2"); + BOOST_CHECK_EQUAL(std::string(reinterpret_cast(columns[2])), "3"); + BOOST_CHECK_EQUAL(std::string(reinterpret_cast(columns[3])), "4"); + BOOST_CHECK_EQUAL(std::string(reinterpret_cast(columns[4])), "5"); + BOOST_CHECK_EQUAL(std::string(reinterpret_cast(columns[5])), "6"); + BOOST_CHECK_EQUAL(std::string(reinterpret_cast(columns[6])), "7"); + BOOST_CHECK_EQUAL(std::string(reinterpret_cast(columns[7])), "1"); + BOOST_CHECK_EQUAL(std::string(reinterpret_cast(columns[8])), "00000000-0000-0008-0000-000000000009"); + // Such format is used because Date returned as Timestamp. + BOOST_CHECK_EQUAL(std::string(reinterpret_cast(columns[9])), "1987-06-05 00:00:00"); + BOOST_CHECK_EQUAL(std::string(reinterpret_cast(columns[10])), "1998-12-27 01:02:03"); + + BOOST_CHECK_EQUAL(columnLens[0], 1); + BOOST_CHECK_EQUAL(columnLens[1], 1); + BOOST_CHECK_EQUAL(columnLens[2], 1); + BOOST_CHECK_EQUAL(columnLens[3], 1); + BOOST_CHECK_EQUAL(columnLens[4], 1); + BOOST_CHECK_EQUAL(columnLens[5], 1); + BOOST_CHECK_EQUAL(columnLens[6], 1); + BOOST_CHECK_EQUAL(columnLens[7], 1); + BOOST_CHECK_EQUAL(columnLens[8], 36); + BOOST_CHECK_EQUAL(columnLens[9], 19); + BOOST_CHECK_EQUAL(columnLens[10], 19); + + ret = SQLFetch(stmt); + BOOST_CHECK(ret == SQL_NO_DATA); +} + + BOOST_AUTO_TEST_SUITE_END() diff --git a/modules/platforms/cpp/odbc-test/src/row_test.cpp b/modules/platforms/cpp/odbc-test/src/row_test.cpp index 1fcd43e83f56a..f38e9c5b58f1d 100644 --- a/modules/platforms/cpp/odbc-test/src/row_test.cpp +++ b/modules/platforms/cpp/odbc-test/src/row_test.cpp @@ -82,7 +82,7 @@ void CheckRowData(Row& row, size_t rowIdx) char strBuf[1024]; SQLGUID guidBuf; char bitBuf; - size_t* offset = 0; + int* offset = 0; ApplicationDataBuffer appLongBuf(type_traits::IGNITE_ODBC_C_TYPE_SIGNED_LONG, &longBuf, sizeof(longBuf), &reslen, &offset); ApplicationDataBuffer appStrBuf(type_traits::IGNITE_ODBC_C_TYPE_CHAR, &strBuf, sizeof(strBuf), &reslen, &offset); @@ -206,4 +206,4 @@ BOOST_AUTO_TEST_CASE(TestTwoRows) CheckRowData(row, 1); } -BOOST_AUTO_TEST_SUITE_END() \ No newline at end of file +BOOST_AUTO_TEST_SUITE_END() diff --git a/modules/platforms/cpp/odbc/Makefile.am b/modules/platforms/cpp/odbc/Makefile.am index 1781bc09ac4aa..3c8b37a5e97f2 100644 --- a/modules/platforms/cpp/odbc/Makefile.am +++ b/modules/platforms/cpp/odbc/Makefile.am @@ -34,7 +34,8 @@ AM_CXXFLAGS = \ -std=c++0x libignite_odbc_la_LIBADD = \ - @top_srcdir@/binary/libignite-binary.la + @top_srcdir@/binary/libignite-binary.la \ + -lodbcinst libignite_odbc_la_LDFLAGS = \ -no-undefined \ diff --git a/modules/platforms/cpp/odbc/include/ignite/odbc.h b/modules/platforms/cpp/odbc/include/ignite/odbc.h index 40158ddf51f08..ec0861c565e19 100644 --- a/modules/platforms/cpp/odbc/include/ignite/odbc.h +++ b/modules/platforms/cpp/odbc/include/ignite/odbc.h @@ -252,6 +252,9 @@ namespace ignite SQLSMALLINT scope, SQLSMALLINT nullable); + SQLRETURN SQLParamData(SQLHSTMT stmt, SQLPOINTER* value); + + SQLRETURN SQLPutData(SQLHSTMT stmt, SQLPOINTER data, SQLLEN strLengthOrIndicator); } // namespace ignite -#endif //_IGNITE_ODBC \ No newline at end of file +#endif //_IGNITE_ODBC diff --git a/modules/platforms/cpp/odbc/include/ignite/odbc/app/application_data_buffer.h b/modules/platforms/cpp/odbc/include/ignite/odbc/app/application_data_buffer.h index ed243593d896b..0ce781810be49 100644 --- a/modules/platforms/cpp/odbc/include/ignite/odbc/app/application_data_buffer.h +++ b/modules/platforms/cpp/odbc/include/ignite/odbc/app/application_data_buffer.h @@ -56,7 +56,8 @@ namespace ignite * @param reslen Resulting data length. * @param offset Pointer to buffer and reslen offset pointer. */ - ApplicationDataBuffer(type_traits::IgniteSqlType type, void* buffer, SqlLen buflen, SqlLen* reslen, size_t** offset = 0); + ApplicationDataBuffer(type_traits::IgniteSqlType type, void* buffer, + SqlLen buflen, SqlLen* reslen, int** offset = 0); /** * Copy constructor. @@ -83,7 +84,7 @@ namespace ignite * * @param offset Pointer to offset pointer. */ - void SetPtrToOffsetPtr(size_t** offset) + void SetPtrToOffsetPtr(int** offset) { this->offset = offset; } @@ -271,6 +272,20 @@ namespace ignite */ const SqlLen* GetResLen() const; + /** + * Get raw data. + * + * @return Buffer data. + */ + void* GetData(); + + /** + * Get result data length. + * + * @return Data length pointer. + */ + SqlLen* GetResLen(); + /** * Get buffer size in bytes. * @@ -281,21 +296,42 @@ namespace ignite return buflen; } - private: /** - * Get raw data. + * Check if the data is going to be provided at execution. * - * @return Buffer data. + * @return True if the data is going to be provided + * at execution. */ - void* GetData(); + bool IsDataAtExec() const; /** - * Get result data length. + * Get size of the data that is going to be provided at + * execution. * - * @return Data length pointer. + * @return Size of the data that is going to be provided + * at execution. */ - SqlLen* GetResLen(); + size_t GetDataAtExecSize() const; + + /** + * Get size of the input buffer. + * + * @return Input buffer size, or zero if the data is going + * to be provided at execution. + */ + size_t GetInputSize() const; + + /** + * Get buffer type. + * + * @return Buffer type. + */ + type_traits::IgniteSqlType GetType() const + { + return type; + } + private: /** * Put value of numeric type in the buffer. * @@ -374,7 +410,7 @@ namespace ignite SqlLen* reslen; /** Pointer to implementation pointer to application offset */ - size_t** offset; + int** offset; }; /** Column binging map type alias. */ @@ -383,4 +419,4 @@ namespace ignite } } -#endif //_IGNITE_ODBC_APP_APPLICATION_DATA_BUFFER \ No newline at end of file +#endif //_IGNITE_ODBC_APP_APPLICATION_DATA_BUFFER diff --git a/modules/platforms/cpp/odbc/include/ignite/odbc/app/parameter.h b/modules/platforms/cpp/odbc/include/ignite/odbc/app/parameter.h index d1ea6970b0bd7..0bd93952c2717 100644 --- a/modules/platforms/cpp/odbc/include/ignite/odbc/app/parameter.h +++ b/modules/platforms/cpp/odbc/include/ignite/odbc/app/parameter.h @@ -90,6 +90,25 @@ namespace ignite */ ApplicationDataBuffer& GetBuffer(); + /** + * Reset stored at-execution data. + */ + void ResetStoredData(); + + /** + * Check if all the at-execution data has been stored. + * @return + */ + bool IsDataReady() const; + + /** + * Put at-execution data. + * + * @param data Data buffer pointer. + * @param len Data length. + */ + void PutData(void* data, SqlLen len); + private: /** Underlying data buffer. */ ApplicationDataBuffer buffer; @@ -102,6 +121,12 @@ namespace ignite /** IPD decimal digits. */ int16_t decDigits; + + /** User provided null data at execution. */ + bool nullData; + + /** Stored at-execution data. */ + std::vector storedData; }; /** Parameter binging map type alias. */ @@ -110,4 +135,4 @@ namespace ignite } } -#endif //_IGNITE_ODBC_APP_PARAMETER \ No newline at end of file +#endif //_IGNITE_ODBC_APP_PARAMETER diff --git a/modules/platforms/cpp/odbc/include/ignite/odbc/common_types.h b/modules/platforms/cpp/odbc/include/ignite/odbc/common_types.h index b01ec76fd1e1a..354cf0874ec87 100644 --- a/modules/platforms/cpp/odbc/include/ignite/odbc/common_types.h +++ b/modules/platforms/cpp/odbc/include/ignite/odbc/common_types.h @@ -21,6 +21,8 @@ #include #include "system/odbc_constants.h" +#include + namespace ignite { namespace odbc @@ -42,7 +44,10 @@ namespace ignite SQL_RESULT_ERROR, /** No more data. */ - SQL_RESULT_NO_DATA + SQL_RESULT_NO_DATA, + + /** No more data. */ + SQL_RESULT_NEED_DATA }; /** @@ -68,9 +73,15 @@ namespace ignite */ SQL_STATE_01S02_OPTION_VALUE_CHANGED, + /** String data, length mismatch. */ + SQL_STATE_22026_DATA_LENGTH_MISMATCH, + /** Invalid cursor state. */ SQL_STATE_24000_INVALID_CURSOR_STATE, + /** Invalid descriptor index. */ + SQL_STATE_07009_INVALID_DESCRIPTOR_INDEX, + /** * The driver was unable to establish a connection with the data * source. @@ -99,6 +110,9 @@ namespace ignite */ SQL_STATE_HY001_MEMORY_ALLOCATION, + /** Invalid use of null pointer. */ + SQL_STATE_HY009_INVALID_USE_OF_NULL_POINTER, + /** Function sequence error. */ SQL_STATE_HY010_SEQUENCE_ERROR, @@ -221,4 +235,4 @@ namespace ignite } } -#endif //_IGNITE_ODBC_COMMON_TYPES \ No newline at end of file +#endif //_IGNITE_ODBC_COMMON_TYPES diff --git a/modules/platforms/cpp/odbc/include/ignite/odbc/config/configuration.h b/modules/platforms/cpp/odbc/include/ignite/odbc/config/configuration.h index b5f385efb279e..4fe4f1b07ed72 100644 --- a/modules/platforms/cpp/odbc/include/ignite/odbc/config/configuration.h +++ b/modules/platforms/cpp/odbc/include/ignite/odbc/config/configuration.h @@ -167,10 +167,7 @@ namespace ignite * * @param port Server port. */ - void SetTcpPort(uint16_t port) - { - arguments[Key::port] = common::LexicalCast(port); - } + void SetTcpPort(uint16_t port); /** * Get DSN. @@ -217,10 +214,7 @@ namespace ignite * * @param server Server host. */ - void SetHost(const std::string& server) - { - arguments[Key::server] = server; - } + void SetHost(const std::string& server); /** * Get cache. @@ -257,10 +251,7 @@ namespace ignite * * @param address Address. */ - void SetAddress(const std::string& address) - { - arguments[Key::address] = address; - } + void SetAddress(const std::string& address); /** * Get protocol version. @@ -357,4 +348,4 @@ namespace ignite } } -#endif //_IGNITE_ODBC_CONFIG_CONFIGURATION \ No newline at end of file +#endif //_IGNITE_ODBC_CONFIG_CONFIGURATION diff --git a/modules/platforms/cpp/odbc/include/ignite/odbc/statement.h b/modules/platforms/cpp/odbc/include/ignite/odbc/statement.h index 97d586f64bdd4..35f1e98b4f140 100644 --- a/modules/platforms/cpp/odbc/include/ignite/odbc/statement.h +++ b/modules/platforms/cpp/odbc/include/ignite/odbc/statement.h @@ -81,14 +81,14 @@ namespace ignite * * @param ptr Column binding offset pointer. */ - void SetColumnBindOffsetPtr(size_t* ptr); + void SetColumnBindOffsetPtr(int* ptr); /** * Get column binding offset pointer. * * @return Column binding offset pointer. */ - size_t* GetColumnBindOffsetPtr(); + int* GetColumnBindOffsetPtr(); /** * Get number of columns in the result set. @@ -129,14 +129,14 @@ namespace ignite * * @param ptr Parameter binding offset pointer. */ - void SetParamBindOffsetPtr(size_t* ptr); + void SetParamBindOffsetPtr(int* ptr); /** * Get parameter binding offset pointer. * * @return Parameter binding offset pointer. */ - size_t* GetParamBindOffsetPtr(); + int* GetParamBindOffsetPtr(); /** * Get value of the column in the result set. @@ -328,9 +328,33 @@ namespace ignite */ uint16_t* GetRowStatusesPtr(); + /** + * Select next parameter data for which is required. + * + * @param paramPtr Pointer to param id stored here. + */ + void SelectParam(void** paramPtr); + + /** + * Puts data for previously selected parameter or column. + * + * @param data Data. + * @param len Data length. + */ + void PutData(void* data, SqlLen len); + private: IGNITE_NO_COPY_ASSIGNMENT(Statement); + /** + * Bind parameter. + * + * @param paramIdx Parameter index. + * @param param Parameter. + * @return Operation result. + */ + SqlResult InternalBindParameter(uint16_t paramIdx, const app::Parameter& param); + /** * Get value of the column in the result set. * @@ -487,6 +511,21 @@ namespace ignite */ SqlResult InternalAffectedRows(int64_t& rowCnt); + /** + * Select next parameter data for which is required. + * + * @param paramPtr Pointer to param id stored here. + */ + SqlResult InternalSelectParam(void** paramPtr); + + /** + * Puts data for previously selected parameter or column. + * + * @param data Data. + * @param len Data length. + */ + SqlResult InternalPutData(void* data, SqlLen len); + /** * Constructor. * Called by friend classes. @@ -514,12 +553,15 @@ namespace ignite uint16_t* rowStatuses; /** Offset added to pointers to change binding of parameters. */ - size_t* paramBindOffset; + int* paramBindOffset; - /* Offset added to pointers to change binding of column data. */ - size_t* columnBindOffset; + /** Offset added to pointers to change binding of column data. */ + int* columnBindOffset; + + /** Index of the parameter, which is currently being set. */ + uint16_t currentParamIdx; }; } } -#endif //_IGNITE_ODBC_STATEMENT \ No newline at end of file +#endif //_IGNITE_ODBC_STATEMENT diff --git a/modules/platforms/cpp/odbc/project/vs/module.def b/modules/platforms/cpp/odbc/project/vs/module.def index c94869bed2687..df7bd223b2bfe 100644 --- a/modules/platforms/cpp/odbc/project/vs/module.def +++ b/modules/platforms/cpp/odbc/project/vs/module.def @@ -35,7 +35,6 @@ EXPORTS SQLTables SQLBrowseConnect SQLColumnPrivileges - SQLDescribeParam SQLForeignKeys SQLMoreResults SQLNativeSql diff --git a/modules/platforms/cpp/odbc/src/app/application_data_buffer.cpp b/modules/platforms/cpp/odbc/src/app/application_data_buffer.cpp index 2190c0036fd1e..1438b0c9e238b 100644 --- a/modules/platforms/cpp/odbc/src/app/application_data_buffer.cpp +++ b/modules/platforms/cpp/odbc/src/app/application_data_buffer.cpp @@ -36,20 +36,32 @@ namespace ignite using ignite::impl::binary::BinaryUtils; ApplicationDataBuffer::ApplicationDataBuffer() : - type(type_traits::IGNITE_ODBC_C_TYPE_UNSUPPORTED), buffer(0), buflen(0), reslen(0), offset(0) + type(type_traits::IGNITE_ODBC_C_TYPE_UNSUPPORTED), + buffer(0), + buflen(0), + reslen(0), + offset(0) { // No-op. } ApplicationDataBuffer::ApplicationDataBuffer(type_traits::IgniteSqlType type, - void* buffer, SqlLen buflen, SqlLen* reslen, size_t** offset) : - type(type), buffer(buffer), buflen(buflen), reslen(reslen), offset(offset) + void* buffer, SqlLen buflen, SqlLen* reslen, int** offset) : + type(type), + buffer(buffer), + buflen(buflen), + reslen(reslen), + offset(offset) { // No-op. } ApplicationDataBuffer::ApplicationDataBuffer(const ApplicationDataBuffer & other) : - type(other.type), buffer(other.buffer), buflen(other.buflen), reslen(other.reslen), offset(other.offset) + type(other.type), + buffer(other.buffer), + buflen(other.buflen), + reslen(other.reslen), + offset(other.offset) { // No-op. } @@ -74,6 +86,10 @@ namespace ignite void ApplicationDataBuffer::PutNum(T value) { using namespace type_traits; + + SqlLen* resLenPtr = GetResLen(); + void* dataPtr = GetData(); + switch (type) { case IGNITE_ODBC_C_TYPE_SIGNED_TINYINT: @@ -151,10 +167,10 @@ namespace ignite case IGNITE_ODBC_C_TYPE_NUMERIC: { - if (GetData()) + if (dataPtr) { SQL_NUMERIC_STRUCT* out = - reinterpret_cast(GetData()); + reinterpret_cast(dataPtr); uint64_t uval = static_cast(value < 0 ? -value : value); @@ -172,27 +188,26 @@ namespace ignite case IGNITE_ODBC_C_TYPE_BINARY: case IGNITE_ODBC_C_TYPE_DEFAULT: { - if (GetData()) + if (dataPtr) { if (buflen >= sizeof(value)) { - memcpy(GetData(), &value, sizeof(value)); + memcpy(dataPtr, &value, sizeof(value)); - if (GetResLen()) - *GetResLen() = sizeof(value); + if (resLenPtr) + *resLenPtr = sizeof(value); } else { - memcpy(GetData(), &value, static_cast(buflen)); + memcpy(dataPtr, &value, static_cast(buflen)); - if (GetResLen()) - *GetResLen() = SQL_NO_TOTAL; + if (resLenPtr) + *resLenPtr = SQL_NO_TOTAL; } } - else if (GetResLen()) - { - *GetResLen() = sizeof(value); - } + else if (resLenPtr) + *resLenPtr = sizeof(value); + break; } @@ -212,8 +227,8 @@ namespace ignite default: { - if (GetResLen()) - *GetResLen() = SQL_NO_TOTAL; + if (resLenPtr) + *resLenPtr = SQL_NO_TOTAL; } } } @@ -221,9 +236,11 @@ namespace ignite template void ApplicationDataBuffer::PutNumToNumBuffer(Tin value) { - if (GetData()) + void* dataPtr = GetData(); + + if (dataPtr) { - Tbuf* out = reinterpret_cast(GetData()); + Tbuf* out = reinterpret_cast(dataPtr); *out = static_cast(value); } } @@ -257,11 +274,14 @@ namespace ignite { SqlLen charSize = static_cast(sizeof(OutCharT)); - if (GetData()) + SqlLen* resLenPtr = GetResLen(); + void* dataPtr = GetData(); + + if (dataPtr) { if (buflen >= charSize) { - OutCharT* out = reinterpret_cast(GetData()); + OutCharT* out = reinterpret_cast(dataPtr); SqlLen outLen = (buflen / charSize) - 1; @@ -273,38 +293,41 @@ namespace ignite out[toCopy] = 0; } - if (GetResLen()) + if (resLenPtr) { if (buflen >= static_cast((value.size() + 1) * charSize)) - *GetResLen() = static_cast(value.size()); + *resLenPtr = static_cast(value.size()); else - *GetResLen() = SQL_NO_TOTAL; + *resLenPtr = SQL_NO_TOTAL; } } - else if (GetResLen()) - *GetResLen() = value.size(); + else if (resLenPtr) + *resLenPtr = value.size(); } void ApplicationDataBuffer::PutRawDataToBuffer(void *data, size_t len) { SqlLen ilen = static_cast(len); - if (GetData()) + SqlLen* resLenPtr = GetResLen(); + void* dataPtr = GetData(); + + if (dataPtr) { size_t toCopy = static_cast(std::min(buflen, ilen)); - memcpy(GetData(), data, toCopy); + memcpy(dataPtr, data, toCopy); - if (GetResLen()) + if (resLenPtr) { if (buflen >= ilen) - *GetResLen() = ilen; + *resLenPtr = ilen; else - *GetResLen() = SQL_NO_TOTAL; + *resLenPtr = SQL_NO_TOTAL; } } - else if (GetResLen()) - *GetResLen() = ilen; + else if (resLenPtr) + *resLenPtr = ilen; } void ApplicationDataBuffer::PutInt8(int8_t value) @@ -411,8 +434,10 @@ namespace ignite default: { - if (GetResLen()) - *GetResLen() = SQL_NO_TOTAL; + SqlLen* resLenPtr = GetResLen(); + + if (resLenPtr) + *resLenPtr = SQL_NO_TOTAL; } } @@ -456,8 +481,10 @@ namespace ignite default: { - if (GetResLen()) - *GetResLen() = SQL_NO_TOTAL; + SqlLen* resLenPtr = GetResLen(); + + if (resLenPtr) + *resLenPtr = SQL_NO_TOTAL; } } } @@ -524,8 +551,10 @@ namespace ignite default: { - if (GetResLen()) - *GetResLen() = SQL_NO_TOTAL; + SqlLen* resLenPtr = GetResLen(); + + if (resLenPtr) + *resLenPtr = SQL_NO_TOTAL; } } @@ -534,13 +563,16 @@ namespace ignite void ApplicationDataBuffer::PutNull() { - if (GetResLen()) - *GetResLen() = SQL_NULL_DATA; + SqlLen* resLenPtr = GetResLen(); + + if (resLenPtr) + *resLenPtr = SQL_NULL_DATA; } void ApplicationDataBuffer::PutDecimal(const common::Decimal& value) { using namespace type_traits; + switch (type) { case IGNITE_ODBC_C_TYPE_SIGNED_TINYINT: @@ -612,8 +644,10 @@ namespace ignite case IGNITE_ODBC_C_TYPE_BINARY: default: { - if (GetResLen()) - *GetResLen() = SQL_NO_TOTAL; + SqlLen* resLenPtr = GetResLen(); + + if (resLenPtr) + *resLenPtr = SQL_NO_TOTAL; } } } @@ -626,28 +660,31 @@ namespace ignite BinaryUtils::DateToCTm(value, tmTime); + SqlLen* resLenPtr = GetResLen(); + void* dataPtr = GetData(); + switch (type) { case IGNITE_ODBC_C_TYPE_CHAR: { - char* buffer = reinterpret_cast(GetData()); + char* buffer = reinterpret_cast(dataPtr); if (buffer) { strftime(buffer, GetSize(), "%Y-%m-%d", &tmTime); - if (GetResLen()) - *GetResLen() = strlen(buffer); + if (resLenPtr) + *resLenPtr = strlen(buffer); } - else if (GetResLen()) - *GetResLen() = sizeof("HHHH-MM-DD") - 1; + else if (resLenPtr) + *resLenPtr = sizeof("HHHH-MM-DD") - 1; break; } case IGNITE_ODBC_C_TYPE_WCHAR: { - SQLWCHAR* buffer = reinterpret_cast(GetData()); + SQLWCHAR* buffer = reinterpret_cast(dataPtr); if (buffer) { @@ -662,18 +699,18 @@ namespace ignite buffer[toCopy] = 0; - if (GetResLen()) - *GetResLen() = toCopy; + if (resLenPtr) + *resLenPtr = toCopy; } - else if (GetResLen()) - *GetResLen() = sizeof("HHHH-MM-DD") - 1; + else if (resLenPtr) + *resLenPtr = sizeof("HHHH-MM-DD") - 1; break; } case IGNITE_ODBC_C_TYPE_TDATE: { - SQL_DATE_STRUCT* buffer = reinterpret_cast(GetData()); + SQL_DATE_STRUCT* buffer = reinterpret_cast(dataPtr); buffer->year = tmTime.tm_year + 1900; buffer->month = tmTime.tm_mon + 1; @@ -684,7 +721,7 @@ namespace ignite case IGNITE_ODBC_C_TYPE_TTIMESTAMP: { - SQL_TIMESTAMP_STRUCT* buffer = reinterpret_cast(GetData()); + SQL_TIMESTAMP_STRUCT* buffer = reinterpret_cast(dataPtr); buffer->year = tmTime.tm_year + 1900; buffer->month = tmTime.tm_mon + 1; @@ -700,11 +737,11 @@ namespace ignite case IGNITE_ODBC_C_TYPE_BINARY: case IGNITE_ODBC_C_TYPE_DEFAULT: { - if (GetData()) - memcpy(GetData(), &value, std::min(static_cast(buflen), sizeof(value))); + if (dataPtr) + memcpy(dataPtr, &value, std::min(static_cast(buflen), sizeof(value))); - if (GetResLen()) - *GetResLen() = sizeof(value); + if (resLenPtr) + *resLenPtr = sizeof(value); break; } @@ -723,8 +760,8 @@ namespace ignite case IGNITE_ODBC_C_TYPE_NUMERIC: default: { - if (GetResLen()) - *GetResLen() = SQL_NO_TOTAL; + if (resLenPtr) + *resLenPtr = SQL_NO_TOTAL; } } } @@ -737,28 +774,31 @@ namespace ignite BinaryUtils::TimestampToCTm(value, tmTime); + SqlLen* resLenPtr = GetResLen(); + void* dataPtr = GetData(); + switch (type) { case IGNITE_ODBC_C_TYPE_CHAR: { - char* buffer = reinterpret_cast(GetData()); + char* buffer = reinterpret_cast(dataPtr); if (buffer) { strftime(buffer, GetSize(), "%Y-%m-%d %H:%M:%S", &tmTime); - if (GetResLen()) - *GetResLen() = strlen(buffer); + if (resLenPtr) + *resLenPtr = strlen(buffer); } - else if (GetResLen()) - *GetResLen() = sizeof("HHHH-MM-DD HH:MM:SS") - 1; + else if (resLenPtr) + *resLenPtr = sizeof("HHHH-MM-DD HH:MM:SS") - 1; break; } case IGNITE_ODBC_C_TYPE_WCHAR: { - SQLWCHAR* buffer = reinterpret_cast(GetData()); + SQLWCHAR* buffer = reinterpret_cast(dataPtr); if (buffer) { @@ -773,18 +813,18 @@ namespace ignite buffer[toCopy] = 0; - if (GetResLen()) - *GetResLen() = toCopy; + if (resLenPtr) + *resLenPtr = toCopy; } - else if (GetResLen()) - *GetResLen() = sizeof("HHHH-MM-DD HH:MM:SS") - 1; + else if (resLenPtr) + *resLenPtr = sizeof("HHHH-MM-DD HH:MM:SS") - 1; break; } case IGNITE_ODBC_C_TYPE_TDATE: { - SQL_DATE_STRUCT* buffer = reinterpret_cast(GetData()); + SQL_DATE_STRUCT* buffer = reinterpret_cast(dataPtr); buffer->year = tmTime.tm_year + 1900; buffer->month = tmTime.tm_mon + 1; @@ -795,7 +835,7 @@ namespace ignite case IGNITE_ODBC_C_TYPE_TTIMESTAMP: { - SQL_TIMESTAMP_STRUCT* buffer = reinterpret_cast(GetData()); + SQL_TIMESTAMP_STRUCT* buffer = reinterpret_cast(dataPtr); buffer->year = tmTime.tm_year + 1900; buffer->month = tmTime.tm_mon + 1; @@ -811,11 +851,11 @@ namespace ignite case IGNITE_ODBC_C_TYPE_BINARY: case IGNITE_ODBC_C_TYPE_DEFAULT: { - if (GetData()) - memcpy(GetData(), &value, std::min(static_cast(buflen), sizeof(value))); + if (dataPtr) + memcpy(dataPtr, &value, std::min(static_cast(buflen), sizeof(value))); - if (GetResLen()) - *GetResLen() = sizeof(value); + if (resLenPtr) + *resLenPtr = sizeof(value); break; } @@ -834,8 +874,8 @@ namespace ignite case IGNITE_ODBC_C_TYPE_NUMERIC: default: { - if (GetResLen()) - *GetResLen() = SQL_NO_TOTAL; + if (resLenPtr) + *resLenPtr = SQL_NO_TOTAL; } } } @@ -849,8 +889,13 @@ namespace ignite { case IGNITE_ODBC_C_TYPE_CHAR: { + size_t paramLen = GetInputSize(); + + if (!paramLen) + break; + res.assign(reinterpret_cast(GetData()), - std::min(maxLen, static_cast(buflen))); + std::min(maxLen, paramLen)); break; } @@ -953,7 +998,12 @@ namespace ignite { case IGNITE_ODBC_C_TYPE_CHAR: { - std::string str(reinterpret_cast(GetData()), static_cast(buflen)); + size_t paramLen = GetInputSize(); + + if (!paramLen) + break; + + std::string str(reinterpret_cast(GetData()), paramLen); std::stringstream converter; @@ -1014,13 +1064,18 @@ namespace ignite { using namespace type_traits; - T res = 0; + T res = T(); switch (type) { case IGNITE_ODBC_C_TYPE_CHAR: { - std::string str = GetString(static_cast(buflen)); + size_t paramLen = GetInputSize(); + + if (!paramLen) + break; + + std::string str = GetString(paramLen); std::stringstream converter; @@ -1158,9 +1213,13 @@ namespace ignite case IGNITE_ODBC_C_TYPE_CHAR: { + size_t paramLen = GetInputSize(); + + if (!paramLen) + break; + std::string str = utility::SqlStringToString( - reinterpret_cast(GetData()), - static_cast(GetSize())); + reinterpret_cast(GetData()), static_cast(paramLen)); sscanf(str.c_str(), "%d-%d-%d %d:%d:%d", &tmTime.tm_year, &tmTime.tm_mon, &tmTime.tm_mday, &tmTime.tm_hour, &tmTime.tm_min, &tmTime.tm_sec); @@ -1217,9 +1276,13 @@ namespace ignite case IGNITE_ODBC_C_TYPE_CHAR: { + size_t paramLen = GetInputSize(); + + if (!paramLen) + break; + std::string str = utility::SqlStringToString( - reinterpret_cast(GetData()), - static_cast(GetSize())); + reinterpret_cast(GetData()), static_cast(paramLen)); sscanf(str.c_str(), "%d-%d-%d %d:%d:%d", &tmTime.tm_year, &tmTime.tm_mon, &tmTime.tm_mday, &tmTime.tm_hour, &tmTime.tm_min, &tmTime.tm_sec); @@ -1245,7 +1308,12 @@ namespace ignite { case IGNITE_ODBC_C_TYPE_CHAR: { - std::string str = GetString(static_cast(buflen)); + size_t paramLen = GetInputSize(); + + if (!paramLen) + break; + + std::string str = GetString(paramLen); std::stringstream converter; @@ -1315,6 +1383,101 @@ namespace ignite return utility::GetPointerWithOffset(ptr, **offset); } + + bool ApplicationDataBuffer::IsDataAtExec() const + { + const SqlLen* resLenPtr = GetResLen(); + + if (!resLenPtr) + return false; + + int32_t ilen = static_cast(*resLenPtr); + + return ilen <= SQL_LEN_DATA_AT_EXEC_OFFSET || ilen == SQL_DATA_AT_EXEC; + } + + size_t ApplicationDataBuffer::GetDataAtExecSize() const + { + using namespace type_traits; + + switch (type) + { + case IGNITE_ODBC_C_TYPE_WCHAR: + case IGNITE_ODBC_C_TYPE_CHAR: + case IGNITE_ODBC_C_TYPE_BINARY: + { + const SqlLen* resLenPtr = GetResLen(); + + if (!resLenPtr) + return 0; + + int32_t ilen = static_cast(*resLenPtr); + + if (ilen <= SQL_LEN_DATA_AT_EXEC_OFFSET) + ilen = static_cast(SQL_LEN_DATA_AT_EXEC(ilen)); + else + ilen = 0; + + if (type == IGNITE_ODBC_C_TYPE_WCHAR) + ilen *= 2; + + return ilen; + } + + case IGNITE_ODBC_C_TYPE_SIGNED_SHORT: + case IGNITE_ODBC_C_TYPE_UNSIGNED_SHORT: + return sizeof(short); + + case IGNITE_ODBC_C_TYPE_SIGNED_LONG: + case IGNITE_ODBC_C_TYPE_UNSIGNED_LONG: + return sizeof(long); + + case IGNITE_ODBC_C_TYPE_FLOAT: + return sizeof(float); + + case IGNITE_ODBC_C_TYPE_DOUBLE: + return sizeof(double); + + case IGNITE_ODBC_C_TYPE_BIT: + case IGNITE_ODBC_C_TYPE_SIGNED_TINYINT: + case IGNITE_ODBC_C_TYPE_UNSIGNED_TINYINT: + return sizeof(char); + + case IGNITE_ODBC_C_TYPE_SIGNED_BIGINT: + case IGNITE_ODBC_C_TYPE_UNSIGNED_BIGINT: + return sizeof(SQLBIGINT); + + case IGNITE_ODBC_C_TYPE_TDATE: + return sizeof(SQL_DATE_STRUCT); + + case IGNITE_ODBC_C_TYPE_TTIME: + return sizeof(SQL_TIME_STRUCT); + + case IGNITE_ODBC_C_TYPE_TTIMESTAMP: + return sizeof(SQL_TIMESTAMP_STRUCT); + + case IGNITE_ODBC_C_TYPE_NUMERIC: + return sizeof(SQL_NUMERIC_STRUCT); + + case IGNITE_ODBC_C_TYPE_GUID: + return sizeof(SQLGUID); + + case IGNITE_ODBC_C_TYPE_DEFAULT: + case IGNITE_ODBC_C_TYPE_UNSUPPORTED: + default: + break; + } + + return 0; + } + + size_t ApplicationDataBuffer::GetInputSize() const + { + if (!IsDataAtExec()) + return static_cast(GetSize()); + + return GetDataAtExecSize(); + } } } } diff --git a/modules/platforms/cpp/odbc/src/app/parameter.cpp b/modules/platforms/cpp/odbc/src/app/parameter.cpp index 3e1464259609f..d70ef6aab2da8 100644 --- a/modules/platforms/cpp/odbc/src/app/parameter.cpp +++ b/modules/platforms/cpp/odbc/src/app/parameter.cpp @@ -33,7 +33,9 @@ namespace ignite buffer(), sqlType(), columnSize(), - decDigits() + decDigits(), + nullData(false), + storedData() { // No-op. } @@ -43,7 +45,9 @@ namespace ignite buffer(buffer), sqlType(sqlType), columnSize(columnSize), - decDigits(decDigits) + decDigits(decDigits), + nullData(false), + storedData() { // No-op. } @@ -52,7 +56,9 @@ namespace ignite buffer(other.buffer), sqlType(other.sqlType), columnSize(other.columnSize), - decDigits(other.decDigits) + decDigits(other.decDigits), + nullData(other.nullData), + storedData(other.storedData) { // No-op. } @@ -74,67 +80,85 @@ namespace ignite void Parameter::Write(ignite::impl::binary::BinaryWriterImpl& writer) const { + if (buffer.GetInputSize() == SQL_NULL_DATA) + { + writer.WriteNull(); + + return; + } + + // Buffer to use to get data. + ApplicationDataBuffer buf(buffer); + + SqlLen storedDataLen = static_cast(storedData.size()); + + if (buffer.IsDataAtExec()) + { + buf = ApplicationDataBuffer(buffer.GetType(), + const_cast(&storedData[0]), storedDataLen, &storedDataLen); + } + switch (sqlType) { case SQL_CHAR: case SQL_VARCHAR: case SQL_LONGVARCHAR: { - utility::WriteString(writer, buffer.GetString(columnSize)); + utility::WriteString(writer, buf.GetString(columnSize)); break; } case SQL_SMALLINT: { - writer.WriteInt16(buffer.GetInt16()); + writer.WriteObject(buf.GetInt16()); break; } case SQL_INTEGER: { - writer.WriteInt32(buffer.GetInt32()); + writer.WriteObject(buf.GetInt32()); break; } case SQL_FLOAT: { - writer.WriteFloat(buffer.GetFloat()); + writer.WriteObject(buf.GetFloat()); break; } case SQL_DOUBLE: { - writer.WriteDouble(buffer.GetDouble()); + writer.WriteObject(buf.GetDouble()); break; } case SQL_TINYINT: { - writer.WriteInt8(buffer.GetInt8()); + writer.WriteObject(buf.GetInt8()); break; } case SQL_BIT: { - writer.WriteBool(buffer.GetInt8() != 0); + writer.WriteObject(buf.GetInt8() != 0); break; } case SQL_BIGINT: { - writer.WriteInt64(buffer.GetInt64()); + writer.WriteObject(buf.GetInt64()); break; } case SQL_DATE: { - writer.WriteDate(buffer.GetDate()); + writer.WriteDate(buf.GetDate()); break; } case SQL_TIMESTAMP: { - writer.WriteTimestamp(buffer.GetTimestamp()); + writer.WriteTimestamp(buf.GetTimestamp()); break; } @@ -142,14 +166,23 @@ namespace ignite case SQL_VARBINARY: case SQL_LONGVARBINARY: { - writer.WriteInt8Array(reinterpret_cast(buffer.GetData()), - static_cast(buffer.GetSize())); + const ApplicationDataBuffer& constRef = buf; + + const SqlLen* resLenPtr = constRef.GetResLen(); + + if (!resLenPtr) + break; + + int32_t paramLen = static_cast(*resLenPtr); + + writer.WriteInt8Array(reinterpret_cast(constRef.GetData()), paramLen); + break; } case SQL_GUID: { - writer.WriteGuid(buffer.GetGuid()); + writer.WriteGuid(buf.GetGuid()); break; } @@ -157,7 +190,7 @@ namespace ignite case SQL_DECIMAL: { common::Decimal dec; - buffer.GetDecimal(dec); + buf.GetDecimal(dec); utility::WriteDecimal(writer, dec); @@ -169,10 +202,67 @@ namespace ignite } } - ApplicationDataBuffer & Parameter::GetBuffer() + ApplicationDataBuffer& Parameter::GetBuffer() { return buffer; } + + void Parameter::ResetStoredData() + { + storedData.clear(); + + if (buffer.IsDataAtExec()) + storedData.reserve(buffer.GetDataAtExecSize()); + } + + bool Parameter::IsDataReady() const + { + return !buffer.IsDataAtExec() || + storedData.size() == buffer.GetDataAtExecSize(); + } + + void Parameter::PutData(void* data, SqlLen len) + { + if (len == SQL_DEFAULT_PARAM) + return; + + if (len == SQL_NULL_DATA) + { + nullData = true; + + return; + } + + if (buffer.GetType() == type_traits::IGNITE_ODBC_C_TYPE_CHAR || + buffer.GetType() == type_traits::IGNITE_ODBC_C_TYPE_BINARY) + { + SqlLen slen = len; + + if (buffer.GetType() == type_traits::IGNITE_ODBC_C_TYPE_CHAR && slen == SQL_NTSL) + { + const char* str = reinterpret_cast(data); + + slen = strlen(str); + } + + if (slen <= 0) + return; + + size_t beginPos = storedData.size(); + + storedData.resize(storedData.size() + static_cast(slen)); + + memcpy(&storedData[beginPos], data, static_cast(slen)); + + return; + } + + size_t dataSize = buffer.GetDataAtExecSize(); + + storedData.resize(dataSize); + + memcpy(&storedData[0], data, dataSize); + } } } } diff --git a/modules/platforms/cpp/odbc/src/common_types.cpp b/modules/platforms/cpp/odbc/src/common_types.cpp index 276d9fdfafc4d..36d11c2c13602 100644 --- a/modules/platforms/cpp/odbc/src/common_types.cpp +++ b/modules/platforms/cpp/odbc/src/common_types.cpp @@ -37,6 +37,9 @@ namespace ignite case SQL_RESULT_NO_DATA: return SQL_NO_DATA; + case SQL_RESULT_NEED_DATA: + return SQL_NEED_DATA; + case SQL_RESULT_ERROR: default: return SQL_ERROR; diff --git a/modules/platforms/cpp/odbc/src/config/configuration.cpp b/modules/platforms/cpp/odbc/src/config/configuration.cpp index 74ccaaf5c05c0..cc2cc5d897912 100644 --- a/modules/platforms/cpp/odbc/src/config/configuration.cpp +++ b/modules/platforms/cpp/odbc/src/config/configuration.cpp @@ -141,6 +141,33 @@ namespace ignite } } + void Configuration::SetTcpPort(uint16_t port) + { + arguments[Key::port] = common::LexicalCast(port); + + ArgumentMap::const_iterator it = arguments.find(Key::address); + + if (it == arguments.end()) + endPoint.port = port; + } + + void Configuration::SetHost(const std::string& server) + { + arguments[Key::server] = server; + + ArgumentMap::const_iterator it = arguments.find(Key::address); + + if (it == arguments.end()) + endPoint.host = server; + } + + void Configuration::SetAddress(const std::string& address) + { + arguments[Key::address] = address; + + ParseAddress(address, endPoint); + } + ProtocolVersion Configuration::GetProtocolVersion() const { ArgumentMap::const_iterator it = arguments.find(Key::protocolVersion); diff --git a/modules/platforms/cpp/odbc/src/config/connection_info.cpp b/modules/platforms/cpp/odbc/src/config/connection_info.cpp index ee2c22b668c75..341ab7fea2863 100644 --- a/modules/platforms/cpp/odbc/src/config/connection_info.cpp +++ b/modules/platforms/cpp/odbc/src/config/connection_info.cpp @@ -62,7 +62,8 @@ namespace ignite DBG_STR_CASE(SQL_CATALOG_TERM); DBG_STR_CASE(SQL_TABLE_TERM); DBG_STR_CASE(SQL_SCHEMA_TERM); - DBG_STR_CASE(SQL_ASYNC_DBC_FUNCTIONS); + DBG_STR_CASE(SQL_NEED_LONG_DATA_LEN); +// DBG_STR_CASE(SQL_ASYNC_DBC_FUNCTIONS); DBG_STR_CASE(SQL_ASYNC_NOTIFICATION); DBG_STR_CASE(SQL_GETDATA_EXTENSIONS); DBG_STR_CASE(SQL_ODBC_INTERFACE_CONFORMANCE); @@ -92,6 +93,8 @@ namespace ignite DBG_STR_CASE(SQL_SQL92_PREDICATES); DBG_STR_CASE(SQL_SQL92_RELATIONAL_JOIN_OPERATORS); DBG_STR_CASE(SQL_SQL92_VALUE_EXPRESSIONS); + DBG_STR_CASE(SQL_STATIC_CURSOR_ATTRIBUTES1); + DBG_STR_CASE(SQL_STATIC_CURSOR_ATTRIBUTES2); default: break; } @@ -104,7 +107,7 @@ namespace ignite ConnectionInfo::ConnectionInfo() : strParams(), intParams(), shortParams() { - //========================= String Params ========================= + //======================= String Params ======================= // Driver name. strParams[SQL_DRIVER_NAME] = "Apache Ignite"; strParams[SQL_DBMS_NAME] = "Apache Ignite"; @@ -170,8 +173,16 @@ namespace ignite strParams[SQL_SCHEMA_TERM] = "schema"; #endif // SQL_SCHEMA_TERM +#ifdef SQL_NEED_LONG_DATA_LEN + // A character string: "Y" if the data source needs the length + // of a long data value (the data type is SQL_LONGVARCHAR, + // SQL_LONGVARBINARY) before that value is sent to the data + // source, "N" if it does not. + strParams[SQL_NEED_LONG_DATA_LEN ] = "Y"; +#endif // SQL_NEED_LONG_DATA_LEN + #ifdef SQL_ASYNC_DBC_FUNCTIONS - //======================== Integer Params ========================= + //====================== Integer Params ======================= // Indicates if the driver can execute functions asynchronously // on the connection handle. // SQL_ASYNC_DBC_CAPABLE = The driver can execute connection @@ -342,7 +353,23 @@ namespace ignite SQL_SRJO_NATURAL_JOIN | SQL_SRJO_INTERSECT_JOIN | SQL_SRJO_UNION_JOIN; #endif // SQL_SQL92_RELATIONAL_JOIN_OPERATORS - //========================= Short Params ========================== +#ifdef SQL_STATIC_CURSOR_ATTRIBUTES1 + // Bitmask that describes the attributes of a static cursor that + // are supported by the driver. This bitmask contains the first + // subset of attributes; for the second subset, see + // SQL_STATIC_CURSOR_ATTRIBUTES2. + intParams[SQL_STATIC_CURSOR_ATTRIBUTES1] = SQL_CA1_NEXT; +#endif //SQL_STATIC_CURSOR_ATTRIBUTES1 + +#ifdef SQL_STATIC_CURSOR_ATTRIBUTES2 + // Bitmask that describes the attributes of a static cursor that + // are supported by the driver. This bitmask contains the second + // subset of attributes; for the first subset, see + // SQL_STATIC_CURSOR_ATTRIBUTES1. + intParams[SQL_STATIC_CURSOR_ATTRIBUTES2] = 0; +#endif //SQL_STATIC_CURSOR_ATTRIBUTES2 + + //======================= Short Params ======================== #ifdef SQL_MAX_CONCURRENT_ACTIVITIES // The maximum number of active statements that the driver can // support for a connection. Zero mean no limit. diff --git a/modules/platforms/cpp/odbc/src/connection.cpp b/modules/platforms/cpp/odbc/src/connection.cpp index cffecdf5798b3..cbbb2f7e3f550 100644 --- a/modules/platforms/cpp/odbc/src/connection.cpp +++ b/modules/platforms/cpp/odbc/src/connection.cpp @@ -52,7 +52,7 @@ namespace ignite { // No-op. } - + const config::ConnectionInfo& Connection::GetInfo() const { // Connection info is constant and the same for all connections now. diff --git a/modules/platforms/cpp/odbc/src/diagnostic/diagnostic_record.cpp b/modules/platforms/cpp/odbc/src/diagnostic/diagnostic_record.cpp index 8553ee47d5188..1b654d26480a3 100644 --- a/modules/platforms/cpp/odbc/src/diagnostic/diagnostic_record.cpp +++ b/modules/platforms/cpp/odbc/src/diagnostic/diagnostic_record.cpp @@ -40,9 +40,15 @@ namespace /** SQL state 01S01 constant. */ const std::string STATE_01S01 = "01S01"; + /** SQL state 22026 constant. */ + const std::string STATE_22026 = "22026"; + /** SQL state 24000 constant. */ const std::string STATE_24000 = "24000"; + /** SQL state 07009 constant. */ + const std::string STATE_07009 = "07009"; + /** SQL state 08001 constant. */ const std::string STATE_08001 = "08001"; @@ -58,6 +64,9 @@ namespace /** SQL state HY001 constant. */ const std::string STATE_HY001 = "HY001"; + /** SQL state HY009 constant. */ + const std::string STATE_HY009 = "HY009"; + /** SQL state HY010 constant. */ const std::string STATE_HY010 = "HY010"; @@ -199,9 +208,15 @@ namespace ignite case SQL_STATE_01S01_ERROR_IN_ROW: return STATE_01S01; + case SQL_STATE_22026_DATA_LENGTH_MISMATCH: + return STATE_22026; + case SQL_STATE_24000_INVALID_CURSOR_STATE: return STATE_24000; + case SQL_STATE_07009_INVALID_DESCRIPTOR_INDEX: + return STATE_07009; + case SQL_STATE_08001_CANNOT_CONNECT: return STATE_08001; @@ -217,6 +232,9 @@ namespace ignite case SQL_STATE_HY001_MEMORY_ALLOCATION: return STATE_HY001; + case SQL_STATE_HY009_INVALID_USE_OF_NULL_POINTER: + return STATE_HY009; + case SQL_STATE_HY010_SEQUENCE_ERROR: return STATE_HY010; diff --git a/modules/platforms/cpp/odbc/src/entry_points.cpp b/modules/platforms/cpp/odbc/src/entry_points.cpp index c3d24bb960c9b..08016cc678a75 100644 --- a/modules/platforms/cpp/odbc/src/entry_points.cpp +++ b/modules/platforms/cpp/odbc/src/entry_points.cpp @@ -399,6 +399,19 @@ SQLRETURN SQL_API SQLSpecialColumns(SQLHSTMT stmt, tableNameLen, scope, nullable); } +SQLRETURN SQL_API SQLParamData(SQLHSTMT stmt, + SQLPOINTER* value) +{ + return ignite::SQLParamData(stmt, value); +} + +SQLRETURN SQL_API SQLPutData(SQLHSTMT stmt, + SQLPOINTER data, + SQLLEN strLengthOrIndicator) +{ + return ignite::SQLPutData(stmt, data, strLengthOrIndicator); +} + // // ==== Not implemented ==== // @@ -467,21 +480,6 @@ SQLRETURN SQL_API SQLGetStmtOption(SQLHSTMT stmt, return SQL_SUCCESS; } -SQLRETURN SQL_API SQLParamData(SQLHSTMT stmt, - SQLPOINTER* value) -{ - LOG_MSG("SQLParamData called\n"); - return SQL_SUCCESS; -} - -SQLRETURN SQL_API SQLPutData(SQLHSTMT stmt, - SQLPOINTER data, - SQLLEN strLengthOrIndicator) -{ - LOG_MSG("SQLPutData called\n"); - return SQL_SUCCESS; -} - SQLRETURN SQL_API SQLSetConnectOption(SQLHDBC conn, SQLUSMALLINT option, SQLULEN value) @@ -665,6 +663,7 @@ SQLRETURN SQL_API SQLColumnPrivileges(SQLHSTMT stmt, return SQL_SUCCESS; } +/* SQLRETURN SQL_API SQLDescribeParam(SQLHSTMT stmt, SQLUSMALLINT paramNum, SQLSMALLINT* dataType, @@ -675,6 +674,7 @@ SQLRETURN SQL_API SQLDescribeParam(SQLHSTMT stmt, LOG_MSG("SQLDescribeParam called\n"); return SQL_SUCCESS; } +*/ SQLRETURN SQL_API SQLParamOptions(SQLHSTMT stmt, SQLULEN paramSetSize, diff --git a/modules/platforms/cpp/odbc/src/odbc.cpp b/modules/platforms/cpp/odbc/src/odbc.cpp index 74d0f9d833f4d..9df64d3b14133 100644 --- a/modules/platforms/cpp/odbc/src/odbc.cpp +++ b/modules/platforms/cpp/odbc/src/odbc.cpp @@ -326,6 +326,8 @@ namespace ignite std::string dsn = SqlStringToString(serverName, serverNameLen); + LOG_MSG("DSN: %s\n", dsn.c_str()); + odbc::ReadDsnConfiguration(dsn.c_str(), config); connection->Establish(config); @@ -621,7 +623,7 @@ namespace ignite using odbc::app::Parameter; using odbc::type_traits::IsSqlTypeSupported; - LOG_MSG("SQLBindParameter called\n"); + LOG_MSG("SQLBindParameter called: %d, %d, %d\n", paramIdx, bufferType, paramSqlType); Statement *statement = reinterpret_cast(stmt); @@ -631,9 +633,6 @@ namespace ignite if (ioType != SQL_PARAM_INPUT) return SQL_ERROR; - if (resLen && (*resLen == SQL_DATA_AT_EXEC || *resLen <= SQL_LEN_DATA_AT_EXEC_OFFSET)) - return SQL_ERROR; - if (!IsSqlTypeSupported(paramSqlType)) return SQL_ERROR; @@ -976,14 +975,14 @@ namespace ignite case SQL_ATTR_PARAM_BIND_OFFSET_PTR: { - statement->SetParamBindOffsetPtr(reinterpret_cast(value)); + statement->SetParamBindOffsetPtr(reinterpret_cast(value)); break; } case SQL_ATTR_ROW_BIND_OFFSET_PTR: { - statement->SetColumnBindOffsetPtr(reinterpret_cast(value)); + statement->SetColumnBindOffsetPtr(reinterpret_cast(value)); break; } @@ -1334,4 +1333,36 @@ namespace ignite return statement->GetDiagnosticRecords().GetReturnCode(); } + SQLRETURN SQLParamData(SQLHSTMT stmt, SQLPOINTER* value) + { + using namespace ignite::odbc; + + LOG_MSG("SQLParamData called\n"); + + Statement *statement = reinterpret_cast(stmt); + + if (!statement) + return SQL_INVALID_HANDLE; + + statement->SelectParam(value); + + return statement->GetDiagnosticRecords().GetReturnCode(); + } + + SQLRETURN SQLPutData(SQLHSTMT stmt, SQLPOINTER data, SQLLEN strLengthOrIndicator) + { + using namespace ignite::odbc; + + LOG_MSG("SQLPutData called\n"); + + Statement *statement = reinterpret_cast(stmt); + + if (!statement) + return SQL_INVALID_HANDLE; + + statement->PutData(data, strLengthOrIndicator); + + return statement->GetDiagnosticRecords().GetReturnCode(); + } + } // namespace ignite; diff --git a/modules/platforms/cpp/odbc/src/statement.cpp b/modules/platforms/cpp/odbc/src/statement.cpp index 8aae156dc7818..32f7c3f0c33d2 100644 --- a/modules/platforms/cpp/odbc/src/statement.cpp +++ b/modules/platforms/cpp/odbc/src/statement.cpp @@ -33,8 +33,14 @@ namespace ignite namespace odbc { Statement::Statement(Connection& parent) : - connection(parent), columnBindings(), currentQuery(), - rowsFetched(0), rowStatuses(0), paramBindOffset(0), columnBindOffset(0) + connection(parent), + columnBindings(), + currentQuery(), + rowsFetched(0), + rowStatuses(0), + paramBindOffset(0), + columnBindOffset(0), + currentParamIdx(0) { // No-op. } @@ -67,12 +73,12 @@ namespace ignite columnBindings.clear(); } - void Statement::SetColumnBindOffsetPtr(size_t * ptr) + void Statement::SetColumnBindOffsetPtr(int * ptr) { columnBindOffset = ptr; } - size_t * Statement::GetColumnBindOffsetPtr() + int* Statement::GetColumnBindOffsetPtr() { return columnBindOffset; } @@ -104,11 +110,25 @@ namespace ignite void Statement::BindParameter(uint16_t paramIdx, const app::Parameter& param) { - IGNITE_ODBC_API_CALL_ALWAYS_SUCCESS; + IGNITE_ODBC_API_CALL(InternalBindParameter(paramIdx, param)); + } + + + SqlResult Statement::InternalBindParameter(uint16_t paramIdx, const app::Parameter& param) + { + if (paramIdx == 0) + { + AddStatusRecord(SQL_STATE_24000_INVALID_CURSOR_STATE, + "The value specified for the argument ParameterNumber was less than 1."); + + return SQL_RESULT_ERROR; + } paramBindings[paramIdx] = param; paramBindings[paramIdx].GetBuffer().SetPtrToOffsetPtr(¶mBindOffset); + + return SQL_RESULT_SUCCESS; } void Statement::UnbindParameter(uint16_t paramIdx) @@ -132,14 +152,14 @@ namespace ignite return static_cast(paramBindings.size()); } - void Statement::SetParamBindOffsetPtr(size_t* ptr) + void Statement::SetParamBindOffsetPtr(int* ptr) { IGNITE_ODBC_API_CALL_ALWAYS_SUCCESS; paramBindOffset = ptr; } - size_t * Statement::GetParamBindOffsetPtr() + int* Statement::GetParamBindOffsetPtr() { return paramBindOffset; } @@ -219,6 +239,21 @@ namespace ignite return SQL_RESULT_ERROR; } + bool paramDataReady = true; + + app::ParameterBindingMap::iterator it; + for (it = paramBindings.begin(); it != paramBindings.end(); ++it) + { + app::Parameter& param = it->second; + + param.ResetStoredData(); + + paramDataReady &= param.IsDataReady(); + } + + if (!paramDataReady) + return SQL_RESULT_NEED_DATA; + return currentQuery->Execute(); } @@ -519,6 +554,107 @@ namespace ignite { return rowStatuses; } + + void Statement::SelectParam(void** paramPtr) + { + IGNITE_ODBC_API_CALL(InternalSelectParam(paramPtr)); + } + + SqlResult Statement::InternalSelectParam(void** paramPtr) + { + if (!paramPtr) + { + AddStatusRecord(SQL_STATE_HY000_GENERAL_ERROR, "Invalid parameter: ValuePtrPtr is null."); + + return SQL_RESULT_ERROR; + } + + if (!currentQuery.get()) + { + AddStatusRecord(SQL_STATE_HY010_SEQUENCE_ERROR, "Query is not prepared."); + + return SQL_RESULT_ERROR; + } + + app::ParameterBindingMap::iterator it; + + if (currentParamIdx) + { + it = paramBindings.find(currentParamIdx); + + if (it != paramBindings.end() && !it->second.IsDataReady()) + { + AddStatusRecord(SQL_STATE_22026_DATA_LENGTH_MISMATCH, + "Less data was sent for a parameter than was specified with " + "the StrLen_or_IndPtr argument in SQLBindParameter."); + + return SQL_RESULT_ERROR; + } + } + + for (it = paramBindings.begin(); it != paramBindings.end(); ++it) + { + uint16_t paramIdx = it->first; + app::Parameter& param = it->second; + + if (!param.IsDataReady()) + { + *paramPtr = param.GetBuffer().GetData(); + + currentParamIdx = paramIdx; + + return SQL_RESULT_NEED_DATA; + } + } + + SqlResult res = currentQuery->Execute(); + + if (res != SQL_RESULT_SUCCESS) + res = SQL_RESULT_SUCCESS_WITH_INFO; + + return res; + } + + void Statement::PutData(void* data, SqlLen len) + { + IGNITE_ODBC_API_CALL(InternalPutData(data, len)); + } + + SqlResult Statement::InternalPutData(void* data, SqlLen len) + { + if (!data && len != 0 && len != SQL_DEFAULT_PARAM && len != SQL_NULL_DATA) + { + AddStatusRecord(SQL_STATE_HY009_INVALID_USE_OF_NULL_POINTER, + "Invalid parameter: DataPtr is null StrLen_or_Ind is not 0, " + "SQL_DEFAULT_PARAM, or SQL_NULL_DATA."); + + return SQL_RESULT_ERROR; + } + + if (currentParamIdx == 0) + { + AddStatusRecord(SQL_STATE_HY010_SEQUENCE_ERROR, + "Parameter is not selected with the SQLParamData."); + + return SQL_RESULT_ERROR; + } + + app::ParameterBindingMap::iterator it = paramBindings.find(currentParamIdx); + + if (it == paramBindings.end()) + { + AddStatusRecord(SQL_STATE_HY000_GENERAL_ERROR, + "Selected parameter has been unbound."); + + return SQL_RESULT_ERROR; + } + + app::Parameter& param = it->second; + + param.PutData(data, len); + + return SQL_RESULT_SUCCESS; + } } } From 350e84656686eb81f8e12d569783db9914ca5a37 Mon Sep 17 00:00:00 2001 From: Pavel Tupitsyn Date: Mon, 26 Sep 2016 19:55:27 +0300 Subject: [PATCH 26/69] IGNITE-3956 .NET: Fix cache creation in LINQPad examples This closes #1116 --- .../Apache.Ignite.Core/NuGet/LINQPad/PutGetExample.linq | 2 +- .../Apache.Ignite.Core/NuGet/LINQPad/QueryExample.linq | 6 +++--- .../Apache.Ignite.Linq/NuGet/LINQPad/QueryExample.linq | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/modules/platforms/dotnet/Apache.Ignite.Core/NuGet/LINQPad/PutGetExample.linq b/modules/platforms/dotnet/Apache.Ignite.Core/NuGet/LINQPad/PutGetExample.linq index 97d9bbe7be3d1..7c77d0995e736 100644 --- a/modules/platforms/dotnet/Apache.Ignite.Core/NuGet/LINQPad/PutGetExample.linq +++ b/modules/platforms/dotnet/Apache.Ignite.Core/NuGet/LINQPad/PutGetExample.linq @@ -42,7 +42,7 @@ void Main() using (var ignite = Ignition.Start(cfg)) { // Create new cache - var cache = ignite.CreateCache("orgs"); + var cache = ignite.GetOrCreateCache("orgs"); // Put data entry to cache cache.Put(1, new Organization {Name = "Apache", Type="Private"}); diff --git a/modules/platforms/dotnet/Apache.Ignite.Core/NuGet/LINQPad/QueryExample.linq b/modules/platforms/dotnet/Apache.Ignite.Core/NuGet/LINQPad/QueryExample.linq index b7f7981fb8986..2a2454e839315 100644 --- a/modules/platforms/dotnet/Apache.Ignite.Core/NuGet/LINQPad/QueryExample.linq +++ b/modules/platforms/dotnet/Apache.Ignite.Core/NuGet/LINQPad/QueryExample.linq @@ -45,14 +45,14 @@ void Main() using (var ignite = Ignition.Start(cfg)) { // Create and populate organization cache - var orgs = ignite.GetOrCreateCache(new CacheConfiguration("orgs", + var orgs = ignite.GetOrCreateCache(new CacheConfiguration("orgs-sql", new QueryEntity(typeof(int), typeof(Organization)))); orgs[1] = new Organization { Name = "Apache", Type = "Private", Size = 5300 }; orgs[2] = new Organization { Name = "Microsoft", Type = "Private", Size = 110000 }; orgs[3] = new Organization { Name = "Red Cross", Type = "Non-Profit", Size = 35000 }; // Create and populate person cache - var persons = ignite.CreateCache(new CacheConfiguration("persons", typeof(Person))); + var persons = ignite.GetOrCreateCache(new CacheConfiguration("persons-sql", typeof(Person))); persons[1] = new Person { OrgId = 1, Name = "James Wilson" }; persons[2] = new Person { OrgId = 1, Name = "Daniel Adams" }; persons[3] = new Person { OrgId = 2, Name = "Christian Moss" }; @@ -64,7 +64,7 @@ void Main() // SQL query with join const string orgName = "Apache"; - persons.Query(new SqlQuery(typeof(Person), "from Person, \"orgs\".Organization where Person.OrgId = \"orgs\".Organization._key and \"orgs\".Organization.Name = ?", orgName)) + persons.Query(new SqlQuery(typeof(Person), "from Person, \"orgs-sql\".Organization where Person.OrgId = \"orgs-sql\".Organization._key and \"orgs-sql\".Organization.Name = ?", orgName)) .Dump("Persons working for " + orgName); // Fields query diff --git a/modules/platforms/dotnet/Apache.Ignite.Linq/NuGet/LINQPad/QueryExample.linq b/modules/platforms/dotnet/Apache.Ignite.Linq/NuGet/LINQPad/QueryExample.linq index 9cce4ec9278d6..6a28f1fc86ece 100644 --- a/modules/platforms/dotnet/Apache.Ignite.Linq/NuGet/LINQPad/QueryExample.linq +++ b/modules/platforms/dotnet/Apache.Ignite.Linq/NuGet/LINQPad/QueryExample.linq @@ -46,14 +46,14 @@ void Main() using (var ignite = Ignition.Start(cfg)) { // Create and populate organization cache - var orgs = ignite.GetOrCreateCache(new CacheConfiguration("orgs", + var orgs = ignite.GetOrCreateCache(new CacheConfiguration("orgs-linq", new QueryEntity(typeof(int), typeof(Organization)))); orgs[1] = new Organization { Name = "Apache", Type = "Private", Size = 5300 }; orgs[2] = new Organization { Name = "Microsoft", Type = "Private", Size = 110000 }; orgs[3] = new Organization { Name = "Red Cross", Type = "Non-Profit", Size = 35000 }; // Create and populate person cache - var persons = ignite.CreateCache(new CacheConfiguration("persons", typeof(Person))); + var persons = ignite.GetOrCreateCache(new CacheConfiguration("persons-linq", typeof(Person))); persons[1] = new Person { OrgId = 1, Name = "James Wilson" }; persons[2] = new Person { OrgId = 1, Name = "Daniel Adams" }; persons[3] = new Person { OrgId = 2, Name = "Christian Moss" }; From 6a41dc92da124a921b304392e922886c30597037 Mon Sep 17 00:00:00 2001 From: Saikat Maitra Date: Mon, 26 Sep 2016 15:28:51 -0700 Subject: [PATCH 27/69] IGNITE-3807 IgniteSpiContext registers message listeners incorrectly --- .../internal/managers/GridManagerAdapter.java | 15 ++ .../apache/ignite/spi/IgniteSpiAdapter.java | 11 + .../apache/ignite/spi/IgniteSpiContext.java | 26 +- ...idManagerLocalMessageListenerSelfTest.java | 222 ++++++++++++++++++ .../testframework/GridSpiTestContext.java | 115 ++++++++- .../ignite/testsuites/IgniteSpiTestSuite.java | 6 +- 6 files changed, 391 insertions(+), 4 deletions(-) create mode 100644 modules/core/src/test/java/org/apache/ignite/internal/managers/GridManagerLocalMessageListenerSelfTest.java diff --git a/modules/core/src/main/java/org/apache/ignite/internal/managers/GridManagerAdapter.java b/modules/core/src/main/java/org/apache/ignite/internal/managers/GridManagerAdapter.java index 4fe8ca87aff42..584cc56e23b3f 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/managers/GridManagerAdapter.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/managers/GridManagerAdapter.java @@ -44,6 +44,7 @@ import org.apache.ignite.internal.util.typedef.internal.CU; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; +import org.apache.ignite.lang.IgniteBiPredicate; import org.apache.ignite.lang.IgniteFuture; import org.apache.ignite.lang.IgnitePredicate; import org.apache.ignite.plugin.extensions.communication.Message; @@ -396,6 +397,20 @@ protected final String stopInfo() { } } + @Override public void addLocalMessageListener(Object topic, IgniteBiPredicate p) { + A.notNull(topic, "topic"); + A.notNull(p, "p"); + + ctx.io().addUserMessageListener(topic, p); + } + + @Override public void removeLocalMessageListener(Object topic, IgniteBiPredicate p) { + A.notNull(topic, "topic"); + A.notNull(topic, "p"); + + ctx.io().removeUserMessageListener(topic, p); + } + @SuppressWarnings("deprecation") @Override public void addMessageListener(GridMessageListener lsnr, String topic) { A.notNull(lsnr, "lsnr"); diff --git a/modules/core/src/main/java/org/apache/ignite/spi/IgniteSpiAdapter.java b/modules/core/src/main/java/org/apache/ignite/spi/IgniteSpiAdapter.java index 65b0420078137..219d07be5f725 100644 --- a/modules/core/src/main/java/org/apache/ignite/spi/IgniteSpiAdapter.java +++ b/modules/core/src/main/java/org/apache/ignite/spi/IgniteSpiAdapter.java @@ -47,6 +47,7 @@ import org.apache.ignite.internal.util.typedef.internal.CU; import org.apache.ignite.internal.util.typedef.internal.SB; import org.apache.ignite.internal.util.typedef.internal.U; +import org.apache.ignite.lang.IgniteBiPredicate; import org.apache.ignite.lang.IgniteFuture; import org.apache.ignite.plugin.extensions.communication.Message; import org.apache.ignite.plugin.extensions.communication.MessageFactory; @@ -758,6 +759,11 @@ private class GridDummySpiContext implements IgniteSpiContext { /* No-op. */ } + /** {@inheritDoc} */ + @Override public void addLocalMessageListener(Object topic, IgniteBiPredicate p) { + /* No-op. */ + } + /** {@inheritDoc} */ @Override public void recordEvent(Event evt) { /* No-op. */ @@ -848,6 +854,11 @@ private class GridDummySpiContext implements IgniteSpiContext { return true; } + /** {@inheritDoc} */ + @Override public void removeLocalMessageListener(Object topic, IgniteBiPredicate p) { + /* No-op. */ + } + /** {@inheritDoc} */ @Override public boolean removeMessageListener(GridMessageListener lsnr, String topic) { return false; diff --git a/modules/core/src/main/java/org/apache/ignite/spi/IgniteSpiContext.java b/modules/core/src/main/java/org/apache/ignite/spi/IgniteSpiContext.java index 899c222d4018f..5eb5227a0a46c 100644 --- a/modules/core/src/main/java/org/apache/ignite/spi/IgniteSpiContext.java +++ b/modules/core/src/main/java/org/apache/ignite/spi/IgniteSpiContext.java @@ -26,6 +26,7 @@ import org.apache.ignite.events.Event; import org.apache.ignite.internal.managers.communication.GridMessageListener; import org.apache.ignite.internal.managers.eventstorage.GridLocalEventListener; +import org.apache.ignite.lang.IgniteBiPredicate; import org.apache.ignite.plugin.extensions.communication.MessageFactory; import org.apache.ignite.plugin.extensions.communication.MessageFormatter; import org.apache.ignite.plugin.security.SecuritySubject; @@ -115,6 +116,23 @@ public interface IgniteSpiContext { */ public void send(ClusterNode node, Serializable msg, String topic) throws IgniteSpiException; + /** + * Register an local message listener to receive messages sent by remote nodes. The underlying + * communication mechanism is defined by {@link org.apache.ignite.spi.communication.CommunicationSpi} implementation used. + * + * @param topic Topic to subscribe to. + * @param p Message predicate. + */ + public void addLocalMessageListener(@Nullable Object topic, IgniteBiPredicate p); + + /** + * Removes a previously registered local message listener. + * + * @param topic Topic to unsubscribe from. + * @param p Message predicate. + */ + public void removeLocalMessageListener(@Nullable Object topic, IgniteBiPredicate p); + /** * Register a message listener to receive messages sent by remote nodes. The underlying * communication mechanism is defined by {@link org.apache.ignite.spi.communication.CommunicationSpi} implementation used. @@ -124,7 +142,10 @@ public interface IgniteSpiContext { * * @param lsnr Message listener to register. * @param topic Topic to register listener for. + * + * @deprecated Use {@link #addLocalMessageListener(Object, IgniteBiPredicate)} instead. */ + @Deprecated public void addMessageListener(GridMessageListener lsnr, String topic); /** @@ -134,7 +155,10 @@ public interface IgniteSpiContext { * @param topic Topic to unregister listener for. * @return {@code true} of message listener was removed, {@code false} if it was not * previously registered. + * + * @deprecated Use {@link #removeLocalMessageListener(Object, IgniteBiPredicate)} instead. */ + @Deprecated public boolean removeMessageListener(GridMessageListener lsnr, String topic); /** @@ -328,4 +352,4 @@ public interface IgniteSpiContext { * @param c Timeout object. */ public void removeTimeoutObject(IgniteSpiTimeoutObject c); -} \ No newline at end of file +} diff --git a/modules/core/src/test/java/org/apache/ignite/internal/managers/GridManagerLocalMessageListenerSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/managers/GridManagerLocalMessageListenerSelfTest.java new file mode 100644 index 0000000000000..4aadc78b1336f --- /dev/null +++ b/modules/core/src/test/java/org/apache/ignite/internal/managers/GridManagerLocalMessageListenerSelfTest.java @@ -0,0 +1,222 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.managers; + +import java.util.UUID; +import java.util.concurrent.CountDownLatch; +import org.apache.ignite.IgniteCheckedException; +import org.apache.ignite.configuration.IgniteConfiguration; +import org.apache.ignite.internal.GridKernalContext; +import org.apache.ignite.internal.managers.communication.GridIoMessageFactory; +import org.apache.ignite.internal.managers.communication.GridIoUserMessage; +import org.apache.ignite.internal.util.typedef.CO; +import org.apache.ignite.lang.IgniteBiPredicate; +import org.apache.ignite.plugin.extensions.communication.Message; +import org.apache.ignite.spi.IgniteSpi; +import org.apache.ignite.spi.IgniteSpiAdapter; +import org.apache.ignite.spi.IgniteSpiContext; +import org.apache.ignite.spi.IgniteSpiException; +import org.apache.ignite.spi.communication.tcp.TcpCommunicationSpi; +import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; +import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder; +import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder; +import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; +import org.jetbrains.annotations.Nullable; + +import static java.util.concurrent.TimeUnit.MILLISECONDS; +import static java.util.concurrent.TimeUnit.NANOSECONDS; +import static java.util.concurrent.TimeUnit.SECONDS; + +/** + * Test Managers to add and remove local message listener. + */ +public class GridManagerLocalMessageListenerSelfTest extends GridCommonAbstractTest { + /** IP finder. */ + private static final TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true); + + /** */ + private static final byte DIRECT_TYPE = (byte)210; + + static { + GridIoMessageFactory.registerCustom(DIRECT_TYPE, new CO() { + @Override public Message apply() { + return new GridIoUserMessage(); + } + }); + } + + /** {@inheritDoc} */ + @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { + IgniteConfiguration c = super.getConfiguration(gridName); + + TcpDiscoverySpi discoSpi = new TcpDiscoverySpi(); + + discoSpi.setIpFinder(ipFinder); + + c.setDiscoverySpi(discoSpi); + + TcpCommunicationSpi commSpi = new TcpCommunicationSpi(); + + c.setCommunicationSpi(commSpi); + + return c; + } + + /** {@inheritDoc} */ + @Override protected void afterTest() throws Exception { + stopAllGrids(); + } + + /** + * @throws Exception If failed. + */ + public void testSendMessage() throws Exception { + startGridsMultiThreaded(2); + + IgniteSpiContext ctx0 = ((IgniteSpiAdapter)grid(0).context().io().getSpi()).getSpiContext(); + IgniteSpiContext ctx1 = ((IgniteSpiAdapter)grid(1).context().io().getSpi()).getSpiContext(); + + String topic = "test-topic"; + + final CountDownLatch latch = new CountDownLatch(1); + + ctx1.addLocalMessageListener(topic, new IgniteBiPredicate() { + @Override public boolean apply(UUID nodeId, Object msg) { + assertEquals("Message", msg); + + latch.countDown(); + + return true; + } + }); + + long time = System.nanoTime(); + + ctx0.send(grid(1).localNode(), "Message", topic); + + assert latch.await(3, SECONDS); + + time = System.nanoTime() - time; + + info(">>>"); + info(">>> send() time (ms): " + MILLISECONDS.convert(time, NANOSECONDS)); + info(">>>"); + } + + /** + * @throws Exception If failed. + */ + public void testAddLocalMessageListener() throws Exception { + startGrid(); + + Manager mgr = new Manager(grid().context(), new Spi()); + + mgr.start(); + + mgr.onKernalStart(); + + assertTrue(mgr.enabled()); + } + + /** + * @throws Exception If failed. + */ + public void testRemoveLocalMessageListener() throws Exception { + startGrid(); + + Manager mgr = new Manager(grid().context(), new Spi()); + + assertTrue(mgr.enabled()); + + mgr.onKernalStart(); + + mgr.onKernalStop(false); + + mgr.stop(false); + + assertTrue(mgr.enabled()); + } + + /** */ + private static class Manager extends GridManagerAdapter { + /** + * @param ctx Kernal context. + * @param spis Specific SPI instance. + */ + protected Manager(GridKernalContext ctx, IgniteSpi... spis) { + super(ctx, spis); + } + + /** {@inheritDoc} */ + @Override public void start() throws IgniteCheckedException { + // No-op. + } + + /** {@inheritDoc} */ + @Override public void stop(boolean cancel) throws IgniteCheckedException { + // No-op. + } + } + + /** + * Test Spi. + */ + private static interface TestSpi extends IgniteSpi { + // No-op. + } + + /** + * Spi + */ + private static class Spi extends IgniteSpiAdapter implements TestSpi { + /** Ignite Spi Context. **/ + private IgniteSpiContext spiCtx; + + /** Test message topic. **/ + private String TEST_TOPIC = "test_topic"; + + /** {@inheritDoc} */ + @Override public void spiStart(@Nullable String gridName) throws IgniteSpiException { + // No-op. + } + + /** {@inheritDoc} */ + @Override public void spiStop() throws IgniteSpiException { + // No-op. + } + + @Override public void onContextInitialized0(IgniteSpiContext spiCtx) throws IgniteSpiException { + this.spiCtx = spiCtx; + + spiCtx.addLocalMessageListener(TEST_TOPIC, new IgniteBiPredicate() { + @Override public boolean apply(UUID uuid, Object o) { + return true; + } + }); + + } + + @Override public void onContextDestroyed0() { + spiCtx.removeLocalMessageListener(TEST_TOPIC, new IgniteBiPredicate() { + @Override public boolean apply(UUID uuid, Object o) { + return true; + } + }); + } + } +} diff --git a/modules/core/src/test/java/org/apache/ignite/testframework/GridSpiTestContext.java b/modules/core/src/test/java/org/apache/ignite/testframework/GridSpiTestContext.java index 0bffe8bfc8710..ac50ef90e29e1 100644 --- a/modules/core/src/test/java/org/apache/ignite/testframework/GridSpiTestContext.java +++ b/modules/core/src/test/java/org/apache/ignite/testframework/GridSpiTestContext.java @@ -29,21 +29,26 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ConcurrentMap; +import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteException; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.events.DiscoveryEvent; import org.apache.ignite.events.Event; import org.apache.ignite.events.TaskEvent; import org.apache.ignite.internal.ClusterMetricsSnapshot; +import org.apache.ignite.internal.GridTopic; import org.apache.ignite.internal.direct.DirectMessageReader; import org.apache.ignite.internal.direct.DirectMessageWriter; import org.apache.ignite.internal.managers.communication.GridIoManager; import org.apache.ignite.internal.managers.communication.GridIoMessageFactory; +import org.apache.ignite.internal.managers.communication.GridIoUserMessage; import org.apache.ignite.internal.managers.communication.GridMessageListener; import org.apache.ignite.internal.managers.eventstorage.GridLocalEventListener; import org.apache.ignite.internal.processors.timeout.GridSpiTimeoutObject; import org.apache.ignite.internal.processors.timeout.GridTimeoutProcessor; import org.apache.ignite.internal.util.typedef.F; +import org.apache.ignite.internal.util.typedef.internal.S; +import org.apache.ignite.lang.IgniteBiPredicate; import org.apache.ignite.lang.IgniteUuid; import org.apache.ignite.plugin.extensions.communication.MessageFactory; import org.apache.ignite.plugin.extensions.communication.MessageFormatter; @@ -61,6 +66,7 @@ import static org.apache.ignite.events.EventType.EVT_NODE_JOINED; import static org.apache.ignite.events.EventType.EVT_NODE_LEFT; import static org.apache.ignite.events.EventType.EVT_NODE_METRICS_UPDATED; +import static org.apache.ignite.internal.GridTopic.TOPIC_COMM_USER; /** * Test SPI context. @@ -241,7 +247,7 @@ public void removeNode(ClusterNode node) { * @param nodeId Node ID. */ public void removeNode(UUID nodeId) { - for (Iterator iter = rmtNodes.iterator(); iter.hasNext();) { + for (Iterator iter = rmtNodes.iterator(); iter.hasNext(); ) { ClusterNode node = iter.next(); if (node.id().equals(nodeId)) { @@ -325,6 +331,27 @@ public void triggerMessage(ClusterNode node, Object msg) { lsnr.onMessage(node.id(), msg); } + /** {@inheritDoc} */ + @SuppressWarnings("deprecation") + @Override public void addLocalMessageListener(Object topic, IgniteBiPredicate p) { + try { + addMessageListener(TOPIC_COMM_USER, + new GridLocalMessageListener(topic, (IgniteBiPredicate)p)); + } + catch (IgniteCheckedException e) { + throw new IgniteException(e); + } + } + + /** + * @param topic Listener's topic. + * @param lsnr Listener to add. + */ + @SuppressWarnings({"TypeMayBeWeakened", "deprecation"}) + public void addMessageListener(GridTopic topic, GridMessageListener lsnr) { + addMessageListener(lsnr, ((Object)topic).toString()); + } + /** {@inheritDoc} */ @SuppressWarnings("deprecation") @Override public void addMessageListener(GridMessageListener lsnr, String topic) { @@ -337,6 +364,28 @@ public void triggerMessage(ClusterNode node, Object msg) { return msgLsnrs.remove(lsnr); } + /** {@inheritDoc} */ + @SuppressWarnings("deprecation") + @Override public void removeLocalMessageListener(Object topic, IgniteBiPredicate p) { + try { + removeMessageListener(TOPIC_COMM_USER, + new GridLocalMessageListener(topic, (IgniteBiPredicate)p)); + } + catch (IgniteCheckedException e) { + throw new IgniteException(e); + } + } + + /** + * @param topic Listener's topic. + * @param lsnr Listener to remove. + * @return Whether or not the lsnr was removed. + */ + @SuppressWarnings("deprecation") + public boolean removeMessageListener(GridTopic topic, @Nullable GridMessageListener lsnr) { + return removeMessageListener(lsnr, ((Object)topic).toString()); + } + /** * @param type Event type. * @param taskName Task name. @@ -471,7 +520,7 @@ public void triggerEvent(Event evt) { boolean res = false; try { - res = get(cacheName, key) != null; + res = get(cacheName, key) != null; } catch (IgniteException ignored) { @@ -587,4 +636,66 @@ private CachedObject(long expire, V obj) { this.obj = obj; } } + + /** + * This class represents a message listener wrapper that knows about peer deployment. + */ + private class GridLocalMessageListener implements GridMessageListener { + /** Predicate listeners. */ + private final IgniteBiPredicate predLsnr; + + /** User message topic. */ + private final Object topic; + + /** + * @param topic User topic. + * @param predLsnr Predicate listener. + * @throws IgniteCheckedException If failed to inject resources to predicates. + */ + GridLocalMessageListener(@Nullable Object topic, @Nullable IgniteBiPredicate predLsnr) + throws IgniteCheckedException { + this.topic = topic; + this.predLsnr = predLsnr; + } + + /** {@inheritDoc} */ + @SuppressWarnings({ + "SynchronizationOnLocalVariableOrMethodParameter", "ConstantConditions", + "OverlyStrongTypeCast"}) + @Override public void onMessage(UUID nodeId, Object msg) { + GridIoUserMessage ioMsg = (GridIoUserMessage)msg; + + ClusterNode node = locNode; + Object msgBody = ioMsg.body(); + + assert msgBody != null || ioMsg.bodyBytes() != null; + } + + /** {@inheritDoc} */ + @Override public boolean equals(Object o) { + if (this == o) + return true; + + if (o == null || getClass() != o.getClass()) + return false; + + GridLocalMessageListener l = (GridLocalMessageListener)o; + + return F.eq(predLsnr, l.predLsnr) && F.eq(topic, l.topic); + } + + /** {@inheritDoc} */ + @Override public int hashCode() { + int res = predLsnr != null ? predLsnr.hashCode() : 0; + + res = 31 * res + (topic != null ? topic.hashCode() : 0); + + return res; + } + + /** {@inheritDoc} */ + @Override public String toString() { + return S.toString(GridLocalMessageListener.class, this); + } + } } diff --git a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteSpiTestSuite.java b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteSpiTestSuite.java index fc4023ad6a46e..90f12185e050a 100644 --- a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteSpiTestSuite.java +++ b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteSpiTestSuite.java @@ -18,6 +18,7 @@ package org.apache.ignite.testsuites; import junit.framework.TestSuite; +import org.apache.ignite.internal.managers.GridManagerLocalMessageListenerSelfTest; import org.apache.ignite.internal.managers.GridNoopManagerSelfTest; /** @@ -64,6 +65,9 @@ public static TestSuite suite() throws Exception { // All other tests. suite.addTestSuite(GridNoopManagerSelfTest.class); + // Local Message Listener tests. + suite.addTestSuite(GridManagerLocalMessageListenerSelfTest.class); + return suite; } -} \ No newline at end of file +} From 97bfee4dff807e3049b61fa473472a8395cdcb6a Mon Sep 17 00:00:00 2001 From: vozerov-gridgain Date: Tue, 27 Sep 2016 10:06:48 +0300 Subject: [PATCH 28/69] Fixing RAT. --- parent/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/parent/pom.xml b/parent/pom.xml index 3e9a71c23cd7a..6240467594249 100644 --- a/parent/pom.xml +++ b/parent/pom.xml @@ -794,7 +794,7 @@ src/test/java/org/apache/ignite/spi/deployment/uri/META-INF/ignite.incorrefs src/test/java/org/apache/ignite/spi/deployment/uri/META-INF/ignite.empty src/test/java/org/apache/ignite/spi/deployment/uri/META-INF/ignite.brokenxml - src/test/java/org/apache/ignite/internal/processors/hadoop/books/*.txt + **/books/*.txt src/main/java/org/apache/ignite/examples/streaming/wordcount/*.txt examples/src/main/java/org/apache/ignite/examples/streaming/wordcount/*.txt src/main/resources/person.csv From 68b0bcd83c295ce540aa9d9d0910abcf671671df Mon Sep 17 00:00:00 2001 From: Pavel Tupitsyn Date: Tue, 27 Sep 2016 12:08:46 +0300 Subject: [PATCH 29/69] IGNITE-3970 .NET: Fix Cyrillic 'C' letters in code --- .../platforms/dotnet/Apache.Ignite.Core/Compute/ICompute.cs | 6 +++--- .../Apache.Ignite.Core/Impl/Unmanaged/UnmanagedCallbacks.cs | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/modules/platforms/dotnet/Apache.Ignite.Core/Compute/ICompute.cs b/modules/platforms/dotnet/Apache.Ignite.Core/Compute/ICompute.cs index a677f39249522..f3bdb7db85d84 100644 --- a/modules/platforms/dotnet/Apache.Ignite.Core/Compute/ICompute.cs +++ b/modules/platforms/dotnet/Apache.Ignite.Core/Compute/ICompute.cs @@ -577,7 +577,7 @@ public interface ICompute /// /// Job to run. /// Job arguments. - /// Сollection of job results. + /// Collection of job results. /// Type of argument. /// Type of job result. ICollection Apply(IComputeFunc clo, IEnumerable args); @@ -589,7 +589,7 @@ public interface ICompute /// /// Job to run. /// Job arguments. - /// Сollection of job results. + /// Collection of job results. /// Type of argument. /// Type of job result. Task> ApplyAsync(IComputeFunc clo, IEnumerable args); @@ -605,7 +605,7 @@ public interface ICompute /// Job arguments. /// The cancellation token. /// - /// Сollection of job results. + /// Collection of job results. /// Task> ApplyAsync(IComputeFunc clo, IEnumerable args, CancellationToken cancellationToken); diff --git a/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Unmanaged/UnmanagedCallbacks.cs b/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Unmanaged/UnmanagedCallbacks.cs index b3489a2d9ddd0..5815b4d9b2179 100644 --- a/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Unmanaged/UnmanagedCallbacks.cs +++ b/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Unmanaged/UnmanagedCallbacks.cs @@ -152,7 +152,7 @@ internal unsafe class UnmanagedCallbacks private delegate void ServiceCancelCallbackDelegate(void* target, long svcPtr, long memPtr); private delegate void ServiceInvokeMethodCallbackDelegate(void* target, long svcPtr, long inMemPtr, long outMemPtr); - private delegate int СlusterNodeFilterApplyCallbackDelegate(void* target, long memPtr); + private delegate int ClusterNodeFilterApplyCallbackDelegate(void* target, long memPtr); private delegate void NodeInfoCallbackDelegate(void* target, long memPtr); @@ -245,7 +245,7 @@ public UnmanagedCallbacks() serviceCancel = CreateFunctionPointer((ServiceCancelCallbackDelegate)ServiceCancel), serviceInvokeMethod = CreateFunctionPointer((ServiceInvokeMethodCallbackDelegate)ServiceInvokeMethod), - clusterNodeFilterApply = CreateFunctionPointer((СlusterNodeFilterApplyCallbackDelegate)СlusterNodeFilterApply), + clusterNodeFilterApply = CreateFunctionPointer((ClusterNodeFilterApplyCallbackDelegate)ClusterNodeFilterApply), onStart = CreateFunctionPointer((OnStartCallbackDelegate)OnStart), onStop = CreateFunctionPointer((OnStopCallbackDelegate)OnStop), @@ -993,7 +993,7 @@ private void ServiceInvokeMethod(void* target, long svcPtr, long inMemPtr, long }); } - private int СlusterNodeFilterApply(void* target, long memPtr) + private int ClusterNodeFilterApply(void* target, long memPtr) { return SafeCall(() => { From 29acb33293c3d3130e16b7ff4d6b7ae260b7b78b Mon Sep 17 00:00:00 2001 From: Alexey Kuznetsov Date: Tue, 27 Sep 2016 17:15:38 +0700 Subject: [PATCH 30/69] Fixed typos. --- .../main/java/org/apache/ignite/cache/store/jdbc/JdbcType.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/JdbcType.java b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/JdbcType.java index 2107240338b85..22f0fac280c4e 100644 --- a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/JdbcType.java +++ b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/JdbcType.java @@ -82,7 +82,7 @@ public JdbcType(JdbcType type) { /** * Gets associated cache name. * - * @return Сache name. + * @return Cache name. */ public String getCacheName() { return cacheName; From c2a3f11ca14cf9f9cf5bd2d6e2a87764f7cda5a7 Mon Sep 17 00:00:00 2001 From: Andrey Martianov Date: Tue, 20 Sep 2016 17:41:49 +0300 Subject: [PATCH 31/69] ignite-3621 Use single ttl cleanup worker thread for all caches (cherry picked from commit 1bc6058) --- .../processors/cache/GridCacheProcessor.java | 2 + .../cache/GridCacheSharedContext.java | 24 +++- .../GridCacheSharedTtlCleanupManager.java | 132 ++++++++++++++++++ .../processors/cache/GridCacheTtlManager.java | 115 +++------------ .../GridCacheTtlManagerNotificationTest.java | 107 +++++++++++++- .../IgniteCacheExpiryPolicyTestSuite.java | 2 + ...acheOnlyOneTtlCleanupThreadExistsTest.java | 102 ++++++++++++++ .../hashmap/GridCacheTestContext.java | 2 + 8 files changed, 384 insertions(+), 102 deletions(-) create mode 100644 modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheSharedTtlCleanupManager.java create mode 100644 modules/core/src/test/java/org/apache/ignite/internal/processors/cache/expiry/IgniteCacheOnlyOneTtlCleanupThreadExistsTest.java diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheProcessor.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheProcessor.java index 6640db8c7bcef..0a0b40aa01415 100755 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheProcessor.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheProcessor.java @@ -1870,6 +1870,7 @@ private GridCacheSharedContext createSharedContext(GridKernalContext kernalCtx, GridCachePartitionExchangeManager exchMgr = new GridCachePartitionExchangeManager(); GridCacheIoManager ioMgr = new GridCacheIoManager(); CacheAffinitySharedManager topMgr = new CacheAffinitySharedManager(); + GridCacheSharedTtlCleanupManager ttl = new GridCacheSharedTtlCleanupManager(); CacheJtaManagerAdapter jta = JTA.createOptional(); @@ -1882,6 +1883,7 @@ private GridCacheSharedContext createSharedContext(GridKernalContext kernalCtx, exchMgr, topMgr, ioMgr, + ttl, jta, storeSesLsnrs ); diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheSharedContext.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheSharedContext.java index 0cdf0a44ae4b2..8f3923515de5b 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheSharedContext.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheSharedContext.java @@ -93,6 +93,9 @@ public class GridCacheSharedContext { /** Affinity manager. */ private CacheAffinitySharedManager affMgr; + /** Ttl cleanup manager. */ + private GridCacheSharedTtlCleanupManager ttlMgr; + /** Cache contexts map. */ private ConcurrentMap> ctxMap; @@ -135,6 +138,7 @@ public class GridCacheSharedContext { * @param exchMgr Exchange manager. * @param affMgr Affinity manager. * @param ioMgr IO manager. + * @param ttlMgr Ttl cleanup manager. * @param jtaMgr JTA manager. * @param storeSesLsnrs Store session listeners. */ @@ -147,12 +151,13 @@ public GridCacheSharedContext( GridCachePartitionExchangeManager exchMgr, CacheAffinitySharedManager affMgr, GridCacheIoManager ioMgr, + GridCacheSharedTtlCleanupManager ttlMgr, CacheJtaManagerAdapter jtaMgr, Collection storeSesLsnrs ) { this.kernalCtx = kernalCtx; - setManagers(mgrs, txMgr, jtaMgr, verMgr, mvccMgr, depMgr, exchMgr, affMgr, ioMgr); + setManagers(mgrs, txMgr, jtaMgr, verMgr, mvccMgr, depMgr, exchMgr, affMgr, ioMgr, ttlMgr); this.storeSesLsnrs = storeSesLsnrs; @@ -248,7 +253,8 @@ void onReconnected() throws IgniteCheckedException { new GridCacheDeploymentManager(), new GridCachePartitionExchangeManager(), affMgr, - ioMgr); + ioMgr, + ttlMgr); this.mgrs = mgrs; @@ -272,13 +278,14 @@ private boolean restartOnDisconnect(GridCacheSharedManager mgr) { /** * @param mgrs Managers list. * @param txMgr Transaction manager. + * @param jtaMgr JTA manager. * @param verMgr Version manager. * @param mvccMgr MVCC manager. * @param depMgr Deployment manager. * @param exchMgr Exchange manager. * @param affMgr Affinity manager. * @param ioMgr IO manager. - * @param jtaMgr JTA manager. + * @param ttlMgr Ttl cleanup manager. */ private void setManagers(List> mgrs, IgniteTxManager txMgr, @@ -288,7 +295,8 @@ private void setManagers(List> mgrs, GridCacheDeploymentManager depMgr, GridCachePartitionExchangeManager exchMgr, CacheAffinitySharedManager affMgr, - GridCacheIoManager ioMgr) { + GridCacheIoManager ioMgr, + GridCacheSharedTtlCleanupManager ttlMgr) { this.mvccMgr = add(mgrs, mvccMgr); this.verMgr = add(mgrs, verMgr); this.txMgr = add(mgrs, txMgr); @@ -297,6 +305,7 @@ private void setManagers(List> mgrs, this.exchMgr = add(mgrs, exchMgr); this.affMgr = add(mgrs, affMgr); this.ioMgr = add(mgrs, ioMgr); + this.ttlMgr = add(mgrs, ttlMgr); } /** @@ -492,6 +501,13 @@ public GridCacheIoManager io() { return ioMgr; } + /** + * @return Ttl cleanup manager. + * */ + public GridCacheSharedTtlCleanupManager ttl() { + return ttlMgr; + } + /** * @return Cache deployment manager. */ diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheSharedTtlCleanupManager.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheSharedTtlCleanupManager.java new file mode 100644 index 0000000000000..d7d2cad901283 --- /dev/null +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheSharedTtlCleanupManager.java @@ -0,0 +1,132 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.cache; + +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; +import org.apache.ignite.configuration.CacheConfiguration; +import org.apache.ignite.internal.IgniteInterruptedCheckedException; +import org.apache.ignite.internal.util.typedef.internal.U; +import org.apache.ignite.internal.util.worker.GridWorker; +import org.apache.ignite.thread.IgniteThread; + +/** + * Periodically removes expired entities from caches with {@link CacheConfiguration#isEagerTtl()} flag set. + */ +public class GridCacheSharedTtlCleanupManager extends GridCacheSharedManagerAdapter { + /** Ttl cleanup worker thread sleep interval, ms. */ + private static final long CLEANUP_WORKER_SLEEP_INTERVAL = 500; + + /** Limit of expired entries processed by worker for certain cache in one pass. */ + private static final int CLEANUP_WORKER_ENTRIES_PROCESS_LIMIT = 1000; + + /** Cleanup worker. */ + private CleanupWorker cleanupWorker; + + /** Mutex on worker thread creation. */ + private final Object mux = new Object(); + + /** List of registered ttl managers. */ + private List mgrs = new CopyOnWriteArrayList<>(); + + /** {@inheritDoc} */ + @Override protected void onKernalStop0(boolean cancel) { + synchronized (mux) { + stopCleanupWorker(); + } + } + + /** + * Register ttl manager of cache for periodical check on expired entries. + * + * @param mgr ttl manager of cache. + * */ + public void register(GridCacheTtlManager mgr) { + synchronized (mux) { + if (cleanupWorker == null) + startCleanupWorker(); + + mgrs.add(mgr); + } + } + + /** + * Unregister ttl manager of cache from periodical check on expired entries. + * + * @param mgr ttl manager of cache. + * */ + public void unregister(GridCacheTtlManager mgr) { + synchronized (mux) { + mgrs.remove(mgr); + + if (mgrs.isEmpty()) + stopCleanupWorker(); + } + } + + /** + * + */ + private void startCleanupWorker() { + cleanupWorker = new CleanupWorker(); + + new IgniteThread(cleanupWorker).start(); + } + + /** + * + */ + private void stopCleanupWorker() { + if (null != cleanupWorker) { + U.cancel(cleanupWorker); + U.join(cleanupWorker, log); + + cleanupWorker = null; + } + } + + /** + * Entry cleanup worker. + */ + private class CleanupWorker extends GridWorker { + /** + * Creates cleanup worker. + */ + CleanupWorker() { + super(cctx.gridName(), "ttl-cleanup-worker", cctx.logger(GridCacheSharedTtlCleanupManager.class)); + } + + /** {@inheritDoc} */ + @Override protected void body() throws InterruptedException, IgniteInterruptedCheckedException { + while (!isCancelled()) { + boolean expiredRemains = false; + + for (GridCacheTtlManager mgr : mgrs) { + if (mgr.expire(CLEANUP_WORKER_ENTRIES_PROCESS_LIMIT)) + expiredRemains = true; + + if (isCancelled()) + return; + } + + if (!expiredRemains) + U.sleep(CLEANUP_WORKER_SLEEP_INTERVAL); + } + } + } +} diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheTtlManager.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheTtlManager.java index 8ff035813528e..996544fdb4dd9 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheTtlManager.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheTtlManager.java @@ -17,20 +17,15 @@ package org.apache.ignite.internal.processors.cache; -import java.util.concurrent.atomic.AtomicLongFieldUpdater; - import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteException; import org.apache.ignite.configuration.CacheConfiguration; -import org.apache.ignite.internal.IgniteInterruptedCheckedException; import org.apache.ignite.internal.processors.cache.version.GridCacheVersion; import org.apache.ignite.internal.util.GridConcurrentSkipListSet; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.internal.util.typedef.internal.CU; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; -import org.apache.ignite.internal.util.worker.GridWorker; -import org.apache.ignite.thread.IgniteThread; import org.jetbrains.annotations.Nullable; import org.jsr166.LongAdder8; @@ -43,19 +38,6 @@ public class GridCacheTtlManager extends GridCacheManagerAdapter { /** Entries pending removal. */ private final GridConcurrentSkipListSetEx pendingEntries = new GridConcurrentSkipListSetEx(); - /** Cleanup worker. */ - private CleanupWorker cleanupWorker; - - /** Mutex. */ - private final Object mux = new Object(); - - /** Next expire time. */ - private volatile long nextExpireTime; - - /** Next expire time updater. */ - private static final AtomicLongFieldUpdater nextExpireTimeUpdater = - AtomicLongFieldUpdater.newUpdater(GridCacheTtlManager.class, "nextExpireTime"); - /** {@inheritDoc} */ @Override protected void start0() throws IgniteCheckedException { boolean cleanupDisabled = cctx.kernalContext().isDaemon() || @@ -68,19 +50,14 @@ public class GridCacheTtlManager extends GridCacheManagerAdapter { if (cleanupDisabled) return; - cleanupWorker = new CleanupWorker(); - } - - /** {@inheritDoc} */ - @Override protected void onKernalStart0() throws IgniteCheckedException { - if (cleanupWorker != null) - new IgniteThread(cleanupWorker).start(); + cctx.shared().ttl().register(this); } /** {@inheritDoc} */ @Override protected void onKernalStop0(boolean cancel) { - U.cancel(cleanupWorker); - U.join(cleanupWorker, log); + pendingEntries.clear(); + + cctx.shared().ttl().unregister(this); } /** @@ -90,27 +67,10 @@ public class GridCacheTtlManager extends GridCacheManagerAdapter { */ public void addTrackedEntry(GridCacheMapEntry entry) { assert Thread.holdsLock(entry); - assert cleanupWorker != null; EntryWrapper e = new EntryWrapper(entry); pendingEntries.add(e); - - while (true) { - long nextExpireTime = this.nextExpireTime; - - if (e.expireTime < nextExpireTime) { - if (nextExpireTimeUpdater.compareAndSet(this, nextExpireTime, e.expireTime)) { - synchronized (mux) { - mux.notifyAll(); - } - - break; - } - } - else - break; - } } /** @@ -118,7 +78,6 @@ public void addTrackedEntry(GridCacheMapEntry entry) { */ public void removeTrackedEntry(GridCacheMapEntry entry) { assert Thread.holdsLock(entry); - assert cleanupWorker != null; pendingEntries.remove(new EntryWrapper(entry)); } @@ -141,15 +100,27 @@ public int pendingSize() { * Expires entries by TTL. */ public void expire() { + expire(-1); + } + + /** + * Processes specified amount of expired entries. + * + * @param amount Limit of processed entries by single call, {@code -1} for no limit. + * @return {@code True} if unprocessed expired entries remains. + */ + public boolean expire(int amount) { long now = U.currentTimeMillis(); GridCacheVersion obsoleteVer = null; - for (int size = pendingEntries.sizex(); size > 0; size--) { + int limit = (-1 != amount) ? amount : pendingEntries.sizex(); + + for (int cnt = limit; cnt > 0; cnt--) { EntryWrapper e = pendingEntries.firstx(); if (e == null || e.expireTime > now) - return; + return false; // All expired entries are processed. if (pendingEntries.remove(e)) { if (obsoleteVer == null) @@ -158,7 +129,6 @@ public void expire() { if (log.isTraceEnabled()) log.trace("Trying to remove expired entry from cache: " + e); - boolean touch = false; GridCacheEntryEx entry = e.ctx.cache().entryEx(e.key); @@ -181,53 +151,14 @@ public void expire() { entry.context().evicts().touch(entry, null); } } - } - - /** - * Entry cleanup worker. - */ - private class CleanupWorker extends GridWorker { - /** - * Creates cleanup worker. - */ - CleanupWorker() { - super(cctx.gridName(), "ttl-cleanup-worker-" + cctx.name(), cctx.logger(GridCacheTtlManager.class)); - } - - /** {@inheritDoc} */ - @Override protected void body() throws InterruptedException, IgniteInterruptedCheckedException { - while (!isCancelled()) { - expire(); - - long waitTime; - - while (true) { - long curTime = U.currentTimeMillis(); - GridCacheTtlManager.EntryWrapper first = pendingEntries.firstx(); - - if (first == null) { - waitTime = 500; - nextExpireTime = curTime + 500; - } - else { - long expireTime = first.expireTime; - - waitTime = expireTime - curTime; - nextExpireTime = expireTime; - } - - synchronized (mux) { - if (pendingEntries.firstx() == first) { - if (waitTime > 0) - mux.wait(waitTime); + if (amount != -1) { + EntryWrapper e = pendingEntries.firstx(); - break; - } - } - } - } + return e != null && e.expireTime <= now; } + + return false; } /** diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheTtlManagerNotificationTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheTtlManagerNotificationTest.java index 85a491e3af55d..79f8a65a7eca8 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheTtlManagerNotificationTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheTtlManagerNotificationTest.java @@ -17,6 +17,8 @@ package org.apache.ignite.internal.processors.cache; +import java.util.ArrayList; +import java.util.List; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.atomic.AtomicInteger; import javax.cache.expiry.CreatedExpiryPolicy; @@ -24,6 +26,7 @@ import javax.cache.expiry.ExpiryPolicy; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; +import org.apache.ignite.IgniteException; import org.apache.ignite.cache.CacheMode; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; @@ -44,6 +47,12 @@ * */ public class GridCacheTtlManagerNotificationTest extends GridCommonAbstractTest { + /** Count of caches in multi caches test. */ + private static final int CACHES_CNT = 10; + + /** Prefix for cache name fir multi caches test. */ + private static final String CACHE_PREFIX = "cache-"; + /** IP finder. */ private static final TcpDiscoveryIpFinder IP_FINDER = new TcpDiscoveryVmIpFinder(true); @@ -60,14 +69,30 @@ public class GridCacheTtlManagerNotificationTest extends GridCommonAbstractTest cfg.setDiscoverySpi(discoSpi); + CacheConfiguration[] ccfgs = new CacheConfiguration[CACHES_CNT + 1]; + + ccfgs[0] = createCacheConfiguration(null); + + for (int i = 0; i < CACHES_CNT; i++) + ccfgs[i + 1] = createCacheConfiguration(CACHE_PREFIX + i); + + cfg.setCacheConfiguration(ccfgs); + + return cfg; + } + + /** + * @param name Cache name. + * @return Cache configuration. + */ + private CacheConfiguration createCacheConfiguration(String name) { CacheConfiguration ccfg = new CacheConfiguration(); ccfg.setCacheMode(cacheMode); ccfg.setEagerTtl(true); + ccfg.setName(name); - cfg.setCacheConfiguration(ccfg); - - return cfg; + return ccfg; } /** @@ -104,8 +129,10 @@ public void testThatNotificationWorkAsExpected() throws Exception { } /** - * Add in several threads value to cache with different expiration policy. - * Wait for expiration of keys with small expiration duration. + * Adds in several threads value to cache with different expiration policy. + * Waits for expiration of keys with small expiration duration. + * + * @throws Exception If failed. */ public void testThatNotificationWorkAsExpectedInMultithreadedMode() throws Exception { final CyclicBarrier barrier = new CyclicBarrier(21); @@ -152,16 +179,83 @@ public void testThatNotificationWorkAsExpectedInMultithreadedMode() throws Excep } } + /** + * Adds in several threads value to several caches with different expiration policy. + * Waits for expiration of keys with small expiration duration. + * + * @throws Exception If failed. + */ + public void testThatNotificationWorkAsExpectedManyCaches() throws Exception { + final int smallDuration = 4_000; + + final int cnt = 1_000; + final int cacheCnt = CACHES_CNT; + final int threadCnt = 2; + + final CyclicBarrier barrier = new CyclicBarrier(2 * threadCnt * cacheCnt + 1); + final AtomicInteger keysRangeGen = new AtomicInteger(); + final AtomicInteger evtCnt = new AtomicInteger(0); + final List> caches = new ArrayList<>(cacheCnt); + + try (final Ignite g = startGrid(0)) { + for (int i = 0; i < cacheCnt; i++) { + IgniteCache cache = g.cache("cache-" + i); + + caches.add(cache); + } + + g.events().localListen(new IgnitePredicate() { + @Override public boolean apply(Event evt) { + evtCnt.incrementAndGet(); + + return true; + } + }, EventType.EVT_CACHE_OBJECT_EXPIRED); + + for (int i = 0; i < cacheCnt; i++) { + GridTestUtils.runMultiThreadedAsync( + new CacheFiller(caches.get(i), 100_000, barrier, keysRangeGen, cnt), + threadCnt, + "put-large-duration"); + + GridTestUtils.runMultiThreadedAsync( + new CacheFiller(caches.get(i), smallDuration, barrier, keysRangeGen, cnt), + threadCnt, + "put-small-duration"); + } + + barrier.await(); + + Thread.sleep(1_000); + + barrier.await(); + + for (int i = 0; i < cacheCnt; i++) + assertEquals("Unexpected size of " + CACHE_PREFIX + i, 2 * threadCnt * cnt, caches.get(i).size()); + + Thread.sleep(2 * smallDuration); + + for (int i = 0; i < cacheCnt; i++) + assertEquals("Unexpected size of " + CACHE_PREFIX + i, threadCnt * cnt, caches.get(i).size()); + + assertEquals("Unexpected count of expired entries", threadCnt * CACHES_CNT * cnt, evtCnt.get()); + } + } + /** */ private static class CacheFiller implements Runnable { /** Barrier. */ private final CyclicBarrier barrier; + /** Keys range generator. */ private final AtomicInteger keysRangeGenerator; + /** Count. */ private final int cnt; + /** Cache. */ private final IgniteCache cache; + /** Expiration duration. */ private final int expirationDuration; @@ -187,6 +281,7 @@ private static class CacheFiller implements Runnable { barrier.await(); ExpiryPolicy plc1 = new CreatedExpiryPolicy(new Duration(MILLISECONDS, expirationDuration)); + int keyStart = keysRangeGenerator.getAndIncrement() * cnt; for (int i = keyStart; i < keyStart + cnt; i++) @@ -195,7 +290,7 @@ private static class CacheFiller implements Runnable { barrier.await(); } catch (Exception e) { - e.printStackTrace(); + throw new IgniteException(e); } } } diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/expiry/IgniteCacheExpiryPolicyTestSuite.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/expiry/IgniteCacheExpiryPolicyTestSuite.java index 28cb2da94e4f8..e371dc7228abc 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/expiry/IgniteCacheExpiryPolicyTestSuite.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/expiry/IgniteCacheExpiryPolicyTestSuite.java @@ -72,7 +72,9 @@ public static TestSuite suite() throws Exception { suite.addTestSuite(IgniteCacheExpireAndUpdateConsistencyTest.class); + // Eager ttl expiration tests. suite.addTestSuite(GridCacheTtlManagerNotificationTest.class); + suite.addTestSuite(IgniteCacheOnlyOneTtlCleanupThreadExistsTest.class); return suite; } diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/expiry/IgniteCacheOnlyOneTtlCleanupThreadExistsTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/expiry/IgniteCacheOnlyOneTtlCleanupThreadExistsTest.java new file mode 100644 index 0000000000000..84f5144aef5b1 --- /dev/null +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/expiry/IgniteCacheOnlyOneTtlCleanupThreadExistsTest.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.cache.expiry; + +import org.apache.ignite.Ignite; +import org.apache.ignite.configuration.CacheConfiguration; +import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; + +/** + * Checks that one and only one Ttl cleanup worker thread must exists, and only + * if at least one cache with set 'eagerTtl' flag exists. + */ +public class IgniteCacheOnlyOneTtlCleanupThreadExistsTest extends GridCommonAbstractTest { + /** */ + private static final String CACHE_NAME1 = "cache-1"; + + /** */ + private static final String CACHE_NAME2 = "cache-2"; + + /** + * @throws Exception If failed. + */ + public void testOnlyOneTtlCleanupThreadExists() throws Exception { + try (final Ignite g = startGrid(0)) { + checkCleanupThreadExists(false); + + g.createCache(createCacheConfiguration(CACHE_NAME1, false)); + + checkCleanupThreadExists(false); + + g.createCache(createCacheConfiguration(CACHE_NAME2, true)); + + checkCleanupThreadExists(true); + + g.destroyCache(CACHE_NAME1); + + checkCleanupThreadExists(true); + + g.createCache(createCacheConfiguration(CACHE_NAME1, true)); + + checkCleanupThreadExists(true); + + g.destroyCache(CACHE_NAME1); + + checkCleanupThreadExists(true); + + g.destroyCache(CACHE_NAME2); + + checkCleanupThreadExists(false); + } + } + + /** + * @param name Cache name. + * @param eagerTtl Eager ttl falg. + * @return Cache configuration. + */ + private CacheConfiguration createCacheConfiguration(String name, boolean eagerTtl) { + CacheConfiguration ccfg = new CacheConfiguration(); + + ccfg.setEagerTtl(eagerTtl); + ccfg.setName(name); + + return ccfg; + } + + /** + * @param exists {@code True} if ttl cleanup worker thread expected. + * @throws Exception If failed. + */ + private void checkCleanupThreadExists(boolean exists) throws Exception { + int cnt = 0; + + for (Thread t : Thread.getAllStackTraces().keySet()) { + if (t.getName().contains("ttl-cleanup-worker")) + cnt++; + } + + if (cnt > 1) + fail("More then one ttl cleanup worker threads exists"); + + if (exists) + assertEquals("Ttl cleanup thread does not exist", cnt, 1); + else + assertEquals("Ttl cleanup thread exists", cnt, 0); + } +} diff --git a/modules/core/src/test/java/org/apache/ignite/loadtests/hashmap/GridCacheTestContext.java b/modules/core/src/test/java/org/apache/ignite/loadtests/hashmap/GridCacheTestContext.java index fb82e20dadf45..6c2c4c1734328 100644 --- a/modules/core/src/test/java/org/apache/ignite/loadtests/hashmap/GridCacheTestContext.java +++ b/modules/core/src/test/java/org/apache/ignite/loadtests/hashmap/GridCacheTestContext.java @@ -34,6 +34,7 @@ import org.apache.ignite.internal.processors.cache.GridCacheSharedContext; import org.apache.ignite.internal.processors.cache.GridCacheSwapManager; import org.apache.ignite.internal.processors.cache.GridCacheTtlManager; +import org.apache.ignite.internal.processors.cache.GridCacheSharedTtlCleanupManager; import org.apache.ignite.internal.processors.cache.datastructures.CacheDataStructuresManager; import org.apache.ignite.internal.processors.cache.dr.GridOsCacheDrManager; import org.apache.ignite.internal.processors.cache.jta.CacheNoopJtaManager; @@ -68,6 +69,7 @@ public GridCacheTestContext(GridTestKernalContext ctx) throws Exception { new GridCachePartitionExchangeManager(), new CacheAffinitySharedManager(), new GridCacheIoManager(), + new GridCacheSharedTtlCleanupManager(), new CacheNoopJtaManager(), null ), From 39fc5477c19cbe2b2116aaf575a2d0a9c9a618b1 Mon Sep 17 00:00:00 2001 From: tledkov-gridgain Date: Tue, 27 Sep 2016 14:48:18 +0300 Subject: [PATCH 32/69] IGNITE-3639: IGFS: Removed BufferedOutputStream from LocalIgfsSecondaryFileSystem because it doesn't give any performance benefit. --- .../local/LocalIgfsSecondaryFileSystem.java | 19 +++++++------------ 1 file changed, 7 insertions(+), 12 deletions(-) diff --git a/modules/core/src/main/java/org/apache/ignite/igfs/secondary/local/LocalIgfsSecondaryFileSystem.java b/modules/core/src/main/java/org/apache/ignite/igfs/secondary/local/LocalIgfsSecondaryFileSystem.java index 8dd4fdac14728..1775db6e430d9 100644 --- a/modules/core/src/main/java/org/apache/ignite/igfs/secondary/local/LocalIgfsSecondaryFileSystem.java +++ b/modules/core/src/main/java/org/apache/ignite/igfs/secondary/local/LocalIgfsSecondaryFileSystem.java @@ -34,7 +34,6 @@ import org.apache.ignite.lifecycle.LifecycleAware; import org.jetbrains.annotations.Nullable; -import java.io.BufferedOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; @@ -53,9 +52,6 @@ * Secondary file system which delegates to local file system. */ public class LocalIgfsSecondaryFileSystem implements IgfsSecondaryFileSystem, LifecycleAware { - /** Default buffer size. */ - private static final int DFLT_BUF_SIZE = 8 * 1024; - /** Path that will be added to each passed path. */ private String workDir; @@ -256,13 +252,13 @@ private boolean mkdirs0(@Nullable File dir) { /** {@inheritDoc} */ @Override public OutputStream create(IgfsPath path, boolean overwrite) { - return create0(path, overwrite, DFLT_BUF_SIZE); + return create0(path, overwrite); } /** {@inheritDoc} */ @Override public OutputStream create(IgfsPath path, int bufSize, boolean overwrite, int replication, long blockSize, @Nullable Map props) { - return create0(path, overwrite, bufSize); + return create0(path, overwrite); } /** {@inheritDoc} */ @@ -274,10 +270,10 @@ private boolean mkdirs0(@Nullable File dir) { boolean exists = file.exists(); if (exists) - return new BufferedOutputStream(new FileOutputStream(file, true), bufSize); + return new FileOutputStream(file, true); else { if (create) - return create0(path, false, bufSize); + return create0(path, false); else throw new IgfsPathNotFoundException("Failed to append to file because it doesn't exist: " + path); } @@ -389,11 +385,10 @@ private IgfsPath igfsPath(File f) throws IgfsException { * Internal create routine. * * @param path Path. - * @param overwrite Overwirte flag. - * @param bufSize Buffer size. + * @param overwrite Overwrite flag. * @return Output stream. */ - private OutputStream create0(IgfsPath path, boolean overwrite, int bufSize) { + private OutputStream create0(IgfsPath path, boolean overwrite) { File file = fileForPath(path); boolean exists = file.exists(); @@ -411,7 +406,7 @@ private OutputStream create0(IgfsPath path, boolean overwrite, int bufSize) { } try { - return new BufferedOutputStream(new FileOutputStream(file), bufSize); + return new FileOutputStream(file); } catch (IOException e) { throw handleSecondaryFsError(e, "Failed to create file [path=" + path + ", overwrite=" + overwrite + ']'); From 5cffd3c3d6cb006e3745c314d6f85a066e6a0f06 Mon Sep 17 00:00:00 2001 From: vozerov-gridgain Date: Tue, 27 Sep 2016 15:13:21 +0300 Subject: [PATCH 33/69] IGNITE-3661: First attempt to move ignored and flaky tests into a single suite. Applied to web-session module. --- .../ignite/testframework/IgniteTestSuite.java | 122 +++++++++++++----- .../junits/GridAbstractTest.java | 22 +++- .../ignite/testsuites/IgniteIgnore.java | 11 +- modules/ignored-tests/README.txt | 4 + modules/ignored-tests/pom.xml | 93 +++++++++++++ .../testsuites/IgniteIgnoredTestSuite.java | 41 ++---- .../ignite/testsuites/package-info.java | 22 ++++ .../IgniteWebSessionSelfTestSuite.java | 68 +--------- .../WebSessionReplicatedSelfTest.java | 28 ++++ .../WebSessionReplicatedV1SelfTest.java | 28 ++++ .../websession/WebSessionSelfTest.java | 2 + .../WebSessionTransactionalSelfTest.java | 48 +++++++ .../WebSessionTransactionalV1SelfTest.java | 28 ++++ .../websession/WebSessionV1SelfTest.java | 28 ++++ pom.xml | 7 + 15 files changed, 423 insertions(+), 129 deletions(-) create mode 100644 modules/ignored-tests/README.txt create mode 100644 modules/ignored-tests/pom.xml rename modules/{core => ignored-tests}/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java (56%) create mode 100644 modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/package-info.java create mode 100644 modules/web/src/test/java/org/apache/ignite/internal/websession/WebSessionReplicatedSelfTest.java create mode 100644 modules/web/src/test/java/org/apache/ignite/internal/websession/WebSessionReplicatedV1SelfTest.java create mode 100644 modules/web/src/test/java/org/apache/ignite/internal/websession/WebSessionTransactionalSelfTest.java create mode 100644 modules/web/src/test/java/org/apache/ignite/internal/websession/WebSessionTransactionalV1SelfTest.java create mode 100644 modules/web/src/test/java/org/apache/ignite/internal/websession/WebSessionV1SelfTest.java diff --git a/modules/core/src/test/java/org/apache/ignite/testframework/IgniteTestSuite.java b/modules/core/src/test/java/org/apache/ignite/testframework/IgniteTestSuite.java index 2828065ea68ff..4153600fdb08a 100644 --- a/modules/core/src/test/java/org/apache/ignite/testframework/IgniteTestSuite.java +++ b/modules/core/src/test/java/org/apache/ignite/testframework/IgniteTestSuite.java @@ -20,6 +20,8 @@ import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; +import org.apache.ignite.internal.util.typedef.F; +import org.apache.ignite.testframework.junits.GridAbstractTest; import org.apache.ignite.testsuites.IgniteIgnore; import org.jetbrains.annotations.Nullable; import org.junit.internal.MethodSorter; @@ -94,16 +96,6 @@ public IgniteTestSuite(@Nullable Class theClass, @Nullable S /** {@inheritDoc} */ @Override public void addTestSuite(Class testClass) { - addTestSuite(testClass, false); - } - - /** - * Add test class to the suite. - * - * @param testClass Test class. - * @param ignoredOnly Ignore only flag. - */ - public void addTestSuite(Class testClass, boolean ignoredOnly) { addTest(new IgniteTestSuite(testClass, ignoredOnly)); } @@ -133,6 +125,7 @@ private void addTestsFromTestCase(Class theClass) { for(List names = new ArrayList<>(); Test.class.isAssignableFrom(superCls); superCls = superCls.getSuperclass()) { + Method[] methods = MethodSorter.getDeclaredMethods(superCls); for (Method each : methods) { @@ -147,45 +140,110 @@ private void addTestsFromTestCase(Class theClass) { } /** - * @param method test method - * @param names test name list - * @param theClass test class + * Add test method. + * + * @param m Test method. + * @param names Test name list. + * @param theClass Test class. + * @return Whether test method was added. */ - private boolean addTestMethod(Method method, List names, Class theClass) { - String name = method.getName(); + private boolean addTestMethod(Method m, List names, Class theClass) { + String name = m.getName(); + + if (names.contains(name)) + return false; + + if (!isPublicTestMethod(m)) { + if (isTestMethod(m)) + addTest(warning("Test method isn't public: " + m.getName() + "(" + theClass.getCanonicalName() + ")")); + + return false; + } + + names.add(name); + + boolean hasIgnore = m.isAnnotationPresent(IgniteIgnore.class); + + if (ignoredOnly) { + if (hasIgnore) { + IgniteIgnore ignore = m.getAnnotation(IgniteIgnore.class); + + String reason = ignore.value(); + + if (F.isEmpty(reason)) + throw new IllegalArgumentException("Reason is not set for ignored test [class=" + + theClass.getName() + ", method=" + name + ']'); - if(!names.contains(name) && canAddMethod(method)) { - if(!Modifier.isPublic(method.getModifiers())) - addTest(warning("Test method isn\'t public: " + method.getName() + "(" + - theClass.getCanonicalName() + ")")); - else { - names.add(name); + Test test = createTest(theClass, name); + if (ignore.forceFailure()) { + if (test instanceof GridAbstractTest) + ((GridAbstractTest)test).forceFailure(ignore.value()); + else + test = new ForcedFailure(name, ignore.value()); + } + + addTest(test); + + return true; + } + } + else { + if (!hasIgnore) { addTest(createTest(theClass, name)); return true; } } + return false; } /** - * Check whether method should be ignored. + * Check whether this is a test method. * - * @param method Method. - * @return {@code True} if it should be ignored. + * @param m Method. + * @return {@code True} if this is a test method. */ - protected boolean canAddMethod(Method method) { - boolean res = method.getParameterTypes().length == 0 && method.getName().startsWith("test") - && method.getReturnType().equals(Void.TYPE); + private static boolean isTestMethod(Method m) { + return m.getParameterTypes().length == 0 && + m.getName().startsWith("test") && + m.getReturnType().equals(Void.TYPE); + } - if (res) { - // If method signature and name matches check if it is ignored or not. - boolean hasIgnore = method.isAnnotationPresent(IgniteIgnore.class); + /** + * Check whether this is a public test method. + * + * @param m Method. + * @return {@code True} if this is a public test method. + */ + private static boolean isPublicTestMethod(Method m) { + return isTestMethod(m) && Modifier.isPublic(m.getModifiers()); + } - res = hasIgnore == ignoredOnly; + /** + * Test case simulating failure. + */ + private static class ForcedFailure extends TestCase { + /** Message. */ + private final String msg; + + /** + * Constructor. + * + * @param name Name. + * @param msg Message. + */ + private ForcedFailure(String name, String msg) { + super(name); + + this.msg = msg; } - return res; + /** {@inheritDoc} */ + @Override protected void runTest() { + fail("Forced failure: " + msg + " (extend " + GridAbstractTest.class.getSimpleName() + + " for better output)."); + } } } diff --git a/modules/core/src/test/java/org/apache/ignite/testframework/junits/GridAbstractTest.java b/modules/core/src/test/java/org/apache/ignite/testframework/junits/GridAbstractTest.java index 3910ce42ac02a..8d6fd07d90e62 100644 --- a/modules/core/src/test/java/org/apache/ignite/testframework/junits/GridAbstractTest.java +++ b/modules/core/src/test/java/org/apache/ignite/testframework/junits/GridAbstractTest.java @@ -155,6 +155,12 @@ public abstract class GridAbstractTest extends TestCase { /** Starting grid name. */ protected static final ThreadLocal startingGrid = new ThreadLocal<>(); + /** Force failure flag. */ + private boolean forceFailure; + + /** Force failure message. */ + private String forceFailureMsg; + /** * */ @@ -1752,12 +1758,26 @@ protected IgniteClosure errorHandler() { return null; } + /** + * Force test failure. + * + * @param msg Message. + */ + public void forceFailure(@Nullable String msg) { + forceFailure = true; + + forceFailureMsg = msg; + } + /** * @throws Throwable If failed. */ @SuppressWarnings({"ProhibitedExceptionDeclared"}) private void runTestInternal() throws Throwable { - super.runTest(); + if (forceFailure) + fail("Forced failure: " + forceFailureMsg); + else + super.runTest(); } /** diff --git a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteIgnore.java b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteIgnore.java index ac9a88532ed12..dbb1d7a01326a 100644 --- a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteIgnore.java +++ b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteIgnore.java @@ -26,10 +26,15 @@ * Annotation which indicates that the test is ignored. */ @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.METHOD, ElementType.TYPE}) +@Target({ElementType.METHOD}) public @interface IgniteIgnore { /** - * The optional reason why the test is ignored. + * Reason for ignore (usually link to JIRA ticket). */ - String value() default ""; + String value(); + + /** + * Whether test should be failed immediately. Useful when test hangs or consumes a lot of time. + */ + boolean forceFailure() default false; } diff --git a/modules/ignored-tests/README.txt b/modules/ignored-tests/README.txt new file mode 100644 index 0000000000000..70f728dd170b6 --- /dev/null +++ b/modules/ignored-tests/README.txt @@ -0,0 +1,4 @@ +Apache Ignite Ignored Tests +------------------------ + +Special module containing ignored and flaky tests grouped in a single test suite. \ No newline at end of file diff --git a/modules/ignored-tests/pom.xml b/modules/ignored-tests/pom.xml new file mode 100644 index 0000000000000..a82a5bb6db23c --- /dev/null +++ b/modules/ignored-tests/pom.xml @@ -0,0 +1,93 @@ + + + + + + + 4.0.0 + + + org.apache.ignite + ignite-parent + 1 + ../../parent + + + ignite-ignored-tests + 1.7.0-SNAPSHOT + http://ignite.apache.org + + + + org.apache.ignite + ignite-core + ${project.version} + + + + org.apache.ignite + ignite-core + ${project.version} + test-jar + test + + + + org.apache.ignite + ignite-log4j + ${project.version} + + + + org.apache.ignite + ignite-spring + ${project.version} + test + + + + org.apache.ignite + ignite-web + ${project.version} + + + + org.apache.ignite + ignite-web + ${project.version} + test-jar + test + + + + org.eclipse.jetty + jetty-servlets + ${jetty.version} + test + + + + org.eclipse.jetty + jetty-webapp + ${jetty.version} + test + + + diff --git a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java b/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java similarity index 56% rename from modules/core/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java rename to modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java index c3ec5e4c0f235..f6ce3e3f09444 100644 --- a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java +++ b/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java @@ -18,8 +18,13 @@ package org.apache.ignite.testsuites; import junit.framework.TestSuite; +import org.apache.ignite.internal.websession.WebSessionReplicatedSelfTest; +import org.apache.ignite.internal.websession.WebSessionReplicatedV1SelfTest; +import org.apache.ignite.internal.websession.WebSessionSelfTest; +import org.apache.ignite.internal.websession.WebSessionTransactionalSelfTest; +import org.apache.ignite.internal.websession.WebSessionTransactionalV1SelfTest; +import org.apache.ignite.internal.websession.WebSessionV1SelfTest; import org.apache.ignite.testframework.IgniteTestSuite; -import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; /** * Special test suite with ignored tests. @@ -30,34 +35,16 @@ public class IgniteIgnoredTestSuite extends TestSuite { * @throws Exception Thrown in case of the failure. */ public static TestSuite suite() throws Exception { - IgniteTestSuite suite = new IgniteTestSuite("Ignite Ignored Test Suite"); + IgniteTestSuite suite = new IgniteTestSuite(null, "Ignite Ignored Test Suite", true); - suite.addTestSuite(SampleTestClass.class, true); + /* --- WEB SESSIONS --- */ + suite.addTestSuite(WebSessionSelfTest.class); + suite.addTestSuite(WebSessionTransactionalSelfTest.class); + suite.addTestSuite(WebSessionReplicatedSelfTest.class); + suite.addTestSuite(WebSessionV1SelfTest.class); + suite.addTestSuite(WebSessionTransactionalV1SelfTest.class); + suite.addTestSuite(WebSessionReplicatedV1SelfTest.class); return suite; } - - /** - * Sample test class. To be removed once the very first really ignored test class is there. - */ - public static class SampleTestClass extends GridCommonAbstractTest { - /** - * Test 1. - * - * @throws Exception If failed. - */ - public void testMethod1() throws Exception { - System.out.println("Normal test method called."); - } - - /** - * Test 2. - * - * @throws Exception If failed. - */ - @IgniteIgnore - public void testMethod2() throws Exception { - System.out.println("Ignored method called."); - } - } } diff --git a/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/package-info.java b/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/package-info.java new file mode 100644 index 0000000000000..cb71478f97828 --- /dev/null +++ b/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/package-info.java @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * + * Contains internal tests or test related classes and interfaces. + */ +package org.apache.ignite.testsuites; \ No newline at end of file diff --git a/modules/web/src/test/java/org/apache/ignite/internal/websession/IgniteWebSessionSelfTestSuite.java b/modules/web/src/test/java/org/apache/ignite/internal/websession/IgniteWebSessionSelfTestSuite.java index 1d151277ebd1f..e1d5c3b757be6 100644 --- a/modules/web/src/test/java/org/apache/ignite/internal/websession/IgniteWebSessionSelfTestSuite.java +++ b/modules/web/src/test/java/org/apache/ignite/internal/websession/IgniteWebSessionSelfTestSuite.java @@ -19,6 +19,7 @@ import junit.framework.TestSuite; import org.apache.ignite.testframework.GridTestUtils; +import org.apache.ignite.testframework.IgniteTestSuite; import static org.apache.ignite.IgniteSystemProperties.IGNITE_OVERRIDE_MCAST_GRP; @@ -32,7 +33,7 @@ public class IgniteWebSessionSelfTestSuite extends TestSuite { * @throws Exception Thrown in case of the failure. */ public static TestSuite suite() throws Exception { - TestSuite suite = new TestSuite("Ignite Web Sessions Test Suite"); + TestSuite suite = new IgniteTestSuite("Ignite Web Sessions Test Suite"); suite.addTestSuite(WebSessionSelfTest.class); suite.addTestSuite(WebSessionTransactionalSelfTest.class); @@ -48,69 +49,4 @@ public static TestSuite suite() throws Exception { return suite; } - - /** - * Tests web sessions with TRANSACTIONAL cache. - */ - public static class WebSessionTransactionalSelfTest extends WebSessionSelfTest { - /** {@inheritDoc} */ - @Override protected String getCacheName() { - return "partitioned_tx"; - } - - /** {@inheritDoc} */ - @Override public void testRestarts() throws Exception { - fail("https://issues.apache.org/jira/browse/IGNITE-810"); - } - - /** {@inheritDoc} */ - @Override public void testInvalidatedSession() throws Exception { - fail("https://issues.apache.org/jira/browse/IGNITE-810"); - } - - /** {@inheritDoc} */ - @Override public void testClientReconnectRequest() throws Exception { - fail("https://issues.apache.org/jira/browse/IGNITE-810"); - } - } - - /** - * Tests web sessions with REPLICATED cache. - */ - public static class WebSessionReplicatedSelfTest extends WebSessionSelfTest { - /** {@inheritDoc} */ - @Override protected String getCacheName() { - return "replicated"; - } - } - - /** - * Old version test. - */ - public static class WebSessionV1SelfTest extends WebSessionSelfTest { - /** {@inheritDoc} */ - @Override protected boolean keepBinary() { - return false; - } - } - - /** - * Tests web sessions with TRANSACTIONAL cache in compatibility mode. - */ - public static class WebSessionTransactionalV1SelfTest extends WebSessionTransactionalSelfTest { - /** {@inheritDoc} */ - @Override protected boolean keepBinary() { - return false; - } - } - - /** - * Tests web sessions with REPLICATED cache in compatibility mode. - */ - public static class WebSessionReplicatedV1SelfTest extends WebSessionReplicatedSelfTest { - /** {@inheritDoc} */ - @Override protected boolean keepBinary() { - return false; - } - } } \ No newline at end of file diff --git a/modules/web/src/test/java/org/apache/ignite/internal/websession/WebSessionReplicatedSelfTest.java b/modules/web/src/test/java/org/apache/ignite/internal/websession/WebSessionReplicatedSelfTest.java new file mode 100644 index 0000000000000..638fdccb878d8 --- /dev/null +++ b/modules/web/src/test/java/org/apache/ignite/internal/websession/WebSessionReplicatedSelfTest.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.websession; + +/** + * Tests web sessions with REPLICATED cache. + */ +public class WebSessionReplicatedSelfTest extends WebSessionSelfTest { + /** {@inheritDoc} */ + @Override protected String getCacheName() { + return "replicated"; + } +} diff --git a/modules/web/src/test/java/org/apache/ignite/internal/websession/WebSessionReplicatedV1SelfTest.java b/modules/web/src/test/java/org/apache/ignite/internal/websession/WebSessionReplicatedV1SelfTest.java new file mode 100644 index 0000000000000..ba69d1322f56a --- /dev/null +++ b/modules/web/src/test/java/org/apache/ignite/internal/websession/WebSessionReplicatedV1SelfTest.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.websession; + +/** + * Tests web sessions with REPLICATED cache in compatibility mode. + */ +public class WebSessionReplicatedV1SelfTest extends WebSessionReplicatedSelfTest { + /** {@inheritDoc} */ + @Override protected boolean keepBinary() { + return false; + } +} \ No newline at end of file diff --git a/modules/web/src/test/java/org/apache/ignite/internal/websession/WebSessionSelfTest.java b/modules/web/src/test/java/org/apache/ignite/internal/websession/WebSessionSelfTest.java index 0ab113077f284..5138e3a5a8ed6 100644 --- a/modules/web/src/test/java/org/apache/ignite/internal/websession/WebSessionSelfTest.java +++ b/modules/web/src/test/java/org/apache/ignite/internal/websession/WebSessionSelfTest.java @@ -46,6 +46,7 @@ import org.apache.ignite.lang.IgnitePredicate; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; +import org.apache.ignite.testsuites.IgniteIgnore; import org.eclipse.jetty.security.HashLoginService; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.servlet.ServletHolder; @@ -88,6 +89,7 @@ public void testSingleRequest() throws Exception { /** * @throws Exception If failed. */ + @IgniteIgnore("https://issues.apache.org/jira/browse/IGNITE-3663") public void testSessionRenewalDuringLogin() throws Exception { testSessionRenewalDuringLogin("/modules/core/src/test/config/websession/example-cache.xml"); } diff --git a/modules/web/src/test/java/org/apache/ignite/internal/websession/WebSessionTransactionalSelfTest.java b/modules/web/src/test/java/org/apache/ignite/internal/websession/WebSessionTransactionalSelfTest.java new file mode 100644 index 0000000000000..4cc1a637f7aad --- /dev/null +++ b/modules/web/src/test/java/org/apache/ignite/internal/websession/WebSessionTransactionalSelfTest.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.websession; + +import org.apache.ignite.testsuites.IgniteIgnore; + +/** + * Tests web sessions with TRANSACTIONAL cache. + */ +public class WebSessionTransactionalSelfTest extends WebSessionSelfTest { + /** {@inheritDoc} */ + @Override protected String getCacheName() { + return "partitioned_tx"; + } + + /** {@inheritDoc} */ + @IgniteIgnore(value = "https://issues.apache.org/jira/browse/IGNITE-810", forceFailure = true) + @Override public void testRestarts() throws Exception { + // No-op. + } + + /** {@inheritDoc} */ + @IgniteIgnore(value = "https://issues.apache.org/jira/browse/IGNITE-810", forceFailure = true) + @Override public void testInvalidatedSession() throws Exception { + // No-op. + } + + /** {@inheritDoc} */ + @IgniteIgnore(value = "https://issues.apache.org/jira/browse/IGNITE-810", forceFailure = true) + @Override public void testClientReconnectRequest() throws Exception { + // No-op. + } +} diff --git a/modules/web/src/test/java/org/apache/ignite/internal/websession/WebSessionTransactionalV1SelfTest.java b/modules/web/src/test/java/org/apache/ignite/internal/websession/WebSessionTransactionalV1SelfTest.java new file mode 100644 index 0000000000000..6f94471a753c7 --- /dev/null +++ b/modules/web/src/test/java/org/apache/ignite/internal/websession/WebSessionTransactionalV1SelfTest.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.websession; + +/** + * Tests web sessions with TRANSACTIONAL cache in compatibility mode. + */ +public class WebSessionTransactionalV1SelfTest extends WebSessionTransactionalSelfTest { + /** {@inheritDoc} */ + @Override protected boolean keepBinary() { + return false; + } +} diff --git a/modules/web/src/test/java/org/apache/ignite/internal/websession/WebSessionV1SelfTest.java b/modules/web/src/test/java/org/apache/ignite/internal/websession/WebSessionV1SelfTest.java new file mode 100644 index 0000000000000..791bec02285a9 --- /dev/null +++ b/modules/web/src/test/java/org/apache/ignite/internal/websession/WebSessionV1SelfTest.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.websession; + +/** + * Tests the correctness of web sessions caching functionality in compatibility mode. + */ +public class WebSessionV1SelfTest extends WebSessionSelfTest { + /** {@inheritDoc} */ + @Override protected boolean keepBinary() { + return false; + } +} diff --git a/pom.xml b/pom.xml index 2c7bad19c745c..36051b4aca0b4 100644 --- a/pom.xml +++ b/pom.xml @@ -495,6 +495,13 @@ + + ignored-tests + + modules/ignored-tests + + + examples From c8dc92ecc8a5d76e68d2d75f12158e0a581a0326 Mon Sep 17 00:00:00 2001 From: sboikov Date: Tue, 27 Sep 2016 15:17:05 +0300 Subject: [PATCH 34/69] ignite-3973 In TcpDiscoveryMulticastIpFinder.requestAddresses wait full timeout for remote addresses --- .../ipfinder/multicast/TcpDiscoveryMulticastIpFinder.java | 3 --- .../discovery/tcp/TcpClientDiscoverySpiMulticastTest.java | 2 +- .../ignite/spi/discovery/tcp/TcpDiscoverySelfTest.java | 2 +- .../multicast/TcpDiscoveryMulticastIpFinderSelfTest.java | 6 +++--- 4 files changed, 5 insertions(+), 8 deletions(-) diff --git a/modules/core/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/multicast/TcpDiscoveryMulticastIpFinder.java b/modules/core/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/multicast/TcpDiscoveryMulticastIpFinder.java index d5b3dae3270a5..5bbe90edbafc1 100644 --- a/modules/core/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/multicast/TcpDiscoveryMulticastIpFinder.java +++ b/modules/core/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/multicast/TcpDiscoveryMulticastIpFinder.java @@ -618,9 +618,6 @@ private T2, Boolean> requestAddresses(InetAddress U.close(sock); } - if (rmtAddrs.size() > locNodeAddrs.size()) - break; - if (i < addrReqAttempts - 1) // Wait some time before re-sending address request. U.sleep(200); } diff --git a/modules/core/src/test/java/org/apache/ignite/spi/discovery/tcp/TcpClientDiscoverySpiMulticastTest.java b/modules/core/src/test/java/org/apache/ignite/spi/discovery/tcp/TcpClientDiscoverySpiMulticastTest.java index 27ce883e8cde4..ba8fe73149fc1 100644 --- a/modules/core/src/test/java/org/apache/ignite/spi/discovery/tcp/TcpClientDiscoverySpiMulticastTest.java +++ b/modules/core/src/test/java/org/apache/ignite/spi/discovery/tcp/TcpClientDiscoverySpiMulticastTest.java @@ -58,7 +58,7 @@ public class TcpClientDiscoverySpiMulticastTest extends GridCommonAbstractTest { TcpDiscoveryMulticastIpFinder ipFinder = new TcpDiscoveryMulticastIpFinder(); - ipFinder.setAddressRequestAttempts(10); + ipFinder.setAddressRequestAttempts(5); spi.setIpFinder(ipFinder); diff --git a/modules/core/src/test/java/org/apache/ignite/spi/discovery/tcp/TcpDiscoverySelfTest.java b/modules/core/src/test/java/org/apache/ignite/spi/discovery/tcp/TcpDiscoverySelfTest.java index 035c6fefbe47c..1ce98a538657e 100644 --- a/modules/core/src/test/java/org/apache/ignite/spi/discovery/tcp/TcpDiscoverySelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/spi/discovery/tcp/TcpDiscoverySelfTest.java @@ -181,7 +181,7 @@ public TcpDiscoverySelfTest() throws Exception { else if (gridName.contains("MulticastIpFinder")) { TcpDiscoveryMulticastIpFinder finder = new TcpDiscoveryMulticastIpFinder(); - finder.setAddressRequestAttempts(10); + finder.setAddressRequestAttempts(5); finder.setMulticastGroup(GridTestUtils.getNextMulticastGroup(getClass())); finder.setMulticastPort(GridTestUtils.getNextMulticastPort(getClass())); diff --git a/modules/core/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/multicast/TcpDiscoveryMulticastIpFinderSelfTest.java b/modules/core/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/multicast/TcpDiscoveryMulticastIpFinderSelfTest.java index 90fdb0a76c22e..29ed595f38189 100644 --- a/modules/core/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/multicast/TcpDiscoveryMulticastIpFinderSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/multicast/TcpDiscoveryMulticastIpFinderSelfTest.java @@ -59,19 +59,19 @@ public void testExchange() throws Exception { try { ipFinder1 = ipFinder(); ipFinder1.setResponseWaitTime(1000); - ipFinder1.setAddressRequestAttempts(10); + ipFinder1.setAddressRequestAttempts(5); ipFinder2 = new TcpDiscoveryMulticastIpFinder(); ipFinder2.setResponseWaitTime(1000); - ipFinder2.setAddressRequestAttempts(10); + ipFinder2.setAddressRequestAttempts(5); ipFinder2.setMulticastGroup(ipFinder1.getMulticastGroup()); ipFinder2.setMulticastPort(ipFinder1.getMulticastPort()); ipFinder3 = new TcpDiscoveryMulticastIpFinder(); ipFinder3.setResponseWaitTime(1000); - ipFinder3.setAddressRequestAttempts(10); + ipFinder3.setAddressRequestAttempts(5); ipFinder3.setMulticastGroup(ipFinder1.getMulticastGroup()); ipFinder3.setMulticastPort(ipFinder1.getMulticastPort()); From 8ba2b947895cabdddb8633a39063c8739c18ad1b Mon Sep 17 00:00:00 2001 From: sboikov Date: Tue, 27 Sep 2016 16:07:52 +0300 Subject: [PATCH 35/69] ignite-3967 Do not use GridBoundedConcurrentOrderedMap.clear --- .../apache/ignite/internal/IgniteKernal.java | 2 +- .../discovery/GridDiscoveryManager.java | 73 ++++++++++--------- .../util/GridBoundedConcurrentOrderedMap.java | 5 ++ .../IgniteClientReconnectCacheTest.java | 35 +++++++++ ...ridDiscoveryManagerAliveCacheSelfTest.java | 4 +- .../GridDiscoveryManagerSelfTest.java | 6 +- 6 files changed, 85 insertions(+), 40 deletions(-) diff --git a/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java b/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java index 6c5a6282420b9..e0a36a7e19a41 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java @@ -1126,7 +1126,7 @@ else if (e instanceof IgniteCheckedException) ackStart(rtBean); if (!isDaemon()) - ctx.discovery().ackTopology(); + ctx.discovery().ackTopology(localNode().order()); } /** diff --git a/modules/core/src/main/java/org/apache/ignite/internal/managers/discovery/GridDiscoveryManager.java b/modules/core/src/main/java/org/apache/ignite/internal/managers/discovery/GridDiscoveryManager.java index bbf3ebd2a2fc3..345cfb48b6d7f 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/managers/discovery/GridDiscoveryManager.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/managers/discovery/GridDiscoveryManager.java @@ -42,7 +42,6 @@ import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.ConcurrentNavigableMap; import java.util.concurrent.ConcurrentSkipListSet; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; @@ -81,7 +80,7 @@ import org.apache.ignite.internal.processors.service.GridServiceProcessor; import org.apache.ignite.internal.processors.timeout.GridTimeoutProcessor; import org.apache.ignite.internal.util.F0; -import org.apache.ignite.internal.util.GridBoundedConcurrentOrderedMap; +import org.apache.ignite.internal.util.GridBoundedConcurrentLinkedHashMap; import org.apache.ignite.internal.util.GridSpinBusyLock; import org.apache.ignite.internal.util.future.GridFinishedFuture; import org.apache.ignite.internal.util.future.GridFutureAdapter; @@ -113,15 +112,16 @@ import org.apache.ignite.spi.discovery.DiscoverySpiListener; import org.apache.ignite.spi.discovery.DiscoverySpiNodeAuthenticator; import org.apache.ignite.spi.discovery.DiscoverySpiOrderSupport; -import org.apache.ignite.spi.discovery.tcp.internal.TcpDiscoveryNode; import org.apache.ignite.thread.IgniteThread; import org.jetbrains.annotations.Nullable; import org.jsr166.ConcurrentHashMap8; import static java.util.concurrent.TimeUnit.MILLISECONDS; +import static org.apache.ignite.IgniteSystemProperties.IGNITE_BINARY_MARSHALLER_USE_STRING_SERIALIZATION_VER_2; +import static org.apache.ignite.IgniteSystemProperties.IGNITE_DISCOVERY_HISTORY_SIZE; import static org.apache.ignite.IgniteSystemProperties.IGNITE_OPTIMIZED_MARSHALLER_USE_DEFAULT_SUID; import static org.apache.ignite.IgniteSystemProperties.IGNITE_SERVICES_COMPATIBILITY_MODE; -import static org.apache.ignite.IgniteSystemProperties.IGNITE_BINARY_MARSHALLER_USE_STRING_SERIALIZATION_VER_2; +import static org.apache.ignite.IgniteSystemProperties.getInteger; import static org.apache.ignite.events.EventType.EVT_CLIENT_NODE_DISCONNECTED; import static org.apache.ignite.events.EventType.EVT_CLIENT_NODE_RECONNECTED; import static org.apache.ignite.events.EventType.EVT_NODE_FAILED; @@ -169,7 +169,7 @@ public class GridDiscoveryManager extends GridManagerAdapter { private static final String PREFIX = "Topology snapshot"; /** Discovery cached history size. */ - protected static final int DISCOVERY_HISTORY_SIZE = 100; + static final int DISCOVERY_HISTORY_SIZE = getInteger(IGNITE_DISCOVERY_HISTORY_SIZE, 500); /** Predicate filtering out daemon nodes. */ private static final IgnitePredicate FILTER_DAEMON = new P1() { @@ -210,8 +210,8 @@ public class GridDiscoveryManager extends GridManagerAdapter { private final AtomicBoolean lastSegChkRes = new AtomicBoolean(true); /** Topology cache history. */ - private final ConcurrentNavigableMap discoCacheHist = - new GridBoundedConcurrentOrderedMap<>(DISCOVERY_HISTORY_SIZE); + private final GridBoundedConcurrentLinkedHashMap discoCacheHist = + new GridBoundedConcurrentLinkedHashMap<>(DISCOVERY_HISTORY_SIZE); /** Topology snapshots history. */ private volatile Map> topHist = new HashMap<>(); @@ -599,7 +599,11 @@ else if (type == EVT_CLIENT_NODE_DISCONNECTED) { registeredCaches.clear(); - discoCacheHist.clear(); + for (AffinityTopologyVersion histVer : discoCacheHist.keySet()) { + Object rmvd = discoCacheHist.remove(histVer); + + assert rmvd != null : histVer; + } topHist.clear(); @@ -1199,9 +1203,11 @@ private static int cpus(Collection nodes) { /** * Prints the latest topology info into log taking into account logging/verbosity settings. + * + * @param topVer Topology version. */ - public void ackTopology() { - ackTopology(topSnap.get().topVer.topologyVersion(), false); + public void ackTopology(long topVer) { + ackTopology(topVer, false); } /** @@ -1213,7 +1219,22 @@ public void ackTopology() { private void ackTopology(long topVer, boolean throttle) { assert !isLocDaemon; - DiscoCache discoCache = discoCache(); + DiscoCache discoCache = discoCacheHist.get(new AffinityTopologyVersion(topVer)); + + if (discoCache == null) { + String msg = "Failed to resolve nodes topology [topVer=" + topVer + + ", hist=" + discoCacheHist.keySet() + ']'; + + if (log.isQuiet()) + U.quiet(false, msg); + + if (log.isDebugEnabled()) + log.debug(msg); + else if (log.isInfoEnabled()) + log.info(msg); + + return; + } Collection rmtNodes = discoCache.remoteNodes(); @@ -1237,7 +1258,7 @@ private void ackTopology(long topVer, boolean throttle) { double heap = U.heapSize(allNodes, 2); if (log.isQuiet()) - U.quiet(false, topologySnapshotMessage(srvNodes.size(), clientNodes.size(), totalCpus, heap)); + U.quiet(false, topologySnapshotMessage(topVer, srvNodes.size(), clientNodes.size(), totalCpus, heap)); if (log.isDebugEnabled()) { String dbg = ""; @@ -1281,19 +1302,20 @@ private void ackTopology(long topVer, boolean throttle) { log.debug(dbg); } else if (log.isInfoEnabled()) - log.info(topologySnapshotMessage(srvNodes.size(), clientNodes.size(), totalCpus, heap)); + log.info(topologySnapshotMessage(topVer, srvNodes.size(), clientNodes.size(), totalCpus, heap)); } /** + * @param topVer Topology version. * @param srvNodesNum Server nodes number. * @param clientNodesNum Client nodes number. * @param totalCpus Total cpu number. * @param heap Heap size. * @return Topology snapshot message. */ - private String topologySnapshotMessage(int srvNodesNum, int clientNodesNum, int totalCpus, double heap) { + private String topologySnapshotMessage(long topVer, int srvNodesNum, int clientNodesNum, int totalCpus, double heap) { return PREFIX + " [" + - (discoOrdered ? "ver=" + topSnap.get().topVer.topologyVersion() + ", " : "") + + (discoOrdered ? "ver=" + topVer + ", " : "") + "servers=" + srvNodesNum + ", clients=" + clientNodesNum + ", CPUs=" + totalCpus + @@ -1506,7 +1528,7 @@ public IgniteInternalFuture topologyFuture(final long awaitVer) { * * @return Discovery collection cache. */ - public DiscoCache discoCache() { + private DiscoCache discoCache() { Snapshot cur = topSnap.get(); assert cur != null; @@ -1533,15 +1555,6 @@ public Collection allNodes() { return discoCache().allNodes(); } - /** - * Gets topology grouped by node versions. - * - * @return Version to collection of nodes map. - */ - public NavigableMap> topologyVersionMap() { - return discoCache().versionsMap(); - } - /** @return Full topology size. */ public int size() { return discoCache().allNodes().size(); @@ -1781,16 +1794,6 @@ private DiscoCache resolveDiscoCache(@Nullable String cacheName, AffinityTopolog DiscoCache cache = AffinityTopologyVersion.NONE.equals(topVer) || topVer.equals(snap.topVer) ? snap.discoCache : discoCacheHist.get(topVer); - if (cache == null) { - // Find the eldest acceptable discovery cache. - Map.Entry eldest = discoCacheHist.firstEntry(); - - if (eldest != null) { - if (topVer.compareTo(eldest.getKey()) < 0) - cache = eldest.getValue(); - } - } - if (cache == null) { throw new IgniteException("Failed to resolve nodes topology [cacheName=" + cacheName + ", topVer=" + topVer + diff --git a/modules/core/src/main/java/org/apache/ignite/internal/util/GridBoundedConcurrentOrderedMap.java b/modules/core/src/main/java/org/apache/ignite/internal/util/GridBoundedConcurrentOrderedMap.java index 3f6db30a08d70..33b2e6032bc04 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/util/GridBoundedConcurrentOrderedMap.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/util/GridBoundedConcurrentOrderedMap.java @@ -236,4 +236,9 @@ private void onPut() { return rmvd; } + + /** {@inheritDoc} */ + @Override public void clear() { + throw new UnsupportedOperationException(); + } } diff --git a/modules/core/src/test/java/org/apache/ignite/internal/IgniteClientReconnectCacheTest.java b/modules/core/src/test/java/org/apache/ignite/internal/IgniteClientReconnectCacheTest.java index ad6c46f284b74..0f0165b0f04f9 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/IgniteClientReconnectCacheTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/IgniteClientReconnectCacheTest.java @@ -27,6 +27,7 @@ import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import javax.cache.CacheException; import junit.framework.AssertionFailedError; @@ -1084,6 +1085,21 @@ public void testReconnectClusterRestartMultinode() throws Exception { * @throws Exception If failed. */ public void testReconnectMultinode() throws Exception { + reconnectMultinode(false); + } + + /** + * @throws Exception If failed. + */ + public void testReconnectMultinodeLongHistory() throws Exception { + reconnectMultinode(true); + } + + /** + * @param longHist If {@code true} generates many discovery events to overflow events history. + * @throws Exception If failed. + */ + private void reconnectMultinode(boolean longHist) throws Exception { grid(0).createCache(new CacheConfiguration<>()); clientMode = true; @@ -1100,6 +1116,25 @@ public void testReconnectMultinode() throws Exception { clients.add(client); } + if (longHist) { + // Generate many discovery events to overflow discovery events history. + final AtomicInteger nodeIdx = new AtomicInteger(SRV_CNT + CLIENTS); + + GridTestUtils.runMultiThreaded(new Callable() { + @Override public Void call() throws Exception { + int idx = nodeIdx.incrementAndGet(); + + for (int i = 0; i < 25; i++) { + startGrid(idx); + + stopGrid(idx); + } + + return null; + } + }, 4, "restart-thread"); + } + int nodes = SRV_CNT + CLIENTS; int srvNodes = SRV_CNT; diff --git a/modules/core/src/test/java/org/apache/ignite/internal/managers/discovery/GridDiscoveryManagerAliveCacheSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/managers/discovery/GridDiscoveryManagerAliveCacheSelfTest.java index 50bcc415b8860..390c83e46bba1 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/managers/discovery/GridDiscoveryManagerAliveCacheSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/managers/discovery/GridDiscoveryManagerAliveCacheSelfTest.java @@ -219,7 +219,9 @@ private void validateAlives() { long currVer = discoMgr.topologyVersion(); - for (long v = currVer; v > currVer - GridDiscoveryManager.DISCOVERY_HISTORY_SIZE && v > 0; v--) { + long startVer = discoMgr.localNode().order(); + + for (long v = currVer; v > currVer - GridDiscoveryManager.DISCOVERY_HISTORY_SIZE && v >= startVer; v--) { F.forAll(discoMgr.aliveCacheNodes(null, new AffinityTopologyVersion(v)), new IgnitePredicate() { @Override public boolean apply(ClusterNode e) { diff --git a/modules/core/src/test/java/org/apache/ignite/internal/managers/discovery/GridDiscoveryManagerSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/managers/discovery/GridDiscoveryManagerSelfTest.java index 7912ee1293c86..c9179d44c0060 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/managers/discovery/GridDiscoveryManagerSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/managers/discovery/GridDiscoveryManagerSelfTest.java @@ -88,15 +88,15 @@ public abstract class GridDiscoveryManagerSelfTest extends GridCommonAbstractTes public void testHasNearCache() throws Exception { IgniteKernal g0 = (IgniteKernal)startGrid(0); // PARTITIONED_ONLY cache. - AffinityTopologyVersion zero = new AffinityTopologyVersion(0); + AffinityTopologyVersion none = new AffinityTopologyVersion(-1); AffinityTopologyVersion one = new AffinityTopologyVersion(1); AffinityTopologyVersion two = new AffinityTopologyVersion(2, 2); AffinityTopologyVersion three = new AffinityTopologyVersion(3); AffinityTopologyVersion four = new AffinityTopologyVersion(4); AffinityTopologyVersion five = new AffinityTopologyVersion(5); - assertFalse(g0.context().discovery().hasNearCache(CACHE_NAME, zero)); - assertFalse(g0.context().discovery().hasNearCache(null, zero)); + assertFalse(g0.context().discovery().hasNearCache(CACHE_NAME, none)); + assertFalse(g0.context().discovery().hasNearCache(null, none)); assertFalse(g0.context().discovery().hasNearCache(CACHE_NAME, one)); assertFalse(g0.context().discovery().hasNearCache(null, one)); From 7f8281cd191ea576a8d6358b53fb13e4344cb9d5 Mon Sep 17 00:00:00 2001 From: vozerov-gridgain Date: Tue, 27 Sep 2016 16:37:40 +0300 Subject: [PATCH 36/69] IGNITE-3978: Applied "IgniteIgnore" annotation to failing S3 tests. This closes #1123. --- .../s3/S3CheckpointManagerSelfTest.java | 3 ++ .../s3/S3CheckpointSpiSelfTest.java | 4 ++ .../s3/S3CheckpointSpiStartStopSelfTest.java | 7 ++++ .../s3/S3SessionCheckpointSelfTest.java | 2 + .../s3/TcpDiscoveryS3IpFinderSelfTest.java | 7 ++++ .../ignite/testsuites/IgniteS3TestSuite.java | 15 +++---- .../ignite/testframework/IgniteTestSuite.java | 41 +++++++++++++++++-- modules/ignored-tests/pom.xml | 14 +++++++ .../testsuites/IgniteIgnoredTestSuite.java | 21 ++++------ 9 files changed, 90 insertions(+), 24 deletions(-) diff --git a/modules/aws/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointManagerSelfTest.java b/modules/aws/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointManagerSelfTest.java index db09e337c19bc..a90323e7dd4c7 100644 --- a/modules/aws/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointManagerSelfTest.java +++ b/modules/aws/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointManagerSelfTest.java @@ -21,6 +21,7 @@ import com.amazonaws.auth.BasicAWSCredentials; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.managers.checkpoint.GridCheckpointManagerAbstractSelfTest; +import org.apache.ignite.testsuites.IgniteIgnore; import org.apache.ignite.testsuites.IgniteS3TestSuite; /** @@ -50,6 +51,7 @@ public class S3CheckpointManagerSelfTest extends GridCheckpointManagerAbstractSe /** * @throws Exception Thrown if any exception occurs. */ + @IgniteIgnore("https://issues.apache.org/jira/browse/IGNITE-2420") public void testS3Based() throws Exception { retries = 6; @@ -59,6 +61,7 @@ public void testS3Based() throws Exception { /** * @throws Exception Thrown if any exception occurs. */ + @IgniteIgnore("https://issues.apache.org/jira/browse/IGNITE-2420") public void testMultiNodeS3Based() throws Exception { retries = 6; diff --git a/modules/aws/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointSpiSelfTest.java b/modules/aws/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointSpiSelfTest.java index 11e5bad029a9c..23abe0642136f 100644 --- a/modules/aws/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointSpiSelfTest.java +++ b/modules/aws/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointSpiSelfTest.java @@ -34,6 +34,7 @@ import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.junits.spi.GridSpiAbstractTest; import org.apache.ignite.testframework.junits.spi.GridSpiTest; +import org.apache.ignite.testsuites.IgniteIgnore; import org.apache.ignite.testsuites.IgniteS3TestSuite; /** @@ -91,6 +92,7 @@ public class S3CheckpointSpiSelfTest extends GridSpiAbstractTest IGNORE_DFLT = new ThreadLocal() { + @Override protected Boolean initialValue() { + return false; + } + }; + /** Whether to execute only ignored tests. */ - private final boolean ignoredOnly; + private boolean ignoredOnly; /** * Constructor. @@ -54,7 +61,7 @@ public IgniteTestSuite(String name) { * @param theClass TestCase class */ public IgniteTestSuite(Class theClass) { - this(theClass, false); + this(theClass, ignoreDefault()); } /** @@ -74,7 +81,7 @@ public IgniteTestSuite(Class theClass, boolean ignoredOnly) * @param name Test suite name. */ public IgniteTestSuite(Class theClass, String name) { - this(theClass, name, false); + this(theClass, name, ignoreDefault()); } /** @@ -94,6 +101,13 @@ public IgniteTestSuite(@Nullable Class theClass, @Nullable S setName(name); } + /** + * Adds a test to the suite. + */ + @Override public void addTest(Test test) { + super.addTest(test); + } + /** {@inheritDoc} */ @Override public void addTestSuite(Class testClass) { addTest(new IgniteTestSuite(testClass, ignoredOnly)); @@ -122,6 +136,7 @@ private void addTestsFromTestCase(Class theClass) { Class superCls = theClass; int testAdded = 0; + int testIgnored = 0; for(List names = new ArrayList<>(); Test.class.isAssignableFrom(superCls); superCls = superCls.getSuperclass()) { @@ -131,10 +146,12 @@ private void addTestsFromTestCase(Class theClass) { for (Method each : methods) { if (addTestMethod(each, names, theClass)) testAdded++; + else + testIgnored++; } } - if(testAdded == 0) + if(testAdded == 0 && testIgnored == 0) addTest(warning("No tests found in " + theClass.getName())); } } @@ -221,6 +238,22 @@ private static boolean isPublicTestMethod(Method m) { return isTestMethod(m) && Modifier.isPublic(m.getModifiers()); } + /** + * @param val Default value of ignore flag. + */ + public static void ignoreDefault(boolean val) { + IGNORE_DFLT.set(val); + } + + /** + * @return Default value of ignore flag. + */ + private static boolean ignoreDefault() { + Boolean res = IGNORE_DFLT.get(); + + return res != null && res; + } + /** * Test case simulating failure. */ diff --git a/modules/ignored-tests/pom.xml b/modules/ignored-tests/pom.xml index a82a5bb6db23c..cfeacb2283e4c 100644 --- a/modules/ignored-tests/pom.xml +++ b/modules/ignored-tests/pom.xml @@ -62,6 +62,20 @@ test + + org.apache.ignite + ignite-aws + ${project.version} + + + + org.apache.ignite + ignite-aws + ${project.version} + test-jar + test + + org.apache.ignite ignite-web diff --git a/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java b/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java index f6ce3e3f09444..fa60721c5d0ad 100644 --- a/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java +++ b/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java @@ -18,12 +18,7 @@ package org.apache.ignite.testsuites; import junit.framework.TestSuite; -import org.apache.ignite.internal.websession.WebSessionReplicatedSelfTest; -import org.apache.ignite.internal.websession.WebSessionReplicatedV1SelfTest; -import org.apache.ignite.internal.websession.WebSessionSelfTest; -import org.apache.ignite.internal.websession.WebSessionTransactionalSelfTest; -import org.apache.ignite.internal.websession.WebSessionTransactionalV1SelfTest; -import org.apache.ignite.internal.websession.WebSessionV1SelfTest; +import org.apache.ignite.internal.websession.IgniteWebSessionSelfTestSuite; import org.apache.ignite.testframework.IgniteTestSuite; /** @@ -35,15 +30,15 @@ public class IgniteIgnoredTestSuite extends TestSuite { * @throws Exception Thrown in case of the failure. */ public static TestSuite suite() throws Exception { - IgniteTestSuite suite = new IgniteTestSuite(null, "Ignite Ignored Test Suite", true); + IgniteTestSuite.ignoreDefault(true); + + IgniteTestSuite suite = new IgniteTestSuite(null, "Ignite Ignored Test Suite"); + + /* --- AWS --- */ + suite.addTest(IgniteS3TestSuite.suite()); /* --- WEB SESSIONS --- */ - suite.addTestSuite(WebSessionSelfTest.class); - suite.addTestSuite(WebSessionTransactionalSelfTest.class); - suite.addTestSuite(WebSessionReplicatedSelfTest.class); - suite.addTestSuite(WebSessionV1SelfTest.class); - suite.addTestSuite(WebSessionTransactionalV1SelfTest.class); - suite.addTestSuite(WebSessionReplicatedV1SelfTest.class); + suite.addTest(IgniteWebSessionSelfTestSuite.suite()); return suite; } From 2bfa06dd75fa55ad01438fa59d14b864ec95834e Mon Sep 17 00:00:00 2001 From: Alexey Kuznetsov Date: Wed, 28 Sep 2016 08:22:40 +0700 Subject: [PATCH 37/69] Fixed typo. --- .../examples/Apache.Ignite.ExamplesDll/Events/LocalListener.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/platforms/dotnet/examples/Apache.Ignite.ExamplesDll/Events/LocalListener.cs b/modules/platforms/dotnet/examples/Apache.Ignite.ExamplesDll/Events/LocalListener.cs index 8c689dc04352f..46524a1698ca9 100644 --- a/modules/platforms/dotnet/examples/Apache.Ignite.ExamplesDll/Events/LocalListener.cs +++ b/modules/platforms/dotnet/examples/Apache.Ignite.ExamplesDll/Events/LocalListener.cs @@ -26,7 +26,7 @@ namespace Apache.Ignite.ExamplesDll.Events /// public class LocalListener : IEventListener { - /** Сount of received events. */ + /** Count of received events. */ private int _eventsReceived; /// From c188c3c4a96eacb85ea8e08f0634288332432c1c Mon Sep 17 00:00:00 2001 From: Alexey Kuznetsov Date: Wed, 28 Sep 2016 08:46:23 +0700 Subject: [PATCH 38/69] IGNITE-3983 Fixed wrong cache load optimization. Test added. --- .../store/jdbc/CacheAbstractJdbcStore.java | 4 +- .../CacheJdbcPojoStoreAbstractSelfTest.java | 55 ++++++++++++------- ...JdbcPojoStoreBinaryMarshallerSelfTest.java | 12 ++-- 3 files changed, 43 insertions(+), 28 deletions(-) diff --git a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheAbstractJdbcStore.java b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheAbstractJdbcStore.java index a9a8ce1d0b697..f316676cabae5 100644 --- a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheAbstractJdbcStore.java +++ b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheAbstractJdbcStore.java @@ -893,9 +893,9 @@ protected Integer columnIndex(Map loadColIdxs, String dbName) { } futs.add(pool.submit(loadCacheRange(em, clo, upperBound, null, 0))); - } - continue; + continue; + } } catch (SQLException e) { log.warning("Failed to load entries from db in multithreaded mode, will try in single thread " + diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreAbstractSelfTest.java b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreAbstractSelfTest.java index 83065f1bbb146..368a28e010c3b 100644 --- a/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreAbstractSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreAbstractSelfTest.java @@ -50,31 +50,34 @@ */ public abstract class CacheJdbcPojoStoreAbstractSelfTest extends GridCommonAbstractTest { /** IP finder. */ - protected static final TcpDiscoveryIpFinder IP_FINDER = new TcpDiscoveryVmIpFinder(true); + private static final TcpDiscoveryIpFinder IP_FINDER = new TcpDiscoveryVmIpFinder(true); /** DB connection URL. */ - protected static final String DFLT_CONN_URL = "jdbc:h2:mem:TestDatabase;DB_CLOSE_DELAY=-1"; + private static final String DFLT_CONN_URL = "jdbc:h2:mem:TestDatabase;DB_CLOSE_DELAY=-1"; /** Organization count. */ - protected static final int ORGANIZATION_CNT = 1000; + private static final int ORGANIZATION_CNT = 1000; /** Person count. */ - protected static final int PERSON_CNT = 100000; + private static final int PERSON_CNT = 100000; /** Test cache name. */ - protected static final String CACHE_NAME = "test-cache"; + private static final String CACHE_NAME = "test-cache"; /** Flag indicating that tests should use transactional cache. */ - protected static boolean transactional; + private static boolean transactional; /** Flag indicating that tests should use primitive classes like java.lang.Integer for keys. */ protected static boolean builtinKeys; /** Flag indicating that classes for keys available on class path or not. */ - protected static boolean noKeyClasses; + private static boolean noKeyClasses; /** Flag indicating that classes for values available on class path or not. */ - protected static boolean noValClasses; + private static boolean noValClasses; + + /** Batch size to load in parallel. */ + private static int parallelLoadThreshold; /** * @return Flag indicating that all internal SQL queries should use escaped identifiers. @@ -221,6 +224,7 @@ protected CacheConfiguration cacheConfiguration() throws Exception { storeFactory.setTypes(storeTypes()); storeFactory.setDataSourceFactory(new H2DataSourceFactory()); // H2 DataSource factory. storeFactory.setSqlEscapeAll(sqlEscapeAll()); + storeFactory.setParallelLoadCacheMinimumThreshold(parallelLoadThreshold); cc.setCacheStoreFactory(storeFactory); cc.setReadThrough(true); @@ -285,13 +289,15 @@ protected void fillSampleDatabase(Connection conn) throws SQLException { * @param noKeyCls {@code True} if keys classes are not on class path. * @param noValCls {@code True} if values classes are not on class path. * @param trn {@code True} if cache should be started in transactional mode. + * @param threshold Load batch size. * @throws Exception If failed to start grid. */ - protected void startTestGrid(boolean builtin, boolean noKeyCls, boolean noValCls, boolean trn) throws Exception { + protected void startTestGrid(boolean builtin, boolean noKeyCls, boolean noValCls, boolean trn, int threshold) throws Exception { builtinKeys = builtin; noKeyClasses = noKeyCls; noValClasses = noValCls; transactional = trn; + parallelLoadThreshold = threshold; startGrid(); } @@ -322,7 +328,16 @@ protected void checkCacheLoadWithSql() { * @throws Exception If failed. */ public void testLoadCache() throws Exception { - startTestGrid(false, false, false, false); + startTestGrid(false, false, false, false, 512); + + checkCacheLoad(); + } + + /** + * @throws Exception If failed. + */ + public void testLoadCacheAll() throws Exception { + startTestGrid(false, false, false, false, ORGANIZATION_CNT + PERSON_CNT + 1); checkCacheLoad(); } @@ -331,7 +346,7 @@ public void testLoadCache() throws Exception { * @throws Exception If failed. */ public void testLoadCacheWithSql() throws Exception { - startTestGrid(false, false, false, false); + startTestGrid(false, false, false, false, 512); checkCacheLoadWithSql(); } @@ -340,7 +355,7 @@ public void testLoadCacheWithSql() throws Exception { * @throws Exception If failed. */ public void testLoadCacheTx() throws Exception { - startTestGrid(false, false, false, true); + startTestGrid(false, false, false, true, 512); checkCacheLoad(); } @@ -349,7 +364,7 @@ public void testLoadCacheTx() throws Exception { * @throws Exception If failed. */ public void testLoadCacheWithSqlTx() throws Exception { - startTestGrid(false, false, false, true); + startTestGrid(false, false, false, true, 512); checkCacheLoadWithSql(); } @@ -358,7 +373,7 @@ public void testLoadCacheWithSqlTx() throws Exception { * @throws Exception If failed. */ public void testLoadCachePrimitiveKeys() throws Exception { - startTestGrid(true, false, false, false); + startTestGrid(true, false, false, false, 512); checkCacheLoad(); } @@ -367,7 +382,7 @@ public void testLoadCachePrimitiveKeys() throws Exception { * @throws Exception If failed. */ public void testLoadCachePrimitiveKeysTx() throws Exception { - startTestGrid(true, false, false, true); + startTestGrid(true, false, false, true, 512); checkCacheLoad(); } @@ -448,7 +463,7 @@ private void checkPutRemove() throws Exception { * @throws Exception If failed. */ public void testPutRemoveBuiltIn() throws Exception { - startTestGrid(true, false, false, false); + startTestGrid(true, false, false, false, 512); checkPutRemove(); } @@ -457,7 +472,7 @@ public void testPutRemoveBuiltIn() throws Exception { * @throws Exception If failed. */ public void testPutRemove() throws Exception { - startTestGrid(false, false, false, false); + startTestGrid(false, false, false, false, 512); checkPutRemove(); } @@ -466,7 +481,7 @@ public void testPutRemove() throws Exception { * @throws Exception If failed. */ public void testPutRemoveTxBuiltIn() throws Exception { - startTestGrid(true, false, false, true); + startTestGrid(true, false, false, true, 512); checkPutRemove(); } @@ -475,7 +490,7 @@ public void testPutRemoveTxBuiltIn() throws Exception { * @throws Exception If failed. */ public void testPutRemoveTx() throws Exception { - startTestGrid(false, false, false, true); + startTestGrid(false, false, false, true, 512); checkPutRemove(); } @@ -484,7 +499,7 @@ public void testPutRemoveTx() throws Exception { * @throws Exception If failed. */ public void testLoadNotRegisteredType() throws Exception { - startTestGrid(false, false, false, false); + startTestGrid(false, false, false, false, 512); IgniteCache c1 = grid().cache(CACHE_NAME); diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreBinaryMarshallerSelfTest.java b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreBinaryMarshallerSelfTest.java index 14c743cbb3bd5..b6d6fe13ac275 100644 --- a/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreBinaryMarshallerSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreBinaryMarshallerSelfTest.java @@ -33,7 +33,7 @@ public class CacheJdbcPojoStoreBinaryMarshallerSelfTest extends CacheJdbcPojoSto * @throws Exception If failed. */ public void testLoadCacheNoKeyClasses() throws Exception { - startTestGrid(false, true, false, false); + startTestGrid(false, true, false, false, 512); checkCacheLoad(); } @@ -42,7 +42,7 @@ public void testLoadCacheNoKeyClasses() throws Exception { * @throws Exception If failed. */ public void testLoadCacheNoKeyClassesTx() throws Exception { - startTestGrid(false, true, false, true); + startTestGrid(false, true, false, true, 512); checkCacheLoad(); } @@ -51,7 +51,7 @@ public void testLoadCacheNoKeyClassesTx() throws Exception { * @throws Exception If failed. */ public void testLoadCacheNoValueClasses() throws Exception { - startTestGrid(false, false, true, false); + startTestGrid(false, false, true, false, 512); checkCacheLoad(); } @@ -60,7 +60,7 @@ public void testLoadCacheNoValueClasses() throws Exception { * @throws Exception If failed. */ public void testLoadCacheNoValueClassesTx() throws Exception { - startTestGrid(false, false, true, true); + startTestGrid(false, false, true, true, 512); checkCacheLoad(); } @@ -69,7 +69,7 @@ public void testLoadCacheNoValueClassesTx() throws Exception { * @throws Exception If failed. */ public void testLoadCacheNoKeyAndValueClasses() throws Exception { - startTestGrid(false, true, true, false); + startTestGrid(false, true, true, false, 512); checkCacheLoad(); } @@ -78,7 +78,7 @@ public void testLoadCacheNoKeyAndValueClasses() throws Exception { * @throws Exception If failed. */ public void testLoadCacheNoKeyAndValueClassesTx() throws Exception { - startTestGrid(false, true, true, true); + startTestGrid(false, true, true, true, 512); checkCacheLoad(); } From 89c30c8b0be6915d2399be508ddcd9eb439a9aaa Mon Sep 17 00:00:00 2001 From: Alexey Kuznetsov Date: Wed, 28 Sep 2016 08:57:45 +0700 Subject: [PATCH 39/69] IGNITE-3965 @GridInternal tasks should run via standart LoadBalancingSpi. Added test. --- .../ignite/compute/ComputeTaskSpis.java | 8 +- .../ignite/internal/GridTaskSessionImpl.java | 15 +- .../apache/ignite/internal/IgnitionEx.java | 22 ++ .../loadbalancer/GridLoadBalancerManager.java | 10 +- .../processors/job/GridJobProcessor.java | 3 +- .../session/GridTaskSessionProcessor.java | 9 +- .../processors/task/GridTaskProcessor.java | 3 +- .../processors/task/GridTaskWorker.java | 2 +- ...ridInternalTasksLoadBalancingSelfTest.java | 201 ++++++++++++++++++ .../IgniteSpiLoadBalancingSelfTestSuite.java | 34 +-- 10 files changed, 279 insertions(+), 28 deletions(-) create mode 100644 modules/core/src/test/java/org/apache/ignite/spi/loadbalancing/internal/GridInternalTasksLoadBalancingSelfTest.java diff --git a/modules/core/src/main/java/org/apache/ignite/compute/ComputeTaskSpis.java b/modules/core/src/main/java/org/apache/ignite/compute/ComputeTaskSpis.java index 5edd464f80386..0b6e425639d2f 100644 --- a/modules/core/src/main/java/org/apache/ignite/compute/ComputeTaskSpis.java +++ b/modules/core/src/main/java/org/apache/ignite/compute/ComputeTaskSpis.java @@ -38,22 +38,22 @@ public @interface ComputeTaskSpis { /** * Optional load balancing SPI name. By default, SPI name is equal - * to the name of the SPI class. You can change SPI name by explicitely + * to the name of the SPI class. You can change SPI name by explicitly * supplying {@link org.apache.ignite.spi.IgniteSpi#getName()} parameter in grid configuration. */ public String loadBalancingSpi() default ""; /** * Optional failover SPI name. By default, SPI name is equal - * to the name of the SPI class. You can change SPI name by explicitely + * to the name of the SPI class. You can change SPI name by explicitly * supplying {@link org.apache.ignite.spi.IgniteSpi#getName()} parameter in grid configuration. */ public String failoverSpi() default ""; /** * Optional checkpoint SPI name. By default, SPI name is equal - * to the name of the SPI class. You can change SPI name by explicitely + * to the name of the SPI class. You can change SPI name by explicitly * supplying {@link org.apache.ignite.spi.IgniteSpi#getName()} parameter in grid configuration. */ public String checkpointSpi() default ""; -} \ No newline at end of file +} diff --git a/modules/core/src/main/java/org/apache/ignite/internal/GridTaskSessionImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/GridTaskSessionImpl.java index bbe05b8127221..dd1caa15396b3 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/GridTaskSessionImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/GridTaskSessionImpl.java @@ -102,6 +102,9 @@ public class GridTaskSessionImpl implements GridTaskSessionInternal { /** */ private final boolean fullSup; + /** */ + private final boolean internal; + /** */ private final Collection top; @@ -124,6 +127,7 @@ public class GridTaskSessionImpl implements GridTaskSessionInternal { * @param attrs Session attributes. * @param ctx Grid Kernal Context. * @param fullSup Session full support enabled flag. + * @param internal Internal task flag. * @param subjId Subject ID. */ public GridTaskSessionImpl( @@ -139,6 +143,7 @@ public GridTaskSessionImpl( @Nullable Map attrs, GridKernalContext ctx, boolean fullSup, + boolean internal, UUID subjId) { assert taskNodeId != null; assert taskName != null; @@ -166,6 +171,7 @@ public GridTaskSessionImpl( } this.fullSup = fullSup; + this.internal = internal; this.subjId = subjId; mapFut = new IgniteFutureImpl(new GridFutureAdapter()); @@ -860,8 +866,15 @@ public void onDone() { return mapFut; } + /** + * @return {@code True} if task is internal. + */ + public boolean isInternal() { + return internal; + } + /** {@inheritDoc} */ @Override public String toString() { return S.toString(GridTaskSessionImpl.class, this); } -} \ No newline at end of file +} diff --git a/modules/core/src/main/java/org/apache/ignite/internal/IgnitionEx.java b/modules/core/src/main/java/org/apache/ignite/internal/IgnitionEx.java index b54c17d1794a0..4f64da2f411d5 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/IgnitionEx.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/IgnitionEx.java @@ -93,6 +93,7 @@ import org.apache.ignite.spi.eventstorage.memory.MemoryEventStorageSpi; import org.apache.ignite.spi.failover.always.AlwaysFailoverSpi; import org.apache.ignite.spi.indexing.noop.NoopIndexingSpi; +import org.apache.ignite.spi.loadbalancing.LoadBalancingSpi; import org.apache.ignite.spi.loadbalancing.roundrobin.RoundRobinLoadBalancingSpi; import org.apache.ignite.spi.swapspace.file.FileSwapSpaceSpi; import org.apache.ignite.spi.swapspace.noop.NoopSwapSpaceSpi; @@ -2052,6 +2053,27 @@ private void initializeDefaultSpi(IgniteConfiguration cfg) { if (cfg.getLoadBalancingSpi() == null) cfg.setLoadBalancingSpi(new RoundRobinLoadBalancingSpi()); + else { + Collection spis = new ArrayList<>(); + + boolean dfltLoadBalancingSpi = false; + + for (LoadBalancingSpi spi : cfg.getLoadBalancingSpi()) { + spis.add(spi); + + if (spi instanceof RoundRobinLoadBalancingSpi) { + dfltLoadBalancingSpi = true; + + break; + } + } + + // Add default load balancing SPI for internal tasks. + if (!dfltLoadBalancingSpi) + spis.add(new RoundRobinLoadBalancingSpi()); + + cfg.setLoadBalancingSpi(spis.toArray(new LoadBalancingSpi[spis.size()])); + } if (cfg.getIndexingSpi() == null) cfg.setIndexingSpi(new NoopIndexingSpi()); diff --git a/modules/core/src/main/java/org/apache/ignite/internal/managers/loadbalancer/GridLoadBalancerManager.java b/modules/core/src/main/java/org/apache/ignite/internal/managers/loadbalancer/GridLoadBalancerManager.java index 631168b64c7df..15a4d2cd9510b 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/managers/loadbalancer/GridLoadBalancerManager.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/managers/loadbalancer/GridLoadBalancerManager.java @@ -30,6 +30,7 @@ import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.A; import org.apache.ignite.spi.loadbalancing.LoadBalancingSpi; +import org.apache.ignite.spi.loadbalancing.roundrobin.RoundRobinLoadBalancingSpi; import org.jetbrains.annotations.Nullable; /** @@ -72,7 +73,12 @@ public ClusterNode getBalancedNode(GridTaskSessionImpl ses, List to assert top != null; assert job != null; - return getSpi(ses.getLoadBalancingSpi()).getBalancedNode(ses, top, job); + LoadBalancingSpi spi = getSpi(ses.getLoadBalancingSpi()); + + if (ses.isInternal() && !(spi instanceof RoundRobinLoadBalancingSpi)) + return getSpi(RoundRobinLoadBalancingSpi.class.getSimpleName()).getBalancedNode(ses, top, job); + + return spi.getBalancedNode(ses, top, job); } /** @@ -102,4 +108,4 @@ public ComputeLoadBalancer getLoadBalancer(final GridTaskSessionImpl ses, final } }; } -} \ No newline at end of file +} diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/job/GridJobProcessor.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/job/GridJobProcessor.java index 6a162d377c3f0..563a3d8ddf7c7 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/job/GridJobProcessor.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/job/GridJobProcessor.java @@ -1057,6 +1057,7 @@ public void processJobExecuteRequest(ClusterNode node, final GridJobExecuteReque siblings, sesAttrs, req.isSessionFullSupport(), + req.isInternal(), req.getSubjectId()); taskSes.setCheckpointSpi(req.getCheckpointSpi()); @@ -2034,4 +2035,4 @@ private JobsMap(int initCap, float loadFactor, int concurLvl) { return sizex(); } } -} \ No newline at end of file +} diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/session/GridTaskSessionProcessor.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/session/GridTaskSessionProcessor.java index 2ac39c909c37d..d660948e29baa 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/session/GridTaskSessionProcessor.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/session/GridTaskSessionProcessor.java @@ -73,8 +73,8 @@ public GridTaskSessionProcessor(GridKernalContext ctx) { * @param endTime Execution end time. * @param siblings Collection of siblings. * @param attrs Map of attributes. - * @param fullSup {@code True} to enable distributed session attributes - * and checkpoints. + * @param fullSup {@code True} to enable distributed session attributes and checkpoints. + * @param internal {@code True} in case of internal task. * @param subjId Subject ID. * @return New session if one did not exist, or existing one. */ @@ -90,6 +90,7 @@ public GridTaskSessionImpl createTaskSession( Collection siblings, Map attrs, boolean fullSup, + boolean internal, UUID subjId) { if (!fullSup) { return new GridTaskSessionImpl( @@ -105,6 +106,7 @@ public GridTaskSessionImpl createTaskSession( attrs, ctx, false, + internal, subjId); } @@ -127,6 +129,7 @@ public GridTaskSessionImpl createTaskSession( attrs, ctx, true, + internal, subjId)); if (old != null) @@ -177,4 +180,4 @@ public boolean removeSession(IgniteUuid sesId) { X.println(">>> Task session processor memory stats [grid=" + ctx.gridName() + ']'); X.println(">>> sesMapSize: " + sesMap.size()); } -} \ No newline at end of file +} diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/task/GridTaskProcessor.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/task/GridTaskProcessor.java index f54277cbd46d4..6d9722933bcc3 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/task/GridTaskProcessor.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/task/GridTaskProcessor.java @@ -621,6 +621,7 @@ else if (task != null) { Collections.emptyList(), Collections.emptyMap(), fullSup, + dep != null && dep.internalTask(task, taskCls), subjId); ComputeTaskInternalFuture fut = new ComputeTaskInternalFuture<>(ses, ctx); @@ -1367,4 +1368,4 @@ private class TaskCancelMessageListener implements GridMessageListener { } } } -} \ No newline at end of file +} diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/task/GridTaskWorker.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/task/GridTaskWorker.java index 79d1a2cf4e35d..452e48cb9ac28 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/task/GridTaskWorker.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/task/GridTaskWorker.java @@ -490,7 +490,7 @@ private void initializeSpis() { } } - internal = dep.internalTask(task, taskCls); + internal = ses.isInternal(); recordTaskEvent(EVT_TASK_STARTED, "Task started."); diff --git a/modules/core/src/test/java/org/apache/ignite/spi/loadbalancing/internal/GridInternalTasksLoadBalancingSelfTest.java b/modules/core/src/test/java/org/apache/ignite/spi/loadbalancing/internal/GridInternalTasksLoadBalancingSelfTest.java new file mode 100644 index 0000000000000..da1a4e57d91a9 --- /dev/null +++ b/modules/core/src/test/java/org/apache/ignite/spi/loadbalancing/internal/GridInternalTasksLoadBalancingSelfTest.java @@ -0,0 +1,201 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.spi.loadbalancing.internal; + +import org.apache.ignite.Ignite; +import org.apache.ignite.IgniteException; +import org.apache.ignite.cluster.ClusterNode; +import org.apache.ignite.compute.ComputeJob; +import org.apache.ignite.compute.ComputeJobAdapter; +import org.apache.ignite.compute.ComputeJobResult; +import org.apache.ignite.compute.ComputeTaskSession; +import org.apache.ignite.compute.ComputeTaskSplitAdapter; +import org.apache.ignite.configuration.IgniteConfiguration; +import org.apache.ignite.internal.processors.task.GridInternal; +import org.apache.ignite.internal.util.lang.GridTuple3; +import org.apache.ignite.internal.visor.VisorTaskArgument; +import org.apache.ignite.internal.visor.node.VisorNodePingTask; +import org.apache.ignite.spi.IgniteSpiAdapter; +import org.apache.ignite.spi.IgniteSpiException; +import org.apache.ignite.spi.IgniteSpiMultipleInstancesSupport; +import org.apache.ignite.spi.loadbalancing.LoadBalancingSpi; +import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; +import org.jetbrains.annotations.Nullable; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.UUID; + +/** + * Test that will start two nodes with custom load balancing SPI and execute {@link GridInternal} task on it. + */ +public class GridInternalTasksLoadBalancingSelfTest extends GridCommonAbstractTest { + /** Grid count. */ + private static final int GRID_CNT = 2; + + /** Expected job result. */ + private static final String JOB_RESULT = "EXPECTED JOB RESULT"; + + /** Expected task result. */ + private static final String TASK_RESULT = JOB_RESULT + JOB_RESULT; + + /** */ + private static Ignite ignite; + + /** If {@code true} then special custom load balancer SPI will be used. */ + private static boolean customLoadBalancer = true; + + /** {@inheritDoc} */ + @Override protected void afterTest() throws Exception { + stopAllGrids(); + } + + /** {@inheritDoc} */ + @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { + IgniteConfiguration cfg = super.getConfiguration(gridName); + + if (customLoadBalancer) + cfg.setLoadBalancingSpi(new CustomLoadBalancerSpi()); + + return cfg; + } + + /** + * This test execute internal tasks over grid with custom balancer. + * + * @throws Exception In case of error. + */ + public void testInternalTaskBalancing() throws Exception { + customLoadBalancer = true; + + ignite = startGrids(GRID_CNT); + + // Task with GridInternal should pass. + assertEquals(TASK_RESULT, ignite.compute().execute(GridInternalTestTask.class.getName(), null)); + + // Visor task should pass. + UUID nid = ignite.cluster().localNode().id(); + + GridTuple3 ping = ignite.compute() + .execute(VisorNodePingTask.class.getName(), new VisorTaskArgument<>(nid, nid, false)); + + assertTrue(ping.get1()); + + // Custom task should fail, because special test load balancer SPI returns null as balanced node. + try { + ignite.compute().execute(CustomTestTask.class.getName(), null); + } + catch (IgniteException e) { + assertTrue(e.getMessage().startsWith("Node can not be null [mappedJob=org.apache.ignite.spi.loadbalancing.internal.GridInternalTasksLoadBalancingSelfTest$CustomTestJob")); + } + } + + /** + * This test execute internal tasks over grid with default balancer. + * + * @throws Exception In case of error. + */ + public void testInternalTaskDefaultBalancing() throws Exception { + customLoadBalancer = false; + + ignite = startGrids(GRID_CNT); + + // Task with GridInternal should pass. + assertEquals(TASK_RESULT, ignite.compute().execute(GridInternalTestTask.class.getName(), null)); + + // Visor task should pass. + UUID nid = ignite.cluster().localNode().id(); + + GridTuple3 ping = ignite.compute() + .execute(VisorNodePingTask.class.getName(), new VisorTaskArgument<>(nid, nid, false)); + + assertTrue(ping.get1()); + + // Custom task should pass. + assertEquals(TASK_RESULT, ignite.compute().execute(CustomTestTask.class.getName(), null)); + } + + /** + * Test task. + */ + private static class CustomTestTask extends ComputeTaskSplitAdapter { + /** {@inheritDoc} */ + @Override protected Collection split(int gridSize, Object arg) { + Collection jobs = new ArrayList<>(gridSize); + + for (int i = 0; i < gridSize; i++) + jobs.add(new CustomTestJob()); + + return jobs; + } + + /** {@inheritDoc} */ + @Override public Serializable reduce(List results) { + assert results != null; + + String res = ""; + + for (ComputeJobResult jobRes : results) + res += jobRes.getData(); + + return res; + } + } + + /** + * Test job. + */ + private static class CustomTestJob extends ComputeJobAdapter { + /** {@inheritDoc} */ + @Override public String execute() { + return JOB_RESULT; + } + } + + + /** + * Test task marked with @GridInternal. + */ + @GridInternal + private static class GridInternalTestTask extends CustomTestTask{ + + } + + /** + * Special test balancer that will do not any balancing. + */ + @IgniteSpiMultipleInstancesSupport(true) + private static class CustomLoadBalancerSpi extends IgniteSpiAdapter implements LoadBalancingSpi { + /** {@inheritDoc} */ + @Override public void spiStart(@Nullable String gridName) throws IgniteSpiException { + // No-op. + } + + /** {@inheritDoc} */ + @Override public void spiStop() throws IgniteSpiException { + // No-op. + } + + /** {@inheritDoc} */ + @Override public ClusterNode getBalancedNode(ComputeTaskSession ses, List top, ComputeJob job) throws IgniteException { + return null; // Intentionally return null. + } + } +} diff --git a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteSpiLoadBalancingSelfTestSuite.java b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteSpiLoadBalancingSelfTestSuite.java index a20b3c8278682..52d4fddabef24 100644 --- a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteSpiLoadBalancingSelfTestSuite.java +++ b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteSpiLoadBalancingSelfTestSuite.java @@ -23,6 +23,7 @@ import org.apache.ignite.spi.loadbalancing.adaptive.GridAdaptiveLoadBalancingSpiMultipleNodeSelfTest; import org.apache.ignite.spi.loadbalancing.adaptive.GridAdaptiveLoadBalancingSpiSelfTest; import org.apache.ignite.spi.loadbalancing.adaptive.GridAdaptiveLoadBalancingSpiStartStopSelfTest; +import org.apache.ignite.spi.loadbalancing.internal.GridInternalTasksLoadBalancingSelfTest; import org.apache.ignite.spi.loadbalancing.roundrobin.GridRoundRobinLoadBalancingNotPerTaskMultithreadedSelfTest; import org.apache.ignite.spi.loadbalancing.roundrobin.GridRoundRobinLoadBalancingSpiLocalNodeSelfTest; import org.apache.ignite.spi.loadbalancing.roundrobin.GridRoundRobinLoadBalancingSpiMultipleNodesSelfTest; @@ -52,25 +53,28 @@ public static Test suite() { TestSuite suite = new TestSuite("Ignite Load Balancing Test Suite"); // Random. - suite.addTest(new TestSuite(GridWeightedRandomLoadBalancingSpiSelfTest.class)); - suite.addTest(new TestSuite(GridWeightedRandomLoadBalancingSpiWeightedSelfTest.class)); - suite.addTest(new TestSuite(GridWeightedRandomLoadBalancingSpiStartStopSelfTest.class)); - suite.addTest(new TestSuite(GridWeightedRandomLoadBalancingSpiConfigSelfTest.class)); + suite.addTestSuite(GridWeightedRandomLoadBalancingSpiSelfTest.class); + suite.addTestSuite(GridWeightedRandomLoadBalancingSpiWeightedSelfTest.class); + suite.addTestSuite(GridWeightedRandomLoadBalancingSpiStartStopSelfTest.class); + suite.addTestSuite(GridWeightedRandomLoadBalancingSpiConfigSelfTest.class); // Round-robin. - suite.addTest(new TestSuite(GridRoundRobinLoadBalancingSpiLocalNodeSelfTest.class)); - suite.addTest(new TestSuite(GridRoundRobinLoadBalancingSpiMultipleNodesSelfTest.class)); - suite.addTest(new TestSuite(GridRoundRobinLoadBalancingSpiTopologyChangeSelfTest.class)); - suite.addTest(new TestSuite(GridRoundRobinLoadBalancingSpiNotPerTaskSelfTest.class)); - suite.addTest(new TestSuite(GridRoundRobinLoadBalancingSpiStartStopSelfTest.class)); - suite.addTest(new TestSuite(GridRoundRobinLoadBalancingNotPerTaskMultithreadedSelfTest.class)); + suite.addTestSuite(GridRoundRobinLoadBalancingSpiLocalNodeSelfTest.class); + suite.addTestSuite(GridRoundRobinLoadBalancingSpiMultipleNodesSelfTest.class); + suite.addTestSuite(GridRoundRobinLoadBalancingSpiTopologyChangeSelfTest.class); + suite.addTestSuite(GridRoundRobinLoadBalancingSpiNotPerTaskSelfTest.class); + suite.addTestSuite(GridRoundRobinLoadBalancingSpiStartStopSelfTest.class); + suite.addTestSuite(GridRoundRobinLoadBalancingNotPerTaskMultithreadedSelfTest.class); // Adaptive. - suite.addTest(new TestSuite(GridAdaptiveLoadBalancingSpiSelfTest.class)); - suite.addTest(new TestSuite(GridAdaptiveLoadBalancingSpiMultipleNodeSelfTest.class)); - suite.addTest(new TestSuite(GridAdaptiveLoadBalancingSpiStartStopSelfTest.class)); - suite.addTest(new TestSuite(GridAdaptiveLoadBalancingSpiConfigSelfTest.class)); + suite.addTestSuite(GridAdaptiveLoadBalancingSpiSelfTest.class); + suite.addTestSuite(GridAdaptiveLoadBalancingSpiMultipleNodeSelfTest.class); + suite.addTestSuite(GridAdaptiveLoadBalancingSpiStartStopSelfTest.class); + suite.addTestSuite(GridAdaptiveLoadBalancingSpiConfigSelfTest.class); + + // Load balancing for internal tasks. + suite.addTestSuite(GridInternalTasksLoadBalancingSelfTest.class); return suite; } -} \ No newline at end of file +} From daf974d261efa525678d5fabc6191642c07f9ad4 Mon Sep 17 00:00:00 2001 From: AKuznetsov Date: Wed, 28 Sep 2016 17:22:10 +0700 Subject: [PATCH 40/69] IGNITE-3965 Fixed issues found on review. --- .../src/main/java/org/apache/ignite/internal/IgnitionEx.java | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/modules/core/src/main/java/org/apache/ignite/internal/IgnitionEx.java b/modules/core/src/main/java/org/apache/ignite/internal/IgnitionEx.java index 4f64da2f411d5..aad96fbc2056f 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/IgnitionEx.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/IgnitionEx.java @@ -2061,11 +2061,8 @@ private void initializeDefaultSpi(IgniteConfiguration cfg) { for (LoadBalancingSpi spi : cfg.getLoadBalancingSpi()) { spis.add(spi); - if (spi instanceof RoundRobinLoadBalancingSpi) { + if (!dfltLoadBalancingSpi && spi instanceof RoundRobinLoadBalancingSpi) dfltLoadBalancingSpi = true; - - break; - } } // Add default load balancing SPI for internal tasks. From d643dcf2dd2caac4c3ff04cb72f31bbfbf97339a Mon Sep 17 00:00:00 2001 From: Pavel Tupitsyn Date: Wed, 28 Sep 2016 14:34:23 +0300 Subject: [PATCH 41/69] IGNITE-3970 .NET: Fix Cyrillic 'C' letters in code - add test --- .../ProjectFilesTest.cs | 37 ++++++++++++------- 1 file changed, 24 insertions(+), 13 deletions(-) diff --git a/modules/platforms/dotnet/Apache.Ignite.Core.Tests/ProjectFilesTest.cs b/modules/platforms/dotnet/Apache.Ignite.Core.Tests/ProjectFilesTest.cs index 75167b54573eb..a030bf29080f0 100644 --- a/modules/platforms/dotnet/Apache.Ignite.Core.Tests/ProjectFilesTest.cs +++ b/modules/platforms/dotnet/Apache.Ignite.Core.Tests/ProjectFilesTest.cs @@ -18,6 +18,7 @@ namespace Apache.Ignite.Core.Tests { using System; + using System.Collections.Generic; using System.IO; using System.Linq; using System.Reflection; @@ -37,11 +38,7 @@ public void TestCsprojToolsVersion() var projFiles = GetDotNetSourceDir().GetFiles("*.csproj", SearchOption.AllDirectories); Assert.GreaterOrEqual(projFiles.Length, 7); - var invalidFiles = - projFiles.Where(x => !File.ReadAllText(x.FullName).Contains("ToolsVersion=\"4.0\"")).ToArray(); - - Assert.AreEqual(0, invalidFiles.Length, - "Invalid csproj files: " + string.Join(", ", invalidFiles.Select(x => x.FullName))); + CheckFiles(projFiles, x => !x.Contains("ToolsVersion=\"4.0\""), "Invalid csproj files: "); } /// @@ -53,17 +50,31 @@ public void TestSlnToolsVersion() var slnFiles = GetDotNetSourceDir().GetFiles("*.sln", SearchOption.AllDirectories); Assert.GreaterOrEqual(slnFiles.Length, 2); - var invalidFiles = - slnFiles.Where(x => - { - var text = File.ReadAllText(x.FullName); + CheckFiles(slnFiles, x => !x.Contains("# Visual Studio 2010") || + !x.Contains("Microsoft Visual Studio Solution File, Format Version 11.00"), + "Invalid sln files: "); + } - return !text.Contains("# Visual Studio 2010") || - !text.Contains("Microsoft Visual Studio Solution File, Format Version 11.00"); - }).ToArray(); + /// + /// Tests that there are no Cyrillic C instead of English C (which are on the same keyboard key). + /// + [Test] + public void TestCyrillicChars() + { + var srcFiles = GetDotNetSourceDir().GetFiles("*.cs", SearchOption.AllDirectories); + + CheckFiles(srcFiles, x => x.Contains('\u0441') || x.Contains('\u0421'), "Files with Cyrillic 'C': "); + } + + /// + /// Checks the files. + /// + private static void CheckFiles(IEnumerable files, Func isInvalid, string errorText) + { + var invalidFiles = files.Where(x => isInvalid(File.ReadAllText(x.FullName))).ToArray(); Assert.AreEqual(0, invalidFiles.Length, - "Invalid sln files: " + string.Join(", ", invalidFiles.Select(x => x.FullName))); + errorText + string.Join(", ", invalidFiles.Select(x => x.FullName))); } /// From 474ade276c4ae3e8f93cce37473d37270b4e7ad9 Mon Sep 17 00:00:00 2001 From: vozerov-gridgain Date: Wed, 28 Sep 2016 14:38:04 +0300 Subject: [PATCH 42/69] IGNITE-3988: Moved failing cloud tests to ignore module. --- .../cloud/TcpDiscoveryCloudIpFinderSelfTest.java | 6 ++++-- .../ignite/testsuites/IgniteCloudTestSuite.java | 5 +++-- modules/ignored-tests/pom.xml | 14 ++++++++++++++ .../ignite/testsuites/IgniteIgnoredTestSuite.java | 3 +++ 4 files changed, 24 insertions(+), 4 deletions(-) diff --git a/modules/cloud/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/cloud/TcpDiscoveryCloudIpFinderSelfTest.java b/modules/cloud/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/cloud/TcpDiscoveryCloudIpFinderSelfTest.java index bf0cbd577dc3c..4bddb18941762 100644 --- a/modules/cloud/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/cloud/TcpDiscoveryCloudIpFinderSelfTest.java +++ b/modules/cloud/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/cloud/TcpDiscoveryCloudIpFinderSelfTest.java @@ -20,9 +20,11 @@ import com.google.common.collect.ImmutableList; import java.net.InetSocketAddress; import java.util.Collection; + import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinderAbstractSelfTest; import org.apache.ignite.testsuites.IgniteCloudTestSuite; +import org.apache.ignite.testsuites.IgniteIgnore; /** * TcpDiscoveryCloudIpFinder test. @@ -58,6 +60,7 @@ public TcpDiscoveryCloudIpFinderSelfTest() throws Exception { * * @throws Exception If any error occurs. */ + @IgniteIgnore("https://issues.apache.org/jira/browse/IGNITE-845") public void testAmazonWebServices() throws Exception { testCloudProvider("aws-ec2"); } @@ -67,9 +70,8 @@ public void testAmazonWebServices() throws Exception { * * @throws Exception If any error occurs. */ + @IgniteIgnore("https://issues.apache.org/jira/browse/IGNITE-1585") public void testGoogleComputeEngine() throws Exception { - fail("https://issues.apache.org/jira/browse/IGNITE-1585"); - testCloudProvider("google-compute-engine"); } diff --git a/modules/cloud/src/test/java/org/apache/ignite/testsuites/IgniteCloudTestSuite.java b/modules/cloud/src/test/java/org/apache/ignite/testsuites/IgniteCloudTestSuite.java index 12a67e74f26c0..632cddc6c7bbc 100644 --- a/modules/cloud/src/test/java/org/apache/ignite/testsuites/IgniteCloudTestSuite.java +++ b/modules/cloud/src/test/java/org/apache/ignite/testsuites/IgniteCloudTestSuite.java @@ -21,6 +21,7 @@ import java.util.LinkedList; import junit.framework.TestSuite; import org.apache.ignite.spi.discovery.tcp.ipfinder.cloud.TcpDiscoveryCloudIpFinderSelfTest; +import org.apache.ignite.testframework.IgniteTestSuite; /** * Ignite Cloud integration test. @@ -31,10 +32,10 @@ public class IgniteCloudTestSuite extends TestSuite { * @throws Exception Thrown in case of the failure. */ public static TestSuite suite() throws Exception { - TestSuite suite = new TestSuite("Cloud Integration Test Suite"); + TestSuite suite = new IgniteTestSuite("Cloud Integration Test Suite"); // Cloud Nodes IP finder. - suite.addTest(new TestSuite(TcpDiscoveryCloudIpFinderSelfTest.class)); + suite.addTestSuite(TcpDiscoveryCloudIpFinderSelfTest.class); return suite; } diff --git a/modules/ignored-tests/pom.xml b/modules/ignored-tests/pom.xml index cfeacb2283e4c..9fdff2cf8fd1c 100644 --- a/modules/ignored-tests/pom.xml +++ b/modules/ignored-tests/pom.xml @@ -76,6 +76,20 @@ test + + org.apache.ignite + ignite-cloud + ${project.version} + + + + org.apache.ignite + ignite-cloud + ${project.version} + test-jar + test + + org.apache.ignite ignite-web diff --git a/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java b/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java index fa60721c5d0ad..3ab1d7f4850cc 100644 --- a/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java +++ b/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java @@ -37,6 +37,9 @@ public static TestSuite suite() throws Exception { /* --- AWS --- */ suite.addTest(IgniteS3TestSuite.suite()); + /* --- CLOUDS --- */ + suite.addTest(IgniteCloudTestSuite.suite()); + /* --- WEB SESSIONS --- */ suite.addTest(IgniteWebSessionSelfTestSuite.suite()); From b9105df77cc70b532f94899c754fba47e3e05e9a Mon Sep 17 00:00:00 2001 From: vozerov-gridgain Date: Wed, 28 Sep 2016 14:53:20 +0300 Subject: [PATCH 43/69] IGNITE-3989: Moved failing JTA tests to ignore module. --- modules/ignored-tests/pom.xml | 75 +++++++++++++++++-- .../testsuites/IgniteIgnoredTestSuite.java | 3 + ...tionedCacheJtaLookupClassNameSelfTest.java | 4 +- .../ignite/testsuites/IgniteJtaTestSuite.java | 3 +- 4 files changed, 74 insertions(+), 11 deletions(-) diff --git a/modules/ignored-tests/pom.xml b/modules/ignored-tests/pom.xml index 9fdff2cf8fd1c..661e237921ccf 100644 --- a/modules/ignored-tests/pom.xml +++ b/modules/ignored-tests/pom.xml @@ -35,15 +35,16 @@ http://ignite.apache.org + org.apache.ignite - ignite-core + ignite-aws ${project.version} org.apache.ignite - ignite-core + ignite-aws ${project.version} test-jar test @@ -51,26 +52,27 @@ org.apache.ignite - ignite-log4j + ignite-cloud ${project.version} org.apache.ignite - ignite-spring + ignite-cloud ${project.version} + test-jar test org.apache.ignite - ignite-aws + ignite-core ${project.version} org.apache.ignite - ignite-aws + ignite-core ${project.version} test-jar test @@ -78,18 +80,31 @@ org.apache.ignite - ignite-cloud + ignite-jta ${project.version} org.apache.ignite - ignite-cloud + ignite-jta ${project.version} test-jar test + + org.apache.ignite + ignite-log4j + ${project.version} + + + + org.apache.ignite + ignite-spring + ${project.version} + test + + org.apache.ignite ignite-web @@ -104,6 +119,21 @@ test + + + javax.resource + connector-api + 1.5 + test + + + + org.apache.tomcat + catalina + 6.0.43 + test + + org.eclipse.jetty jetty-servlets @@ -117,5 +147,34 @@ ${jetty.version} test + + + org.springframework + spring-beans + ${spring.version} + test + + + + org.springframework + spring-core + ${spring.version} + test + + + + org.springframework + spring-context + ${spring.version} + test + + + + org.ow2.jotm + jotm-core + 2.1.9 + test + + diff --git a/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java b/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java index 3ab1d7f4850cc..97f65c5c84b90 100644 --- a/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java +++ b/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java @@ -40,6 +40,9 @@ public static TestSuite suite() throws Exception { /* --- CLOUDS --- */ suite.addTest(IgniteCloudTestSuite.suite()); + /* --- JTA --- */ + suite.addTest(IgniteJtaTestSuite.suite()); + /* --- WEB SESSIONS --- */ suite.addTest(IgniteWebSessionSelfTestSuite.suite()); diff --git a/modules/jta/src/test/java/org/apache/ignite/internal/processors/cache/GridPartitionedCacheJtaLookupClassNameSelfTest.java b/modules/jta/src/test/java/org/apache/ignite/internal/processors/cache/GridPartitionedCacheJtaLookupClassNameSelfTest.java index 2e322f86fb5e9..1efbda66934bc 100644 --- a/modules/jta/src/test/java/org/apache/ignite/internal/processors/cache/GridPartitionedCacheJtaLookupClassNameSelfTest.java +++ b/modules/jta/src/test/java/org/apache/ignite/internal/processors/cache/GridPartitionedCacheJtaLookupClassNameSelfTest.java @@ -26,6 +26,7 @@ import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.testframework.GridTestUtils; +import org.apache.ignite.testsuites.IgniteIgnore; /** * Lookup class name based JTA integration test using PARTITIONED cache. @@ -39,9 +40,8 @@ public class GridPartitionedCacheJtaLookupClassNameSelfTest extends AbstractCach /** * */ + @IgniteIgnore(value = "https://issues.apache.org/jira/browse/IGNITE-1094", forceFailure = true) public void testUncompatibleTmLookup() { - fail("https://issues.apache.org/jira/browse/IGNITE-1094"); - final IgniteEx ignite = grid(0); final CacheConfiguration cacheCfg = new CacheConfiguration(); diff --git a/modules/jta/src/test/java/org/apache/ignite/testsuites/IgniteJtaTestSuite.java b/modules/jta/src/test/java/org/apache/ignite/testsuites/IgniteJtaTestSuite.java index 60c20ddafbaf7..4ae5df06d54ec 100644 --- a/modules/jta/src/test/java/org/apache/ignite/testsuites/IgniteJtaTestSuite.java +++ b/modules/jta/src/test/java/org/apache/ignite/testsuites/IgniteJtaTestSuite.java @@ -28,6 +28,7 @@ import org.apache.ignite.internal.processors.cache.GridReplicatedCacheJtaFactoryUseSyncSelfTest; import org.apache.ignite.internal.processors.cache.GridReplicatedCacheJtaLookupClassNameSelfTest; import org.apache.ignite.internal.processors.cache.GridJtaLifecycleAwareSelfTest; +import org.apache.ignite.testframework.IgniteTestSuite; /** * JTA integration tests. @@ -38,7 +39,7 @@ public class IgniteJtaTestSuite extends TestSuite { * @throws Exception Thrown in case of the failure. */ public static TestSuite suite() throws Exception { - TestSuite suite = new TestSuite("JTA Integration Test Suite"); + TestSuite suite = new IgniteTestSuite("JTA Integration Test Suite"); suite.addTestSuite(GridPartitionedCacheJtaFactorySelfTest.class); suite.addTestSuite(GridReplicatedCacheJtaFactorySelfTest.class); From d595345765db2151ff432beecd478ce056393593 Mon Sep 17 00:00:00 2001 From: vozerov-gridgain Date: Wed, 28 Sep 2016 15:08:38 +0300 Subject: [PATCH 44/69] IGNITE-3990: Moved failing Spring tests to "ignore" module. --- .../ignite/testframework/IgniteTestSuite.java | 12 ++++-------- modules/ignored-tests/pom.xml | 7 +++++++ .../ignite/testsuites/IgniteIgnoredTestSuite.java | 3 +++ .../p2p/GridP2PUserVersionChangeSelfTest.java | 7 ++----- .../testsuites/IgniteResourceSelfTestSuite.java | 11 ++++++----- .../ignite/testsuites/IgniteSpringTestSuite.java | 15 ++++++++------- 6 files changed, 30 insertions(+), 25 deletions(-) diff --git a/modules/core/src/test/java/org/apache/ignite/testframework/IgniteTestSuite.java b/modules/core/src/test/java/org/apache/ignite/testframework/IgniteTestSuite.java index 6fc7917733214..6dad032b1c0ba 100644 --- a/modules/core/src/test/java/org/apache/ignite/testframework/IgniteTestSuite.java +++ b/modules/core/src/test/java/org/apache/ignite/testframework/IgniteTestSuite.java @@ -101,16 +101,12 @@ public IgniteTestSuite(@Nullable Class theClass, @Nullable S setName(name); } - /** - * Adds a test to the suite. - */ - @Override public void addTest(Test test) { - super.addTest(test); - } - /** {@inheritDoc} */ @Override public void addTestSuite(Class testClass) { - addTest(new IgniteTestSuite(testClass, ignoredOnly)); + IgniteTestSuite suite = new IgniteTestSuite(testClass, ignoredOnly); + + if (suite.testCount() > 0) + addTest(suite); } /** diff --git a/modules/ignored-tests/pom.xml b/modules/ignored-tests/pom.xml index 661e237921ccf..336485c30043c 100644 --- a/modules/ignored-tests/pom.xml +++ b/modules/ignored-tests/pom.xml @@ -102,6 +102,13 @@ org.apache.ignite ignite-spring ${project.version} + + + + org.apache.ignite + ignite-spring + ${project.version} + test-jar test diff --git a/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java b/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java index 97f65c5c84b90..2188cd67677bb 100644 --- a/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java +++ b/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java @@ -43,6 +43,9 @@ public static TestSuite suite() throws Exception { /* --- JTA --- */ suite.addTest(IgniteJtaTestSuite.suite()); + /* --- Spring --- */ + suite.addTest(IgniteSpringTestSuite.suite()); + /* --- WEB SESSIONS --- */ suite.addTest(IgniteWebSessionSelfTestSuite.suite()); diff --git a/modules/spring/src/test/java/org/apache/ignite/p2p/GridP2PUserVersionChangeSelfTest.java b/modules/spring/src/test/java/org/apache/ignite/p2p/GridP2PUserVersionChangeSelfTest.java index 8efa440c013dc..0b32b045f583e 100644 --- a/modules/spring/src/test/java/org/apache/ignite/p2p/GridP2PUserVersionChangeSelfTest.java +++ b/modules/spring/src/test/java/org/apache/ignite/p2p/GridP2PUserVersionChangeSelfTest.java @@ -41,6 +41,7 @@ import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.config.GridTestProperties; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; +import org.apache.ignite.testsuites.IgniteIgnore; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static org.apache.ignite.events.EventType.EVT_NODE_LEFT; @@ -282,14 +283,10 @@ public void testRedeployOnNodeRestartSharedMode() throws Exception { } /** - * TODO: IGNITE-604. - * * @throws Exception If failed. */ + @IgniteIgnore(value = "https://issues.apache.org/jira/browse/IGNITE-604", forceFailure = true) public void testCacheRedeployVersionChangeContinuousMode() throws Exception { - // Build execution timeout if try to run test on TC. - fail("https://issues.apache.org/jira/browse/IGNITE-604"); - depMode = DeploymentMode.CONTINUOUS; try { diff --git a/modules/spring/src/test/java/org/apache/ignite/testsuites/IgniteResourceSelfTestSuite.java b/modules/spring/src/test/java/org/apache/ignite/testsuites/IgniteResourceSelfTestSuite.java index 742190b82325e..e120aaa41cd8f 100644 --- a/modules/spring/src/test/java/org/apache/ignite/testsuites/IgniteResourceSelfTestSuite.java +++ b/modules/spring/src/test/java/org/apache/ignite/testsuites/IgniteResourceSelfTestSuite.java @@ -22,6 +22,7 @@ import org.apache.ignite.internal.processors.resource.GridResourceProcessorSelfTest; import org.apache.ignite.internal.processors.resource.GridServiceInjectionSelfTest; import org.apache.ignite.internal.processors.resource.GridSpringResourceInjectionSelfTest; +import org.apache.ignite.testframework.IgniteTestSuite; /** * Ignite resource injection test Suite. @@ -33,12 +34,12 @@ public class IgniteResourceSelfTestSuite extends TestSuite { * @throws Exception If failed. */ public static TestSuite suite() throws Exception { - TestSuite suite = new TestSuite("Ignite Resource Injection Test Suite"); + TestSuite suite = new IgniteTestSuite("Ignite Resource Injection Test Suite"); - suite.addTest(new TestSuite(GridResourceProcessorSelfTest.class)); - suite.addTest(new TestSuite(GridLoggerInjectionSelfTest.class)); - suite.addTest(new TestSuite(GridServiceInjectionSelfTest.class)); - suite.addTest(new TestSuite(GridSpringResourceInjectionSelfTest.class)); + suite.addTestSuite(GridResourceProcessorSelfTest.class); + suite.addTestSuite(GridLoggerInjectionSelfTest.class); + suite.addTestSuite(GridServiceInjectionSelfTest.class); + suite.addTestSuite(GridSpringResourceInjectionSelfTest.class); return suite; } diff --git a/modules/spring/src/test/java/org/apache/ignite/testsuites/IgniteSpringTestSuite.java b/modules/spring/src/test/java/org/apache/ignite/testsuites/IgniteSpringTestSuite.java index c4a4b75c5171d..fff81089c2f11 100644 --- a/modules/spring/src/test/java/org/apache/ignite/testsuites/IgniteSpringTestSuite.java +++ b/modules/spring/src/test/java/org/apache/ignite/testsuites/IgniteSpringTestSuite.java @@ -33,6 +33,7 @@ import org.apache.ignite.spring.IgniteExcludeInConfigurationTest; import org.apache.ignite.spring.IgniteStartFromStreamConfigurationTest; import org.apache.ignite.spring.injection.GridServiceInjectionSpringResourceTest; +import org.apache.ignite.testframework.IgniteTestSuite; import org.apache.ignite.transactions.spring.GridSpringTransactionManagerSelfTest; import org.apache.ignite.transactions.spring.SpringTransactionManagerContextInjectionTest; @@ -45,23 +46,23 @@ public class IgniteSpringTestSuite extends TestSuite { * @throws Exception Thrown in case of the failure. */ public static TestSuite suite() throws Exception { - TestSuite suite = new TestSuite("Spring Test Suite"); + TestSuite suite = new IgniteTestSuite("Spring Test Suite"); suite.addTestSuite(GridSpringBeanSerializationSelfTest.class); suite.addTestSuite(GridFactorySelfTest.class); suite.addTest(IgniteResourceSelfTestSuite.suite()); - suite.addTest(new TestSuite(IgniteExcludeInConfigurationTest.class)); + suite.addTestSuite(IgniteExcludeInConfigurationTest.class); // Tests moved to this suite since they require Spring functionality. - suite.addTest(new TestSuite(GridP2PUserVersionChangeSelfTest.class)); + suite.addTestSuite(GridP2PUserVersionChangeSelfTest.class); - suite.addTest(new TestSuite(GridSpringCacheManagerSelfTest.class)); + suite.addTestSuite(GridSpringCacheManagerSelfTest.class); - suite.addTest(new TestSuite(IgniteDynamicCacheConfigTest.class)); + suite.addTestSuite(IgniteDynamicCacheConfigTest.class); - suite.addTest(new TestSuite(IgniteStartFromStreamConfigurationTest.class)); + suite.addTestSuite(IgniteStartFromStreamConfigurationTest.class); suite.addTestSuite(CacheSpringStoreSessionListenerSelfTest.class); @@ -70,7 +71,7 @@ public static TestSuite suite() throws Exception { suite.addTestSuite(CachePojoStoreXmlSelfTest.class); suite.addTestSuite(CachePojoStoreXmlWithSqlEscapeSelfTest.class); - suite.addTest(new TestSuite(GridSpringTransactionManagerSelfTest.class)); + suite.addTestSuite(GridSpringTransactionManagerSelfTest.class); suite.addTestSuite(GridServiceInjectionSpringResourceTest.class); From e3f13455d4273e615727d0410783e3719db98f76 Mon Sep 17 00:00:00 2001 From: sboikov Date: Wed, 28 Sep 2016 12:56:17 +0300 Subject: [PATCH 45/69] ignite-2833 Need call 'touch' for cache entry if it was obtained using 'entryEx'. (cherry picked from commit 17c2fc0) --- .../processors/cache/GridCacheTtlManager.java | 32 ++++++++++--------- .../GridCacheAbstractFullApiSelfTest.java | 2 +- 2 files changed, 18 insertions(+), 16 deletions(-) diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheTtlManager.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheTtlManager.java index 996544fdb4dd9..0f855fe6dca91 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheTtlManager.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheTtlManager.java @@ -129,26 +129,28 @@ public boolean expire(int amount) { if (log.isTraceEnabled()) log.trace("Trying to remove expired entry from cache: " + e); - boolean touch = false; + boolean touch = e.ctx.isSwapOrOffheapEnabled(); - GridCacheEntryEx entry = e.ctx.cache().entryEx(e.key); + GridCacheEntryEx entry = touch ? e.ctx.cache().entryEx(e.key) : e.ctx.cache().peekEx(e.key); - while (true) { - try { - if (entry.onTtlExpired(obsoleteVer)) - touch = false; + if (entry != null) { + while (true) { + try { + if (entry.onTtlExpired(obsoleteVer)) + touch = false; - break; - } - catch (GridCacheEntryRemovedException e0) { - entry = entry.context().cache().entryEx(entry.key()); + break; + } + catch (GridCacheEntryRemovedException e0) { + entry = entry.context().cache().entryEx(entry.key()); - touch = true; + touch = true; + } } - } - if (touch) - entry.context().evicts().touch(entry, null); + if (touch) + entry.context().evicts().touch(entry, null); + } } } @@ -216,7 +218,7 @@ private static class EntryWrapper implements Comparable { private final GridCacheContext ctx; /** Cache Object Key */ - private final CacheObject key; + private final KeyCacheObject key; /** * @param entry Cache entry to create wrapper for. diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheAbstractFullApiSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheAbstractFullApiSelfTest.java index a31c82ef72bca..e7daf2b6fef2d 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheAbstractFullApiSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheAbstractFullApiSelfTest.java @@ -4057,7 +4057,7 @@ public void testCompactExpired() throws Exception { // Peek will actually remove entry from cache. assertNull(cache.localPeek(key)); - assert cache.localSize() == 0; + assertEquals(0, cache.localSize()); // Clear readers, if any. cache.remove(key); From b2faa339acb2eea24e6dd5e0c21fc3d3d0592ff6 Mon Sep 17 00:00:00 2001 From: sboikov Date: Wed, 28 Sep 2016 13:47:51 +0300 Subject: [PATCH 46/69] ignite-3621 Fixed 'testEvictExpired'. (cherry picked from commit 4ff19c2) --- .../cache/GridCacheAbstractFullApiSelfTest.java | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheAbstractFullApiSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheAbstractFullApiSelfTest.java index e7daf2b6fef2d..52d9a5e78e3f8 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheAbstractFullApiSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheAbstractFullApiSelfTest.java @@ -3475,9 +3475,16 @@ public void testEvictExpired() throws Exception { grid(0).cache(null).withExpiryPolicy(expiry).put(key, 1); + final Affinity aff = ignite(0).affinity(null); + boolean wait = waitForCondition(new GridAbsPredicate() { @Override public boolean apply() { - return cache.localPeek(key) == null; + for (int i = 0; i < gridCount(); i++) { + if (peek(jcache(i), key) != null) + return false; + } + + return true; } }, ttl + 1000); @@ -3496,8 +3503,6 @@ public void testEvictExpired() throws Exception { load(cache, key, true); - Affinity aff = ignite(0).affinity(null); - for (int i = 0; i < gridCount(); i++) { if (aff.isPrimary(grid(i).cluster().localNode(), key)) assertEquals((Integer)1, peek(jcache(i), key)); From 74d2fc2416b8e6bc0598152552021f984a013061 Mon Sep 17 00:00:00 2001 From: sboikov Date: Wed, 28 Sep 2016 14:31:24 +0300 Subject: [PATCH 47/69] ignite-3621 Fixed 'testEvictExpired'. (cherry picked from commit bfe4458) --- ...tomicClientOnlyMultiNodeFullApiSelfTest.java | 17 +++++++++++++++-- ...eAtomicNearOnlyMultiNodeFullApiSelfTest.java | 17 +++++++++++++++-- 2 files changed, 30 insertions(+), 4 deletions(-) diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/near/GridCacheAtomicClientOnlyMultiNodeFullApiSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/near/GridCacheAtomicClientOnlyMultiNodeFullApiSelfTest.java index 927ee6273cf41..65f780b2c3fb8 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/near/GridCacheAtomicClientOnlyMultiNodeFullApiSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/near/GridCacheAtomicClientOnlyMultiNodeFullApiSelfTest.java @@ -36,6 +36,7 @@ import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.NearCacheConfiguration; import org.apache.ignite.events.Event; +import org.apache.ignite.internal.util.lang.GridAbsPredicate; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgnitePredicate; @@ -45,6 +46,7 @@ import static org.apache.ignite.cache.CacheAtomicityMode.ATOMIC; import static org.apache.ignite.events.EventType.EVT_CACHE_OBJECT_SWAPPED; import static org.apache.ignite.events.EventType.EVT_CACHE_OBJECT_UNSWAPPED; +import static org.apache.ignite.testframework.GridTestUtils.waitForCondition; /** * @@ -193,7 +195,7 @@ public class GridCacheAtomicClientOnlyMultiNodeFullApiSelfTest extends GridCache @Override public void testEvictExpired() throws Exception { IgniteCache cache = jcache(); - String key = primaryKeysForCache(cache, 1).get(0); + final String key = primaryKeysForCache(cache, 1).get(0); cache.put(key, 1); @@ -204,7 +206,18 @@ public class GridCacheAtomicClientOnlyMultiNodeFullApiSelfTest extends GridCache grid(0).cache(null). withExpiryPolicy(new TouchedExpiryPolicy(new Duration(MILLISECONDS, ttl))).put(key, 1); - Thread.sleep(ttl + 100); + boolean wait = waitForCondition(new GridAbsPredicate() { + @Override public boolean apply() { + for (int i = 0; i < gridCount(); i++) { + if (peek(jcache(i), key) != null) + return false; + } + + return true; + } + }, ttl + 1000); + + assertTrue("Failed to wait for entry expiration.", wait); // Expired entry should not be swapped. cache.localEvict(Collections.singleton(key)); diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/near/GridCacheAtomicNearOnlyMultiNodeFullApiSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/near/GridCacheAtomicNearOnlyMultiNodeFullApiSelfTest.java index 3ce38f3a4072f..6542e762779c7 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/near/GridCacheAtomicNearOnlyMultiNodeFullApiSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/near/GridCacheAtomicNearOnlyMultiNodeFullApiSelfTest.java @@ -29,9 +29,11 @@ import org.apache.ignite.cache.affinity.Affinity; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.NearCacheConfiguration; +import org.apache.ignite.internal.util.lang.GridAbsPredicate; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static org.apache.ignite.cache.CacheAtomicityMode.ATOMIC; +import static org.apache.ignite.testframework.GridTestUtils.waitForCondition; /** * Tests NEAR_ONLY cache. @@ -124,7 +126,7 @@ public class GridCacheAtomicNearOnlyMultiNodeFullApiSelfTest extends GridCacheNe IgniteCache cache = jcache(); - String key = primaryKeysForCache(cache, 1).get(0); + final String key = primaryKeysForCache(cache, 1).get(0); cache.put(key, 1); @@ -135,7 +137,18 @@ public class GridCacheAtomicNearOnlyMultiNodeFullApiSelfTest extends GridCacheNe grid(0).cache(null). withExpiryPolicy(new TouchedExpiryPolicy(new Duration(MILLISECONDS, ttl))).put(key, 1); - Thread.sleep(ttl + 100); + boolean wait = waitForCondition(new GridAbsPredicate() { + @Override public boolean apply() { + for (int i = 0; i < gridCount(); i++) { + if (peek(jcache(i), key) != null) + return false; + } + + return true; + } + }, ttl + 1000); + + assertTrue("Failed to wait for entry expiration.", wait); // Expired entry should not be swapped. cache.localEvict(Collections.singleton(key)); From d2563dacceea61b19bb6e083e29ebacc28fdd323 Mon Sep 17 00:00:00 2001 From: vozerov-gridgain Date: Wed, 28 Sep 2016 15:51:55 +0300 Subject: [PATCH 48/69] IGNITE-3993: Added failing client tests to "ignored" test suite. --- .../client/ClientReconnectionSelfTest.java | 4 +- .../client/router/TcpSslRouterSelfTest.java | 7 +- .../client/suite/IgniteClientTestSuite.java | 71 +++++------ .../ignite/testframework/IgniteTestSuite.java | 116 +++++++++++++++--- .../ignite/testsuites/IgniteIgnore.java | 2 +- modules/ignored-tests/pom.xml | 46 +++++++ .../testsuites/IgniteIgnoredTestSuite.java | 5 +- 7 files changed, 192 insertions(+), 59 deletions(-) diff --git a/modules/clients/src/test/java/org/apache/ignite/internal/client/ClientReconnectionSelfTest.java b/modules/clients/src/test/java/org/apache/ignite/internal/client/ClientReconnectionSelfTest.java index 92c8e76ec02bf..f1085b3ec361a 100644 --- a/modules/clients/src/test/java/org/apache/ignite/internal/client/ClientReconnectionSelfTest.java +++ b/modules/clients/src/test/java/org/apache/ignite/internal/client/ClientReconnectionSelfTest.java @@ -24,6 +24,7 @@ import org.apache.ignite.internal.client.impl.connection.GridClientConnectionResetException; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; +import org.apache.ignite.testsuites.IgniteIgnore; /** * @@ -182,9 +183,8 @@ public void testFailedInit() throws Exception { * * @throws Exception If failed. */ + @IgniteIgnore(value = "https://issues.apache.org/jira/browse/IGNITE-590", forceFailure = true) public void testIdleConnection() throws Exception { - fail("https://issues.apache.org/jira/browse/IGNITE-590"); - int srvsCnt = 4; // TODO: IGNITE-590 it may be wrong value. Need to investigate after IGNITE-590 will be fixed. for (int i = 0; i < srvsCnt; i++) diff --git a/modules/clients/src/test/java/org/apache/ignite/internal/client/router/TcpSslRouterSelfTest.java b/modules/clients/src/test/java/org/apache/ignite/internal/client/router/TcpSslRouterSelfTest.java index 1e2a2bdf5425e..3b47ae5ca9d87 100644 --- a/modules/clients/src/test/java/org/apache/ignite/internal/client/router/TcpSslRouterSelfTest.java +++ b/modules/clients/src/test/java/org/apache/ignite/internal/client/router/TcpSslRouterSelfTest.java @@ -20,16 +20,13 @@ import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.client.ssl.GridSslContextFactory; import org.apache.ignite.testframework.GridTestUtils; +import org.apache.ignite.testsuites.IgniteIgnore; /** * */ +@IgniteIgnore(value = "https://issues.apache.org/jira/browse/IGNITE-433", forceFailure = true) public class TcpSslRouterSelfTest extends TcpRouterAbstractSelfTest { - /** {@inheritDoc} */ - @Override protected void beforeTest() throws Exception { - fail("https://issues.apache.org/jira/browse/IGNITE-433"); - } - /** {@inheritDoc} */ @Override protected boolean useSsl() { return true; diff --git a/modules/clients/src/test/java/org/apache/ignite/internal/client/suite/IgniteClientTestSuite.java b/modules/clients/src/test/java/org/apache/ignite/internal/client/suite/IgniteClientTestSuite.java index fdb5456113ffb..cc8f9fa976651 100644 --- a/modules/clients/src/test/java/org/apache/ignite/internal/client/suite/IgniteClientTestSuite.java +++ b/modules/clients/src/test/java/org/apache/ignite/internal/client/suite/IgniteClientTestSuite.java @@ -57,6 +57,7 @@ import org.apache.ignite.internal.processors.rest.RestProcessorStartSelfTest; import org.apache.ignite.internal.processors.rest.TaskCommandHandlerSelfTest; import org.apache.ignite.internal.processors.rest.protocols.tcp.TcpRestParserSelfTest; +import org.apache.ignite.testframework.IgniteTestSuite; /** * Test suite includes all test that concern REST processors. @@ -66,65 +67,65 @@ public class IgniteClientTestSuite extends TestSuite { * @return Suite that contains all tests for REST. */ public static TestSuite suite() { - TestSuite suite = new TestSuite("Ignite Clients Test Suite"); + TestSuite suite = new IgniteTestSuite("Ignite Clients Test Suite"); - suite.addTest(new TestSuite(RouterFactorySelfTest.class)); + suite.addTestSuite(RouterFactorySelfTest.class); // Parser standalone test. - suite.addTest(new TestSuite(TcpRestParserSelfTest.class)); + suite.addTestSuite(TcpRestParserSelfTest.class); // Test memcache protocol with custom test client. - suite.addTest(new TestSuite(RestMemcacheProtocolSelfTest.class)); + suite.addTestSuite(RestMemcacheProtocolSelfTest.class); // Test custom binary protocol with test client. - suite.addTest(new TestSuite(RestBinaryProtocolSelfTest.class)); + suite.addTestSuite(RestBinaryProtocolSelfTest.class); // Test jetty rest processor - suite.addTest(new TestSuite(JettyRestProcessorSignedSelfTest.class)); - suite.addTest(new TestSuite(JettyRestProcessorUnsignedSelfTest.class)); + suite.addTestSuite(JettyRestProcessorSignedSelfTest.class); + suite.addTestSuite(JettyRestProcessorUnsignedSelfTest.class); // Test TCP rest processor with original memcache client. - suite.addTest(new TestSuite(ClientMemcachedProtocolSelfTest.class)); + suite.addTestSuite(ClientMemcachedProtocolSelfTest.class); - suite.addTest(new TestSuite(RestProcessorStartSelfTest.class)); + suite.addTestSuite(RestProcessorStartSelfTest.class); // Test cache flag conversion. - suite.addTest(new TestSuite(ClientCacheFlagsCodecTest.class)); + suite.addTestSuite(ClientCacheFlagsCodecTest.class); // Test multi-start. - suite.addTest(new TestSuite(RestProcessorMultiStartSelfTest.class)); + suite.addTestSuite(RestProcessorMultiStartSelfTest.class); // Test clients. - suite.addTest(new TestSuite(ClientDataImplSelfTest.class)); - suite.addTest(new TestSuite(ClientComputeImplSelfTest.class)); - suite.addTest(new TestSuite(ClientTcpSelfTest.class)); - suite.addTest(new TestSuite(ClientTcpDirectSelfTest.class)); - suite.addTest(new TestSuite(ClientTcpSslSelfTest.class)); - suite.addTest(new TestSuite(ClientTcpSslDirectSelfTest.class)); + suite.addTestSuite(ClientDataImplSelfTest.class); + suite.addTestSuite(ClientComputeImplSelfTest.class); + suite.addTestSuite(ClientTcpSelfTest.class); + suite.addTestSuite(ClientTcpDirectSelfTest.class); + suite.addTestSuite(ClientTcpSslSelfTest.class); + suite.addTestSuite(ClientTcpSslDirectSelfTest.class); // Test client with many nodes. - suite.addTest(new TestSuite(ClientTcpMultiNodeSelfTest.class)); - suite.addTest(new TestSuite(ClientTcpDirectMultiNodeSelfTest.class)); - suite.addTest(new TestSuite(ClientTcpSslMultiNodeSelfTest.class)); - suite.addTest(new TestSuite(ClientTcpSslDirectMultiNodeSelfTest.class)); - suite.addTest(new TestSuite(ClientTcpUnreachableMultiNodeSelfTest.class)); - suite.addTest(new TestSuite(ClientPreferDirectSelfTest.class)); + suite.addTestSuite(ClientTcpMultiNodeSelfTest.class); + suite.addTestSuite(ClientTcpDirectMultiNodeSelfTest.class); + suite.addTestSuite(ClientTcpSslMultiNodeSelfTest.class); + suite.addTestSuite(ClientTcpSslDirectMultiNodeSelfTest.class); + suite.addTestSuite(ClientTcpUnreachableMultiNodeSelfTest.class); + suite.addTestSuite(ClientPreferDirectSelfTest.class); // Test client with many nodes and in multithreaded scenarios - suite.addTest(new TestSuite(ClientTcpMultiThreadedSelfTest.class)); - suite.addTest(new TestSuite(ClientTcpSslMultiThreadedSelfTest.class)); + suite.addTestSuite(ClientTcpMultiThreadedSelfTest.class); + suite.addTestSuite(ClientTcpSslMultiThreadedSelfTest.class); // Test client authentication. - suite.addTest(new TestSuite(ClientTcpSslAuthenticationSelfTest.class)); + suite.addTestSuite(ClientTcpSslAuthenticationSelfTest.class); - suite.addTest(new TestSuite(ClientTcpConnectivitySelfTest.class)); - suite.addTest(new TestSuite(ClientReconnectionSelfTest.class)); + suite.addTestSuite(ClientTcpConnectivitySelfTest.class); + suite.addTestSuite(ClientReconnectionSelfTest.class); // Rest task command handler test. - suite.addTest(new TestSuite(TaskCommandHandlerSelfTest.class)); + suite.addTestSuite(TaskCommandHandlerSelfTest.class); // Default cache only test. - suite.addTest(new TestSuite(ClientDefaultCacheSelfTest.class)); + suite.addTestSuite(ClientDefaultCacheSelfTest.class); suite.addTestSuite(ClientFutureAdapterSelfTest.class); suite.addTestSuite(ClientPropertiesConfigurationSelfTest.class); @@ -134,13 +135,13 @@ public static TestSuite suite() { suite.addTestSuite(ClientByteUtilsTest.class); // Router tests. - suite.addTest(new TestSuite(TcpRouterSelfTest.class)); - suite.addTest(new TestSuite(TcpSslRouterSelfTest.class)); - suite.addTest(new TestSuite(TcpRouterMultiNodeSelfTest.class)); + suite.addTestSuite(TcpRouterSelfTest.class); + suite.addTestSuite(TcpSslRouterSelfTest.class); + suite.addTestSuite(TcpRouterMultiNodeSelfTest.class); - suite.addTest(new TestSuite(ClientFailedInitSelfTest.class)); + suite.addTestSuite(ClientFailedInitSelfTest.class); - suite.addTest(new TestSuite(ClientTcpTaskExecutionAfterTopologyRestartSelfTest.class)); + suite.addTestSuite(ClientTcpTaskExecutionAfterTopologyRestartSelfTest.class); return suite; } diff --git a/modules/core/src/test/java/org/apache/ignite/testframework/IgniteTestSuite.java b/modules/core/src/test/java/org/apache/ignite/testframework/IgniteTestSuite.java index 6dad032b1c0ba..7db9664a7ec9b 100644 --- a/modules/core/src/test/java/org/apache/ignite/testframework/IgniteTestSuite.java +++ b/modules/core/src/test/java/org/apache/ignite/testframework/IgniteTestSuite.java @@ -129,6 +129,8 @@ private void addTestsFromTestCase(Class theClass) { if(!Modifier.isPublic(theClass.getModifiers())) addTest(warning("Class " + theClass.getName() + " is not public")); else { + IgnoreDescriptor clsIgnore = IgnoreDescriptor.forClass(theClass); + Class superCls = theClass; int testAdded = 0; @@ -140,7 +142,7 @@ private void addTestsFromTestCase(Class theClass) { Method[] methods = MethodSorter.getDeclaredMethods(superCls); for (Method each : methods) { - if (addTestMethod(each, names, theClass)) + if (addTestMethod(each, names, theClass, clsIgnore)) testAdded++; else testIgnored++; @@ -158,9 +160,11 @@ private void addTestsFromTestCase(Class theClass) { * @param m Test method. * @param names Test name list. * @param theClass Test class. + * @param clsIgnore Class ignore descriptor (if any). * @return Whether test method was added. */ - private boolean addTestMethod(Method m, List names, Class theClass) { + private boolean addTestMethod(Method m, List names, Class theClass, + @Nullable IgnoreDescriptor clsIgnore) { String name = m.getName(); if (names.contains(name)) @@ -175,25 +179,20 @@ private boolean addTestMethod(Method m, List names, Class theClass) { names.add(name); - boolean hasIgnore = m.isAnnotationPresent(IgniteIgnore.class); - - if (ignoredOnly) { - if (hasIgnore) { - IgniteIgnore ignore = m.getAnnotation(IgniteIgnore.class); + IgnoreDescriptor ignore = IgnoreDescriptor.forMethod(theClass, m); - String reason = ignore.value(); - - if (F.isEmpty(reason)) - throw new IllegalArgumentException("Reason is not set for ignored test [class=" + - theClass.getName() + ", method=" + name + ']'); + if (ignore == null) + ignore = clsIgnore; + if (ignoredOnly) { + if (ignore != null) { Test test = createTest(theClass, name); if (ignore.forceFailure()) { if (test instanceof GridAbstractTest) - ((GridAbstractTest)test).forceFailure(ignore.value()); + ((GridAbstractTest)test).forceFailure(ignore.reason()); else - test = new ForcedFailure(name, ignore.value()); + test = new ForcedFailure(name, ignore.reason()); } addTest(test); @@ -202,7 +201,7 @@ private boolean addTestMethod(Method m, List names, Class theClass) { } } else { - if (!hasIgnore) { + if (ignore == null) { addTest(createTest(theClass, name)); return true; @@ -250,6 +249,93 @@ private static boolean ignoreDefault() { return res != null && res; } + /** + * Ignore descriptor. + */ + private static class IgnoreDescriptor { + /** Reason. */ + private final String reason; + + /** Force failure. */ + private final boolean forceFailure; + + /** + * Get descriptor for class (if any). + * + * @param cls Class. + * @return Descriptor or {@code null}. + */ + @Nullable public static IgnoreDescriptor forClass(Class cls) { + Class cls0 = cls; + + while (Test.class.isAssignableFrom(cls0)) { + if (cls0.isAnnotationPresent(IgniteIgnore.class)) { + IgniteIgnore ignore = (IgniteIgnore)cls0.getAnnotation(IgniteIgnore.class); + + String reason = ignore.value(); + + if (F.isEmpty(reason)) + throw new IllegalArgumentException("Reason is not set for ignored test [class=" + + cls0.getName() + ']'); + + return new IgnoreDescriptor(reason, ignore.forceFailure()); + } + + cls0 = cls0.getSuperclass(); + } + + return null; + } + + /** + * Get descriptor for method (if any). + * + * @param cls Class. + * @param mthd Method. + * @return Descriptor or {@code null}. + */ + @Nullable public static IgnoreDescriptor forMethod(Class cls, Method mthd) { + if (mthd.isAnnotationPresent(IgniteIgnore.class)) { + IgniteIgnore ignore = mthd.getAnnotation(IgniteIgnore.class); + + String reason = ignore.value(); + + if (F.isEmpty(reason)) + throw new IllegalArgumentException("Reason is not set for ignored test [class=" + + cls.getName() + ", method=" + mthd.getName() + ']'); + + return new IgnoreDescriptor(reason, ignore.forceFailure()); + } + else + return null; + } + + /** + * Constructor. + * + * @param reason Reason. + * @param forceFailure Force failure. + */ + private IgnoreDescriptor(String reason, boolean forceFailure) { + this.reason = reason; + this.forceFailure = forceFailure; + } + + /** + * @return Reason. + */ + public String reason() { + return reason; + } + + /** + * @return Force failure. + */ + public boolean forceFailure() { + return forceFailure; + } + } + /** * Test case simulating failure. */ diff --git a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteIgnore.java b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteIgnore.java index dbb1d7a01326a..05af2a27e6a91 100644 --- a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteIgnore.java +++ b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteIgnore.java @@ -26,7 +26,7 @@ * Annotation which indicates that the test is ignored. */ @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.METHOD}) +@Target({ElementType.METHOD, ElementType.TYPE}) public @interface IgniteIgnore { /** * Reason for ignore (usually link to JIRA ticket). diff --git a/modules/ignored-tests/pom.xml b/modules/ignored-tests/pom.xml index 336485c30043c..142754e10fa8f 100644 --- a/modules/ignored-tests/pom.xml +++ b/modules/ignored-tests/pom.xml @@ -50,6 +50,20 @@ test + + org.apache.ignite + ignite-clients + ${project.version} + + + + org.apache.ignite + ignite-clients + ${project.version} + test-jar + test + + org.apache.ignite ignite-cloud @@ -78,6 +92,12 @@ test + + org.apache.ignite + ignite-indexing + ${project.version} + + org.apache.ignite ignite-jta @@ -98,6 +118,12 @@ ${project.version} + + org.apache.ignite + ignite-rest-http + ${project.version} + + org.apache.ignite ignite-spring @@ -127,6 +153,26 @@ + + com.google.code.simple-spring-memcached + spymemcached + 2.7.3 + test + + + commons-codec + commons-codec + + + + + + commons-io + commons-io + 2.4 + test + + javax.resource connector-api diff --git a/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java b/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java index 2188cd67677bb..fd92a35aeae0b 100644 --- a/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java +++ b/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java @@ -37,13 +37,16 @@ public static TestSuite suite() throws Exception { /* --- AWS --- */ suite.addTest(IgniteS3TestSuite.suite()); + /* --- CLIENTS --- */ + suite.addTest(org.apache.ignite.internal.client.suite.IgniteClientTestSuite.suite()); + /* --- CLOUDS --- */ suite.addTest(IgniteCloudTestSuite.suite()); /* --- JTA --- */ suite.addTest(IgniteJtaTestSuite.suite()); - /* --- Spring --- */ + /* --- SPRING --- */ suite.addTest(IgniteSpringTestSuite.suite()); /* --- WEB SESSIONS --- */ From 78144c4c9d6200ceef8b666a186039685f053381 Mon Sep 17 00:00:00 2001 From: vozerov-gridgain Date: Wed, 28 Sep 2016 16:52:13 +0300 Subject: [PATCH 49/69] Fixed incorrect test count calculation leading to afterTestsStopped() not being called. --- .../ignite/testframework/IgniteTestSuite.java | 79 ++++++++++++++++--- .../junits/GridAbstractTest.java | 31 +++++++- 2 files changed, 94 insertions(+), 16 deletions(-) diff --git a/modules/core/src/test/java/org/apache/ignite/testframework/IgniteTestSuite.java b/modules/core/src/test/java/org/apache/ignite/testframework/IgniteTestSuite.java index 7db9664a7ec9b..1cf69ae16e502 100644 --- a/modules/core/src/test/java/org/apache/ignite/testframework/IgniteTestSuite.java +++ b/modules/core/src/test/java/org/apache/ignite/testframework/IgniteTestSuite.java @@ -29,6 +29,7 @@ import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; +import java.util.LinkedList; import java.util.List; /** @@ -134,7 +135,9 @@ private void addTestsFromTestCase(Class theClass) { Class superCls = theClass; int testAdded = 0; - int testIgnored = 0; + int testSkipped = 0; + + LinkedList addedTests = new LinkedList<>(); for(List names = new ArrayList<>(); Test.class.isAssignableFrom(superCls); superCls = superCls.getSuperclass()) { @@ -142,15 +145,29 @@ private void addTestsFromTestCase(Class theClass) { Method[] methods = MethodSorter.getDeclaredMethods(superCls); for (Method each : methods) { - if (addTestMethod(each, names, theClass, clsIgnore)) + AddResult res = addTestMethod(each, names, theClass, clsIgnore); + + if (res.added()) { testAdded++; + + addedTests.add(res.test()); + } else - testIgnored++; + testSkipped++; } } - if(testAdded == 0 && testIgnored == 0) + if(testAdded == 0 && testSkipped == 0) addTest(warning("No tests found in " + theClass.getName())); + + // Populate tests count. + for (Test test : addedTests) { + if (test instanceof GridAbstractTest) { + GridAbstractTest test0 = (GridAbstractTest)test; + + test0.forceTestCount(addedTests.size()); + } + } } } @@ -161,20 +178,20 @@ private void addTestsFromTestCase(Class theClass) { * @param names Test name list. * @param theClass Test class. * @param clsIgnore Class ignore descriptor (if any). - * @return Whether test method was added. + * @return Result. */ - private boolean addTestMethod(Method m, List names, Class theClass, + private AddResult addTestMethod(Method m, List names, Class theClass, @Nullable IgnoreDescriptor clsIgnore) { String name = m.getName(); if (names.contains(name)) - return false; + return new AddResult(false, null); if (!isPublicTestMethod(m)) { if (isTestMethod(m)) addTest(warning("Test method isn't public: " + m.getName() + "(" + theClass.getCanonicalName() + ")")); - return false; + return new AddResult(false, null); } names.add(name); @@ -197,18 +214,20 @@ private boolean addTestMethod(Method m, List names, Class theClass, addTest(test); - return true; + return new AddResult(true, test); } } else { if (ignore == null) { - addTest(createTest(theClass, name)); + Test test = createTest(theClass, name); + + addTest(test); - return true; + return new AddResult(true, test); } } - return false; + return new AddResult(false, null); } /** @@ -336,6 +355,42 @@ public boolean forceFailure() { } } + /** + * Test add result. + */ + private static class AddResult { + /** Result. */ + private final boolean added; + + /** Test */ + private final Test test; + + /** + * Constructor. + * + * @param added Result. + * @param test Test. + */ + public AddResult(boolean added, Test test) { + this.added = added; + this.test = test; + } + + /** + * @return Result. + */ + public boolean added() { + return added; + } + + /** + * @return Test. + */ + public Test test() { + return test; + } + } + /** * Test case simulating failure. */ diff --git a/modules/core/src/test/java/org/apache/ignite/testframework/junits/GridAbstractTest.java b/modules/core/src/test/java/org/apache/ignite/testframework/junits/GridAbstractTest.java index 8d6fd07d90e62..aa90af0b54026 100644 --- a/modules/core/src/test/java/org/apache/ignite/testframework/junits/GridAbstractTest.java +++ b/modules/core/src/test/java/org/apache/ignite/testframework/junits/GridAbstractTest.java @@ -161,6 +161,12 @@ public abstract class GridAbstractTest extends TestCase { /** Force failure message. */ private String forceFailureMsg; + /** Whether test count is known is advance. */ + private boolean forceTestCnt; + + /** Number of tests. */ + private int testCnt; + /** * */ @@ -1769,6 +1775,15 @@ public void forceFailure(@Nullable String msg) { forceFailureMsg = msg; } + /** + * Set test count. + */ + public void forceTestCount(int cnt) { + testCnt = cnt; + + forceTestCnt = true; + } + /** * @throws Throwable If failed. */ @@ -2076,11 +2091,19 @@ public void incrementStopped() { */ public int getNumberOfTests() { if (numOfTests == -1) { - int cnt = 0; + GridAbstractTest this0 = GridAbstractTest.this; + + int cnt; - for (Method m : GridAbstractTest.this.getClass().getMethods()) - if (m.getName().startsWith("test") && Modifier.isPublic(m.getModifiers())) - cnt++; + if (this0.forceTestCnt) + cnt = this0.testCnt; + else { + cnt = 0; + + for (Method m : this0.getClass().getMethods()) + if (m.getName().startsWith("test") && Modifier.isPublic(m.getModifiers())) + cnt++; + } numOfTests = cnt; } From e3dfdecc3607b5f3183bfcb1ce36c57543a8965f Mon Sep 17 00:00:00 2001 From: Alexander Paschenko Date: Wed, 28 Sep 2016 16:46:46 +0300 Subject: [PATCH 50/69] Validate hash code presence in BinaryObject. Fixes #928 --- .../binary/BinaryClassDescriptor.java | 16 ++++- .../internal/binary/BinaryEnumObjectImpl.java | 5 ++ .../internal/binary/BinaryObjectEx.java | 8 +++ .../internal/binary/BinaryObjectImpl.java | 7 ++ .../binary/BinaryObjectOffheapImpl.java | 7 ++ .../ignite/internal/binary/BinaryUtils.java | 5 +- .../internal/binary/BinaryWriterExImpl.java | 6 +- .../builder/BinaryObjectBuilderImpl.java | 11 +++- .../processors/cache/GridCacheUtils.java | 5 ++ .../ignite/internal/util/IgniteUtils.java | 23 ++++++- ...dCacheStoreManagerDeserializationTest.java | 1 + .../cache/GridCacheUtilsSelfTest.java | 64 ++++++++++++++++++- ...lCacheStoreManagerDeserializationTest.java | 2 +- ...ridCacheBinaryObjectsAbstractSelfTest.java | 31 +++++++++ 14 files changed, 182 insertions(+), 9 deletions(-) diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryClassDescriptor.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryClassDescriptor.java index 083057ddd5eea..4c824d4409fb1 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryClassDescriptor.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryClassDescriptor.java @@ -41,6 +41,7 @@ import org.apache.ignite.binary.Binarylizable; import org.apache.ignite.internal.processors.cache.CacheObjectImpl; import org.apache.ignite.internal.util.GridUnsafe; +import org.apache.ignite.internal.util.IgniteUtils; import org.apache.ignite.internal.util.tostring.GridToStringExclude; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; @@ -113,6 +114,9 @@ public class BinaryClassDescriptor { /** */ private final boolean excluded; + /** */ + private final boolean overridesHashCode; + /** */ private final Class[] intfs; @@ -164,6 +168,8 @@ public class BinaryClassDescriptor { this.mapper = mapper; this.registered = registered; + overridesHashCode = IgniteUtils.overridesEqualsAndHashCode(cls); + schemaReg = ctx.schemaRegistry(typeId); excluded = MarshallerExclusions.isExcluded(cls); @@ -845,7 +851,15 @@ private boolean preWrite(BinaryWriterExImpl writer, Object obj) { * @param obj Object. */ private void postWrite(BinaryWriterExImpl writer, Object obj) { - writer.postWrite(userType, registered, obj instanceof CacheObjectImpl ? 0 : obj.hashCode()); + if (obj instanceof CacheObjectImpl) + writer.postWrite(userType, registered, 0, false); + else if (obj instanceof BinaryObjectEx) { + boolean flagSet = ((BinaryObjectEx)obj).isFlagSet(BinaryUtils.FLAG_EMPTY_HASH_CODE); + + writer.postWrite(userType, registered, obj.hashCode(), !flagSet); + } + else + writer.postWrite(userType, registered, obj.hashCode(), overridesHashCode); } /** diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryEnumObjectImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryEnumObjectImpl.java index c9874ed1cbc7c..dcfcc9d39f40f 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryEnumObjectImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryEnumObjectImpl.java @@ -138,6 +138,11 @@ public BinaryEnumObjectImpl(BinaryContext ctx, byte[] arr) { return BinaryUtils.type(ctx, this); } + /** {@inheritDoc} */ + @Override public boolean isFlagSet(short flag) { + return false; + } + /** {@inheritDoc} */ @Override public F field(String fieldName) throws BinaryObjectException { return null; diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectEx.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectEx.java index e3566bc2e7fba..4e137b7c816ce 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectEx.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectEx.java @@ -38,4 +38,12 @@ public interface BinaryObjectEx extends BinaryObject { * @throws BinaryObjectException If failed. */ @Nullable public BinaryType rawType() throws BinaryObjectException; + + /** + * Check if flag set. + * + * @param flag flag to check. + * @return {@code true} if flag is set, {@code false} otherwise. + */ + public boolean isFlagSet(short flag); } diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectImpl.java index 7b42c03c7d94a..f37d7c2051e36 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectImpl.java @@ -245,6 +245,13 @@ public void context(BinaryContext ctx) { return true; } + /** {@inheritDoc} */ + @Override public boolean isFlagSet(short flag) { + short flags = BinaryPrimitives.readShort(arr, start + GridBinaryMarshaller.FLAGS_POS); + + return BinaryUtils.isFlagSet(flags, flag); + } + /** {@inheritDoc} */ @Override public int typeId() { int off = start + GridBinaryMarshaller.TYPE_ID_POS; diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectOffheapImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectOffheapImpl.java index 2225b7a09ae9a..9cbbaa2767092 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectOffheapImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectOffheapImpl.java @@ -144,6 +144,13 @@ public BinaryObject heapCopy() { return false; } + /** {@inheritDoc} */ + @Override public boolean isFlagSet(short flag) { + short flags = BinaryPrimitives.readShort(ptr, start + GridBinaryMarshaller.FLAGS_POS); + + return BinaryUtils.isFlagSet(flags, flag); + } + /** {@inheritDoc} */ @Nullable @Override public BinaryType type() throws BinaryObjectException { return BinaryUtils.typeProxy(ctx, this); diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryUtils.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryUtils.java index b5834a5254425..25d87ffe802be 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryUtils.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryUtils.java @@ -109,6 +109,9 @@ public class BinaryUtils { /** Flag: compact footer, no field IDs. */ public static final short FLAG_COMPACT_FOOTER = 0x0020; + /** Flag: no hash code has been set. */ + public static final short FLAG_EMPTY_HASH_CODE = 0x0040; + /** Offset which fits into 1 byte. */ public static final int OFFSET_1 = 1; @@ -305,7 +308,7 @@ public static boolean isCompactFooter(short flags) { * @param flag Flag. * @return {@code True} if flag is set in flags. */ - private static boolean isFlagSet(short flags, short flag) { + static boolean isFlagSet(short flags, short flag) { return (flags & flag) == flag; } diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryWriterExImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryWriterExImpl.java index 1a818199b9944..22b4d1f481f5d 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryWriterExImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryWriterExImpl.java @@ -245,8 +245,9 @@ public void preWrite(@Nullable String clsName) { * @param userType User type flag. * @param registered Whether type is registered. * @param hashCode Hash code. + * @param isHashCodeSet Hash code presence flag. */ - public void postWrite(boolean userType, boolean registered, int hashCode) { + public void postWrite(boolean userType, boolean registered, int hashCode, boolean isHashCodeSet) { short flags; boolean useCompactFooter; @@ -303,6 +304,9 @@ else if (offsetByteCnt == BinaryUtils.OFFSET_2) } } + if (!isHashCodeSet) + flags |= BinaryUtils.FLAG_EMPTY_HASH_CODE; + // Actual write. int retPos = out.position(); diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/builder/BinaryObjectBuilderImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/builder/BinaryObjectBuilderImpl.java index 086da5c689057..2c761925e68af 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/builder/BinaryObjectBuilderImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/builder/BinaryObjectBuilderImpl.java @@ -86,6 +86,9 @@ public class BinaryObjectBuilderImpl implements BinaryObjectBuilder { /** */ private int hashCode; + /** */ + private boolean isHashCodeSet; + /** * @param clsName Class name. * @param ctx Binary context. @@ -117,7 +120,7 @@ public BinaryObjectBuilderImpl(BinaryContext ctx, int typeId, String typeName) { */ public BinaryObjectBuilderImpl(BinaryObjectImpl obj) { this(new BinaryBuilderReader(obj), obj.start()); - + isHashCodeSet = !obj.isFlagSet(BinaryUtils.FLAG_EMPTY_HASH_CODE); reader.registerObject(this); } @@ -329,7 +332,8 @@ else if (readCache == null) { reader.position(start + BinaryUtils.length(reader, start)); } - writer.postWrite(true, registeredType, hashCode); + //noinspection NumberEquality + writer.postWrite(true, registeredType, hashCode, isHashCodeSet); // Update metadata if needed. int schemaId = writer.schemaId(); @@ -408,9 +412,12 @@ else if (!nullFieldVal) { } /** {@inheritDoc} */ + @SuppressWarnings("UnnecessaryBoxing") @Override public BinaryObjectBuilderImpl hashCode(int hashCode) { this.hashCode = hashCode; + isHashCodeSet = true; + return this; } diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheUtils.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheUtils.java index 1a4ffd502178c..0f4e89bc89963 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheUtils.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheUtils.java @@ -1170,6 +1170,7 @@ private static String capitalize(String str) { /** * Validates that cache key object has overridden equals and hashCode methods. + * Will also check that a BinaryObject has a hash code set. * * @param key Key. * @throws IllegalArgumentException If equals or hashCode is not implemented. @@ -1181,6 +1182,10 @@ public static void validateCacheKey(@Nullable Object key) { if (!U.overridesEqualsAndHashCode(key)) throw new IllegalArgumentException("Cache key must override hashCode() and equals() methods: " + key.getClass().getName()); + + if (U.isHashCodeEmpty(key)) + throw new IllegalArgumentException("Cache key created with BinaryBuilder is missing hash code - " + + "please set it explicitly during building by using BinaryBuilder.hashCode(int)"); } /** diff --git a/modules/core/src/main/java/org/apache/ignite/internal/util/IgniteUtils.java b/modules/core/src/main/java/org/apache/ignite/internal/util/IgniteUtils.java index cdaeab1abe552..501cdb2eaba49 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/util/IgniteUtils.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/util/IgniteUtils.java @@ -149,6 +149,7 @@ import org.apache.ignite.IgniteInterruptedException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.IgniteSystemProperties; +import org.apache.ignite.binary.BinaryObject; import org.apache.ignite.binary.BinaryRawReader; import org.apache.ignite.binary.BinaryRawWriter; import org.apache.ignite.cluster.ClusterGroupEmptyException; @@ -170,6 +171,8 @@ import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.IgniteInterruptedCheckedException; import org.apache.ignite.internal.IgniteNodeAttributes; +import org.apache.ignite.internal.binary.BinaryObjectEx; +import org.apache.ignite.internal.binary.BinaryUtils; import org.apache.ignite.internal.cluster.ClusterGroupEmptyCheckedException; import org.apache.ignite.internal.cluster.ClusterTopologyCheckedException; import org.apache.ignite.internal.compute.ComputeTaskCancelledCheckedException; @@ -8479,9 +8482,15 @@ public static String consistentId(Collection addrs, int port) { * @return {@code True} if given object has overridden equals and hashCode method. */ public static boolean overridesEqualsAndHashCode(Object obj) { - try { - Class cls = obj.getClass(); + return overridesEqualsAndHashCode(obj.getClass()); + } + /** + * @param cls Class. + * @return {@code True} if given class has overridden equals and hashCode method. + */ + public static boolean overridesEqualsAndHashCode(Class cls) { + try { return !Object.class.equals(cls.getMethod("equals", Object.class).getDeclaringClass()) && !Object.class.equals(cls.getMethod("hashCode").getDeclaringClass()); } @@ -8490,6 +8499,16 @@ public static boolean overridesEqualsAndHashCode(Object obj) { } } + /** + * @param obj Object. + * @return {@code True} if given object is a {@link BinaryObjectEx} and + * has {@link BinaryUtils#FLAG_EMPTY_HASH_CODE} set + */ + public static boolean isHashCodeEmpty(Object obj) { + return obj != null && obj instanceof BinaryObjectEx && + ((BinaryObjectEx)obj).isFlagSet(BinaryUtils.FLAG_EMPTY_HASH_CODE); + } + /** * Checks if error is MAC invalid argument error which ususally requires special handling. * diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheStoreManagerDeserializationTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheStoreManagerDeserializationTest.java index 4a069a95b9122..39ce33d4bb22a 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheStoreManagerDeserializationTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheStoreManagerDeserializationTest.java @@ -279,6 +279,7 @@ private BinaryObject streamBinaryData(final Ignite grid) { for (int i = 0; i < 1; i++) { builder.setField("id", i); + builder.hashCode(i); entity = builder.build(); diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheUtilsSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheUtilsSelfTest.java index d5888e725c8e0..5f2c0046daae9 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheUtilsSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheUtilsSelfTest.java @@ -18,7 +18,21 @@ package org.apache.ignite.internal.processors.cache; import java.util.concurrent.Callable; +import org.apache.ignite.IgniteCheckedException; +import org.apache.ignite.binary.BinaryObject; +import org.apache.ignite.configuration.BinaryConfiguration; +import org.apache.ignite.configuration.IgniteConfiguration; +import org.apache.ignite.internal.binary.BinaryCachingMetadataHandler; +import org.apache.ignite.internal.binary.BinaryContext; +import org.apache.ignite.internal.binary.BinaryMarshaller; +import org.apache.ignite.internal.binary.BinaryObjectImpl; +import org.apache.ignite.internal.binary.GridBinaryMarshaller; +import org.apache.ignite.internal.binary.builder.BinaryObjectBuilderImpl; +import org.apache.ignite.internal.util.IgniteUtils; import org.apache.ignite.internal.util.typedef.internal.CU; +import org.apache.ignite.internal.util.typedef.internal.U; +import org.apache.ignite.logger.NullLogger; +import org.apache.ignite.marshaller.MarshallerContextTestImpl; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; @@ -104,7 +118,8 @@ private static class ExtendsClassWithEqualsAndHashCode2 extends EqualsAndHashCod /** */ - public void testCacheKeyValidation() { + @SuppressWarnings("ResultOfMethodCallIgnored") + public void testCacheKeyValidation() throws IgniteCheckedException { CU.validateCacheKey("key"); CU.validateCacheKey(1); @@ -124,6 +139,53 @@ public void testCacheKeyValidation() { assertThrowsForInvalidKey(new NoHashCode()); assertThrowsForInvalidKey(new WrongEquals()); + + BinaryObjectBuilderImpl binBuilder = new BinaryObjectBuilderImpl(binaryContext(), + EqualsAndHashCode.class.getName()); + + assertThrowsForInvalidKey(binBuilder.build()); + + binBuilder.hashCode(0xFE12); + + BinaryObject binObj = binBuilder.build(); + + CU.validateCacheKey(binObj); + + BinaryObjectBuilderImpl binBuilder2 = new BinaryObjectBuilderImpl((BinaryObjectImpl) binObj); + + CU.validateCacheKey(binBuilder2.build()); + } + + /** + * @return Binary marshaller. + * @throws IgniteCheckedException if failed. + */ + private BinaryMarshaller binaryMarshaller() throws IgniteCheckedException { + IgniteConfiguration iCfg = new IgniteConfiguration(); + + BinaryConfiguration bCfg = new BinaryConfiguration(); + + iCfg.setBinaryConfiguration(bCfg); + + BinaryContext ctx = new BinaryContext(BinaryCachingMetadataHandler.create(), iCfg, new NullLogger()); + + BinaryMarshaller marsh = new BinaryMarshaller(); + + marsh.setContext(new MarshallerContextTestImpl(null)); + + IgniteUtils.invoke(BinaryMarshaller.class, marsh, "setBinaryContext", ctx, iCfg); + + return marsh; + } + + /** + * @return Binary context. + * @throws IgniteCheckedException if failed. + */ + private BinaryContext binaryContext() throws IgniteCheckedException { + GridBinaryMarshaller impl = U.field(binaryMarshaller(), "impl"); + + return impl.context(); } /** diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridLocalCacheStoreManagerDeserializationTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridLocalCacheStoreManagerDeserializationTest.java index 827b3cf3a1e54..b86fe53102e9e 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridLocalCacheStoreManagerDeserializationTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridLocalCacheStoreManagerDeserializationTest.java @@ -86,7 +86,7 @@ public void testBinaryUpdate() throws Exception { final BinaryObjectBuilder builder = grid.binary().builder("custom_type"); - final BinaryObject entity = builder.setField("id", 0).build(); + final BinaryObject entity = builder.setField("id", 0).hashCode(0).build(); cache.put(entity, entity); diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/binary/GridCacheBinaryObjectsAbstractSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/binary/GridCacheBinaryObjectsAbstractSelfTest.java index 3a510c354d02e..7936ea49b7303 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/binary/GridCacheBinaryObjectsAbstractSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/binary/GridCacheBinaryObjectsAbstractSelfTest.java @@ -28,6 +28,7 @@ import java.util.Map; import java.util.Set; import java.util.UUID; +import java.util.concurrent.Callable; import javax.cache.Cache; import javax.cache.processor.EntryProcessor; import javax.cache.processor.EntryProcessorException; @@ -886,6 +887,36 @@ private void checkGetAllAsyncTx(TransactionConcurrency concurrency, TransactionI } } + /** + * @throws Exception If failed. + */ + @SuppressWarnings({ "ThrowableResultOfMethodCallIgnored", "unchecked" }) + public void testPutWithoutHashCode() throws Exception { + final IgniteCache c = jcache(0); + + GridTestUtils.assertThrows(log, new Callable() { + /** {@inheritDoc} */ + @Override public Object call() throws Exception { + c.put(new TestObject(5), 5); + return null; + } + }, IllegalArgumentException.class, "Cache key must override hashCode() and equals() methods: "); + + BinaryObjectBuilder bldr = grid(0).binary().builder(TestObject.class.getName()); + bldr.setField("val", 5); + + final BinaryObject binKey = bldr.build(); + + GridTestUtils.assertThrows(log, new Callable() { + /** {@inheritDoc} */ + @Override public Object call() throws Exception { + c.put(binKey, 5); + return null; + } + }, IllegalArgumentException.class, "Cache key created with BinaryBuilder is missing hash code - " + + "please set it explicitly during building by using BinaryBuilder.hashCode(int)"); + } + /** * @throws Exception if failed. */ From d1e3a78ae569fa5d5692816db44f2c677e1b8283 Mon Sep 17 00:00:00 2001 From: Valentin Kulichenko Date: Wed, 28 Sep 2016 17:53:42 -0700 Subject: [PATCH 51/69] IGNITE-3191 - Fixed ordering of fields in binary objects --- .../binary/BinaryClassDescriptor.java | 16 +-- .../builder/BinaryObjectBuilderImpl.java | 24 ++--- .../binary/BinaryFieldOrderSelfTest.java | 98 +++++++++++++++++++ .../IgniteBinaryObjectsTestSuite.java | 2 + 4 files changed, 121 insertions(+), 19 deletions(-) create mode 100644 modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldOrderSelfTest.java diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryClassDescriptor.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryClassDescriptor.java index 4c824d4409fb1..276dfe5134252 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryClassDescriptor.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryClassDescriptor.java @@ -25,7 +25,6 @@ import java.lang.reflect.Proxy; import java.math.BigDecimal; import java.sql.Timestamp; -import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; @@ -33,6 +32,7 @@ import java.util.HashSet; import java.util.Map; import java.util.Set; +import java.util.TreeMap; import java.util.UUID; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.binary.BinaryObjectException; @@ -269,10 +269,9 @@ else if (useOptMarshaller) case OBJECT: // Must not use constructor to honor transient fields semantics. ctor = null; - ArrayList fields0 = new ArrayList<>(); stableFieldsMeta = metaDataEnabled ? new HashMap() : null; - BinarySchema.Builder schemaBuilder = BinarySchema.Builder.newBuilder(); + Map fields0 = new TreeMap<>(); Set duplicates = duplicateFields(cls); @@ -300,9 +299,7 @@ else if (useOptMarshaller) BinaryFieldAccessor fieldInfo = BinaryFieldAccessor.create(f, fieldId); - fields0.add(fieldInfo); - - schemaBuilder.addField(fieldId); + fields0.put(name, fieldInfo); if (metaDataEnabled) stableFieldsMeta.put(name, fieldInfo.mode().typeId()); @@ -310,7 +307,12 @@ else if (useOptMarshaller) } } - fields = fields0.toArray(new BinaryFieldAccessor[fields0.size()]); + fields = fields0.values().toArray(new BinaryFieldAccessor[fields0.size()]); + + BinarySchema.Builder schemaBuilder = BinarySchema.Builder.newBuilder(); + + for (BinaryFieldAccessor field : fields) + schemaBuilder.addField(field.id); stableSchema = schemaBuilder.build(); diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/builder/BinaryObjectBuilderImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/builder/BinaryObjectBuilderImpl.java index 2c761925e68af..d166051d18f35 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/builder/BinaryObjectBuilderImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/builder/BinaryObjectBuilderImpl.java @@ -17,32 +17,32 @@ package org.apache.ignite.internal.binary.builder; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; import org.apache.ignite.binary.BinaryInvalidTypeException; import org.apache.ignite.binary.BinaryObject; import org.apache.ignite.binary.BinaryObjectBuilder; import org.apache.ignite.binary.BinaryObjectException; import org.apache.ignite.binary.BinaryType; +import org.apache.ignite.internal.binary.BinaryContext; import org.apache.ignite.internal.binary.BinaryMetadata; import org.apache.ignite.internal.binary.BinaryObjectImpl; -import org.apache.ignite.internal.binary.BinaryWriterExImpl; -import org.apache.ignite.internal.binary.GridBinaryMarshaller; -import org.apache.ignite.internal.binary.BinaryContext; +import org.apache.ignite.internal.binary.BinaryObjectOffheapImpl; import org.apache.ignite.internal.binary.BinarySchema; import org.apache.ignite.internal.binary.BinarySchemaRegistry; -import org.apache.ignite.internal.binary.BinaryObjectOffheapImpl; import org.apache.ignite.internal.binary.BinaryUtils; +import org.apache.ignite.internal.binary.BinaryWriterExImpl; +import org.apache.ignite.internal.binary.GridBinaryMarshaller; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteBiTuple; import org.jetbrains.annotations.Nullable; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.Set; - /** * */ @@ -522,7 +522,7 @@ private void ensureReadCacheInit() { Object val = val0 == null ? new BinaryValueWithType(BinaryUtils.typeByClass(Object.class), null) : val0; if (assignedVals == null) - assignedVals = new LinkedHashMap<>(); + assignedVals = new TreeMap<>(); Object oldVal = assignedVals.put(name, val); diff --git a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldOrderSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldOrderSelfTest.java new file mode 100644 index 0000000000000..6bb1e1380da8a --- /dev/null +++ b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldOrderSelfTest.java @@ -0,0 +1,98 @@ +package org.apache.ignite.internal.binary; + +import org.apache.ignite.binary.BinaryObject; +import org.apache.ignite.configuration.IgniteConfiguration; +import org.apache.ignite.internal.IgniteEx; +import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; +import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder; +import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; + +/** + * Test that field ordering doesn't change the schema. + */ +public class BinaryFieldOrderSelfTest extends GridCommonAbstractTest { + /** {@inheritDoc} */ + @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { + IgniteConfiguration cfg = super.getConfiguration(gridName); + + cfg.setMarshaller(new BinaryMarshaller()); + cfg.setDiscoverySpi(new TcpDiscoverySpi().setIpFinder(new TcpDiscoveryVmIpFinder(true))); + + return cfg; + } + + /** {@inheritDoc} */ + @Override protected void beforeTest() throws Exception { + startGrid(); + } + + /** {@inheritDoc} */ + @Override protected void afterTest() throws Exception { + stopAllGrids(); + } + + /** + * @throws Exception If failed. + */ + public void testEquals() throws Exception { + IgniteEx ignite = grid(); + + BinaryObject bo0 = ignite.binary().toBinary(new MyType(222, 333, 111)); + + BinaryObject bo1 = ignite.binary().builder(bo0.type().typeName()). + setField("b", 222). + setField("c", 333). + setField("a", 111). + hashCode(12345). + build(); + + BinaryObject bo2 = ignite.binary().builder(bo0.type().typeName()). + setField("a", 111). + setField("b", 222). + setField("c", 333). + hashCode(12345). + build(); + + assertEquals(12345, bo0.hashCode()); + assertEquals(12345, bo1.hashCode()); + assertEquals(12345, bo2.hashCode()); + + assertTrue(bo0.equals(bo1)); + assertTrue(bo0.equals(bo2)); + assertTrue(bo1.equals(bo2)); + } + + /** + */ + private static class MyType { + /** B. */ + private int b; + + /** C. */ + private int c; + + /** A. */ + private int a; + + /** + * @param b B. + * @param c C. + * @param a A. + */ + MyType(int b, int c, int a) { + this.b = b; + this.c = c; + this.a = a; + } + + /** {@inheritDoc} */ + @Override public boolean equals(Object obj) { + return super.equals(obj); + } + + /** {@inheritDoc} */ + @Override public int hashCode() { + return 12345; + } + } +} diff --git a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteBinaryObjectsTestSuite.java b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteBinaryObjectsTestSuite.java index c1d9974837fb3..50c6f0b95a95d 100644 --- a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteBinaryObjectsTestSuite.java +++ b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteBinaryObjectsTestSuite.java @@ -22,6 +22,7 @@ import org.apache.ignite.internal.binary.BinaryBasicNameMapperSelfTest; import org.apache.ignite.internal.binary.BinaryConfigurationConsistencySelfTest; import org.apache.ignite.internal.binary.BinaryEnumsSelfTest; +import org.apache.ignite.internal.binary.BinaryFieldOrderSelfTest; import org.apache.ignite.internal.binary.BinaryFieldsHeapSelfTest; import org.apache.ignite.internal.binary.BinaryFieldsOffheapSelfTest; import org.apache.ignite.internal.binary.BinaryFooterOffsetsHeapSelfTest; @@ -104,6 +105,7 @@ public static TestSuite suite() throws Exception { suite.addTestSuite(GridBinaryAffinityKeySelfTest.class); suite.addTestSuite(GridBinaryWildcardsSelfTest.class); suite.addTestSuite(BinaryObjectToStringSelfTest.class); + suite.addTestSuite(BinaryFieldOrderSelfTest.class); // Tests for objects with non-compact footers. suite.addTestSuite(BinaryMarshallerNonCompactSelfTest.class); From b280c3efa1eb84c6bc8abbe31ba669b0c24323d8 Mon Sep 17 00:00:00 2001 From: vozerov-gridgain Date: Thu, 29 Sep 2016 10:23:54 +0300 Subject: [PATCH 52/69] Fixed missing Apache header. --- .../binary/BinaryFieldOrderSelfTest.java | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldOrderSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldOrderSelfTest.java index 6bb1e1380da8a..e4709480f49a4 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldOrderSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldOrderSelfTest.java @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.ignite.internal.binary; import org.apache.ignite.binary.BinaryObject; From 3e8a1c6b045c231dbb3c972463000f824386aee9 Mon Sep 17 00:00:00 2001 From: vozerov-gridgain Date: Thu, 29 Sep 2016 11:08:23 +0300 Subject: [PATCH 53/69] Added node stop to IgniteCacheExpireAndUpdateConsistencyTest. --- .../cache/IgniteCacheExpireAndUpdateConsistencyTest.java | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/IgniteCacheExpireAndUpdateConsistencyTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/IgniteCacheExpireAndUpdateConsistencyTest.java index 7f54a832becf0..882ed2200b15e 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/IgniteCacheExpireAndUpdateConsistencyTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/IgniteCacheExpireAndUpdateConsistencyTest.java @@ -97,6 +97,13 @@ public class IgniteCacheExpireAndUpdateConsistencyTest extends GridCommonAbstrac assertTrue(client.configuration().isClientMode()); } + /** {@inheritDoc} */ + @Override protected void afterTestsStopped() throws Exception { + stopAllGrids(); + + super.afterTestsStopped(); + } + /** * @throws Exception If failed. */ From 062b9b69aa851d5253dfb7f44066b7d749f1ca12 Mon Sep 17 00:00:00 2001 From: Alexey Goncharuk Date: Thu, 29 Sep 2016 11:26:50 +0300 Subject: [PATCH 54/69] IGNITE-3633 - Enforce key validation for tests. --- .../ignite/internal/processors/cache/GridCacheAdapter.java | 7 +++++++ .../binary/GridCacheBinaryObjectsAbstractSelfTest.java | 6 ++++++ 2 files changed, 13 insertions(+) diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheAdapter.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheAdapter.java index fe6bb1be27c89..55400abb72021 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheAdapter.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheAdapter.java @@ -4890,6 +4890,13 @@ public void onReconnected() { // No-op. } + /** + * For tests only. + */ + public void forceKeyCheck() { + keyCheck = true; + } + /** * Validates that given cache key has overridden equals and hashCode methods and * implements {@link Externalizable}. diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/binary/GridCacheBinaryObjectsAbstractSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/binary/GridCacheBinaryObjectsAbstractSelfTest.java index 7936ea49b7303..150c2454762b5 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/binary/GridCacheBinaryObjectsAbstractSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/binary/GridCacheBinaryObjectsAbstractSelfTest.java @@ -894,6 +894,10 @@ private void checkGetAllAsyncTx(TransactionConcurrency concurrency, TransactionI public void testPutWithoutHashCode() throws Exception { final IgniteCache c = jcache(0); + GridCacheAdapter cache0 = grid(0).context().cache().internalCache(null); + + cache0.forceKeyCheck(); + GridTestUtils.assertThrows(log, new Callable() { /** {@inheritDoc} */ @Override public Object call() throws Exception { @@ -907,6 +911,8 @@ public void testPutWithoutHashCode() throws Exception { final BinaryObject binKey = bldr.build(); + cache0.forceKeyCheck(); + GridTestUtils.assertThrows(log, new Callable() { /** {@inheritDoc} */ @Override public Object call() throws Exception { From ad613afd7165ed2d8bf0a62805fc2506dfe6c59a Mon Sep 17 00:00:00 2001 From: sboikov Date: Thu, 29 Sep 2016 12:04:44 +0300 Subject: [PATCH 55/69] ignite-3621 Fixed 'testEvictExpired'. (cherry picked from commit a4d7aa3) --- .../cache/IgniteCacheConfigVariationsFullApiTest.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/IgniteCacheConfigVariationsFullApiTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/IgniteCacheConfigVariationsFullApiTest.java index 2ca09c836ce7d..6b0e1932e1f2c 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/IgniteCacheConfigVariationsFullApiTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/IgniteCacheConfigVariationsFullApiTest.java @@ -3336,7 +3336,12 @@ public void testEvictExpired() throws Exception { boolean wait = waitForCondition(new GridAbsPredicate() { @Override public boolean apply() { - return cache.localPeek(key) == null; + for (int i = 0; i < gridCount(); i++) { + if (peek(jcache(i), key) != null) + return false; + } + + return true; } }, ttl + 1000); From 22dc2c9e05e5183f838865023c9cb7a8291ac67f Mon Sep 17 00:00:00 2001 From: Alexander Paschenko Date: Thu, 29 Sep 2016 15:43:20 +0300 Subject: [PATCH 56/69] GridNearCacheAdapter, GridDhtCacheAdapter - test fix --- .../cache/distributed/dht/GridDhtCacheAdapter.java | 12 ++++++++++++ .../cache/distributed/near/GridNearCacheAdapter.java | 12 ++++++++++++ 2 files changed, 24 insertions(+) diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridDhtCacheAdapter.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridDhtCacheAdapter.java index 35e62670ea2a9..8ced02f27fd9e 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridDhtCacheAdapter.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridDhtCacheAdapter.java @@ -261,6 +261,18 @@ protected GridCacheMapEntryFactory entryFactory() { */ public abstract GridNearCacheAdapter near(); + /** {@inheritDoc} */ + @Override public void forceKeyCheck() { + if (!keyCheck) { + super.forceKeyCheck(); + + GridNearCacheAdapter near = near(); + + if (near != null) + near.forceKeyCheck(); + } + } + /** * @return Partition topology. */ diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/near/GridNearCacheAdapter.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/near/GridNearCacheAdapter.java index dd66a334c8ed2..6acf48ea0fa55 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/near/GridNearCacheAdapter.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/near/GridNearCacheAdapter.java @@ -114,6 +114,18 @@ protected GridNearCacheAdapter(GridCacheContext ctx) { */ public abstract GridDhtCacheAdapter dht(); + /** {@inheritDoc} */ + @Override public void forceKeyCheck() { + if (!keyCheck) { + super.forceKeyCheck(); + + GridDhtCacheAdapter dht = dht(); + + if (dht != null) + dht.forceKeyCheck(); + } + } + /** {@inheritDoc} */ @Override public void onReconnected() { map = new GridCacheConcurrentMapImpl( From f447559ecba55149452e4e48e2bf44ef1fa1b4d7 Mon Sep 17 00:00:00 2001 From: Alexey Goncharuk Date: Thu, 29 Sep 2016 17:10:14 +0300 Subject: [PATCH 57/69] ignite-1.6.9 - Fixing tests --- .../cache/distributed/dht/GridDhtCacheAdapter.java | 12 ------------ .../cache/distributed/near/GridNearCacheAdapter.java | 9 ++------- 2 files changed, 2 insertions(+), 19 deletions(-) diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridDhtCacheAdapter.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridDhtCacheAdapter.java index 8ced02f27fd9e..35e62670ea2a9 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridDhtCacheAdapter.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridDhtCacheAdapter.java @@ -261,18 +261,6 @@ protected GridCacheMapEntryFactory entryFactory() { */ public abstract GridNearCacheAdapter near(); - /** {@inheritDoc} */ - @Override public void forceKeyCheck() { - if (!keyCheck) { - super.forceKeyCheck(); - - GridNearCacheAdapter near = near(); - - if (near != null) - near.forceKeyCheck(); - } - } - /** * @return Partition topology. */ diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/near/GridNearCacheAdapter.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/near/GridNearCacheAdapter.java index 6acf48ea0fa55..4ddad74157987 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/near/GridNearCacheAdapter.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/near/GridNearCacheAdapter.java @@ -116,14 +116,9 @@ protected GridNearCacheAdapter(GridCacheContext ctx) { /** {@inheritDoc} */ @Override public void forceKeyCheck() { - if (!keyCheck) { - super.forceKeyCheck(); + super.forceKeyCheck(); - GridDhtCacheAdapter dht = dht(); - - if (dht != null) - dht.forceKeyCheck(); - } + dht().forceKeyCheck(); } /** {@inheritDoc} */ From a4c63ae113931615e940ada2411ebce32dba5db8 Mon Sep 17 00:00:00 2001 From: iveselovskiy Date: Fri, 30 Sep 2016 10:45:13 +0300 Subject: [PATCH 58/69] IGNITE-3998: IGFS: Enabled IgfsAbstractSelfTest.testCreateConsistencyMultithreaded. This closes #1129. --- .../processors/igfs/IgfsAbstractSelfTest.java | 141 +++++++++--------- 1 file changed, 70 insertions(+), 71 deletions(-) diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractSelfTest.java index e8d65f18e06ab..7058954f01770 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractSelfTest.java @@ -1310,77 +1310,76 @@ public void testCreateConsistency() throws Exception { * @throws Exception If failed. */ public void testCreateConsistencyMultithreaded() throws Exception { - // TODO: Enable -// final AtomicBoolean stop = new AtomicBoolean(); -// -// final AtomicInteger createCtr = new AtomicInteger(); // How many times the file was re-created. -// final AtomicReference err = new AtomicReference<>(); -// -// igfs.create(FILE, false).close(); -// -// int threadCnt = 50; -// -// IgniteInternalFuture fut = multithreadedAsync(new Runnable() { -// @SuppressWarnings("ThrowFromFinallyBlock") -// @Override public void run() { -// while (!stop.get() && err.get() == null) { -// IgfsOutputStream os = null; -// -// try { -// os = igfs.create(FILE, true); -// -// os.write(chunk); -// -// os.close(); -// -// createCtr.incrementAndGet(); -// } -// catch (IgniteException e) { -// // No-op. -// } -// catch (IOException e) { -// err.compareAndSet(null, e); -// -// Throwable[] chain = X.getThrowables(e); -// -// Throwable cause = chain[chain.length - 1]; -// -// System.out.println("Failed due to IOException exception. Cause:"); -// cause.printStackTrace(System.out); -// } -// finally { -// if (os != null) -// try { -// os.close(); -// } -// catch (IOException ioe) { -// throw new IgniteException(ioe); -// } -// } -// } -// } -// }, threadCnt); -// -// long startTime = U.currentTimeMillis(); -// -// while (err.get() == null -// && createCtr.get() < 500 -// && U.currentTimeMillis() - startTime < 60 * 1000) -// U.sleep(100); -// -// stop.set(true); -// -// fut.get(); -// -// awaitFileClose(igfs.asSecondary(), FILE); -// -// if (err.get() != null) { -// X.println("Test failed: rethrowing first error: " + err.get()); -// -// throw err.get(); -// } -// -// checkFileContent(igfs, FILE, chunk); + final AtomicBoolean stop = new AtomicBoolean(); + + final AtomicInteger createCtr = new AtomicInteger(); // How many times the file was re-created. + final AtomicReference err = new AtomicReference<>(); + + igfs.create(FILE, false).close(); + + int threadCnt = 50; + + IgniteInternalFuture fut = multithreadedAsync(new Runnable() { + @SuppressWarnings("ThrowFromFinallyBlock") + @Override public void run() { + while (!stop.get() && err.get() == null) { + IgfsOutputStream os = null; + + try { + os = igfs.create(FILE, true); + + os.write(chunk); + + os.close(); + + createCtr.incrementAndGet(); + } + catch (IgniteException e) { + // No-op. + } + catch (IOException e) { + err.compareAndSet(null, e); + + Throwable[] chain = X.getThrowables(e); + + Throwable cause = chain[chain.length - 1]; + + System.out.println("Failed due to IOException exception. Cause:"); + cause.printStackTrace(System.out); + } + finally { + if (os != null) + try { + os.close(); + } + catch (IOException ioe) { + throw new IgniteException(ioe); + } + } + } + } + }, threadCnt); + + long startTime = U.currentTimeMillis(); + + while (err.get() == null + && createCtr.get() < 500 + && U.currentTimeMillis() - startTime < 60 * 1000) + U.sleep(100); + + stop.set(true); + + fut.get(); + + awaitFileClose(igfs.asSecondary(), FILE); + + if (err.get() != null) { + X.println("Test failed: rethrowing first error: " + err.get()); + + throw err.get(); + } + + checkFileContent(igfs, FILE, chunk); } /** From a12298c0baf19407f3110ba3c4f0aab88b67946d Mon Sep 17 00:00:00 2001 From: Igor Sapego Date: Fri, 30 Sep 2016 11:00:51 +0300 Subject: [PATCH 59/69] IGNITE-3868: ODBC: Fixed connection string parsing. This closes #1051. --- modules/platforms/cpp/odbc/src/odbc.cpp | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/modules/platforms/cpp/odbc/src/odbc.cpp b/modules/platforms/cpp/odbc/src/odbc.cpp index 9df64d3b14133..7416ad2b5b663 100644 --- a/modules/platforms/cpp/odbc/src/odbc.cpp +++ b/modules/platforms/cpp/odbc/src/odbc.cpp @@ -289,9 +289,7 @@ namespace ignite if (!diag.IsSuccessful()) return diag.GetReturnCode(); - std::string outConnectStr = connection->GetConfiguration().ToConnectString(); - - size_t reslen = CopyStringToBuffer(outConnectStr, + size_t reslen = CopyStringToBuffer(connectStr, reinterpret_cast(outConnectionString), static_cast(outConnectionStringBufferLen)); @@ -753,7 +751,7 @@ namespace ignite LOG_MSG("columnSizeRes: %lld\n", columnSizeRes); LOG_MSG("decimalDigitsRes: %lld\n", decimalDigitsRes); LOG_MSG("nullableRes: %lld\n", nullableRes); - LOG_MSG("columnNameBuf: %s\n", columnNameBuf ? columnNameBuf : ""); + LOG_MSG("columnNameBuf: %s\n", columnNameBuf ? (const char*)columnNameBuf : ""); LOG_MSG("columnNameLen: %d\n", columnNameLen ? *columnNameLen : -1); if (dataType) From c32082fe8b1e02758179c1b7bb61a75be53534fe Mon Sep 17 00:00:00 2001 From: Alexey Kuznetsov Date: Fri, 30 Sep 2016 15:20:11 +0700 Subject: [PATCH 60/69] IGNITE-4007 Fixed update of QueryMetrics.minimumTime() metric. Tests added. Added Visor reset metrics task. --- .../query/GridCacheQueryMetricsAdapter.java | 12 ++-- .../VisorCacheResetQueryMetricsTask.java | 69 +++++++++++++++++++ .../CacheAbstractQueryMetricsSelfTest.java | 6 +- 3 files changed, 80 insertions(+), 7 deletions(-) create mode 100644 modules/core/src/main/java/org/apache/ignite/internal/visor/cache/VisorCacheResetQueryMetricsTask.java diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/query/GridCacheQueryMetricsAdapter.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/query/GridCacheQueryMetricsAdapter.java index 1928ea56d6e0d..e70ea9ff46222 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/query/GridCacheQueryMetricsAdapter.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/query/GridCacheQueryMetricsAdapter.java @@ -34,7 +34,7 @@ public class GridCacheQueryMetricsAdapter implements QueryMetrics, Externalizabl private static final long serialVersionUID = 0L; /** Minimum time of execution. */ - private final GridAtomicLong minTime = new GridAtomicLong(); + private final GridAtomicLong minTime = new GridAtomicLong(Long.MAX_VALUE); /** Maximum time of execution. */ private final GridAtomicLong maxTime = new GridAtomicLong(); @@ -58,7 +58,9 @@ public class GridCacheQueryMetricsAdapter implements QueryMetrics, Externalizabl /** {@inheritDoc} */ @Override public long minimumTime() { - return minTime.get(); + long min = minTime.get(); + + return min == Long.MAX_VALUE ? 0 : min; } /** {@inheritDoc} */ @@ -71,9 +73,9 @@ public class GridCacheQueryMetricsAdapter implements QueryMetrics, Externalizabl if (avgTime > 0) return avgTime; else { - long val = completed.sum(); + double val = completed.sum(); - return val > 0 ? sumTime.sum() / val : 0; + return val > 0 ? sumTime.sum() / val : 0.0; } } @@ -170,4 +172,4 @@ public GridCacheQueryMetricsAdapter copy() { @Override public String toString() { return S.toString(GridCacheQueryMetricsAdapter.class, this); } -} \ No newline at end of file +} diff --git a/modules/core/src/main/java/org/apache/ignite/internal/visor/cache/VisorCacheResetQueryMetricsTask.java b/modules/core/src/main/java/org/apache/ignite/internal/visor/cache/VisorCacheResetQueryMetricsTask.java new file mode 100644 index 0000000000000..96d98573e9f8d --- /dev/null +++ b/modules/core/src/main/java/org/apache/ignite/internal/visor/cache/VisorCacheResetQueryMetricsTask.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.visor.cache; + +import org.apache.ignite.internal.processors.cache.IgniteInternalCache; +import org.apache.ignite.internal.processors.task.GridInternal; +import org.apache.ignite.internal.util.typedef.internal.S; +import org.apache.ignite.internal.visor.VisorJob; +import org.apache.ignite.internal.visor.VisorOneNodeTask; + +/** + * Reset compute grid query metrics. + */ +@GridInternal +public class VisorCacheResetQueryMetricsTask extends VisorOneNodeTask { + /** */ + private static final long serialVersionUID = 0L; + + /** {@inheritDoc} */ + @Override protected VisorCacheResetQueryMetricsJob job(String arg) { + return new VisorCacheResetQueryMetricsJob(arg, debug); + } + + /** + * Job that reset cache query metrics. + */ + private static class VisorCacheResetQueryMetricsJob extends VisorJob { + /** */ + private static final long serialVersionUID = 0L; + + /** + * @param arg Cache name to reset query metrics for. + * @param debug Debug flag. + */ + private VisorCacheResetQueryMetricsJob(String arg, boolean debug) { + super(arg, debug); + } + + /** {@inheritDoc} */ + @Override protected Void run(String cacheName) { + IgniteInternalCache cache = ignite.cachex(cacheName); + + if (cache != null) + cache.context().queries().resetMetrics(); + + return null; + } + + /** {@inheritDoc} */ + @Override public String toString() { + return S.toString(VisorCacheResetQueryMetricsJob.class, this); + } + } +} \ No newline at end of file diff --git a/modules/indexing/src/test/java/org/apache/ignite/internal/processors/cache/CacheAbstractQueryMetricsSelfTest.java b/modules/indexing/src/test/java/org/apache/ignite/internal/processors/cache/CacheAbstractQueryMetricsSelfTest.java index d2d8c4d75ee0c..10f761237cdb1 100644 --- a/modules/indexing/src/test/java/org/apache/ignite/internal/processors/cache/CacheAbstractQueryMetricsSelfTest.java +++ b/modules/indexing/src/test/java/org/apache/ignite/internal/processors/cache/CacheAbstractQueryMetricsSelfTest.java @@ -236,6 +236,7 @@ private void testQueryMetrics(IgniteCache cache, Query qry) { assertTrue(m.averageTime() >= 0); assertTrue(m.maximumTime() >= 0); assertTrue(m.minimumTime() >= 0); + assertTrue("On first execution minTime == maxTime", m.minimumTime() == m.maximumTime()); // Execute again with the same parameters. cache.query(qry).getAll(); @@ -274,6 +275,7 @@ private void testQueryNotFullyFetchedMetrics(IgniteCache cache, assertTrue(m.averageTime() >= 0); assertTrue(m.maximumTime() >= 0); assertTrue(m.minimumTime() >= 0); + assertTrue("On first execution minTime == maxTime", m.minimumTime() == m.maximumTime()); // Execute again with the same parameters. cache.query(qry).iterator().next(); @@ -301,7 +303,7 @@ private void testQueryFailedMetrics(IgniteCache cache, Query qr try { cache.query(qry).getAll(); } - catch (Exception e) { + catch (Exception ignored) { // No-op. } @@ -320,7 +322,7 @@ private void testQueryFailedMetrics(IgniteCache cache, Query qr try { cache.query(qry).getAll(); } - catch (Exception e) { + catch (Exception ignored) { // No-op. } From f745371af76baa676385c22257b4577d2e697d16 Mon Sep 17 00:00:00 2001 From: vozerov-gridgain Date: Mon, 3 Oct 2016 10:07:26 +0300 Subject: [PATCH 61/69] Revert "Fixed missing Apache header." This reverts commit b280c3efa1eb84c6bc8abbe31ba669b0c24323d8. --- .../binary/BinaryFieldOrderSelfTest.java | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldOrderSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldOrderSelfTest.java index e4709480f49a4..6bb1e1380da8a 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldOrderSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldOrderSelfTest.java @@ -1,20 +1,3 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - package org.apache.ignite.internal.binary; import org.apache.ignite.binary.BinaryObject; From de50287d493acc73186926d34431733bc76c549b Mon Sep 17 00:00:00 2001 From: vozerov-gridgain Date: Mon, 3 Oct 2016 10:07:33 +0300 Subject: [PATCH 62/69] Revert "IGNITE-3191 - Fixed ordering of fields in binary objects" This reverts commit d1e3a78ae569fa5d5692816db44f2c677e1b8283. --- .../binary/BinaryClassDescriptor.java | 16 ++- .../builder/BinaryObjectBuilderImpl.java | 24 ++--- .../binary/BinaryFieldOrderSelfTest.java | 98 ------------------- .../IgniteBinaryObjectsTestSuite.java | 2 - 4 files changed, 19 insertions(+), 121 deletions(-) delete mode 100644 modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldOrderSelfTest.java diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryClassDescriptor.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryClassDescriptor.java index 276dfe5134252..4c824d4409fb1 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryClassDescriptor.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryClassDescriptor.java @@ -25,6 +25,7 @@ import java.lang.reflect.Proxy; import java.math.BigDecimal; import java.sql.Timestamp; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; @@ -32,7 +33,6 @@ import java.util.HashSet; import java.util.Map; import java.util.Set; -import java.util.TreeMap; import java.util.UUID; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.binary.BinaryObjectException; @@ -269,9 +269,10 @@ else if (useOptMarshaller) case OBJECT: // Must not use constructor to honor transient fields semantics. ctor = null; + ArrayList fields0 = new ArrayList<>(); stableFieldsMeta = metaDataEnabled ? new HashMap() : null; - Map fields0 = new TreeMap<>(); + BinarySchema.Builder schemaBuilder = BinarySchema.Builder.newBuilder(); Set duplicates = duplicateFields(cls); @@ -299,7 +300,9 @@ else if (useOptMarshaller) BinaryFieldAccessor fieldInfo = BinaryFieldAccessor.create(f, fieldId); - fields0.put(name, fieldInfo); + fields0.add(fieldInfo); + + schemaBuilder.addField(fieldId); if (metaDataEnabled) stableFieldsMeta.put(name, fieldInfo.mode().typeId()); @@ -307,12 +310,7 @@ else if (useOptMarshaller) } } - fields = fields0.values().toArray(new BinaryFieldAccessor[fields0.size()]); - - BinarySchema.Builder schemaBuilder = BinarySchema.Builder.newBuilder(); - - for (BinaryFieldAccessor field : fields) - schemaBuilder.addField(field.id); + fields = fields0.toArray(new BinaryFieldAccessor[fields0.size()]); stableSchema = schemaBuilder.build(); diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/builder/BinaryObjectBuilderImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/builder/BinaryObjectBuilderImpl.java index d166051d18f35..2c761925e68af 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/builder/BinaryObjectBuilderImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/builder/BinaryObjectBuilderImpl.java @@ -17,32 +17,32 @@ package org.apache.ignite.internal.binary.builder; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.Set; -import java.util.TreeMap; import org.apache.ignite.binary.BinaryInvalidTypeException; import org.apache.ignite.binary.BinaryObject; import org.apache.ignite.binary.BinaryObjectBuilder; import org.apache.ignite.binary.BinaryObjectException; import org.apache.ignite.binary.BinaryType; -import org.apache.ignite.internal.binary.BinaryContext; import org.apache.ignite.internal.binary.BinaryMetadata; import org.apache.ignite.internal.binary.BinaryObjectImpl; -import org.apache.ignite.internal.binary.BinaryObjectOffheapImpl; +import org.apache.ignite.internal.binary.BinaryWriterExImpl; +import org.apache.ignite.internal.binary.GridBinaryMarshaller; +import org.apache.ignite.internal.binary.BinaryContext; import org.apache.ignite.internal.binary.BinarySchema; import org.apache.ignite.internal.binary.BinarySchemaRegistry; +import org.apache.ignite.internal.binary.BinaryObjectOffheapImpl; import org.apache.ignite.internal.binary.BinaryUtils; -import org.apache.ignite.internal.binary.BinaryWriterExImpl; -import org.apache.ignite.internal.binary.GridBinaryMarshaller; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteBiTuple; import org.jetbrains.annotations.Nullable; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Set; + /** * */ @@ -522,7 +522,7 @@ private void ensureReadCacheInit() { Object val = val0 == null ? new BinaryValueWithType(BinaryUtils.typeByClass(Object.class), null) : val0; if (assignedVals == null) - assignedVals = new TreeMap<>(); + assignedVals = new LinkedHashMap<>(); Object oldVal = assignedVals.put(name, val); diff --git a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldOrderSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldOrderSelfTest.java deleted file mode 100644 index 6bb1e1380da8a..0000000000000 --- a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldOrderSelfTest.java +++ /dev/null @@ -1,98 +0,0 @@ -package org.apache.ignite.internal.binary; - -import org.apache.ignite.binary.BinaryObject; -import org.apache.ignite.configuration.IgniteConfiguration; -import org.apache.ignite.internal.IgniteEx; -import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; -import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder; -import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; - -/** - * Test that field ordering doesn't change the schema. - */ -public class BinaryFieldOrderSelfTest extends GridCommonAbstractTest { - /** {@inheritDoc} */ - @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { - IgniteConfiguration cfg = super.getConfiguration(gridName); - - cfg.setMarshaller(new BinaryMarshaller()); - cfg.setDiscoverySpi(new TcpDiscoverySpi().setIpFinder(new TcpDiscoveryVmIpFinder(true))); - - return cfg; - } - - /** {@inheritDoc} */ - @Override protected void beforeTest() throws Exception { - startGrid(); - } - - /** {@inheritDoc} */ - @Override protected void afterTest() throws Exception { - stopAllGrids(); - } - - /** - * @throws Exception If failed. - */ - public void testEquals() throws Exception { - IgniteEx ignite = grid(); - - BinaryObject bo0 = ignite.binary().toBinary(new MyType(222, 333, 111)); - - BinaryObject bo1 = ignite.binary().builder(bo0.type().typeName()). - setField("b", 222). - setField("c", 333). - setField("a", 111). - hashCode(12345). - build(); - - BinaryObject bo2 = ignite.binary().builder(bo0.type().typeName()). - setField("a", 111). - setField("b", 222). - setField("c", 333). - hashCode(12345). - build(); - - assertEquals(12345, bo0.hashCode()); - assertEquals(12345, bo1.hashCode()); - assertEquals(12345, bo2.hashCode()); - - assertTrue(bo0.equals(bo1)); - assertTrue(bo0.equals(bo2)); - assertTrue(bo1.equals(bo2)); - } - - /** - */ - private static class MyType { - /** B. */ - private int b; - - /** C. */ - private int c; - - /** A. */ - private int a; - - /** - * @param b B. - * @param c C. - * @param a A. - */ - MyType(int b, int c, int a) { - this.b = b; - this.c = c; - this.a = a; - } - - /** {@inheritDoc} */ - @Override public boolean equals(Object obj) { - return super.equals(obj); - } - - /** {@inheritDoc} */ - @Override public int hashCode() { - return 12345; - } - } -} diff --git a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteBinaryObjectsTestSuite.java b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteBinaryObjectsTestSuite.java index 50c6f0b95a95d..c1d9974837fb3 100644 --- a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteBinaryObjectsTestSuite.java +++ b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteBinaryObjectsTestSuite.java @@ -22,7 +22,6 @@ import org.apache.ignite.internal.binary.BinaryBasicNameMapperSelfTest; import org.apache.ignite.internal.binary.BinaryConfigurationConsistencySelfTest; import org.apache.ignite.internal.binary.BinaryEnumsSelfTest; -import org.apache.ignite.internal.binary.BinaryFieldOrderSelfTest; import org.apache.ignite.internal.binary.BinaryFieldsHeapSelfTest; import org.apache.ignite.internal.binary.BinaryFieldsOffheapSelfTest; import org.apache.ignite.internal.binary.BinaryFooterOffsetsHeapSelfTest; @@ -105,7 +104,6 @@ public static TestSuite suite() throws Exception { suite.addTestSuite(GridBinaryAffinityKeySelfTest.class); suite.addTestSuite(GridBinaryWildcardsSelfTest.class); suite.addTestSuite(BinaryObjectToStringSelfTest.class); - suite.addTestSuite(BinaryFieldOrderSelfTest.class); // Tests for objects with non-compact footers. suite.addTestSuite(BinaryMarshallerNonCompactSelfTest.class); From 59b46d3e6ea073d054ca2262d676b055a74bbb1f Mon Sep 17 00:00:00 2001 From: vozerov-gridgain Date: Mon, 3 Oct 2016 10:40:04 +0300 Subject: [PATCH 63/69] IGNITE-2881: Fixed SPI queries. --- .../apache/ignite/cache/query/SpiQuery.java | 8 +- .../processors/cache/IgniteCacheProxy.java | 34 ++- .../cache/query/GridCacheQueryManager.java | 2 +- .../cache/query/IndexingSpiQuerySelfTest.java | 218 ++++++++++++++++++ .../query/IndexingSpiQueryTxSelfTest.java | 162 +++++++++++++ .../IgniteBinaryCacheQueryTestSuite.java | 1 - .../IgniteCacheQuerySelfTestSuite.java | 5 + 7 files changed, 416 insertions(+), 14 deletions(-) create mode 100644 modules/core/src/test/java/org/apache/ignite/internal/processors/cache/query/IndexingSpiQuerySelfTest.java create mode 100644 modules/core/src/test/java/org/apache/ignite/internal/processors/cache/query/IndexingSpiQueryTxSelfTest.java diff --git a/modules/core/src/main/java/org/apache/ignite/cache/query/SpiQuery.java b/modules/core/src/main/java/org/apache/ignite/cache/query/SpiQuery.java index 0c1143715897b..c3aa472080f24 100644 --- a/modules/core/src/main/java/org/apache/ignite/cache/query/SpiQuery.java +++ b/modules/core/src/main/java/org/apache/ignite/cache/query/SpiQuery.java @@ -58,13 +58,13 @@ public SpiQuery setArgs(Object... args) { } /** {@inheritDoc} */ - @Override public SqlQuery setPageSize(int pageSize) { - return (SqlQuery)super.setPageSize(pageSize); + @Override public SpiQuery setPageSize(int pageSize) { + return (SpiQuery)super.setPageSize(pageSize); } /** {@inheritDoc} */ - @Override public SqlQuery setLocal(boolean loc) { - return (SqlQuery)super.setLocal(loc); + @Override public SpiQuery setLocal(boolean loc) { + return (SpiQuery)super.setLocal(loc); } /** {@inheritDoc} */ diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/IgniteCacheProxy.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/IgniteCacheProxy.java index 81d4b49f882e6..58c7c9ce83817 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/IgniteCacheProxy.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/IgniteCacheProxy.java @@ -533,11 +533,11 @@ private QueryCursor query( @SuppressWarnings("unchecked") private QueryCursor> query(final Query filter, @Nullable ClusterGroup grp) throws IgniteCheckedException { - final CacheQuery> qry; + final CacheQuery qry; boolean isKeepBinary = opCtx != null && opCtx.isKeepBinary(); - final CacheQueryFuture> fut; + final CacheQueryFuture fut; if (filter instanceof TextQuery) { TextQuery p = (TextQuery)filter; @@ -561,8 +561,8 @@ else if (filter instanceof SpiQuery) { qry.projection(grp); fut = ctx.kernalContext().query().executeQuery(ctx, - new IgniteOutClosureX>>() { - @Override public CacheQueryFuture> applyx() throws IgniteCheckedException { + new IgniteOutClosureX>>() { + @Override public CacheQueryFuture> applyx() throws IgniteCheckedException { return qry.execute(((SpiQuery)filter).getArgs()); } }, false); @@ -577,21 +577,39 @@ else if (filter instanceof SpiQuery) { return new QueryCursorImpl<>(new GridCloseableIteratorAdapter>() { /** */ - private Map.Entry cur; + private Cache.Entry cur; @Override protected Entry onNext() throws IgniteCheckedException { if (!onHasNext()) throw new NoSuchElementException(); - Map.Entry e = cur; + Cache.Entry e = cur; cur = null; - return new CacheEntryImpl<>(e.getKey(), e.getValue()); + return e; } @Override protected boolean onHasNext() throws IgniteCheckedException { - return cur != null || (cur = fut.next()) != null; + if (cur != null) + return true; + + Object next = fut.next(); + + // Workaround a bug: if IndexingSpi is configured future represents Iterator + // instead of Iterator due to IndexingSpi interface. + if (next == null) + return false; + + if (next instanceof Cache.Entry) + cur = (Cache.Entry)next; + else { + Map.Entry e = (Map.Entry)next; + + cur = new CacheEntryImpl(e.getKey(), e.getValue()); + } + + return true; } @Override protected void onClose() throws IgniteCheckedException { diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/query/GridCacheQueryManager.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/query/GridCacheQueryManager.java index 9699f09eacfec..7bd1a51b314d3 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/query/GridCacheQueryManager.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/query/GridCacheQueryManager.java @@ -690,7 +690,7 @@ private FieldsResult executeFieldsQuery(GridCacheQueryAdapter qry, @Nullable T2> resKey = null; - if (qry.clause() == null) { + if (qry.clause() == null && qry.type() != SPI) { assert !loc; throw new IgniteCheckedException("Received next page request after iterator was removed. " + diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/query/IndexingSpiQuerySelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/query/IndexingSpiQuerySelfTest.java new file mode 100644 index 0000000000000..94b0c8aedbb2b --- /dev/null +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/query/IndexingSpiQuerySelfTest.java @@ -0,0 +1,218 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.cache.query; + +import junit.framework.TestCase; +import org.apache.ignite.Ignite; +import org.apache.ignite.IgniteCache; +import org.apache.ignite.IgniteTransactions; +import org.apache.ignite.Ignition; +import org.apache.ignite.cache.CacheAtomicityMode; +import org.apache.ignite.cache.query.QueryCursor; +import org.apache.ignite.cache.query.SpiQuery; +import org.apache.ignite.configuration.CacheConfiguration; +import org.apache.ignite.configuration.IgniteConfiguration; +import org.apache.ignite.internal.processors.cache.CacheEntryImpl; +import org.apache.ignite.internal.transactions.IgniteTxHeuristicCheckedException; +import org.apache.ignite.spi.IgniteSpiAdapter; +import org.apache.ignite.spi.IgniteSpiException; +import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; +import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder; +import org.apache.ignite.spi.indexing.IndexingQueryFilter; +import org.apache.ignite.spi.indexing.IndexingSpi; +import org.apache.ignite.testframework.GridTestUtils; +import org.apache.ignite.transactions.Transaction; +import org.apache.ignite.transactions.TransactionConcurrency; +import org.apache.ignite.transactions.TransactionIsolation; +import org.apache.ignite.transactions.TransactionState; +import org.jetbrains.annotations.Nullable; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.Map; +import java.util.SortedMap; +import java.util.TreeMap; +import java.util.concurrent.Callable; +import javax.cache.Cache; + +/** + * Indexing Spi query test + */ +public class IndexingSpiQuerySelfTest extends TestCase { + /** {@inheritDoc} */ + @Override public void tearDown() throws Exception { + Ignition.stopAll(true); + } + + /** + * @throws Exception If failed. + */ + public void testSimpleIndexingSpi() throws Exception { + IgniteConfiguration cfg = configuration(); + + cfg.setIndexingSpi(new MyIndexingSpi()); + + Ignite ignite = Ignition.start(cfg); + + CacheConfiguration ccfg = new CacheConfiguration<>("test-cache"); + + ccfg.setIndexedTypes(Integer.class, Integer.class); + + IgniteCache cache = ignite.createCache(ccfg); + + for (int i = 0; i < 10; i++) + cache.put(i, i); + + QueryCursor> cursor = cache.query(new SpiQuery().setArgs(2, 5)); + + for (Cache.Entry entry : cursor) + System.out.println(entry); + } + + /** + * @throws Exception If failed. + */ + @SuppressWarnings("ThrowableResultOfMethodCallIgnored") + public void testIndexingSpiFailure() throws Exception { + IgniteConfiguration cfg = configuration(); + + cfg.setIndexingSpi(new MyBrokenIndexingSpi()); + + Ignite ignite = Ignition.start(cfg); + + CacheConfiguration ccfg = new CacheConfiguration<>("test-cache"); + + ccfg.setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL); + ccfg.setIndexedTypes(Integer.class, Integer.class); + + final IgniteCache cache = ignite.createCache(ccfg); + + final IgniteTransactions txs = ignite.transactions(); + + for (final TransactionConcurrency concurrency : TransactionConcurrency.values()) { + for (final TransactionIsolation isolation : TransactionIsolation.values()) { + System.out.println("Run in transaction: " + concurrency + " " + isolation); + + GridTestUtils.assertThrowsWithCause(new Callable() { + @Override public Void call() throws Exception { + Transaction tx; + + try (Transaction tx0 = tx = txs.txStart(concurrency, isolation)) { + cache.put(1, 1); + + tx0.commit(); + } + + assertEquals(TransactionState.ROLLED_BACK, tx.state()); + return null; + } + }, IgniteTxHeuristicCheckedException.class); + } + } + } + + /** + * @return Configuration. + */ + private IgniteConfiguration configuration() { + IgniteConfiguration cfg = new IgniteConfiguration(); + + TcpDiscoveryVmIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true); + TcpDiscoverySpi disco = new TcpDiscoverySpi(); + + disco.setMaxMissedHeartbeats(Integer.MAX_VALUE); + + disco.setIpFinder(ipFinder); + + cfg.setDiscoverySpi(disco); + + return cfg; + } + + /** + * Indexing Spi implementation for test + */ + private static class MyIndexingSpi extends IgniteSpiAdapter implements IndexingSpi { + /** Index. */ + private final SortedMap idx = new TreeMap<>(); + + /** {@inheritDoc} */ + @Override public void spiStart(@Nullable String gridName) throws IgniteSpiException { + // No-op. + } + + /** {@inheritDoc} */ + @Override public void spiStop() throws IgniteSpiException { + // No-op. + } + + /** {@inheritDoc} */ + @Override public Iterator> query(@Nullable String spaceName, Collection params, + @Nullable IndexingQueryFilter filters) throws IgniteSpiException { + if (params.size() < 2) + throw new IgniteSpiException("Range parameters required."); + + Iterator paramsIt = params.iterator(); + + Object from = paramsIt.next(); + Object to = paramsIt.next(); + + SortedMap map = idx.subMap(from, to); + + Collection> res = new ArrayList<>(map.size()); + + for (Map.Entry entry : map.entrySet()) + res.add(new CacheEntryImpl<>(entry.getKey(), entry.getValue())); + + return res.iterator(); + } + + /** {@inheritDoc} */ + @Override public void store(@Nullable String spaceName, Object key, Object val, long expirationTime) + throws IgniteSpiException { + idx.put(key, val); + } + + /** {@inheritDoc} */ + @Override public void remove(@Nullable String spaceName, Object key) throws IgniteSpiException { + // No-op. + } + + /** {@inheritDoc} */ + @Override public void onSwap(@Nullable String spaceName, Object key) throws IgniteSpiException { + // No-op. + } + + /** {@inheritDoc} */ + @Override public void onUnswap(@Nullable String spaceName, Object key, Object val) throws IgniteSpiException { + // No-op. + } + } + + /** + * Broken Indexing Spi implementation for test + */ + private class MyBrokenIndexingSpi extends MyIndexingSpi { + /** {@inheritDoc} */ + @Override public void store(@Nullable String spaceName, Object key, Object val, + long expirationTime) throws IgniteSpiException { + throw new IgniteSpiException("Test exception"); + } + } +} \ No newline at end of file diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/query/IndexingSpiQueryTxSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/query/IndexingSpiQueryTxSelfTest.java new file mode 100644 index 0000000000000..78ed1fda1dbc0 --- /dev/null +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/query/IndexingSpiQueryTxSelfTest.java @@ -0,0 +1,162 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.cache.query; + +import org.apache.ignite.IgniteCache; +import org.apache.ignite.IgniteTransactions; +import org.apache.ignite.cache.CacheAtomicityMode; +import org.apache.ignite.configuration.CacheConfiguration; +import org.apache.ignite.configuration.IgniteConfiguration; +import org.apache.ignite.internal.IgniteEx; +import org.apache.ignite.internal.processors.cache.GridCacheAbstractSelfTest; +import org.apache.ignite.internal.transactions.IgniteTxHeuristicCheckedException; +import org.apache.ignite.spi.IgniteSpiAdapter; +import org.apache.ignite.spi.IgniteSpiException; +import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; +import org.apache.ignite.spi.indexing.IndexingQueryFilter; +import org.apache.ignite.spi.indexing.IndexingSpi; +import org.apache.ignite.testframework.GridTestUtils; +import org.apache.ignite.transactions.Transaction; +import org.apache.ignite.transactions.TransactionConcurrency; +import org.apache.ignite.transactions.TransactionIsolation; +import org.apache.ignite.transactions.TransactionState; +import org.jetbrains.annotations.Nullable; + +import java.util.Collection; +import java.util.Iterator; +import java.util.concurrent.Callable; +import java.util.concurrent.atomic.AtomicInteger; +import javax.cache.Cache; + +/** + * Indexing Spi transactional query test + */ +public class IndexingSpiQueryTxSelfTest extends GridCacheAbstractSelfTest { + /** */ + private static AtomicInteger cnt; + + /** {@inheritDoc} */ + @Override protected int gridCount() { + return 4; + } + + /** {@inheritDoc} */ + @Override protected void beforeTestsStarted() throws Exception { + cnt = new AtomicInteger(); + + super.beforeTestsStarted(); + } + + /** {@inheritDoc} */ + @SuppressWarnings("unchecked") + @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { + IgniteConfiguration cfg = super.getConfiguration(gridName); + ((TcpDiscoverySpi)cfg.getDiscoverySpi()).setForceServerMode(true); + + if (cnt.getAndIncrement() == 0) + cfg.setClientMode(true); + else { + cfg.setIndexingSpi(new MyBrokenIndexingSpi()); + + CacheConfiguration ccfg = cacheConfiguration(gridName); + ccfg.setName("test-cache"); + ccfg.setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL); + + ccfg.setIndexedTypes(Integer.class, Integer.class); + + cfg.setCacheConfiguration(ccfg); + } + return cfg; + } + + /** + * @throws Exception If failed. + */ + @SuppressWarnings("ThrowableResultOfMethodCallIgnored") + public void testIndexingSpiWithTx() throws Exception { + IgniteEx ignite = grid(0); + + final IgniteCache cache = ignite.cache("test-cache"); + + final IgniteTransactions txs = ignite.transactions(); + + for (final TransactionConcurrency concurrency : TransactionConcurrency.values()) { + for (final TransactionIsolation isolation : TransactionIsolation.values()) { + System.out.println("Run in transaction: " + concurrency + " " + isolation); + + GridTestUtils.assertThrowsWithCause(new Callable() { + @Override public Void call() throws Exception { + Transaction tx; + + try (Transaction tx0 = tx = txs.txStart(concurrency, isolation)) { + cache.put(1, 1); + + tx0.commit(); + } + + assertEquals(TransactionState.ROLLED_BACK, tx.state()); + + return null; + } + }, IgniteTxHeuristicCheckedException.class); + } + } + } + + /** + * Indexing SPI implementation for test + */ + private static class MyBrokenIndexingSpi extends IgniteSpiAdapter implements IndexingSpi { + /** {@inheritDoc} */ + @Override public void spiStart(@Nullable String gridName) throws IgniteSpiException { + // No-op. + } + + /** {@inheritDoc} */ + @Override public void spiStop() throws IgniteSpiException { + // No-op. + } + + /** {@inheritDoc} */ + @Override public Iterator> query(@Nullable String spaceName, Collection params, + @Nullable IndexingQueryFilter filters) throws IgniteSpiException { + return null; + } + + /** {@inheritDoc} */ + @Override public void store(@Nullable String spaceName, Object key, Object val, long expirationTime) + throws IgniteSpiException { + throw new IgniteSpiException("Test exception"); + } + + /** {@inheritDoc} */ + @Override public void remove(@Nullable String spaceName, Object key) throws IgniteSpiException { + // No-op. + } + + /** {@inheritDoc} */ + @Override public void onSwap(@Nullable String spaceName, Object key) throws IgniteSpiException { + // No-op. + } + + /** {@inheritDoc} */ + @Override public void onUnswap(@Nullable String spaceName, Object key, Object val) throws IgniteSpiException { + // No-op. + } + } +} diff --git a/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteBinaryCacheQueryTestSuite.java b/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteBinaryCacheQueryTestSuite.java index 761d4bd4cc7dc..3cb603c6d0ae7 100644 --- a/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteBinaryCacheQueryTestSuite.java +++ b/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteBinaryCacheQueryTestSuite.java @@ -24,7 +24,6 @@ import org.apache.ignite.internal.processors.cache.IgniteCacheBinaryObjectsScanSelfTest; import org.apache.ignite.internal.processors.cache.binary.distributed.dht.GridCacheBinaryDuplicateIndexObjectPartitionedAtomicSelfTest; import org.apache.ignite.internal.processors.cache.binary.distributed.dht.GridCacheBinaryDuplicateIndexObjectPartitionedTransactionalSelfTest; -import org.apache.ignite.internal.processors.cache.query.continuous.CacheContinuousQueryLostPartitionTest; import org.apache.ignite.testframework.config.GridTestProperties; /** diff --git a/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteCacheQuerySelfTestSuite.java b/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteCacheQuerySelfTestSuite.java index e7f55a1faae2c..21741478f945e 100644 --- a/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteCacheQuerySelfTestSuite.java +++ b/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteCacheQuerySelfTestSuite.java @@ -60,6 +60,8 @@ import org.apache.ignite.internal.processors.cache.local.IgniteCacheLocalAtomicQuerySelfTest; import org.apache.ignite.internal.processors.cache.local.IgniteCacheLocalQuerySelfTest; import org.apache.ignite.internal.processors.cache.query.GridCacheQueryTransformerSelfTest; +import org.apache.ignite.internal.processors.cache.query.IndexingSpiQuerySelfTest; +import org.apache.ignite.internal.processors.cache.query.IndexingSpiQueryTxSelfTest; import org.apache.ignite.internal.processors.query.IgniteSqlSchemaIndexingTest; import org.apache.ignite.internal.processors.query.IgniteSqlSplitterSelfTest; import org.apache.ignite.internal.processors.query.h2.sql.GridQueryParsingTest; @@ -127,6 +129,9 @@ public static TestSuite suite() throws Exception { suite.addTestSuite(IgniteBinaryObjectQueryArgumentsOffheapLocalTest.class); suite.addTestSuite(IgniteBinaryObjectLocalQueryArgumentsTest.class); + suite.addTestSuite(IndexingSpiQuerySelfTest.class); + suite.addTestSuite(IndexingSpiQueryTxSelfTest.class); + return suite; } } From 0b66d2d7b695cf370a4b8a717844ad67742c6090 Mon Sep 17 00:00:00 2001 From: sboikov Date: Mon, 3 Oct 2016 12:04:03 +0300 Subject: [PATCH 64/69] ignite-3601 Do not check version on commit for read-only serializable transactions. --- .../colocated/GridDhtDetachedCacheEntry.java | 5 + .../distributed/near/GridNearTxLocal.java | 2 +- .../transactions/IgniteTxLocalAdapter.java | 62 ++++--- .../cache/CacheGetEntryAbstractTest.java | 38 +++++ .../cache/CachePutIfAbsentTest.java | 161 ++++++++++++++++++ .../CacheSerializableTransactionsTest.java | 70 +++----- .../cache/CacheTxFastFinishTest.java | 2 +- .../testsuites/IgniteCacheTestSuite4.java | 2 + 8 files changed, 276 insertions(+), 66 deletions(-) create mode 100644 modules/core/src/test/java/org/apache/ignite/internal/processors/cache/CachePutIfAbsentTest.java diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/colocated/GridDhtDetachedCacheEntry.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/colocated/GridDhtDetachedCacheEntry.java index db9113446fb8f..2e055604b2687 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/colocated/GridDhtDetachedCacheEntry.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/colocated/GridDhtDetachedCacheEntry.java @@ -97,4 +97,9 @@ public void resetFromPrimary(CacheObject val, GridCacheVersion ver) { // No-op for detached cache entry. return true; } + + /** {@inheritDoc} */ + @Override public int partition() { + return cctx.affinity().partition(key); + } } diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/near/GridNearTxLocal.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/near/GridNearTxLocal.java index 410baf837f34c..ed37059b20cb8 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/near/GridNearTxLocal.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/near/GridNearTxLocal.java @@ -953,7 +953,7 @@ void readyNearLock(IgniteTxEntry txEntry, * @return {@code True} if 'fast finish' path can be used for transaction completion. */ private boolean fastFinish() { - return writeMap().isEmpty() && ((optimistic() && !serializable()) || readMap().isEmpty()); + return writeMap().isEmpty() && (optimistic() || readMap().isEmpty()); } /** diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/transactions/IgniteTxLocalAdapter.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/transactions/IgniteTxLocalAdapter.java index fe695368117c3..6d21dcfb716fd 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/transactions/IgniteTxLocalAdapter.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/transactions/IgniteTxLocalAdapter.java @@ -2269,7 +2269,16 @@ private IgniteInternalFuture loadMissing( addInvokeResult(e, cacheVal, ret, ver); } else { - boolean success = !hasFilters || isAll(e.context(), key, cacheVal, filter); + boolean success; + + if (hasFilters) { + success = isAll(e.context(), key, cacheVal, filter); + + if (!success) + e.value(cacheVal, false, false); + } + else + success = true; ret.set(cacheCtx, cacheVal, success, keepBinary); } @@ -2411,25 +2420,43 @@ private boolean enlistWriteEntry(GridCacheContext cacheCtx, else old = retval ? entry.rawGetOrUnmarshal(false) : entry.rawGet(); + final GridCacheOperation op = lockOnly ? NOOP : rmv ? DELETE : + entryProcessor != null ? TRANSFORM : old != null ? UPDATE : CREATE; + if (old != null && hasFilters && !filter(entry.context(), cacheKey, old, filter)) { ret.set(cacheCtx, old, false, keepBinary); if (!readCommitted()) { - // Enlist failed filters as reads for non-read-committed mode, - // so future ops will get the same values. - txEntry = addEntry(READ, - old, - null, - null, - entry, - null, - CU.empty0(), - false, - -1L, - -1L, - null, - skipStore, - keepBinary); + if (optimistic() && serializable()) { + txEntry = addEntry(op, + old, + entryProcessor, + invokeArgs, + entry, + expiryPlc, + filter, + true, + drTtl, + drExpireTime, + drVer, + skipStore, + keepBinary); + } + else { + txEntry = addEntry(READ, + old, + null, + null, + entry, + null, + CU.empty0(), + false, + -1L, + -1L, + null, + skipStore, + keepBinary); + } txEntry.markValid(); @@ -2446,9 +2473,6 @@ private boolean enlistWriteEntry(GridCacheContext cacheCtx, break; // While. } - final GridCacheOperation op = lockOnly ? NOOP : rmv ? DELETE : - entryProcessor != null ? TRANSFORM : old != null ? UPDATE : CREATE; - txEntry = addEntry(op, cacheCtx.toCacheObject(val), entryProcessor, diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/CacheGetEntryAbstractTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/CacheGetEntryAbstractTest.java index 34480a24e5b30..2eab6d9f0393e 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/CacheGetEntryAbstractTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/CacheGetEntryAbstractTest.java @@ -40,6 +40,7 @@ import org.apache.ignite.transactions.Transaction; import org.apache.ignite.transactions.TransactionConcurrency; import org.apache.ignite.transactions.TransactionIsolation; +import org.apache.ignite.transactions.TransactionOptimisticException; import org.jetbrains.annotations.Nullable; import static org.apache.ignite.cache.CacheAtomicityMode.ATOMIC; @@ -52,6 +53,7 @@ import static org.apache.ignite.transactions.TransactionConcurrency.PESSIMISTIC; import static org.apache.ignite.transactions.TransactionIsolation.READ_COMMITTED; import static org.apache.ignite.transactions.TransactionIsolation.REPEATABLE_READ; +import static org.apache.ignite.transactions.TransactionIsolation.SERIALIZABLE; /** * Test getEntry and getEntries methods. @@ -247,6 +249,10 @@ private void test(CacheConfiguration cfg, final boolean oneEntry) throws Excepti testConcurrentTx(cache, PESSIMISTIC, REPEATABLE_READ, oneEntry); testConcurrentTx(cache, PESSIMISTIC, READ_COMMITTED, oneEntry); + + testConcurrentOptimisticTxGet(cache, REPEATABLE_READ); + testConcurrentOptimisticTxGet(cache, READ_COMMITTED); + testConcurrentOptimisticTxGet(cache, SERIALIZABLE); } } finally { @@ -254,6 +260,38 @@ private void test(CacheConfiguration cfg, final boolean oneEntry) throws Excepti } } + /** + * @param cache Cache. + * @param txIsolation Transaction isolation. + * @throws Exception If failed. + */ + private void testConcurrentOptimisticTxGet(final IgniteCache cache, + final TransactionIsolation txIsolation) throws Exception { + GridTestUtils.runMultiThreaded(new Runnable() { + @Override public void run() { + final int key = 42; + + IgniteTransactions txs = grid(0).transactions(); + + cache.put(key, new TestValue(key)); + + long stopTime = System.currentTimeMillis() + 3000; + + while (System.currentTimeMillis() < stopTime) { + try (Transaction tx = txs.txStart(OPTIMISTIC, txIsolation)) { + cache.get(key); + + tx.commit(); + } + catch (TransactionOptimisticException e) { + assertTrue("Should not throw optimistic exception in only read TX. Tx isolation: " + + txIsolation, false); + } + } + } + }, 10, "tx-thread"); + } + /** * @param cache Cache. * @param txConcurrency Transaction concurrency. diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/CachePutIfAbsentTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/CachePutIfAbsentTest.java new file mode 100644 index 0000000000000..6ed90494d822f --- /dev/null +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/CachePutIfAbsentTest.java @@ -0,0 +1,161 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.cache; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.ThreadLocalRandom; +import org.apache.ignite.Ignite; +import org.apache.ignite.IgniteCache; +import org.apache.ignite.IgniteTransactions; +import org.apache.ignite.cache.CacheMode; +import org.apache.ignite.cache.CacheWriteSynchronizationMode; +import org.apache.ignite.configuration.CacheConfiguration; +import org.apache.ignite.configuration.IgniteConfiguration; +import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; +import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder; +import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder; +import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; +import org.apache.ignite.transactions.Transaction; +import org.apache.ignite.transactions.TransactionConcurrency; +import org.apache.ignite.transactions.TransactionIsolation; + +import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL; +import static org.apache.ignite.cache.CacheMode.PARTITIONED; +import static org.apache.ignite.cache.CacheMode.REPLICATED; +import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC; + +/** + * + */ +public class CachePutIfAbsentTest extends GridCommonAbstractTest { + /** */ + private static final TcpDiscoveryIpFinder IP_FINDER = new TcpDiscoveryVmIpFinder(true); + + /** */ + private static final int SRVS = 4; + + /** {@inheritDoc} */ + @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { + IgniteConfiguration cfg = super.getConfiguration(gridName); + + ((TcpDiscoverySpi)cfg.getDiscoverySpi()).setIpFinder(IP_FINDER); + + return cfg; + } + + /** {@inheritDoc} */ + @Override protected void beforeTestsStarted() throws Exception { + super.beforeTestsStarted(); + + startGridsMultiThreaded(SRVS); + } + + /** {@inheritDoc} */ + @Override protected void afterTestsStopped() throws Exception { + super.afterTestsStopped(); + + stopAllGrids(); + } + + /** {@inheritDoc} */ + @Override protected long getTestTimeout() { + return 5 * 60_000; + } + + /** + * @return Cache configurations. + */ + private List> cacheConfigurations() { + List> ccfgs = new ArrayList<>(); + + ccfgs.add(cacheConfiguration(REPLICATED, FULL_SYNC, 0)); + + ccfgs.add(cacheConfiguration(PARTITIONED, FULL_SYNC, 0)); + ccfgs.add(cacheConfiguration(PARTITIONED, FULL_SYNC, 1)); + ccfgs.add(cacheConfiguration(PARTITIONED, FULL_SYNC, 2)); + + return ccfgs; + } + + /** + * @param cacheMode Cache mode. + * @param syncMode Write synchronization mode. + * @param backups Number of backups. + * @return Cache configuration. + */ + private CacheConfiguration cacheConfiguration( + CacheMode cacheMode, + CacheWriteSynchronizationMode syncMode, + int backups) { + CacheConfiguration ccfg = new CacheConfiguration<>(); + + ccfg.setCacheMode(cacheMode); + ccfg.setAtomicityMode(TRANSACTIONAL); + ccfg.setWriteSynchronizationMode(syncMode); + + if (cacheMode == PARTITIONED) + ccfg.setBackups(backups); + + return ccfg; + } + + /** + * @throws Exception If failed. + */ + public void testTxConflictGetAndPutIfAbsent() throws Exception { + Ignite ignite0 = ignite(0); + + final IgniteTransactions txs = ignite0.transactions(); + + for (CacheConfiguration ccfg : cacheConfigurations()) { + try { + IgniteCache cache = ignite0.createCache(ccfg); + + ThreadLocalRandom rnd = ThreadLocalRandom.current(); + + for (int i = 0; i < 10; i++) { + Integer key = rnd.nextInt(10_000); + + cache.put(key, 2); + + for (TransactionConcurrency concurrency : TransactionConcurrency.values()) { + for (TransactionIsolation isolation : TransactionIsolation.values()) { + try (Transaction tx = txs.txStart(concurrency, isolation)) { + Object old = cache.getAndPutIfAbsent(key, 3); + + assertEquals(2, old); + + Object val = cache.get(key); + + assertEquals(2, val); + + tx.commit(); + } + + assertEquals((Integer)2, cache.get(key)); + } + } + } + } + finally { + ignite0.destroyCache(ccfg.getName()); + } + } + } +} diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/CacheSerializableTransactionsTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/CacheSerializableTransactionsTest.java index 3d4f8502d20f1..6a73f79dfc303 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/CacheSerializableTransactionsTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/CacheSerializableTransactionsTest.java @@ -709,6 +709,7 @@ public void testTxConflictReadEntry2() throws Exception { /** * @param noVal If {@code true} there is no cache value when read in tx. + * @param needVer If {@code true} then gets entry, otherwise just value. * @throws Exception If failed. */ private void txConflictRead(boolean noVal, boolean needVer) throws Exception { @@ -735,28 +736,21 @@ private void txConflictRead(boolean noVal, boolean needVer) throws Exception { cache.put(key, expVal); } - try { - try (Transaction tx = txs.txStart(OPTIMISTIC, SERIALIZABLE)) { - if (needVer) { - CacheEntry val = cache.getEntry(key); - - assertEquals(expVal, val == null ? null : val.getValue()); - } - else { - Integer val = cache.get(key); - - assertEquals(expVal, val); - } + try (Transaction tx = txs.txStart(OPTIMISTIC, SERIALIZABLE)) { + if (needVer) { + CacheEntry val = cache.getEntry(key); - updateKey(cache, key, 1); + assertEquals(expVal, val == null ? null : val.getValue()); + } + else { + Integer val = cache.get(key); - tx.commit(); + assertEquals(expVal, val); } - fail(); - } - catch (TransactionOptimisticException e) { - log.info("Expected exception: " + e); + updateKey(cache, key, 1); + + tx.commit(); } checkValue(key, 1, cache.getName()); @@ -2625,21 +2619,14 @@ private void rollbackNearCacheRead(boolean near) throws Exception { cache0.put(key2, -1); cache0.put(key3, -1); - try { - try (Transaction tx = txs.txStart(OPTIMISTIC, SERIALIZABLE)) { - cache.get(key1); - cache.get(key2); - cache.get(key3); - - updateKey(near ? cache : cache0, key2, -2); + try (Transaction tx = txs.txStart(OPTIMISTIC, SERIALIZABLE)) { + cache.get(key1); + cache.get(key2); + cache.get(key3); - tx.commit(); - } + updateKey(near ? cache : cache0, key2, -2); - fail(); - } - catch (TransactionOptimisticException e) { - log.info("Expected exception: " + e); + tx.commit(); } checkValue(key1, -1, cacheName); @@ -2890,23 +2877,16 @@ public void testCrossCacheTx() throws Exception { checkValue(key1, newVal, CACHE1); checkValue(key2, newVal, CACHE2); - try { - try (Transaction tx = txs.txStart(OPTIMISTIC, SERIALIZABLE)) { - Object val1 = cache1.get(key1); - Object val2 = cache2.get(key2); - - assertEquals(newVal, val1); - assertEquals(newVal, val2); + try (Transaction tx = txs.txStart(OPTIMISTIC, SERIALIZABLE)) { + Object val1 = cache1.get(key1); + Object val2 = cache2.get(key2); - updateKey(cache2, key2, newVal); + assertEquals(newVal, val1); + assertEquals(newVal, val2); - tx.commit(); - } + updateKey(cache2, key2, newVal); - fail(); - } - catch (TransactionOptimisticException e) { - log.info("Expected exception: " + e); + tx.commit(); } try (Transaction tx = txs.txStart(OPTIMISTIC, SERIALIZABLE)) { diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/CacheTxFastFinishTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/CacheTxFastFinishTest.java index 35b14055066d4..f9c66834d088e 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/CacheTxFastFinishTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/CacheTxFastFinishTest.java @@ -173,7 +173,7 @@ private void fastFinishTx(Ignite ignite) { try (Transaction tx = txs.txStart(OPTIMISTIC, SERIALIZABLE)) { cache.get(i); - checkNormalTxFinish(tx, commit); + checkFastTxFinish(tx, commit); } try (Transaction tx = txs.txStart(PESSIMISTIC, REPEATABLE_READ)) { diff --git a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite4.java b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite4.java index c494e732b4f66..2b446bbea8e6d 100644 --- a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite4.java +++ b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite4.java @@ -28,6 +28,7 @@ import org.apache.ignite.internal.processors.cache.CacheGetEntryPessimisticRepeatableReadSeltTest; import org.apache.ignite.internal.processors.cache.CacheGetEntryPessimisticSerializableSeltTest; import org.apache.ignite.internal.processors.cache.CacheOffheapMapEntrySelfTest; +import org.apache.ignite.internal.processors.cache.CachePutIfAbsentTest; import org.apache.ignite.internal.processors.cache.CacheReadThroughAtomicRestartSelfTest; import org.apache.ignite.internal.processors.cache.CacheReadThroughLocalAtomicRestartSelfTest; import org.apache.ignite.internal.processors.cache.CacheReadThroughLocalRestartSelfTest; @@ -326,6 +327,7 @@ public static TestSuite suite() throws Exception { suite.addTestSuite(IgniteCachePrimarySyncTest.class); suite.addTestSuite(IgniteTxCachePrimarySyncTest.class); suite.addTestSuite(IgniteTxCacheWriteSynchronizationModesMultithreadedTest.class); + suite.addTestSuite(CachePutIfAbsentTest.class); suite.addTestSuite(MarshallerCacheJobRunNodeRestartTest.class); From 9f211e413332931f0fb1190744ddd7c7f38fd213 Mon Sep 17 00:00:00 2001 From: tledkov-gridgain Date: Mon, 3 Oct 2016 12:26:12 +0300 Subject: [PATCH 65/69] IGNITE-3645: IGFS: Local secondary: Implemented update() operation. This closes #1003. --- .../local/LocalIgfsSecondaryFileSystem.java | 90 ++- .../internal/processors/igfs/IgfsImpl.java | 11 +- .../processors/igfs/IgfsMetaManager.java | 1 - .../local/LocalFileSystemIgfsFile.java | 9 +- .../secondary/local/LocalFileSystemUtils.java | 142 +++++ .../igfs/IgfsAbstractBaseSelfTest.java | 19 + .../processors/igfs/IgfsAbstractSelfTest.java | 5 +- .../igfs/IgfsDualAbstractSelfTest.java | 42 +- ...condaryFileSystemDualAbstractSelfTest.java | 26 +- ...LocalSecondaryFileSystemProxySelfTest.java | 5 - ...fsLocalSecondaryFileSystemTestAdapter.java | 27 +- .../igfs/benchmark/IgfsBenchmark.java | 561 ++++++++++++++++++ ...opIgfsSecondaryFileSystemDelegateImpl.java | 3 +- .../impl/igfs/HadoopIgfsProperties.java | 3 + 14 files changed, 900 insertions(+), 44 deletions(-) create mode 100644 modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/secondary/local/LocalFileSystemUtils.java create mode 100644 modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/benchmark/IgfsBenchmark.java diff --git a/modules/core/src/main/java/org/apache/ignite/igfs/secondary/local/LocalIgfsSecondaryFileSystem.java b/modules/core/src/main/java/org/apache/ignite/igfs/secondary/local/LocalIgfsSecondaryFileSystem.java index 1775db6e430d9..ef00beada796c 100644 --- a/modules/core/src/main/java/org/apache/ignite/igfs/secondary/local/LocalIgfsSecondaryFileSystem.java +++ b/modules/core/src/main/java/org/apache/ignite/igfs/secondary/local/LocalIgfsSecondaryFileSystem.java @@ -26,8 +26,10 @@ import org.apache.ignite.igfs.IgfsPathNotFoundException; import org.apache.ignite.igfs.secondary.IgfsSecondaryFileSystem; import org.apache.ignite.igfs.secondary.IgfsSecondaryFileSystemPositionedReadable; +import org.apache.ignite.internal.processors.igfs.IgfsUtils; import org.apache.ignite.internal.processors.igfs.secondary.local.LocalFileSystemIgfsFile; import org.apache.ignite.internal.processors.igfs.secondary.local.LocalFileSystemSizeVisitor; +import org.apache.ignite.internal.processors.igfs.secondary.local.LocalFileSystemUtils; import org.apache.ignite.internal.processors.igfs.secondary.local.LocalIgfsSecondaryFileSystemPositionedReadable; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.U; @@ -44,6 +46,7 @@ import java.nio.file.LinkOption; import java.nio.file.Path; import java.nio.file.attribute.BasicFileAttributes; +import java.nio.file.attribute.PosixFileAttributes; import java.util.Collection; import java.util.Collections; import java.util.Map; @@ -76,7 +79,14 @@ private IgfsException handleSecondaryFsError(IOException e, String msg) { /** {@inheritDoc} */ @Nullable @Override public IgfsFile update(IgfsPath path, Map props) { - throw new UnsupportedOperationException("Update operation is not yet supported."); + File f = fileForPath(path); + + if (!f.exists()) + return null; + + updatePropertiesIfNeeded(path, props); + + return info(path); } /** {@inheritDoc} */ @@ -157,6 +167,8 @@ private boolean deleteRecursive(File f, boolean deleteIfExists) { /** {@inheritDoc} */ @Override public void mkdirs(IgfsPath path, @Nullable Map props) { mkdirs(path); + + updatePropertiesIfNeeded(path, props); } /** @@ -258,7 +270,23 @@ private boolean mkdirs0(@Nullable File dir) { /** {@inheritDoc} */ @Override public OutputStream create(IgfsPath path, int bufSize, boolean overwrite, int replication, long blockSize, @Nullable Map props) { - return create0(path, overwrite); + OutputStream os = create0(path, overwrite); + + try { + updatePropertiesIfNeeded(path, props); + + return os; + } + catch (Exception err) { + try { + os.close(); + } + catch (IOException closeErr) { + err.addSuppressed(closeErr); + } + + throw err; + } } /** {@inheritDoc} */ @@ -269,11 +297,30 @@ private boolean mkdirs0(@Nullable File dir) { boolean exists = file.exists(); - if (exists) - return new FileOutputStream(file, true); + if (exists) { + OutputStream os = new FileOutputStream(file, true); + + try { + updatePropertiesIfNeeded(path, props); + + return os; + } + catch (Exception err) { + try { + os.close(); + + throw err; + } + catch (IOException closeErr) { + err.addSuppressed(closeErr); + + throw err; + } + } + } else { if (create) - return create0(path, false); + return create(path, bufSize, false, 0, 0, props); else throw new IgfsPathNotFoundException("Failed to append to file because it doesn't exist: " + path); } @@ -285,17 +332,21 @@ private boolean mkdirs0(@Nullable File dir) { /** {@inheritDoc} */ @Override public IgfsFile info(final IgfsPath path) { - File f = fileForPath(path); + File file = fileForPath(path); - if (!f.exists()) + if (!file.exists()) return null; - boolean isDir = f.isDirectory(); + boolean isDir = file.isDirectory(); + + PosixFileAttributes attrs = LocalFileSystemUtils.posixAttributes(file); + + Map props = LocalFileSystemUtils.posixAttributesToMap(attrs); if (isDir) - return new LocalFileSystemIgfsFile(path, false, true, 0, f.lastModified(), 0, null); + return new LocalFileSystemIgfsFile(path, false, true, 0, file.lastModified(), 0, props); else - return new LocalFileSystemIgfsFile(path, f.isFile(), false, 0, f.lastModified(), f.length(), null); + return new LocalFileSystemIgfsFile(path, file.isFile(), false, 0, file.lastModified(), file.length(), props); } /** {@inheritDoc} */ @@ -412,4 +463,23 @@ private OutputStream create0(IgfsPath path, boolean overwrite) { throw handleSecondaryFsError(e, "Failed to create file [path=" + path + ", overwrite=" + overwrite + ']'); } } + + /** + * Update path properties if needed. + * + * @param path IGFS path + * @param props Properties map. + */ + private void updatePropertiesIfNeeded(IgfsPath path, Map props) { + if (props == null || props.isEmpty()) + return; + + File file = fileForPath(path); + + if (!file.exists()) + throw new IgfsPathNotFoundException("Failed to update properties for path: " + path); + + LocalFileSystemUtils.updateProperties(file, props.get(IgfsUtils.PROP_GROUP_NAME), + props.get(IgfsUtils.PROP_PERMISSION)); + } } \ No newline at end of file diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java index 1dd12d913a590..1c985c0d6ed5c 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java @@ -152,9 +152,6 @@ public final class IgfsImpl implements IgfsEx { /** Writers map. */ private final ConcurrentHashMap8 workerMap = new ConcurrentHashMap8<>(); - /** Local metrics holder. */ - private final IgfsLocalMetrics metrics = new IgfsLocalMetrics(); - /** Client log directory. */ private volatile String logDir; @@ -765,13 +762,11 @@ else if (val) if (log.isDebugEnabled()) log.debug("Make directories: " + path); - final Map props0 = props == null ? DFLT_DIR_META : new HashMap<>(props); - IgfsMode mode = resolveMode(path); switch (mode) { case PRIMARY: - meta.mkdirs(path, props0); + meta.mkdirs(path, props == null ? DFLT_DIR_META : new HashMap<>(props)); break; @@ -779,12 +774,12 @@ else if (val) case DUAL_SYNC: await(path); - meta.mkdirsDual(secondaryFs, path, props0); + meta.mkdirsDual(secondaryFs, path, props); break; case PROXY: - secondaryFs.mkdirs(path, props0); + secondaryFs.mkdirs(path, props); break; } diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsMetaManager.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsMetaManager.java index 89cadced7ed08..ffa502b0b75ea 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsMetaManager.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsMetaManager.java @@ -2226,7 +2226,6 @@ public boolean mkdirsDual(final IgfsSecondaryFileSystem fs, final IgfsPath path, try { assert fs != null; assert path != null; - assert props != null; if (path.parent() == null) return true; // No additional handling for root directory is needed. diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/secondary/local/LocalFileSystemIgfsFile.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/secondary/local/LocalFileSystemIgfsFile.java index 5abe4eb49b154..400ac34e393ae 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/secondary/local/LocalFileSystemIgfsFile.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/secondary/local/LocalFileSystemIgfsFile.java @@ -17,14 +17,13 @@ package org.apache.ignite.internal.processors.igfs.secondary.local; +import java.util.Collections; +import java.util.Map; import org.apache.ignite.igfs.IgfsFile; import org.apache.ignite.igfs.IgfsPath; import org.apache.ignite.internal.processors.igfs.IgfsUtils; import org.jetbrains.annotations.Nullable; -import java.util.Collections; -import java.util.Map; - /** * Implementation of the IgfsFile interface for the local filesystem. */ @@ -45,7 +44,7 @@ public class LocalFileSystemIgfsFile implements IgfsFile { private final long len; /** Properties. */ - private final Map props; + private Map props; /** * @param path IGFS path. @@ -63,7 +62,7 @@ public LocalFileSystemIgfsFile(IgfsPath path, boolean isFile, boolean isDir, int assert !isDir || len == 0 : "length must be 0 for dirs. [length=" + len + ']'; this.path = path; - this.flags = IgfsUtils.flags(isDir, isFile); + flags = IgfsUtils.flags(isDir, isFile); this.blockSize = blockSize; this.modTime = modTime; this.len = len; diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/secondary/local/LocalFileSystemUtils.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/secondary/local/LocalFileSystemUtils.java new file mode 100644 index 0000000000000..59383c5a42576 --- /dev/null +++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/secondary/local/LocalFileSystemUtils.java @@ -0,0 +1,142 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.igfs.secondary.local; + +import org.apache.ignite.igfs.IgfsException; +import org.apache.ignite.internal.processors.igfs.IgfsUtils; +import org.apache.ignite.internal.util.typedef.internal.U; +import org.jetbrains.annotations.Nullable; + +import java.io.File; +import java.io.IOException; +import java.nio.file.FileSystems; +import java.nio.file.Files; +import java.nio.file.attribute.GroupPrincipal; +import java.nio.file.attribute.PosixFileAttributeView; +import java.nio.file.attribute.PosixFileAttributes; +import java.nio.file.attribute.PosixFilePermission; +import java.nio.file.attribute.UserPrincipalLookupService; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +/** + * Various utility methods for local file system. + */ +public class LocalFileSystemUtils { + /** Posix file permissions. */ + public static final PosixFilePermission[] POSIX_PERMISSIONS = PosixFilePermission.values(); + + /** + * Update file properties. + * + * @param file File. + * @param grp Group. + * @param perm Permissions. + */ + public static void updateProperties(File file, String grp, String perm) { + PosixFileAttributeView attrs = Files.getFileAttributeView(file.toPath(), PosixFileAttributeView.class); + + if (attrs == null) + throw new UnsupportedOperationException("Posix file attributes not available"); + + if (grp != null) { + try { + UserPrincipalLookupService lookupService = FileSystems.getDefault().getUserPrincipalLookupService(); + + GroupPrincipal grp0 = lookupService.lookupPrincipalByGroupName(grp); + + attrs.setGroup(grp0); + } + catch (IOException e) { + throw new IgfsException("Update the '" + IgfsUtils.PROP_GROUP_NAME + "' property is failed.", e); + } + } + + if (perm != null) { + int perm0 = Integer.parseInt(perm, 8); + + Set permSet = new HashSet<>(9); + + for (int i = 0; i < LocalFileSystemUtils.POSIX_PERMISSIONS.length; ++i) { + if ((perm0 & (1 << i)) != 0) + permSet.add(LocalFileSystemUtils.POSIX_PERMISSIONS[i]); + } + + try { + attrs.setPermissions(permSet); + } + catch (IOException e) { + throw new IgfsException("Update the '" + IgfsUtils.PROP_PERMISSION + "' property is failed.", e); + } + } + } + + /** + * Get POSIX attributes for file. + * + * @param file File. + */ + @Nullable public static PosixFileAttributes posixAttributes(File file) { + PosixFileAttributes attrs = null; + + try { + PosixFileAttributeView view = Files.getFileAttributeView(file.toPath(), PosixFileAttributeView.class); + + if (view != null) + attrs = view.readAttributes(); + } + catch (IOException e) { + throw new IgfsException("Failed to read POSIX attributes: " + file.getAbsolutePath(), e); + } + + return attrs; + } + + /** + * Convert POSIX attributes to property map. + * + * @param attrs Attributes view. + * @return IGFS properties map. + */ + public static Map posixAttributesToMap(PosixFileAttributes attrs) { + if (attrs == null) + return null; + + Map props = U.newHashMap(3); + + props.put(IgfsUtils.PROP_USER_NAME, attrs.owner().getName()); + props.put(IgfsUtils.PROP_GROUP_NAME, attrs.group().getName()); + + int perm = 0; + + for(PosixFilePermission p : attrs.permissions()) + perm |= (1 << 8 - p.ordinal()); + + props.put(IgfsUtils.PROP_PERMISSION, '0' + Integer.toOctalString(perm)); + + return props; + } + + /** + * Private constructor. + */ + private LocalFileSystemUtils() { + // No-op. + } +} diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractBaseSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractBaseSelfTest.java index 79dc57b630ee9..374d3d39f2f98 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractBaseSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractBaseSelfTest.java @@ -868,6 +868,25 @@ protected void checkFileContent(IgfsSecondaryFileSystemTestAdapter uni, String p } } + /** + * Create map with properties. + * + * @param grpName Group name. + * @param perm Permission. + * @return Map with properties. + */ + protected Map properties(@Nullable String grpName, @Nullable String perm) { + Map props = new HashMap<>(); + + if (grpName != null) + props.put(IgfsUtils.PROP_GROUP_NAME, grpName); + + if (perm != null) + props.put(IgfsUtils.PROP_PERMISSION, perm); + + return props; + } + /** * Create map with properties. * diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractSelfTest.java index 7058954f01770..128239dacc793 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsAbstractSelfTest.java @@ -532,7 +532,10 @@ public void testMkdirs() throws Exception { */ @SuppressWarnings("ConstantConditions") public void testMkdirsParentRoot() throws Exception { - Map props = properties(null, null, "0555"); // mkdirs command doesn't propagate user info. + Map props = null; + + if (permissionsSupported()) + props = properties(null, null, "0555"); // mkdirs command doesn't propagate user info. igfs.mkdirs(DIR, props); diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsDualAbstractSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsDualAbstractSelfTest.java index 742d20c76ec7d..1d6010dfd83d1 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsDualAbstractSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsDualAbstractSelfTest.java @@ -970,7 +970,10 @@ public void testMoveRenameDirectorySourceParentRootDestinationMissing() throws E * @throws Exception If failed. */ public void testMkdirsParentPathMissingPartially() throws Exception { - Map props = properties(null, null, "0555"); // mkdirs command doesn't propagate user info. + Map props = null; + + if (permissionsSupported()) + props = properties(null, null, "0555"); // mkdirs command doesn't propagate user info. create(igfsSecondary, paths(DIR, SUBDIR), null); create(igfs, paths(DIR), null); @@ -997,7 +1000,10 @@ public void testMkdirsParentPathMissingPartially() throws Exception { * @throws Exception If failed. */ public void testMkdrisParentPathMissing() throws Exception { - Map props = properties(null, null, "0555"); // mkdirs command doesn't propagate user info. + Map props = null; + + if (permissionsSupported()) + props = properties(null, null, "0555"); // mkdirs command doesn't propagate user info. create(igfsSecondary, paths(DIR, SUBDIR), null); create(igfs, null, null); @@ -1131,11 +1137,19 @@ public void testUpdatePathMissing() throws Exception { * @throws Exception If failed. */ public void testUpdateParentRootPathMissing() throws Exception { + doUpdateParentRootPathMissing(properties("owner", "group", "0555")); + } + + /** + * Test update when parent is the root and the path being updated is missing locally. + * + * @param props Properties. + * @throws Exception If failed. + */ + protected void doUpdateParentRootPathMissing(Map props) throws Exception { if (!propertiesSupported()) return; - Map props = properties("owner", "group", "0555"); - create(igfsSecondary, paths(DIR), null); create(igfs, null, null); @@ -1143,8 +1157,8 @@ public void testUpdateParentRootPathMissing() throws Exception { checkExist(igfs, DIR); - assertEquals(props, igfsSecondary.properties(DIR.toString())); - assertEquals(props, igfs.info(DIR).properties()); + assertTrue(propertiesContains(igfsSecondary.properties(DIR.toString()), props)); + assertTrue(propertiesContains(igfs.info(DIR).properties(), props)); } /** @@ -1613,4 +1627,20 @@ public void testSecondarySize() throws Exception { assertEquals(chunk.length, igfs.size(FILE)); assertEquals(chunk.length * 2, igfs.size(SUBDIR)); } + + /** + * @param allProps All properties. + * @param checkedProps Checked properies + * @return {@code true} If allchecked properties are contained in the #propsAll. + */ + public static boolean propertiesContains(Map allProps, Map checkedProps) { + for (String name : checkedProps.keySet()) + if (!checkedProps.get(name).equals(allProps.get(name))) { + System.err.println("All properties: " + allProps); + System.err.println("Checked properties: " + checkedProps); + return false; + } + + return true; + } } \ No newline at end of file diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsLocalSecondaryFileSystemDualAbstractSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsLocalSecondaryFileSystemDualAbstractSelfTest.java index 0e6fc48c4a8f3..8a23954859a90 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsLocalSecondaryFileSystemDualAbstractSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsLocalSecondaryFileSystemDualAbstractSelfTest.java @@ -17,6 +17,7 @@ package org.apache.ignite.internal.processors.igfs; +import org.apache.ignite.IgniteSystemProperties; import org.apache.ignite.igfs.IgfsFile; import org.apache.ignite.igfs.IgfsMode; import org.apache.ignite.igfs.IgfsPath; @@ -60,6 +61,13 @@ public abstract class IgfsLocalSecondaryFileSystemDualAbstractSelfTest extends I /** */ private final File fileLinkSrc = new File(FS_WORK_DIR + File.separatorChar + "file"); + /** */ + private final String TEST_GROUP = System.getProperty("IGFS_LOCAL_FS_TEST_GROUP", "igfs_grp_0"); + + /** */ + private final Boolean PROPERTIES_SUPPORT = + IgniteSystemProperties.getBoolean("IGFS_LOCAL_FS_PROPERTIES_SUPPORT", false); + /** * Constructor. @@ -103,13 +111,13 @@ protected IgfsLocalSecondaryFileSystemDualAbstractSelfTest(IgfsMode mode) { } /** {@inheritDoc} */ - @Override protected boolean permissionsSupported() { - return false; + @Override protected boolean propertiesSupported() { + return !U.isWindows() && PROPERTIES_SUPPORT; } /** {@inheritDoc} */ - @Override protected boolean propertiesSupported() { - return false; + @Override protected boolean permissionsSupported() { + return !U.isWindows(); } /** {@inheritDoc} */ @@ -169,6 +177,16 @@ public void testSymlinkToFile() throws Exception { checkFileContent(igfs, new IgfsPath("/file"), chunk); } + /** + * Test update when parent is the root and the path being updated is missing locally. + * + * @throws Exception If failed. + */ + public void testUpdateParentRootPathMissing() throws Exception { + doUpdateParentRootPathMissing(properties(TEST_GROUP, "0555")); + } + + /** * * @throws Exception If failed. diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsLocalSecondaryFileSystemProxySelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsLocalSecondaryFileSystemProxySelfTest.java index 848abe2b825ae..e7f9bbb99a3a1 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsLocalSecondaryFileSystemProxySelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsLocalSecondaryFileSystemProxySelfTest.java @@ -105,11 +105,6 @@ public class IgfsLocalSecondaryFileSystemProxySelfTest extends IgfsProxySelfTest return false; } - /** {@inheritDoc} */ - @Override public void testUpdatePathDoesNotExist() throws Exception { - fail("IGNITE-3645"); - } - /** * * @throws Exception If failed. diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsLocalSecondaryFileSystemTestAdapter.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsLocalSecondaryFileSystemTestAdapter.java index 12714c461b892..8f6af832fb811 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsLocalSecondaryFileSystemTestAdapter.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsLocalSecondaryFileSystemTestAdapter.java @@ -17,6 +17,10 @@ package org.apache.ignite.internal.processors.igfs; +import java.nio.file.attribute.PosixFileAttributeView; +import java.nio.file.attribute.PosixFileAttributes; +import java.nio.file.attribute.PosixFilePermission; +import java.util.HashMap; import org.apache.ignite.internal.util.typedef.T2; import java.io.File; @@ -77,12 +81,30 @@ public IgfsLocalSecondaryFileSystemTestAdapter(final File workDir) { /** {@inheritDoc} */ @Override public Map properties(final String path) throws IOException { - throw new UnsupportedOperationException("properties"); + Path p = path(path); + PosixFileAttributes attrs = Files.getFileAttributeView(p, PosixFileAttributeView.class).readAttributes(); + + Map props = new HashMap<>(); + props.put(IgfsUtils.PROP_USER_NAME, attrs.owner().getName()); + props.put(IgfsUtils.PROP_GROUP_NAME, attrs.group().getName()); + props.put(IgfsUtils.PROP_PERMISSION, permissions(path)); + + return props; } /** {@inheritDoc} */ @Override public String permissions(String path) throws IOException { - throw new UnsupportedOperationException("permissions"); + Path p = path(path); + PosixFileAttributeView attrView = Files.getFileAttributeView(p, PosixFileAttributeView.class); + + if (attrView == null) + throw new UnsupportedOperationException("Posix file attributes not available"); + + int perm = 0; + for(PosixFilePermission pfp : attrView.readAttributes().permissions()) + perm |= (1 << 8 - pfp.ordinal()); + + return '0' + Integer.toOctalString(perm); } /** {@inheritDoc} */ @@ -123,6 +145,7 @@ private Path path(String path) { * * @param path Path. * @throws IOException If failed. + * @return {@code true} if the file is deleted successfully. {@code false} otherwise. */ private boolean deleteRecursively(Path path) throws IOException { if (Files.isDirectory(path)) { diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/benchmark/IgfsBenchmark.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/benchmark/IgfsBenchmark.java new file mode 100644 index 0000000000000..9cf6e8faf4982 --- /dev/null +++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/benchmark/IgfsBenchmark.java @@ -0,0 +1,561 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.igfs.benchmark; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import org.apache.ignite.Ignite; +import org.apache.ignite.IgniteException; +import org.apache.ignite.IgniteFileSystem; +import org.apache.ignite.Ignition; +import org.apache.ignite.igfs.IgfsFile; +import org.apache.ignite.igfs.IgfsInputStream; +import org.apache.ignite.igfs.IgfsOutputStream; +import org.apache.ignite.igfs.IgfsPath; +import org.apache.ignite.igfs.IgfsPathNotFoundException; + +/** + * + */ +class FileOperation { + /** Buff size. */ + public static final int BUFF_SIZE = 8192; + + /** Data bufer. */ + ByteBuffer dataBufer = ByteBuffer.allocate(BUFF_SIZE); + + /** Filesystem. */ + protected final IgniteFileSystem fs; + + /** + * @param fs Ignite filesystem to benchmark. + */ + public FileOperation(IgniteFileSystem fs) { + this.fs = fs; + } + + /** + * @param path Path to do operation. + * @throws Exception If failed. + */ + public void handleFile(String path) throws Exception { + // No-op. + } + + /** + * @param path Path to do operation. + * @throws Exception If failed. + */ + public void preHandleDir(String path) throws Exception { + // No-op. + } + + /** + * @param path Path to do operation. + * @throws Exception If failed. + */ + public void postHandleDir(String path) throws Exception { + // No-op. + } +} + +/** + * + */ +class WriteFileOperation extends FileOperation { + /** Size. */ + private int size; + + /** + * @param fs Filesystem/ + * @param size Size to write. + */ + public WriteFileOperation(IgniteFileSystem fs, int size) { + super(fs); + this.size = size; + } + + /** {@inheritDoc} */ + @Override public void handleFile(String strPath) throws Exception { + IgfsPath path = new IgfsPath(strPath); + IgfsOutputStream out = null; + + try { + out = fs.create(path, false); + } + catch (IgniteException ex) { + System.out.println("create file " + path.toString() + " failed: " + ex); + throw ex; + } + + try { + for (int i = 0; i < size / dataBufer.capacity(); i++) + out.write(dataBufer.array()); + } + catch (IOException ex) { + System.out.println("write file " + path.toString() + " failed: " + ex); + throw ex; + } + finally { + out.close(); + } + } + + /** {@inheritDoc} */ + @Override public void preHandleDir(String strPath) throws Exception { + IgfsPath path = new IgfsPath(strPath); + + if (fs.exists(path)) + throw new IgniteException("path " + path.toString() + " already exists"); + + try { + fs.mkdirs(path); + } + catch (IgniteException ex) { + throw ex; + } + } +} + +/** + * + */ +class ReadFileOperation extends FileOperation { + /** Size. */ + private int size; + + /** + * @param fs Filesystem + * @param size Size to read. + */ + public ReadFileOperation(IgniteFileSystem fs, int size) { + super(fs); + this.size = size; + } + + /** {@inheritDoc} */ + @Override public void handleFile(String strPath) throws Exception { + IgfsPath path = new IgfsPath(strPath); + IgfsInputStream in = null; + + try { + in = fs.open(path); + } + catch (IgfsPathNotFoundException ex) { + System.out.println("file " + path.toString() + " not exist: " + ex); + throw ex; + } + catch (IgniteException ex) { + System.out.println("open file " + path.toString() + " failed: " + ex); + throw ex; + } + + try { + for (int i = 0; i < size / dataBufer.capacity(); i++) + in.read(dataBufer.array()); + } + catch (IOException ex) { + System.out.println("read file " + path.toString() + " failed: " + ex); + throw ex; + } + finally { + in.close(); + } + } + + /** {@inheritDoc} */ + @Override public void preHandleDir(String strPath) throws Exception { + IgfsPath path = new IgfsPath(strPath); + + if (!fs.exists(path)) { + System.out.println("path " + path.toString() + " not exist"); + throw new IgniteException("path " + path.toString() + " not exist"); + } + } +} + +/** + * + */ +class DeleteFileOperation extends FileOperation { + /** Size. */ + private int size; + + /** + * @param fs Filesystem. + * @param size Size. + */ + public DeleteFileOperation(IgniteFileSystem fs, int size) { + super(fs); + this.size = size; + } + + /** {@inheritDoc} */ + @Override public void handleFile(String strPath) throws Exception { + IgfsPath path = new IgfsPath(strPath); + fs.delete(path, false); + } + + /** {@inheritDoc} */ + @Override public void postHandleDir(String strPath) throws Exception { + IgfsPath path = new IgfsPath(strPath); + fs.delete(path, false); + } +} + +/** + * + */ +class InfoFileOperation extends FileOperation { + /** + * @param fs Filesystem. + */ + public InfoFileOperation(IgniteFileSystem fs) { + super(fs); + } + + /** {@inheritDoc} */ + @Override public void handleFile(String strPath) throws Exception { + IgfsPath path = new IgfsPath(strPath); + IgfsFile info = fs.info(path); + + assert info != null : "Info must be not null for exists file. All files must be exists for benchmark"; + } + + /** {@inheritDoc} */ + @Override public void postHandleDir(String strPath) throws Exception { + IgfsPath path = new IgfsPath(strPath); + IgfsFile info = fs.info(path); + + assert info != null : "Info must be not null for exists dir. All dirs must be exists for benchmark"; + } +} + +/** + * + */ +class ListPathFileOperation extends FileOperation { + /** + * @param fs Filesystem. + */ + public ListPathFileOperation(IgniteFileSystem fs) { + super(fs); + } + + /** {@inheritDoc} */ + @Override public void postHandleDir(String strPath) throws Exception { + IgfsPath path = new IgfsPath(strPath); + + Collection lst = fs.listPaths(path); + + assert lst != null : "List of paths must not be null"; + } +} + +/** + * + */ +public class IgfsBenchmark { + /** Path. */ + private final String path; + + /** Depth. */ + private final int depth; + + /** Width. */ + private final int subDirsCount; + + /** Count. */ + private final int filesCount; + + /** Size. */ + private final int size; + + /** + * @param path Root test path. + * @param depth Directory depth. + * @param subDirsCount Count of subdirectories. + * @param filesCount Count of files. + * @param size Size of file. + */ + public IgfsBenchmark(String path, + int depth, + int subDirsCount, + int filesCount, + int size) { + this.path = path; + this.depth = depth; + this.subDirsCount = subDirsCount; + this.filesCount = filesCount; + this.size = (size > FileOperation.BUFF_SIZE) ? size : FileOperation.BUFF_SIZE; + } + + /** + * @param lst List of measurement results. + * @return Average value. + */ + public static long avg(List lst) { + if (lst.isEmpty()) + throw new IllegalArgumentException("List must be not empty"); + + long sum = 0; + for (long l : lst) + sum += l; + + return sum / lst.size(); + } + + /** + * @param lst List of measurement results. + * @param avg Average value. + * @return THe value of the standard derivation. + */ + public static long stdDev(List lst, long avg) { + if (lst.isEmpty()) + throw new IllegalArgumentException("List must be not empty"); + + long sum = 0; + for (long l : lst) + sum += (l - avg) * (l - avg); + + return (long)Math.sqrt((double)sum / (double)lst.size()); + } + + /** + * @param args Commandline arguments + */ + public static void main(String[] args) { + Ignition.setClientMode(Boolean.getBoolean("clientMode")); + + Ignite ignite = Ignition.start(System.getProperty("cfg", "default-config.xml")); + + int wormUpCount = Integer.getInteger("wormup", 2); + int cycles = Integer.getInteger("cycles", 10); + + final IgfsBenchmark fsTest = new IgfsBenchmark( + System.getProperty("testDir", "/test"), + Integer.getInteger("depth", 3), + Integer.getInteger("subDirs", 10), + Integer.getInteger("files", 10), + Integer.getInteger("fileSize", 8) * 1024); + + final IgniteFileSystem fs = ignite.fileSystem("igfs"); + + try { + for (int i = 0; i < wormUpCount; ++i) { + System.out.println("Wormup #" + i + " / " + wormUpCount); + fsTest.testWriteFile(fs); + fsTest.testReadFile(fs); + fsTest.testDeleteFile(fs); + } + } + catch (Exception ex) { + System.err.println("Wormup error"); + ex.printStackTrace(System.err); + Ignition.stop(false); + return; + } + + List writeRes = new ArrayList<>(cycles); + List readRes = new ArrayList<>(cycles); + List infoRes = new ArrayList<>(cycles); + List listRes = new ArrayList<>(cycles); + List delRes = new ArrayList<>(cycles); + + try { + for (int i = 0; i < cycles; ++i) { + System.out.println("Benchmark cycle #" + i + " / " + cycles); + + writeRes.add(bench(new Runnable() { + @Override public void run() { + fsTest.testWriteFile(fs); + } + })); + + readRes.add(bench(new Runnable() { + @Override public void run() { + fsTest.testReadFile(fs); + } + })); + + infoRes.add(bench(new Runnable() { + @Override public void run() { + fsTest.testInfoFile(fs); + } + })); + + listRes.add(bench(new Runnable() { + @Override public void run() { + fsTest.testListPathFile(fs); + } + })); + + delRes.add(bench(new Runnable() { + @Override public void run() { + fsTest.testDeleteFile(fs); + } + })); + } + + System.out.println("\n"); + System.out.println("Write " + avg(writeRes) + " +/- " + stdDev(writeRes, avg(writeRes))); + System.out.println("Read " + avg(readRes) + " +/- " + stdDev(readRes, avg(readRes))); + System.out.println("Info " + avg(infoRes) + " +/- " + stdDev(infoRes, avg(infoRes))); + System.out.println("List " + avg(listRes) + " +/- " + stdDev(listRes, avg(listRes))); + System.out.println("Delete " + avg(delRes) + " +/- " + stdDev(delRes, avg(delRes))); + } + catch (Exception ex) { + System.err.println("Benchmark error"); + ex.printStackTrace(System.err); + } + finally { + Ignition.stop(false); + } + } + + /** + * @param parentPath Begin path. + * @param depth Current deep. + * @return List of subdirs. + */ + private String[] buildPath(String parentPath, int depth) { + String curPath[] = new String[subDirsCount]; + + for (int i = 1; i <= curPath.length; i++) + curPath[i - 1] = parentPath + "/vdb." + depth + "_" + i + ".dir"; + + return curPath; + } + + /** + * @param parentPath Begin path. + * @param operation Test operation to do. + * @throws Exception If failed. + */ + private void recurseFile(String parentPath, FileOperation operation) throws Exception { + for (int i = 1; i <= filesCount; i++) { + String filePath = parentPath + "/vdb_f" + String.format("%0" + String.valueOf(this.filesCount).length() + "d", i) + ".file"; + operation.handleFile(filePath); + } + } + + /** + * @param parentPath Begin path. + * @param depth depth of recurse. + * @param operation Test operation to do. + * @throws Exception If failed. + */ + private void recursePath(String parentPath, int depth, FileOperation operation) throws Exception { + if (depth == this.depth + 1) + recurseFile(parentPath, operation); + else { + String curPath[] = buildPath(parentPath, depth); + + for (String path : curPath) { + operation.preHandleDir(path); + recursePath(path, depth + 1, operation); + operation.postHandleDir(path); + } + } + } + + /** + * Do read file operations. Files must be exist. + * + * @param fs Filesystem. + */ + public void testReadFile(IgniteFileSystem fs) { + try { + recursePath(path, 1, new ReadFileOperation(fs, size)); + } + catch (Exception e) { + throw new RuntimeException(e); + } + } + + /** + * Do write file operations. + * + * @param fs Filesystem. + */ + public void testWriteFile(IgniteFileSystem fs) { + try { + recursePath(path, 1, new WriteFileOperation(fs, size)); + } + catch (Exception e) { + throw new RuntimeException(e); + } + } + + /** + * Do delete file operations. Files must be exist. + * + * @param fs Filesystem. + */ + public void testDeleteFile(IgniteFileSystem fs) { + try { + recursePath(path, 1, new DeleteFileOperation(fs, 0)); + } + catch (Exception e) { + throw new RuntimeException(e); + } + } + + /** + * Do info file operations. Files must be exist. + * + * @param fs Filesystem. + */ + public void testInfoFile(IgniteFileSystem fs) { + try { + recursePath(path, 1, new InfoFileOperation(fs)); + } + catch (Exception e) { + throw new RuntimeException(e); + } + } + + /** + * Do info file operations. Files must be exist. + * + * @param fs Filesystem. + */ + public void testListPathFile(IgniteFileSystem fs) { + try { + recursePath(path, 1, new ListPathFileOperation(fs)); + } + catch (Exception e) { + throw new RuntimeException(e); + } + } + + /** + * @param r Runnable. + * @return Time of execution in millis. + */ + public static long bench(Runnable r) { + long t0 = System.currentTimeMillis(); + + r.run(); + + return System.currentTimeMillis() - t0; + } +} diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopIgfsSecondaryFileSystemDelegateImpl.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopIgfsSecondaryFileSystemDelegateImpl.java index fcad674deb116..203965cebc137 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopIgfsSecondaryFileSystemDelegateImpl.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopIgfsSecondaryFileSystemDelegateImpl.java @@ -250,8 +250,7 @@ public HadoopIgfsSecondaryFileSystemDelegateImpl(IgniteHadoopIgfsSecondaryFileSy /** {@inheritDoc} */ @Override public OutputStream create(IgfsPath path, int bufSize, boolean overwrite, int replication, long blockSize, @Nullable Map props) { - HadoopIgfsProperties props0 = - new HadoopIgfsProperties(props != null ? props : Collections.emptyMap()); + HadoopIgfsProperties props0 = new HadoopIgfsProperties(props); try { return fileSystemForUser().create(convert(path), props0.permission(), overwrite, bufSize, diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsProperties.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsProperties.java index 5427bf15b6033..a322a9939fd25 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsProperties.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/igfs/HadoopIgfsProperties.java @@ -43,6 +43,9 @@ public class HadoopIgfsProperties { * @throws IgniteException In case of error. */ public HadoopIgfsProperties(Map props) throws IgniteException { + if (props == null) + return; + usrName = props.get(IgfsUtils.PROP_USER_NAME); grpName = props.get(IgfsUtils.PROP_GROUP_NAME); From fa698d6fcfde0006423ef009337dbef58ac62515 Mon Sep 17 00:00:00 2001 From: vozerov-gridgain Date: Mon, 3 Oct 2016 13:33:12 +0300 Subject: [PATCH 66/69] Fixed TcpDiscoverySnapshotHistoryTest intermittent failures caused by multicast IP finder. --- .../tcp/TcpDiscoverySnapshotHistoryTest.java | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/modules/core/src/test/java/org/apache/ignite/spi/discovery/tcp/TcpDiscoverySnapshotHistoryTest.java b/modules/core/src/test/java/org/apache/ignite/spi/discovery/tcp/TcpDiscoverySnapshotHistoryTest.java index 9667fb8cc5d7d..47e911204edb2 100644 --- a/modules/core/src/test/java/org/apache/ignite/spi/discovery/tcp/TcpDiscoverySnapshotHistoryTest.java +++ b/modules/core/src/test/java/org/apache/ignite/spi/discovery/tcp/TcpDiscoverySnapshotHistoryTest.java @@ -22,24 +22,26 @@ import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.spi.discovery.DiscoverySpi; import org.apache.ignite.spi.discovery.DiscoverySpiHistorySupport; +import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; +import java.util.Collections; + import static org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi.DFLT_TOP_HISTORY_SIZE; /** * Tests for topology snapshots history. */ public class TcpDiscoverySnapshotHistoryTest extends GridCommonAbstractTest { - /** */ - public TcpDiscoverySnapshotHistoryTest() { - super(false); - } - /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(gridName); - cfg.setDiscoverySpi(new TcpDiscoverySpi()); + TcpDiscoveryVmIpFinder ipFinder = new TcpDiscoveryVmIpFinder(); + + ipFinder.setAddresses(Collections.singleton("127.0.0.1:47500")); + + cfg.setDiscoverySpi(new TcpDiscoverySpi().setIpFinder(ipFinder)); cfg.setCacheConfiguration(); cfg.setLocalHost("127.0.0.1"); cfg.setConnectorConfiguration(null); From bba019fd5076412ca43c10a32fd300b6031ccd0b Mon Sep 17 00:00:00 2001 From: vozerov-gridgain Date: Mon, 3 Oct 2016 17:25:20 +0300 Subject: [PATCH 67/69] IGNITE-3980: Processed failing tests in query suites. This closes #1137. --- .../ignite/testframework/IgniteTestSuite.java | 18 ++++++-- modules/ignored-tests/pom.xml | 8 ++++ ...iteIgnoredBinarySimpleMapperTestSuite.java | 41 ++++++++++++++++++ .../IgniteIgnoredBinaryTestSuite.java | 42 +++++++++++++++++++ .../testsuites/IgniteIgnoredTestSuite.java | 4 ++ .../IgniteCacheReplicatedQuerySelfTest.java | 4 +- .../query/IgniteSqlSplitterSelfTest.java | 4 +- .../query/h2/sql/BaseH2CompareQueryTest.java | 3 +- ...rySimpleNameMapperCacheQueryTestSuite.java | 2 - .../IgniteCacheQuerySelfTestSuite.java | 3 +- .../IgniteCacheQuerySelfTestSuite2.java | 3 +- 11 files changed, 119 insertions(+), 13 deletions(-) create mode 100644 modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredBinarySimpleMapperTestSuite.java create mode 100644 modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredBinaryTestSuite.java diff --git a/modules/core/src/test/java/org/apache/ignite/testframework/IgniteTestSuite.java b/modules/core/src/test/java/org/apache/ignite/testframework/IgniteTestSuite.java index 1cf69ae16e502..157e1f5af4277 100644 --- a/modules/core/src/test/java/org/apache/ignite/testframework/IgniteTestSuite.java +++ b/modules/core/src/test/java/org/apache/ignite/testframework/IgniteTestSuite.java @@ -103,11 +103,21 @@ public IgniteTestSuite(@Nullable Class theClass, @Nullable S } /** {@inheritDoc} */ - @Override public void addTestSuite(Class testClass) { - IgniteTestSuite suite = new IgniteTestSuite(testClass, ignoredOnly); + @Override public void addTest(Test test) { + // Ignore empty test suites. + if (test instanceof IgniteTestSuite) { + IgniteTestSuite suite = (IgniteTestSuite)test; + + if (suite.testCount() == 0) + return; + } - if (suite.testCount() > 0) - addTest(suite); + super.addTest(test); + } + + /** {@inheritDoc} */ + @Override public void addTestSuite(Class testClass) { + addTest(new IgniteTestSuite(testClass, ignoredOnly)); } /** diff --git a/modules/ignored-tests/pom.xml b/modules/ignored-tests/pom.xml index 142754e10fa8f..00c7d554d7650 100644 --- a/modules/ignored-tests/pom.xml +++ b/modules/ignored-tests/pom.xml @@ -98,6 +98,14 @@ ${project.version} + + org.apache.ignite + ignite-indexing + ${project.version} + test-jar + test + + org.apache.ignite ignite-jta diff --git a/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredBinarySimpleMapperTestSuite.java b/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredBinarySimpleMapperTestSuite.java new file mode 100644 index 0000000000000..aa2e219e27e67 --- /dev/null +++ b/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredBinarySimpleMapperTestSuite.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.testsuites; + +import junit.framework.TestSuite; +import org.apache.ignite.testframework.IgniteTestSuite; + +/** + * Special test suite with ignored tests for Binary mode. + */ +public class IgniteIgnoredBinarySimpleMapperTestSuite extends TestSuite { + /** + * @return IgniteCache test suite. + * @throws Exception Thrown in case of the failure. + */ + public static TestSuite suite() throws Exception { + IgniteTestSuite.ignoreDefault(true); + + IgniteTestSuite suite = new IgniteTestSuite(null, "Ignite Ignored Binary Simple Mapper Test Suite"); + + /* --- QUERY --- */ + suite.addTest(IgniteBinarySimpleNameMapperCacheQueryTestSuite.suite()); + + return suite; + } +} diff --git a/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredBinaryTestSuite.java b/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredBinaryTestSuite.java new file mode 100644 index 0000000000000..5116bd74d3346 --- /dev/null +++ b/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredBinaryTestSuite.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.testsuites; + +import junit.framework.TestSuite; +import org.apache.ignite.testframework.IgniteTestSuite; + +/** + * Special test suite with ignored tests for Binary mode. + */ +public class IgniteIgnoredBinaryTestSuite extends TestSuite { + /** + * @return IgniteCache test suite. + * @throws Exception Thrown in case of the failure. + */ + public static TestSuite suite() throws Exception { + IgniteTestSuite.ignoreDefault(true); + + IgniteTestSuite suite = new IgniteTestSuite(null, "Ignite Ignored Binary Test Suite"); + + /* --- QUERY --- */ + suite.addTest(IgniteBinaryCacheQueryTestSuite.suite()); + suite.addTest(IgniteBinaryCacheQueryTestSuite2.suite()); + + return suite; + } +} diff --git a/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java b/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java index fd92a35aeae0b..414d166062c1d 100644 --- a/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java +++ b/modules/ignored-tests/src/test/java/org/apache/ignite/testsuites/IgniteIgnoredTestSuite.java @@ -46,6 +46,10 @@ public static TestSuite suite() throws Exception { /* --- JTA --- */ suite.addTest(IgniteJtaTestSuite.suite()); + /* --- QUERIES --- */ + suite.addTest(IgniteCacheQuerySelfTestSuite.suite()); + suite.addTest(IgniteCacheQuerySelfTestSuite2.suite()); + /* --- SPRING --- */ suite.addTest(IgniteSpringTestSuite.suite()); diff --git a/modules/indexing/src/test/java/org/apache/ignite/internal/processors/cache/distributed/replicated/IgniteCacheReplicatedQuerySelfTest.java b/modules/indexing/src/test/java/org/apache/ignite/internal/processors/cache/distributed/replicated/IgniteCacheReplicatedQuerySelfTest.java index 06adb688637d9..a673a73be77e8 100644 --- a/modules/indexing/src/test/java/org/apache/ignite/internal/processors/cache/distributed/replicated/IgniteCacheReplicatedQuerySelfTest.java +++ b/modules/indexing/src/test/java/org/apache/ignite/internal/processors/cache/distributed/replicated/IgniteCacheReplicatedQuerySelfTest.java @@ -53,6 +53,7 @@ import org.apache.ignite.lang.IgniteBiTuple; import org.apache.ignite.lang.IgnitePredicate; import org.apache.ignite.testframework.GridTestUtils; +import org.apache.ignite.testsuites.IgniteIgnore; import org.apache.ignite.transactions.Transaction; import org.springframework.util.ReflectionUtils; @@ -354,9 +355,8 @@ public void testLostIterator() throws Exception { /** * @throws Exception If failed. */ + @IgniteIgnore(value = "https://issues.apache.org/jira/browse/IGNITE-613", forceFailure = true) public void testNodeLeft() throws Exception { - fail("https://issues.apache.org/jira/browse/IGNITE-613"); - Ignite g = startGrid("client"); try { diff --git a/modules/indexing/src/test/java/org/apache/ignite/internal/processors/query/IgniteSqlSplitterSelfTest.java b/modules/indexing/src/test/java/org/apache/ignite/internal/processors/query/IgniteSqlSplitterSelfTest.java index fd52469ac8fc4..64be936559d15 100644 --- a/modules/indexing/src/test/java/org/apache/ignite/internal/processors/query/IgniteSqlSplitterSelfTest.java +++ b/modules/indexing/src/test/java/org/apache/ignite/internal/processors/query/IgniteSqlSplitterSelfTest.java @@ -39,6 +39,7 @@ import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; +import org.apache.ignite.testsuites.IgniteIgnore; /** * Tests for correct distributed partitioned queries. @@ -281,9 +282,8 @@ private static List column(int idx, List> rows) { /** * */ + @IgniteIgnore(value = "https://issues.apache.org/jira/browse/IGNITE-1886", forceFailure = true) public void testFunctionNpe() { - assert false : "https://issues.apache.org/jira/browse/IGNITE-1886"; - IgniteCache userCache = ignite(0).createCache( cacheConfig("UserCache", true, Integer.class, User.class)); IgniteCache userOrderCache = ignite(0).createCache( diff --git a/modules/indexing/src/test/java/org/apache/ignite/internal/processors/query/h2/sql/BaseH2CompareQueryTest.java b/modules/indexing/src/test/java/org/apache/ignite/internal/processors/query/h2/sql/BaseH2CompareQueryTest.java index 46076a9c6d685..7cae5854a37f7 100644 --- a/modules/indexing/src/test/java/org/apache/ignite/internal/processors/query/h2/sql/BaseH2CompareQueryTest.java +++ b/modules/indexing/src/test/java/org/apache/ignite/internal/processors/query/h2/sql/BaseH2CompareQueryTest.java @@ -37,6 +37,7 @@ import org.apache.ignite.cache.query.annotations.QuerySqlFunction; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.testframework.GridTestUtils; +import org.apache.ignite.testsuites.IgniteIgnore; /** * Base set of queries to compare query results from h2 database instance and mixed ignite caches (replicated and partitioned) @@ -204,8 +205,8 @@ public void testInvalidQuery() throws Exception { /** * @throws Exception */ + @IgniteIgnore(value = "https://issues.apache.org/jira/browse/IGNITE-705", forceFailure = true) public void testAllExamples() throws Exception { - fail("https://issues.apache.org/jira/browse/IGNITE-705"); // compareQueryRes0("select ? limit ? offset ?"); // compareQueryRes0("select cool1()"); diff --git a/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteBinarySimpleNameMapperCacheQueryTestSuite.java b/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteBinarySimpleNameMapperCacheQueryTestSuite.java index 3eed9800ff6ae..109e244e0b5b7 100644 --- a/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteBinarySimpleNameMapperCacheQueryTestSuite.java +++ b/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteBinarySimpleNameMapperCacheQueryTestSuite.java @@ -18,7 +18,6 @@ package org.apache.ignite.testsuites; import junit.framework.TestSuite; -import org.apache.ignite.internal.binary.BinaryMarshaller; import org.apache.ignite.testframework.config.GridTestProperties; /** @@ -30,7 +29,6 @@ public class IgniteBinarySimpleNameMapperCacheQueryTestSuite extends TestSuite { * @throws Exception In case of error. */ public static TestSuite suite() throws Exception { - GridTestProperties.setProperty(GridTestProperties.MARSH_CLASS_NAME, BinaryMarshaller.class.getName()); GridTestProperties.setProperty(GridTestProperties.BINARY_MARSHALLER_USE_SIMPLE_NAME_MAPPER, "true"); return IgniteBinaryCacheQueryTestSuite.suite(); diff --git a/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteCacheQuerySelfTestSuite.java b/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteCacheQuerySelfTestSuite.java index 21741478f945e..d1f4c4ac6ebd3 100644 --- a/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteCacheQuerySelfTestSuite.java +++ b/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteCacheQuerySelfTestSuite.java @@ -65,6 +65,7 @@ import org.apache.ignite.internal.processors.query.IgniteSqlSchemaIndexingTest; import org.apache.ignite.internal.processors.query.IgniteSqlSplitterSelfTest; import org.apache.ignite.internal.processors.query.h2.sql.GridQueryParsingTest; +import org.apache.ignite.testframework.IgniteTestSuite; /** * Test suite for cache queries. @@ -75,7 +76,7 @@ public class IgniteCacheQuerySelfTestSuite extends TestSuite { * @throws Exception If failed. */ public static TestSuite suite() throws Exception { - TestSuite suite = new TestSuite("Ignite Cache Queries Test Suite"); + IgniteTestSuite suite = new IgniteTestSuite("Ignite Cache Queries Test Suite"); // Parsing suite.addTestSuite(GridQueryParsingTest.class); diff --git a/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteCacheQuerySelfTestSuite2.java b/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteCacheQuerySelfTestSuite2.java index e5f4027a274ec..40fc15759d3f3 100644 --- a/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteCacheQuerySelfTestSuite2.java +++ b/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteCacheQuerySelfTestSuite2.java @@ -47,6 +47,7 @@ import org.apache.ignite.internal.processors.query.h2.sql.BaseH2CompareQueryTest; import org.apache.ignite.internal.processors.query.h2.sql.H2CompareBigQueryTest; import org.apache.ignite.spi.communication.tcp.GridOrderedMessageCancelSelfTest; +import org.apache.ignite.testframework.IgniteTestSuite; /** * Test suite for cache queries. @@ -57,7 +58,7 @@ public class IgniteCacheQuerySelfTestSuite2 extends TestSuite { * @throws Exception If failed. */ public static TestSuite suite() throws Exception { - TestSuite suite = new TestSuite("Ignite Cache Queries Test Suite 2"); + TestSuite suite = new IgniteTestSuite("Ignite Cache Queries Test Suite 2"); // Scan queries. suite.addTestSuite(CacheScanPartitionQueryFallbackSelfTest.class); From 9d4243277822eb75b96f123db2bc9759245fe4df Mon Sep 17 00:00:00 2001 From: "Andrey V. Mashenkov" Date: Tue, 4 Oct 2016 13:39:53 +0300 Subject: [PATCH 68/69] Minors --- .../apache/ignite/IgniteSystemProperties.java | 4 +- .../internal/binary/BinaryObjectImpl.java | 8 +- .../binary/BinaryObjectOffheapImpl.java | 8 +- .../internal/binary/BinaryReaderExImpl.java | 72 +++++++++-------- .../ignite/internal/binary/BinaryUtils.java | 25 +++--- .../internal/binary/BinaryWriterExImpl.java | 34 ++++---- .../binary/builder/BinaryBuilderReader.java | 18 ++--- .../binary/BinaryFieldsAbstractSelfTest.java | 79 ++++++------------- .../binary/BinaryMarshallerSelfTest.java | 5 +- .../IgniteBinaryObjectsTestSuite.java | 2 - .../IgniteBinaryObjectsTestSuite2.java} | 32 +++----- 11 files changed, 127 insertions(+), 160 deletions(-) rename modules/core/src/test/java/org/apache/ignite/{internal/binary/BinaryMarshallerCompactZeroesSelfTest.java => testsuites/IgniteBinaryObjectsTestSuite2.java} (55%) diff --git a/modules/core/src/main/java/org/apache/ignite/IgniteSystemProperties.java b/modules/core/src/main/java/org/apache/ignite/IgniteSystemProperties.java index 6a2bb68b7faed..6eb66269029da 100644 --- a/modules/core/src/main/java/org/apache/ignite/IgniteSystemProperties.java +++ b/modules/core/src/main/java/org/apache/ignite/IgniteSystemProperties.java @@ -478,10 +478,10 @@ public final class IgniteSystemProperties { * with no value included, that saves 8-bytes per field. * Otherwise Integer\Long zeroes will be encoded in old manner, preserving compatibility. *

- * @deprecated Should be removed in Apache Ignite 2.0. + * @deprecated This mode should be default mode in Apache Ignite 2.0 and option should be removed. */ @Deprecated - public static final String IGNITE_BINARY_COMPACT_INT_ZEROES = "IGNITE_BINARY_COMPACT_INT_ZEROES"; + public static final String IGNITE_BINARY_COMPACT_ZEROES = "IGNITE_BINARY_COMPACT_ZEROES"; /** * Enforces singleton. diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectImpl.java index c8bf1e443b09a..f853e359dc3e4 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectImpl.java @@ -328,13 +328,13 @@ else if (fieldOffLen == BinaryUtils.OFFSET_2) break; - case GridBinaryMarshaller.ZERO_INT: - val = 0; + case GridBinaryMarshaller.LONG: + val = BinaryPrimitives.readLong(arr, fieldPos + 1); break; - case GridBinaryMarshaller.LONG: - val = BinaryPrimitives.readLong(arr, fieldPos + 1); + case GridBinaryMarshaller.ZERO_INT: + val = 0; break; diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectOffheapImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectOffheapImpl.java index 9a7e12de661ad..5ba259eadfd73 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectOffheapImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectOffheapImpl.java @@ -206,13 +206,13 @@ else if (fieldOffLen == BinaryUtils.OFFSET_2) break; - case GridBinaryMarshaller.ZERO_INT: - val = 0; + case GridBinaryMarshaller.LONG: + val = BinaryPrimitives.readLong(ptr, fieldPos + 1); break; - case GridBinaryMarshaller.LONG: - val = BinaryPrimitives.readLong(ptr, fieldPos + 1); + case GridBinaryMarshaller.ZERO_INT: + val = 0; break; diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryReaderExImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryReaderExImpl.java index bbacff59244fb..94ba08c573f75 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryReaderExImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryReaderExImpl.java @@ -22,12 +22,10 @@ import java.io.ObjectInput; import java.math.BigDecimal; import java.sql.Timestamp; -import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.Map; import java.util.UUID; - import org.apache.ignite.binary.BinaryCollectionFactory; import org.apache.ignite.binary.BinaryInvalidTypeException; import org.apache.ignite.binary.BinaryMapFactory; @@ -652,7 +650,7 @@ char readChar(int fieldId) throws BinaryObjectException { /** {@inheritDoc} */ @Override public int readInt(String fieldName) throws BinaryObjectException { - return findFieldByName(fieldName) && checkFlagNoHandles(INT,ZERO_INT) == INT ? in.readInt() : 0; + return findFieldByName(fieldName) && checkFlagsNoHandles(INT, ZERO_INT) == Flag.NORMAL ? in.readInt() : 0; } /** @@ -661,7 +659,7 @@ char readChar(int fieldId) throws BinaryObjectException { * @throws BinaryObjectException If failed. */ int readInt(int fieldId) throws BinaryObjectException { - return findFieldById(fieldId) && checkFlagNoHandles(INT,ZERO_INT) == INT ? in.readInt() : 0; + return findFieldById(fieldId) && checkFlagsNoHandles(INT, ZERO_INT) == Flag.NORMAL ? in.readInt() : 0; } /** @@ -670,15 +668,19 @@ int readInt(int fieldId) throws BinaryObjectException { * @throws BinaryObjectException In case of error. */ @Nullable Integer readIntNullable(int fieldId) throws BinaryObjectException { - if(findFieldById(fieldId)) { - switch (checkFlagNoHandles(INT, ZERO_INT)) { - case INT: + if (findFieldById(fieldId)) { + switch (checkFlagsNoHandles(INT, ZERO_INT)) { + case NORMAL: return in.readInt(); - case ZERO_INT: + case ZERO: return 0; + + default: + return null; } } + return null; } @@ -717,7 +719,8 @@ int readInt(int fieldId) throws BinaryObjectException { /** {@inheritDoc} */ @Override public long readLong(String fieldName) throws BinaryObjectException { - return (findFieldByName(fieldName) && checkFlagNoHandles(LONG, ZERO_LONG) == LONG) ? in.readLong() : 0L; + return (findFieldByName(fieldName) && checkFlagsNoHandles(LONG, ZERO_LONG) == Flag.NORMAL) ? in.readLong() : 0L; + } /** @@ -726,7 +729,7 @@ int readInt(int fieldId) throws BinaryObjectException { * @throws BinaryObjectException If failed. */ long readLong(int fieldId) throws BinaryObjectException { - return (findFieldById(fieldId) && checkFlagNoHandles(LONG, ZERO_LONG) == LONG) ? in.readLong() : 0L; + return (findFieldById(fieldId) && checkFlagsNoHandles(LONG, ZERO_LONG) == Flag.NORMAL) ? in.readLong() : 0L; } /** @@ -736,12 +739,15 @@ long readLong(int fieldId) throws BinaryObjectException { */ @Nullable Long readLongNullable(int fieldId) throws BinaryObjectException { if (findFieldById(fieldId)) { - switch (checkFlagNoHandles(LONG, ZERO_LONG)) { - case LONG: + switch (checkFlagsNoHandles(LONG, ZERO_LONG)) { + case NORMAL: return in.readLong(); - case ZERO_LONG: + case ZERO: return 0L; + + default: + return null; } } @@ -1450,26 +1456,27 @@ else if (flag == NULL) } /** - * Ensure that type flag is either null or contained by allowed values. + * Ensure that type flag is either null, compressed zero value or contains expected value. * - * @param expFlags Expected values. - * @return type flag. + * @param expFlag Expected value. + * @param zeroFlag Allowed zero value. + * @return Flag mode. * @throws BinaryObjectException If flag is neither null, nor expected. */ - private byte checkFlagNoHandles(byte... expFlags) { + private Flag checkFlagsNoHandles(byte expFlag, byte zeroFlag) { byte flag = in.readByte(); - if (flag == NULL) - return NULL; - - for (byte f : expFlags) - if (f == flag) - return f; + if (flag == expFlag) + return Flag.NORMAL; + else if (flag == zeroFlag) + return Flag.ZERO; + else if (flag == NULL) + return Flag.NULL; int pos = BinaryUtils.positionForHandle(in); - throw new BinaryObjectException("Unexpected flag value [pos=" + pos + ", expected=" + - Arrays.toString(expFlags) + ", actual=" + flag + ']'); + throw new BinaryObjectException("Unexpected flag value [pos=" + pos + ", expected=" + expFlag + + ", actual=" + flag + ']'); } /** {@inheritDoc} */ @@ -1563,13 +1570,13 @@ private byte checkFlagNoHandles(byte... expFlags) { break; - case ZERO_INT: - obj = 0; + case LONG: + obj = in.readLong(); break; - case LONG: - obj = in.readLong(); + case ZERO_INT: + obj = 0; break; @@ -2110,7 +2117,7 @@ private void streamPositionRandom(int pos) { /** {@inheritDoc} */ @Override public long skip(long n) throws IOException { - return skipBytes((int)n); + return skipBytes((int) n); } /** {@inheritDoc} */ @@ -2141,6 +2148,9 @@ private enum Flag { HANDLE, /** Null. */ - NULL + NULL, + + /** Zero */ + ZERO } } diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryUtils.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryUtils.java index a3396bf7bf8d8..21d5de7086e39 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryUtils.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryUtils.java @@ -182,16 +182,11 @@ public class BinaryUtils { PLAIN_CLASS_TO_FLAG.put(boolean.class, GridBinaryMarshaller.BOOLEAN); for (byte b : new byte[] { - GridBinaryMarshaller.BYTE, GridBinaryMarshaller.SHORT, GridBinaryMarshaller.INT, GridBinaryMarshaller.LONG, - GridBinaryMarshaller.FLOAT, GridBinaryMarshaller.DOUBLE, GridBinaryMarshaller.CHAR, - GridBinaryMarshaller.BOOLEAN, GridBinaryMarshaller.DECIMAL, GridBinaryMarshaller.STRING, - GridBinaryMarshaller.UUID, GridBinaryMarshaller.DATE, GridBinaryMarshaller.TIMESTAMP, - GridBinaryMarshaller.BYTE_ARR, GridBinaryMarshaller.SHORT_ARR, GridBinaryMarshaller.INT_ARR, - GridBinaryMarshaller.LONG_ARR, GridBinaryMarshaller.FLOAT_ARR, GridBinaryMarshaller.DOUBLE_ARR, - GridBinaryMarshaller.CHAR_ARR, GridBinaryMarshaller.BOOLEAN_ARR, GridBinaryMarshaller.DECIMAL_ARR, - GridBinaryMarshaller.STRING_ARR, GridBinaryMarshaller.UUID_ARR, GridBinaryMarshaller.DATE_ARR, - GridBinaryMarshaller.TIMESTAMP_ARR, GridBinaryMarshaller.ENUM, GridBinaryMarshaller.ENUM_ARR, - GridBinaryMarshaller.NULL}) { + GridBinaryMarshaller.BYTE, GridBinaryMarshaller.SHORT, GridBinaryMarshaller.INT, GridBinaryMarshaller.LONG, GridBinaryMarshaller.FLOAT, GridBinaryMarshaller.DOUBLE, + GridBinaryMarshaller.CHAR, GridBinaryMarshaller.BOOLEAN, GridBinaryMarshaller.DECIMAL, GridBinaryMarshaller.STRING, GridBinaryMarshaller.UUID, GridBinaryMarshaller.DATE, GridBinaryMarshaller.TIMESTAMP, + GridBinaryMarshaller.BYTE_ARR, GridBinaryMarshaller.SHORT_ARR, GridBinaryMarshaller.INT_ARR, GridBinaryMarshaller.LONG_ARR, GridBinaryMarshaller.FLOAT_ARR, GridBinaryMarshaller.DOUBLE_ARR, + GridBinaryMarshaller.CHAR_ARR, GridBinaryMarshaller.BOOLEAN_ARR, GridBinaryMarshaller.DECIMAL_ARR, GridBinaryMarshaller.STRING_ARR, GridBinaryMarshaller.UUID_ARR, GridBinaryMarshaller.DATE_ARR, GridBinaryMarshaller.TIMESTAMP_ARR, + GridBinaryMarshaller.ENUM, GridBinaryMarshaller.ENUM_ARR, GridBinaryMarshaller.NULL}) { PLAIN_TYPE_FLAG[b] = true; } @@ -691,7 +686,7 @@ public static boolean knownArray(Object arr) { if (arr == null) return false; - Class cls = arr.getClass(); + Class cls = arr.getClass(); return cls == byte[].class || cls == short[].class || cls == int[].class || cls == long[].class || cls == float[].class || cls == double[].class || cls == char[].class || cls == boolean[].class || @@ -1722,12 +1717,12 @@ public static Object doReadOptimized(BinaryInputStream in, BinaryContext ctx, @N case GridBinaryMarshaller.INT: return in.readInt(); - case GridBinaryMarshaller.ZERO_INT: - return 0; - case GridBinaryMarshaller.LONG: return in.readLong(); + case GridBinaryMarshaller.ZERO_INT: + return 0; + case GridBinaryMarshaller.ZERO_LONG: return 0L; @@ -2211,7 +2206,7 @@ else if (c > 0x07FF) { } else { arr[position++] = (byte)(0xC0 | ((c >> 6) & 0x1F)); - arr[position++] = (byte)(0x80 | (c & 0x3F)); + arr[position++] = (byte)(0x80 | (c & 0x3F)); } } diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryWriterExImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryWriterExImpl.java index 988c9a89aa9c2..4a239c6baf779 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryWriterExImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryWriterExImpl.java @@ -17,17 +17,6 @@ package org.apache.ignite.internal.binary; -import org.apache.ignite.IgniteCheckedException; -import org.apache.ignite.IgniteSystemProperties; -import org.apache.ignite.binary.BinaryObjectException; -import org.apache.ignite.binary.BinaryRawWriter; -import org.apache.ignite.binary.BinaryWriter; -import org.apache.ignite.internal.binary.streams.BinaryHeapOutputStream; -import org.apache.ignite.internal.binary.streams.BinaryOutputStream; -import org.apache.ignite.internal.util.IgniteUtils; -import org.apache.ignite.internal.util.typedef.internal.A; -import org.jetbrains.annotations.Nullable; - import java.io.IOException; import java.io.ObjectOutput; import java.lang.reflect.InvocationHandler; @@ -39,6 +28,16 @@ import java.util.Date; import java.util.Map; import java.util.UUID; +import org.apache.ignite.IgniteCheckedException; +import org.apache.ignite.IgniteSystemProperties; +import org.apache.ignite.binary.BinaryObjectException; +import org.apache.ignite.binary.BinaryRawWriter; +import org.apache.ignite.binary.BinaryWriter; +import org.apache.ignite.internal.binary.streams.BinaryHeapOutputStream; +import org.apache.ignite.internal.binary.streams.BinaryOutputStream; +import org.apache.ignite.internal.util.IgniteUtils; +import org.apache.ignite.internal.util.typedef.internal.A; +import org.jetbrains.annotations.Nullable; import static java.nio.charset.StandardCharsets.UTF_8; @@ -52,6 +51,13 @@ public class BinaryWriterExImpl implements BinaryWriter, BinaryRawWriterEx, Obje /** Initial capacity. */ private static final int INIT_CAP = 1024; + /** Compress zeroes feature flag. + * See {@link IgniteSystemProperties#IGNITE_BINARY_COMPACT_ZEROES} + * @deprecated This mode should be default mode in Apache Ignite 2.0 and option should be removed. + * */ + public static final boolean IGNITE_BINARY_COMPACT_ZEROES = + !IgniteSystemProperties.getBoolean(IgniteSystemProperties.IGNITE_BINARY_COMPACT_ZEROES); + /** */ private final BinaryContext ctx; @@ -934,8 +940,7 @@ void writeShortField(@Nullable Short val) { * @param val Value. */ void writeIntFieldPrimitive(int val) { - if (val == 0 && - IgniteSystemProperties.getBoolean(IgniteSystemProperties.IGNITE_BINARY_COMPACT_INT_ZEROES, false)) { + if (val == 0 && IGNITE_BINARY_COMPACT_ZEROES) { out.unsafeEnsure(1); out.unsafeWriteByte(GridBinaryMarshaller.ZERO_INT); @@ -962,8 +967,7 @@ void writeIntField(@Nullable Integer val) { * @param val Value. */ void writeLongFieldPrimitive(long val) { - if (val == 0L && - IgniteSystemProperties.getBoolean(IgniteSystemProperties.IGNITE_BINARY_COMPACT_INT_ZEROES, false)) { + if (val == 0L && IGNITE_BINARY_COMPACT_ZEROES) { out.unsafeEnsure(1); out.unsafeWriteByte(GridBinaryMarshaller.ZERO_LONG); diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/builder/BinaryBuilderReader.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/builder/BinaryBuilderReader.java index 4359b96b496f7..c3ef062c849d6 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/builder/BinaryBuilderReader.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/builder/BinaryBuilderReader.java @@ -274,7 +274,7 @@ public void skipValue() { break; case GridBinaryMarshaller.DECIMAL: - len = /** scale */4 + /** mag len */4 + /** mag bytes count */readInt(4); + len = /** scale */ 4 + /** mag len */ 4 + /** mag bytes count */ readInt(4); break; @@ -409,12 +409,12 @@ public Object getValueQuickly(int pos, int len) { case GridBinaryMarshaller.INT: return BinaryPrimitives.readInt(arr, pos + 1); - case GridBinaryMarshaller.ZERO_INT: - return 0; - case GridBinaryMarshaller.LONG: return BinaryPrimitives.readLong(arr, pos + 1); + case GridBinaryMarshaller.ZERO_INT: + return 0; + case GridBinaryMarshaller.ZERO_LONG: return 0L; @@ -546,13 +546,13 @@ public Object parseValue() { case GridBinaryMarshaller.INT: return readInt(); - case GridBinaryMarshaller.ZERO_INT: - plainLazyValLen = 0; + case GridBinaryMarshaller.LONG: + plainLazyValLen = 8; break; - case GridBinaryMarshaller.LONG: - plainLazyValLen = 8; + case GridBinaryMarshaller.ZERO_INT: + plainLazyValLen = 0; break; @@ -580,7 +580,7 @@ public Object parseValue() { return arr[pos++] != 0; case GridBinaryMarshaller.DECIMAL: - plainLazyValLen = /** scale */4 + /** mag len */4 + /** mag bytes count */readInt(4); + plainLazyValLen = /** scale */ 4 + /** mag len */ 4 + /** mag bytes count */ readInt(4); break; diff --git a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldsAbstractSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldsAbstractSelfTest.java index e16bf64618aef..d7d5823498a61 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldsAbstractSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryFieldsAbstractSelfTest.java @@ -17,7 +17,6 @@ package org.apache.ignite.internal.binary; -import org.apache.ignite.IgniteSystemProperties; import org.apache.ignite.binary.BinaryField; import org.apache.ignite.binary.BinaryObject; import org.apache.ignite.binary.BinaryTypeConfiguration; @@ -180,23 +179,7 @@ public void testCharArray() throws Exception { */ public void testInt() throws Exception { check("fInt"); - } - - /** - * Test int field within compact mode on. - * Compact Integer zeroes should become default mode in Apache Ignite 2.0, so this test will be redundant. - * - * @deprecated Should be removed in Apache Ignite 2.0. - * @throws Exception If failed. - */ - @Deprecated - public void testOIntZero() throws Exception { - System.setProperty(IgniteSystemProperties.IGNITE_BINARY_COMPACT_INT_ZEROES, "true"); - - check("fInt"); check("fIntZero"); - - System.clearProperty(IgniteSystemProperties.IGNITE_BINARY_COMPACT_INT_ZEROES); } /** @@ -215,23 +198,7 @@ public void testIntArray() throws Exception { */ public void testLong() throws Exception { check("fLong"); - } - - /** - * Test long field within compact mode on. - * Compact Long zeroes should become default mode in Apache Ignite 2.0, so this test will be redundant. - * - * @deprecated Should be removed in Apache Ignite 2.0. - * @throws Exception If failed. - */ - @Deprecated - public void testLongZero() throws Exception { - System.setProperty(IgniteSystemProperties.IGNITE_BINARY_COMPACT_INT_ZEROES, "true"); - - check("fLong"); check("fLongZero"); - - System.clearProperty(IgniteSystemProperties.IGNITE_BINARY_COMPACT_INT_ZEROES); } /** @@ -464,27 +431,27 @@ private void check0(String fieldName, TestContext ctx, boolean exists) throws Ex Object expVal = U.field(ctx.obj, fieldName); if (val instanceof BinaryObject) - val = ((BinaryObject)val).deserialize(); + val = ((BinaryObject) val).deserialize(); if (val != null && val.getClass().isArray()) { assertNotNull(expVal); if (val instanceof byte[]) - assertTrue(Arrays.equals((byte[])expVal, (byte[])val)); + assertTrue(Arrays.equals((byte[]) expVal, (byte[]) val)); else if (val instanceof boolean[]) - assertTrue(Arrays.equals((boolean[])expVal, (boolean[])val)); + assertTrue(Arrays.equals((boolean[]) expVal, (boolean[]) val)); else if (val instanceof short[]) - assertTrue(Arrays.equals((short[])expVal, (short[])val)); + assertTrue(Arrays.equals((short[]) expVal, (short[]) val)); else if (val instanceof char[]) - assertTrue(Arrays.equals((char[])expVal, (char[])val)); + assertTrue(Arrays.equals((char[]) expVal, (char[]) val)); else if (val instanceof int[]) - assertTrue(Arrays.equals((int[])expVal, (int[])val)); + assertTrue(Arrays.equals((int[]) expVal, (int[]) val)); else if (val instanceof long[]) - assertTrue(Arrays.equals((long[])expVal, (long[])val)); + assertTrue(Arrays.equals((long[]) expVal, (long[]) val)); else if (val instanceof float[]) - assertTrue(Arrays.equals((float[])expVal, (float[])val)); + assertTrue(Arrays.equals((float[]) expVal, (float[]) val)); else if (val instanceof double[]) - assertTrue(Arrays.equals((double[])expVal, (double[])val)); + assertTrue(Arrays.equals((double[]) expVal, (double[]) val)); else { Object[] expVal0 = (Object[])expVal; Object[] val0 = (Object[])val; @@ -668,14 +635,14 @@ public TestObject(int ignore) { fFloat = 6.6f; fDouble = 7.7; - fByteArr = new byte[] {1, 2}; - fBoolArr = new boolean[] {true, false}; - fShortArr = new short[] {2, 3}; - fCharArr = new char[] {3, 4}; - fIntArr = new int[] {4, 5}; - fLongArr = new long[] {5, 6}; - fFloatArr = new float[] {6.6f, 7.7f}; - fDoubleArr = new double[] {7.7, 8.8}; + fByteArr = new byte[] { 1, 2 }; + fBoolArr = new boolean[] { true, false }; + fShortArr = new short[] { 2, 3 }; + fCharArr = new char[] { 3, 4 }; + fIntArr = new int[] { 4, 5 }; + fLongArr = new long[] { 5, 6 }; + fFloatArr = new float[] { 6.6f, 7.7f }; + fDoubleArr = new double[] { 7.7, 8.8 }; fString = "8"; fDate = new Date(); @@ -683,15 +650,15 @@ public TestObject(int ignore) { fUuid = UUID.randomUUID(); fDecimal = new BigDecimal(9); - fStringArr = new String[] {"8", "9"}; - fDateArr = new Date[] {new Date(), new Date(new Date().getTime() + 1)}; + fStringArr = new String[] { "8", "9" }; + fDateArr = new Date[] { new Date(), new Date(new Date().getTime() + 1) }; fTimestampArr = - new Timestamp[] {new Timestamp(new Date().getTime() + 1), new Timestamp(new Date().getTime() + 2)}; - fUuidArr = new UUID[] {UUID.randomUUID(), UUID.randomUUID()}; - fDecimalArr = new BigDecimal[] {new BigDecimal(9), new BigDecimal(10)}; + new Timestamp[] { new Timestamp(new Date().getTime() + 1), new Timestamp(new Date().getTime() + 2) }; + fUuidArr = new UUID[] { UUID.randomUUID(), UUID.randomUUID() }; + fDecimalArr = new BigDecimal[] { new BigDecimal(9), new BigDecimal(10) }; fObj = new TestInnerObject(10); - fObjArr = new TestInnerObject[] {new TestInnerObject(10), new TestInnerObject(11)}; + fObjArr = new TestInnerObject[] { new TestInnerObject(10), new TestInnerObject(11) }; } } diff --git a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerSelfTest.java index 226a67c729cbb..8c8269cb8fdab 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerSelfTest.java @@ -51,10 +51,8 @@ import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentSkipListSet; - import junit.framework.Assert; import org.apache.ignite.IgniteCheckedException; -import org.apache.ignite.IgniteSystemProperties; import org.apache.ignite.binary.BinaryBasicIdMapper; import org.apache.ignite.binary.BinaryBasicNameMapper; import org.apache.ignite.binary.BinaryCollectionFactory; @@ -75,7 +73,6 @@ import org.apache.ignite.binary.Binarylizable; import org.apache.ignite.configuration.BinaryConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; -import org.apache.ignite.internal.IgniteProperties; import org.apache.ignite.internal.binary.builder.BinaryObjectBuilderImpl; import org.apache.ignite.internal.processors.cache.CacheObjectContext; import org.apache.ignite.internal.util.GridUnsafe; @@ -857,6 +854,8 @@ public void testVoid() throws Exception { assertEquals(clazz, marshalUnmarshal(clazz)); } + + /** * */ diff --git a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteBinaryObjectsTestSuite.java b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteBinaryObjectsTestSuite.java index d6bce7e102164..c1d9974837fb3 100644 --- a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteBinaryObjectsTestSuite.java +++ b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteBinaryObjectsTestSuite.java @@ -26,7 +26,6 @@ import org.apache.ignite.internal.binary.BinaryFieldsOffheapSelfTest; import org.apache.ignite.internal.binary.BinaryFooterOffsetsHeapSelfTest; import org.apache.ignite.internal.binary.BinaryFooterOffsetsOffheapSelfTest; -import org.apache.ignite.internal.binary.BinaryMarshallerCompactZeroesSelfTest; import org.apache.ignite.internal.binary.BinaryMarshallerSelfTest; import org.apache.ignite.internal.binary.BinaryObjectBuilderAdditionalSelfTest; import org.apache.ignite.internal.binary.BinaryObjectBuilderDefaultMappersSelfTest; @@ -90,7 +89,6 @@ public static TestSuite suite() throws Exception { suite.addTestSuite(BinaryTreeSelfTest.class); suite.addTestSuite(BinaryMarshallerSelfTest.class); - suite.addTestSuite(BinaryMarshallerCompactZeroesSelfTest.class); suite.addTestSuite(BinaryConfigurationConsistencySelfTest.class); suite.addTestSuite(GridBinaryMarshallerCtxDisabledSelfTest.class); suite.addTestSuite(BinaryObjectBuilderDefaultMappersSelfTest.class); diff --git a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerCompactZeroesSelfTest.java b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteBinaryObjectsTestSuite2.java similarity index 55% rename from modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerCompactZeroesSelfTest.java rename to modules/core/src/test/java/org/apache/ignite/testsuites/IgniteBinaryObjectsTestSuite2.java index a034c65417c5f..d48a13ada46a2 100644 --- a/modules/core/src/test/java/org/apache/ignite/internal/binary/BinaryMarshallerCompactZeroesSelfTest.java +++ b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteBinaryObjectsTestSuite2.java @@ -15,31 +15,25 @@ * limitations under the License. */ -package org.apache.ignite.internal.binary; +package org.apache.ignite.testsuites; +import junit.framework.TestSuite; import org.apache.ignite.IgniteSystemProperties; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertNotEquals; - /** - * Binary marshaller tests with compact mode on. - * Compact Long zeroes should become default mode in Apache Ignite 2.0, so this test will be redundant. + * Test for binary objects stored in cache using {@link IgniteSystemProperties#IGNITE_BINARY_COMPACT_ZEROES} * - * @deprecated Should be removed in Apache Ignite 2.0. + * @deprecated IGNITE_BINARY_COMPACT_ZEROES should be default mode in Apache Ignite 2.0, so this test will be redundant. */ @Deprecated -public class BinaryMarshallerCompactZeroesSelfTest extends BinaryMarshallerSelfTest { - - @Override protected void beforeTestsStarted() throws Exception { - super.beforeTestsStarted(); - - System.setProperty(IgniteSystemProperties.IGNITE_BINARY_COMPACT_INT_ZEROES, "true"); - } - - @Override protected void afterTestsStopped() throws Exception { - super.afterTestsStopped(); - - System.clearProperty(IgniteSystemProperties.IGNITE_BINARY_COMPACT_INT_ZEROES); +public class IgniteBinaryObjectsTestSuite2 extends TestSuite { + /** + * @return Suite. + * @throws Exception If failed. + */ + public static TestSuite suite() throws Exception { + System.setProperty(IgniteSystemProperties.IGNITE_BINARY_COMPACT_ZEROES, "true"); + + return IgniteBinaryObjectsTestSuite.suite(); } } From 15a4a2689862e7798637d206456d5376bea9b415 Mon Sep 17 00:00:00 2001 From: "Andrey V. Mashenkov" Date: Tue, 4 Oct 2016 13:59:36 +0300 Subject: [PATCH 69/69] Minors --- .../org/apache/ignite/internal/binary/BinaryWriterExImpl.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryWriterExImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryWriterExImpl.java index 4a239c6baf779..3d1dcc3886d5f 100644 --- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryWriterExImpl.java +++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryWriterExImpl.java @@ -55,8 +55,8 @@ public class BinaryWriterExImpl implements BinaryWriter, BinaryRawWriterEx, Obje * See {@link IgniteSystemProperties#IGNITE_BINARY_COMPACT_ZEROES} * @deprecated This mode should be default mode in Apache Ignite 2.0 and option should be removed. * */ - public static final boolean IGNITE_BINARY_COMPACT_ZEROES = - !IgniteSystemProperties.getBoolean(IgniteSystemProperties.IGNITE_BINARY_COMPACT_ZEROES); + private static final boolean IGNITE_BINARY_COMPACT_ZEROES = + IgniteSystemProperties.getBoolean(IgniteSystemProperties.IGNITE_BINARY_COMPACT_ZEROES); /** */ private final BinaryContext ctx;