diff --git a/MergerCli/IProcess.cs b/MergerCli/IProcess.cs index 08885f77..ec2eb8e9 100644 --- a/MergerCli/IProcess.cs +++ b/MergerCli/IProcess.cs @@ -1,12 +1,11 @@ using MergerCli.Utils; using MergerLogic.DataTypes; -using MergerLogic.ImageProcessing; namespace MergerCli { internal interface IProcess { - void Start(TileFormat targetFormat, IData baseData, IData newData, BatchStatusManager batchStatusManager); + void Start(IData baseData, IData newData, BatchStatusManager batchStatusManager); void Validate(IData baseData, IData newData, string? incompleteBatchIdentifier = null); } diff --git a/MergerCli/ISourceParser.cs b/MergerCli/ISourceParser.cs index b2d584fb..2f2a379f 100644 --- a/MergerCli/ISourceParser.cs +++ b/MergerCli/ISourceParser.cs @@ -1,10 +1,9 @@ using MergerLogic.DataTypes; -using MergerLogic.ImageProcessing; namespace MergerCli { internal interface ISourceParser { - List ParseSources(string[] args, int batchSize, out TileFormat format); + List ParseSources(string[] args, int batchSize); } } diff --git a/MergerCli/Process.cs b/MergerCli/Process.cs index 2e778cf5..e7ccb330 100644 --- a/MergerCli/Process.cs +++ b/MergerCli/Process.cs @@ -6,6 +6,7 @@ using Microsoft.Extensions.Logging; using System.Collections.Concurrent; using System.Reflection; +using static MergerLogic.ImageProcessing.TileFormatStrategy; namespace MergerCli { @@ -15,6 +16,7 @@ internal class Process : IProcess private readonly IConfigurationManager _configManager; private readonly ITileMerger _tileMerger; private readonly ILogger _logger; + private TileFormatStrategy _tileFormatStrategy; static readonly object _locker = new object(); public Process(IConfigurationManager configuration, ITileMerger tileMerger, ILogger logger) @@ -22,9 +24,13 @@ public Process(IConfigurationManager configuration, ITileMerger tileMerger, ILog this._configManager = configuration; this._tileMerger = tileMerger; this._logger = logger; + + FormatStrategy outputFormatStrategy = this._configManager.GetConfiguration("TILE", "outputFormatStrategy"); + TileFormat outputFormat = this._configManager.GetConfiguration("TILE", "outputFormat"); + this._tileFormatStrategy = new TileFormatStrategy(outputFormat, outputFormatStrategy); } - public void Start(TileFormat targetFormat, IData baseData, IData newData, BatchStatusManager batchStatusManager) + public void Start(IData baseData, IData newData, BatchStatusManager batchStatusManager) { long totalTileCount = newData.TileCount(); batchStatusManager.InitializeLayer(newData.Path); @@ -36,6 +42,10 @@ public void Start(TileFormat targetFormat, IData baseData, IData newData, BatchS { resumeMode = true; this._logger.LogDebug($"[{MethodBase.GetCurrentMethod().Name}] Resume mode activated, resume batchId: {resumeBatchIdentifier}"); + + // Set strategy from status manager + this._tileFormatStrategy = new TileFormatStrategy(batchStatusManager.Format, batchStatusManager.Strategy); + // fix resume progress bug for gpkg, fs and web, fixing it for s3 requires storing additional data. if (newData.Type != DataType.S3) { @@ -47,13 +57,14 @@ public void Start(TileFormat targetFormat, IData baseData, IData newData, BatchS this._logger.LogInformation($"[{MethodBase.GetCurrentMethod().Name}] Total amount of tiles to merge: {totalTileCount - tileProgressCount}"); var uploadOnly = this._configManager.GetConfiguration("GENERAL", "uploadOnly"); - bool shouldUpscale = !(uploadOnly || baseData.IsNew); - _getTileByCoord = uploadOnly || baseData.IsNew ? + uploadOnly = uploadOnly || baseData.IsNew; + bool shouldUpscale = !uploadOnly; + _getTileByCoord = uploadOnly ? (_) => null : (targetCoords) => baseData.GetCorrespondingTile(targetCoords, shouldUpscale); - ParallelRun(targetFormat, baseData, newData, batchStatusManager, + ParallelRun(baseData, newData, batchStatusManager, tileProgressCount, totalTileCount, resumeBatchIdentifier, resumeMode, pollForBatch); batchStatusManager.CompleteLayer(newData.Path); @@ -97,7 +108,7 @@ public void Start(TileFormat targetFormat, IData baseData, IData newData, BatchS } } - private void ProcessBatch(TileFormat targetFormat, IData baseData, List newTiles, ref long tileProgressCount, long totalTileCount,ref bool pollForBatch) + private void ProcessBatch(IData baseData, List newTiles, ref long tileProgressCount, long totalTileCount,ref bool pollForBatch) { ConcurrentBag tiles = new ConcurrentBag(); @@ -116,12 +127,11 @@ private void ProcessBatch(TileFormat targetFormat, IData baseData, List ne () => _getTileByCoord(targetCoords), () => newTile }; - byte[]? image = this._tileMerger.MergeTiles(correspondingTileBuilders, targetCoords, targetFormat); + Tile? tile = this._tileMerger.MergeTiles(correspondingTileBuilders, targetCoords, this._tileFormatStrategy); - if (image != null) + if (tile != null) { - newTile = new Tile(newTile.Z, newTile.X, newTile.Y, image); - tiles.Add(newTile); + tiles.Add(tile); } } @@ -131,7 +141,7 @@ private void ProcessBatch(TileFormat targetFormat, IData baseData, List ne this._logger.LogInformation($"[{MethodBase.GetCurrentMethod().Name}] Tile Count: {tileProgressCount} / {totalTileCount}"); } - private void ParallelRun(TileFormat targetFormat, IData baseData, IData newData, + private void ParallelRun(IData baseData, IData newData, BatchStatusManager batchStatusManager, long tileProgressCount, long totalTileCount, string? resumeBatchIdentifier, bool resumeMode,bool pollForBatch) { var numOfThreads = this._configManager.GetConfiguration("GENERAL", "parallel", "numOfThreads"); @@ -140,7 +150,7 @@ private void ParallelRun(TileFormat targetFormat, IData baseData, IData newData, while (tileProgressCount != totalTileCount && pollForBatch) { var batchResult = ManageBatchIdentifier(batchStatusManager, newData, resumeBatchIdentifier, totalTileCount, ref resumeMode); - ProcessBatch(targetFormat, baseData, batchResult.newTiles, ref tileProgressCount, + ProcessBatch(baseData, batchResult.newTiles, ref tileProgressCount, totalTileCount, ref pollForBatch); batchStatusManager.CompleteBatch(newData.Path, batchResult.currentBatchIdentifier, tileProgressCount); } diff --git a/MergerCli/Program.cs b/MergerCli/Program.cs index 7393575c..32fdc752 100644 --- a/MergerCli/Program.cs +++ b/MergerCli/Program.cs @@ -7,6 +7,7 @@ using Microsoft.Extensions.Logging; using System.Diagnostics; using System.Runtime.Loader; +using static MergerLogic.ImageProcessing.TileFormatStrategy; namespace MergerCli { @@ -38,6 +39,9 @@ private static void Main(string[] args) var config = container.GetRequiredService(); var pathUtils = container.GetRequiredService(); string outputPath = pathUtils.RemoveTrailingSlash(config.GetConfiguration("GENERAL", "resumeOutputFolder")); + + FormatStrategy outputFormatStrategy = config.GetConfiguration("TILE", "outputFormatStrategy"); + TileFormat outputFormat = config.GetConfiguration("TILE", "outputFormat"); _resumeFilePath = $"{outputPath}/status.json"; // If should resume, load status manager file and update states, else create from arguments @@ -48,17 +52,16 @@ private static void Main(string[] args) } else { - _batchStatusManager = new BatchStatusManager(args); + _batchStatusManager = new BatchStatusManager(args, outputFormat, outputFormatStrategy); } PrepareStatusManger(); int batchSize = int.Parse(args[1]); - TileFormat format; List sources; try { var parser = container.GetRequiredService(); - sources = parser.ParseSources(args, batchSize, out format); + sources = parser.ParseSources(args, batchSize); } catch (Exception ex) { @@ -101,7 +104,7 @@ private static void Main(string[] args) continue; } - process.Start(format, baseData, sources[i], _batchStatusManager); + process.Start(baseData, sources[i], _batchStatusManager); baseData.IsNew = false; stopWatch.Stop(); @@ -161,7 +164,7 @@ web sources (cant be base source): gpkg [bbox - in format 'minX,minY,maxX,maxY' - required base] [--1x1] [--UL / --LL] **** please note all layers must be 2X1 EPSG:4326 layers **** - merge sources: {programName} [...] + merge sources: {programName} [...] Examples: {programName} 1000 gpkg area1.gpkg gpkg area2.gpkg {programName} 1000 s3 /path1/on/s3 s3 /path2/on/s3 diff --git a/MergerCli/SourceParser.cs b/MergerCli/SourceParser.cs index a380dc26..708e842d 100644 --- a/MergerCli/SourceParser.cs +++ b/MergerCli/SourceParser.cs @@ -26,16 +26,11 @@ private void LogDataErrorAndExit(bool isBase, Exception e) { Environment.Exit(1); } - public List ParseSources(string[] args, int batchSize, out TileFormat format) + public List ParseSources(string[] args, int batchSize) { List sources = new List(); - if (!TileFormat.TryParse(args[2], true, out format)) - { - this._logger.LogError($"invalid target tile format: {args[2]}"); - Environment.Exit(1); - } - int idx = 3; + int idx = 2; bool isBase = true; while (idx < args.Length) { diff --git a/MergerCli/appsettings.json b/MergerCli/appsettings.json index 0826e828..bcf7f3ec 100644 --- a/MergerCli/appsettings.json +++ b/MergerCli/appsettings.json @@ -7,6 +7,10 @@ "numOfThreads": 1 } }, + "TILE": { + "outputFormatStrategy": "fixed", + "outputFormat": "jpeg" + }, "GPKG": { "vacuum": false }, diff --git a/MergerCli/utils/BatchStatusManager.cs b/MergerCli/utils/BatchStatusManager.cs index 7ba45edb..7f017fa2 100644 --- a/MergerCli/utils/BatchStatusManager.cs +++ b/MergerCli/utils/BatchStatusManager.cs @@ -1,6 +1,9 @@ -using System.Collections.Concurrent; -using System.Text.Json; +using MergerLogic.ImageProcessing; +using Newtonsoft.Json; +using Newtonsoft.Json.Converters; +using System.Collections.Concurrent; using System.Text.Json.Serialization; +using static MergerLogic.ImageProcessing.TileFormatStrategy; namespace MergerCli.Utils { internal class BatchStatusManager @@ -38,16 +41,30 @@ public BaseLayerStatus() [JsonInclude] public Dictionary States { get; private set; } + [JsonInclude] + public FormatStrategy Strategy { get; private set; } + + [JsonInclude] + public TileFormat Format { get; private set; } + [JsonInclude] public string[] Command { get; private set; } static readonly object _locker = new object(); - public BatchStatusManager(string[] command) + [System.Text.Json.Serialization.JsonIgnore] + private JsonSerializerSettings _jsonSerializerSettings; + + public BatchStatusManager(string[] command, TileFormat format, FormatStrategy strategy = FormatStrategy.Fixed) { this.BaseLayer = new BaseLayerStatus(); this.States = new Dictionary(); + this.Strategy = strategy; + this.Format = format; this.Command = command; + + this._jsonSerializerSettings = new JsonSerializerSettings(); + this._jsonSerializerSettings.Converters.Add(new StringEnumConverter()); } public void SetCurrentBatch(string layer, string? batchIdentifier) @@ -177,12 +194,15 @@ public void ResetBatchStatus() public override string ToString() { - return JsonSerializer.Serialize(this); + return JsonConvert.SerializeObject(this, this._jsonSerializerSettings); } public static BatchStatusManager FromJson(string json) { - BatchStatusManager? batchStatusManager = JsonSerializer.Deserialize(json); + JsonSerializerSettings jsonSerializerSettings = new JsonSerializerSettings(); + jsonSerializerSettings.Converters.Add(new StringEnumConverter()); + + BatchStatusManager? batchStatusManager = JsonConvert.DeserializeObject(json, jsonSerializerSettings)!; if (batchStatusManager == null) { throw new Exception("invalid batch status manager json"); diff --git a/MergerLogic/Batching/Tile.cs b/MergerLogic/Batching/Tile.cs index 52ee5e12..de28907b 100644 --- a/MergerLogic/Batching/Tile.cs +++ b/MergerLogic/Batching/Tile.cs @@ -1,3 +1,4 @@ +using ImageMagick; using MergerLogic.DataTypes; using MergerLogic.ImageProcessing; using System.ComponentModel.DataAnnotations; @@ -6,6 +7,10 @@ namespace MergerLogic.Batching { public delegate Tile? CorrespondingTileBuilder(); + // TODO: add to README that the Merger assumes EPSG:4326 + /// + /// Class Tile represents a position of an image in the EPSG:4326 geographic tiling scheme + /// public class Tile { public int Z @@ -48,6 +53,15 @@ public Tile(Coord cords, byte[] data) this._data = data; } + public Tile(Coord cords, IMagickImage image) + { + this.Z = cords.Z; + this.X = cords.X; + this.Y = cords.Y; + this.Format = ImageFormatter.GetTileFormat(image) ?? throw new ValidationException($"Cannot create tile {this}, data is in invalid format"); + this._data = image.ToByteArray(); + } + public bool HasCoords(int z, int x, int y) { return z == this.Z && x == this.X && y == this.Y; @@ -72,12 +86,12 @@ public virtual byte[] GetImageBytes() return this._data; } + public int Size() { + return this._data.Length; + } + public void ConvertToFormat(TileFormat format) { - if (this.Format == format) { - return; - } - this._data = ImageFormatter.ConvertToFormat(this._data, format); this.Format = format; } diff --git a/MergerLogic/ImageProcessing/ITileMerger.cs b/MergerLogic/ImageProcessing/ITileMerger.cs index acc82b70..e7428cc5 100644 --- a/MergerLogic/ImageProcessing/ITileMerger.cs +++ b/MergerLogic/ImageProcessing/ITileMerger.cs @@ -5,6 +5,6 @@ namespace MergerLogic.ImageProcessing { public interface ITileMerger { - byte[]? MergeTiles(List tiles, Coord targetCoords, TileFormat format); + Tile? MergeTiles(List tiles, Coord targetCoords, TileFormatStrategy strategy); } } diff --git a/MergerLogic/ImageProcessing/ImageFormatter.cs b/MergerLogic/ImageProcessing/ImageFormatter.cs index 92f3a083..50c75d95 100644 --- a/MergerLogic/ImageProcessing/ImageFormatter.cs +++ b/MergerLogic/ImageProcessing/ImageFormatter.cs @@ -9,11 +9,35 @@ public enum TileFormat [EnumMember(Value = "jpeg")] Jpeg, } + public class TileFormatStrategy { + public enum FormatStrategy { + [EnumMember(Value = "fixed")] Fixed, + [EnumMember(Value = "mixed")] Mixed, + } + + private FormatStrategy _strategy; + private TileFormat _format; + + public TileFormatStrategy(TileFormat format, FormatStrategy strategy = FormatStrategy.Fixed) + { + this._strategy = strategy; + this._format = format; + } + + public TileFormat ApplyStrategy(TileFormat format) { + if (this._strategy == FormatStrategy.Fixed) { + return this._format; + } + + return format; + } + } + public class ImageFormatter { public static byte[] ConvertToFormat(byte[] tile, TileFormat format) { - var currentFormat = GetTileFormat(tile); + TileFormat? currentFormat = GetTileFormat(tile); if (currentFormat != format) { using (var image = new MagickImage(tile)) @@ -77,6 +101,22 @@ public static void ConvertToFormat(IMagickImage image, TileFormat format) return null; } + public static TileFormat? GetTileFormat(IMagickImage image) { + if(image.IsOpaque) { + image.Format = MagickFormat.Jpeg; + } + + if (image.Format == MagickFormat.Jpg || image.Format == MagickFormat.Jpeg) { + return TileFormat.Jpeg; + } + + if (image.Format == MagickFormat.Png) { + return TileFormat.Png; + } + + return null; + } + public static void RemoveImageDateAttributes(IMagickImage? image) { if (image == null) diff --git a/MergerLogic/ImageProcessing/TileMerger.cs b/MergerLogic/ImageProcessing/TileMerger.cs index a1295a68..69a5461c 100644 --- a/MergerLogic/ImageProcessing/TileMerger.cs +++ b/MergerLogic/ImageProcessing/TileMerger.cs @@ -18,10 +18,11 @@ public TileMerger(ITileScaler tileScaler, ILogger logger) this._tileScaler = tileScaler; } - public byte[]? MergeTiles(List tiles, Coord targetCoords, TileFormat format) + public Tile? MergeTiles(List tiles, Coord targetCoords, TileFormatStrategy strategy) { var images = this.GetImageList(tiles, targetCoords); - byte[] data; + IMagickImage image; + switch (images.Count) { case 0: @@ -29,12 +30,10 @@ public TileMerger(ITileScaler tileScaler, ILogger logger) this._logger.LogDebug($"[{MethodBase.GetCurrentMethod().Name}] No images where found return null"); return null; case 1: - ImageFormatter.ConvertToFormat(images[0], format); ImageFormatter.RemoveImageDateAttributes(images[0]); - data = images[0].ToByteArray(); - images[0].Dispose(); + image = images[0]; this._logger.LogDebug($"[{MethodBase.GetCurrentMethod().Name}] 1 image found"); - return data; + break; default: using (var imageCollection = new MagickImageCollection()) { @@ -50,13 +49,17 @@ public TileMerger(ITileScaler tileScaler, ILogger logger) mergedImage.ColorSpace = ColorSpace.sRGB; mergedImage.ColorType = mergedImage.HasAlpha ? ColorType.TrueColorAlpha : ColorType.TrueColor; - ImageFormatter.ConvertToFormat(mergedImage, format); - var mergedImageBytes = mergedImage.ToByteArray(); + image = new MagickImage(mergedImage); this._logger.LogDebug($"[{MethodBase.GetCurrentMethod().Name}] 'imageMagic' merging finished"); - return mergedImageBytes; } } + break; } + + Tile tile = new Tile(targetCoords, image); + image.Dispose(); + tile.ConvertToFormat(strategy.ApplyStrategy(tile.Format)); + return tile; } private List GetImageList(List tiles, Coord targetCoords) diff --git a/MergerLogic/Utils/ImageUtils.cs b/MergerLogic/Utils/ImageUtils.cs index 5d19c9a0..8d11a8d5 100644 --- a/MergerLogic/Utils/ImageUtils.cs +++ b/MergerLogic/Utils/ImageUtils.cs @@ -12,8 +12,15 @@ public static bool IsTransparent(MagickImage image) } using var pixels = image.GetPixels(); - // Check pixels to see if at least one of them is transparent (or partially transparent) - return pixels.Select(pixel => pixel.ToColor()).Any(color => color?.A != 255); + + foreach (var pixel in pixels) + { + if (pixel.ToColor()?.A != 255) { + return true; + } + } + + return false; } public static bool IsFullyTransparent(MagickImage image) diff --git a/MergerLogicUnitTests/ImageProcessing/ImageFormatterTest.cs b/MergerLogicUnitTests/ImageProcessing/ImageFormatterTest.cs index 62258d4c..280908aa 100644 --- a/MergerLogicUnitTests/ImageProcessing/ImageFormatterTest.cs +++ b/MergerLogicUnitTests/ImageProcessing/ImageFormatterTest.cs @@ -38,6 +38,30 @@ public static IEnumerable GetConvertToFormatTestParameters() TileFormat.Jpeg, true, }; + + yield return new object[] { + File.ReadAllBytes("5_8bit.png"), + TileFormat.Png, + true, + }; + + yield return new object[] { + File.ReadAllBytes("5_24bit.png"), + TileFormat.Png, + true, + }; + + yield return new object[] { + File.ReadAllBytes("5_32bit.png"), + TileFormat.Png, + true, + }; + + yield return new object[] { + File.ReadAllBytes("5_64bit.png"), + TileFormat.Png, + true, + }; } [TestMethod] diff --git a/MergerLogicUnitTests/ImageProcessing/TestImages/2_1_merged.jpeg b/MergerLogicUnitTests/ImageProcessing/TestImages/2_1_merged.jpeg new file mode 100644 index 00000000..670365b9 Binary files /dev/null and b/MergerLogicUnitTests/ImageProcessing/TestImages/2_1_merged.jpeg differ diff --git a/MergerLogicUnitTests/ImageProcessing/TestImages/5.jpeg b/MergerLogicUnitTests/ImageProcessing/TestImages/5.jpeg new file mode 100644 index 00000000..e5a91519 Binary files /dev/null and b/MergerLogicUnitTests/ImageProcessing/TestImages/5.jpeg differ diff --git a/MergerLogicUnitTests/ImageProcessing/TileMergerTest.cs b/MergerLogicUnitTests/ImageProcessing/TileMergerTest.cs index 829564e0..fc97a008 100644 --- a/MergerLogicUnitTests/ImageProcessing/TileMergerTest.cs +++ b/MergerLogicUnitTests/ImageProcessing/TileMergerTest.cs @@ -5,7 +5,6 @@ using Microsoft.Extensions.Logging; using Microsoft.VisualStudio.TestTools.UnitTesting; using Moq; -using System; using System.Collections.Generic; using System.IO; using System.Linq; @@ -52,31 +51,87 @@ public static IEnumerable GetMergeTilesTestParameters() new Tile[] { new Tile(targetCoordHighZoom, File.ReadAllBytes("2.png")), new Tile(targetCoordHighZoom, File.ReadAllBytes("1.png")) - }, targetCoordHighZoom, TileFormat.Png, - File.ReadAllBytes("2_1_merged.png"), + }, targetCoordHighZoom, new TileFormatStrategy(TileFormat.Jpeg), TileFormat.Jpeg, + File.ReadAllBytes("2_1_merged.jpeg"), }; yield return new object[] { new Tile[] { new Tile(targetCoordHighZoom, File.ReadAllBytes("3.jpeg")), new Tile(targetCoordHighZoom, File.ReadAllBytes("1.png")) - }, targetCoordHighZoom, TileFormat.Jpeg, + }, targetCoordHighZoom, new TileFormatStrategy(TileFormat.Jpeg), TileFormat.Jpeg, File.ReadAllBytes("3_1_merged.jpeg"), }; + yield return new object[] { + new Tile[] { + new Tile(targetCoordHighZoom, File.ReadAllBytes("1.png")), + new Tile(targetCoordHighZoom, File.ReadAllBytes("3.jpeg")) + }, targetCoordHighZoom, new TileFormatStrategy(TileFormat.Jpeg), TileFormat.Jpeg, + File.ReadAllBytes("3.jpeg"), + }; + yield return new object[] { new Tile[] { new Tile(targetCoordHighZoom, File.ReadAllBytes("3.jpeg")), new Tile(targetCoordHighZoom, File.ReadAllBytes("4.jpeg")) - }, targetCoordHighZoom, TileFormat.Jpeg, + }, targetCoordHighZoom, new TileFormatStrategy(TileFormat.Jpeg), TileFormat.Jpeg, File.ReadAllBytes("3_4_merged.jpeg"), }; + yield return new object[] { + new Tile[] { + new Tile(targetCoordHighZoom, File.ReadAllBytes("2.png")), + new Tile(targetCoordHighZoom, File.ReadAllBytes("1.png")) + }, targetCoordHighZoom, new TileFormatStrategy(TileFormat.Png), TileFormat.Png, + File.ReadAllBytes("2_1_merged.png"), + }; + + yield return new object[] { + new Tile[] { + new Tile(targetCoordHighZoom, File.ReadAllBytes("2.png")), + new Tile(targetCoordHighZoom, File.ReadAllBytes("1.png")) + }, targetCoordHighZoom, new TileFormatStrategy(TileFormat.Png, TileFormatStrategy.FormatStrategy.Mixed), + TileFormat.Png, File.ReadAllBytes("2_1_merged.png"), + }; + + yield return new object[] { + new Tile[] { + new Tile(targetCoordHighZoom, File.ReadAllBytes("2.png")), + new Tile(targetCoordHighZoom, File.ReadAllBytes("1.png")) + }, targetCoordHighZoom, new TileFormatStrategy(TileFormat.Jpeg, TileFormatStrategy.FormatStrategy.Mixed), + TileFormat.Png, File.ReadAllBytes("2_1_merged.png"), + }; + + yield return new object[] { + new Tile[] { + new Tile(targetCoordHighZoom, File.ReadAllBytes("3.jpeg")), + new Tile(targetCoordHighZoom, File.ReadAllBytes("1.png")) + }, targetCoordHighZoom, new TileFormatStrategy(TileFormat.Png, TileFormatStrategy.FormatStrategy.Mixed), + TileFormat.Jpeg, File.ReadAllBytes("3_1_merged.jpeg"), + }; + + yield return new object[] { + new Tile[] { + new Tile(targetCoordHighZoom, File.ReadAllBytes("3.jpeg")), + new Tile(targetCoordHighZoom, File.ReadAllBytes("4.jpeg")) + }, targetCoordHighZoom, new TileFormatStrategy(TileFormat.Png, TileFormatStrategy.FormatStrategy.Mixed), + TileFormat.Jpeg, File.ReadAllBytes("3_4_merged.jpeg"), + }; + + yield return new object[] { + new Tile[] { + new Tile(targetCoordHighZoom, File.ReadAllBytes("5.png")), + new Tile(targetCoordHighZoom, File.ReadAllBytes("5.png")) + }, targetCoordHighZoom, new TileFormatStrategy(TileFormat.Png, TileFormatStrategy.FormatStrategy.Mixed), + TileFormat.Jpeg, File.ReadAllBytes("5.jpeg"), + }; + yield return new object[] { new Tile[] { new Tile(targetCoordHighZoom, File.ReadAllBytes("3.jpeg")), new Tile(targetCoordLowZoom, File.ReadAllBytes("1.png")) - }, targetCoordHighZoom, TileFormat.Jpeg, + }, targetCoordHighZoom, new TileFormatStrategy(TileFormat.Jpeg), TileFormat.Jpeg, File.ReadAllBytes("3_1_merged_upscaled_5_15.jpeg"), }; @@ -84,7 +139,7 @@ public static IEnumerable GetMergeTilesTestParameters() new Tile[] { new Tile(targetCoordHighZoom, File.ReadAllBytes("3.jpeg")), new Tile(targetCoordMediumZoom, File.ReadAllBytes("1.png")) - }, targetCoordHighZoom, TileFormat.Jpeg, + }, targetCoordHighZoom, new TileFormatStrategy(TileFormat.Jpeg), TileFormat.Jpeg, File.ReadAllBytes("3_1_merged_upscaled_14_15.jpeg"), }; } @@ -92,13 +147,14 @@ public static IEnumerable GetMergeTilesTestParameters() [TestMethod] [TestCategory("MergeTiles")] [DynamicData(nameof(GetMergeTilesTestParameters), DynamicDataSourceType.Method)] - public void MergeTiles(Tile[] tiles, Coord targetCoord, TileFormat tileFormat, byte[] expectedTileBytes) + public void MergeTiles(Tile[] tiles, Coord targetCoord, TileFormatStrategy strategy, TileFormat expectedForamt, byte[] expectedTileBytes) { var tileBuilders = tiles.Select(tile => () => tile).ToList(); - var result = this._testTileMerger.MergeTiles(tileBuilders, targetCoord, tileFormat); + var result = this._testTileMerger.MergeTiles(tileBuilders, targetCoord, strategy); Assert.IsNotNull(result); - CollectionAssert.AreEqual(expectedTileBytes, result); + Assert.AreEqual(expectedForamt, result.Format); + CollectionAssert.AreEqual(expectedTileBytes, result.GetImageBytes()); } #endregion diff --git a/MergerService/Models/Tasks/Task.cs b/MergerService/Models/Tasks/Task.cs index 901568e4..2f5fb409 100644 --- a/MergerService/Models/Tasks/Task.cs +++ b/MergerService/Models/Tasks/Task.cs @@ -6,6 +6,7 @@ using System.ComponentModel; using System.Runtime.Serialization; using System.Text.Json.Serialization; +using static MergerLogic.ImageProcessing.TileFormatStrategy; namespace MergerService.Models.Tasks { @@ -22,6 +23,10 @@ public enum Status public class MergeMetadata { [JsonInclude] public TileFormat TargetFormat { get; } + + [DefaultValue(FormatStrategy.Fixed)] + [JsonProperty(DefaultValueHandling=DefaultValueHandling.Populate)] + [JsonInclude] public FormatStrategy OutputFormatStrategy { get; } [JsonInclude] public bool IsNewTarget { get; } @@ -32,9 +37,10 @@ public class MergeMetadata [System.Text.Json.Serialization.JsonIgnore] private JsonSerializerSettings _jsonSerializerSettings; - public MergeMetadata(TileFormat targetFormat, bool isNewTarget, TileBounds[] batches, Source[] sources) + public MergeMetadata(TileFormat targetFormat, bool isNewTarget, TileBounds[] batches, Source[] sources, FormatStrategy outputFormatStrategy = FormatStrategy.Fixed) { this.TargetFormat = targetFormat; + this.OutputFormatStrategy = outputFormatStrategy; this.IsNewTarget = isNewTarget; this.Batches = batches; this.Sources = sources; diff --git a/MergerService/Runners/TaskExecutor.cs b/MergerService/Runners/TaskExecutor.cs index d5376959..23e43bea 100644 --- a/MergerService/Runners/TaskExecutor.cs +++ b/MergerService/Runners/TaskExecutor.cs @@ -30,7 +30,7 @@ public class TaskExecutor : ITaskExecutor private readonly bool _shouldValidate; private static readonly int DEFAULT_BATCH_SIZE = 1000; - public TaskExecutor(IDataFactory dataFactory, ITileMerger tileMerger, ITimeUtils timeUtils, MergerLogic.Utils.IConfigurationManager configurationManager, + public TaskExecutor(IDataFactory dataFactory, ITileMerger tileMerger, ITimeUtils timeUtils, IConfigurationManager configurationManager, ILogger logger, ActivitySource activitySource, IFileSystem fileSystem, IMetricsProvider metricsProvider) { @@ -86,6 +86,8 @@ public void ExecuteTask(MergeTask task, ITaskUtils taskUtils, string? managerCal Stopwatch mergeRunTimeStopwatch = new Stopwatch(); TimeSpan ts; + TileFormatStrategy strategy = new TileFormatStrategy(metadata.TargetFormat, metadata.OutputFormatStrategy); + bool shouldUpscale = !metadata.IsNewTarget; Func getTileByCoord = metadata.IsNewTarget ? (_, _) => null @@ -168,15 +170,14 @@ public void ExecuteTask(MergeTask task, ITaskUtils taskUtils, string? managerCal correspondingTileBuilders.Add(() => source.GetCorrespondingTile(coord, false)); } var tileMergeStopwatch = Stopwatch.StartNew(); - byte[]? blob = this._tileMerger.MergeTiles(correspondingTileBuilders, coord, - metadata.TargetFormat); + Tile? tile = this._tileMerger.MergeTiles(correspondingTileBuilders, coord, strategy); tileMergeStopwatch.Stop(); this._metricsProvider.MergeTimePerTileHistogram(tileMergeStopwatch.Elapsed.TotalSeconds, metadata.TargetFormat); - if (blob != null) + if (tile != null) { - tiles.Add(new Tile(coord, blob)); - currentBatchBytes += blob.Length; + tiles.Add(tile); + currentBatchBytes += tile.Size(); // Flushes the "tiles" list if it reached the batch size or the batch max bytes // This is done to prevent memory overflow diff --git a/MergerServiceUnitTests/Runners/TaskExecutorTest.cs b/MergerServiceUnitTests/Runners/TaskExecutorTest.cs index 74ca69a0..dd104637 100644 --- a/MergerServiceUnitTests/Runners/TaskExecutorTest.cs +++ b/MergerServiceUnitTests/Runners/TaskExecutorTest.cs @@ -116,16 +116,16 @@ public static IEnumerable GetBatchLimitTestParameters() // When the limit is 2, and there are total 5 tiles, we expect 3 flushes // We set the bytes limit to a high value to avoid it being reached by the test yield return new object[] { - true, 2, 1024 * 1024 * 80, 3, 5 - }; + true, 2, 1024 * 1024 * 80, 3, 5 + }; // Tests that given a batch bytes limit, it flushes every time it reaches the limit // When the limit is one byte more then twice the size of the tile, and there are total 7 tiles, we expect 3 flushes // (Flush every 3rd tile, and additional flush at the end) // We disable the size limit to avoid it being used in the test yield return new object[] { - false, 1, (File.ReadAllBytes("tile.jpeg").Length * 2) + 1, 3, 7 - }; + false, 1, (File.ReadAllBytes("tile.jpeg").Length * 2) + 1, 3, 7 + }; } [TestMethod] diff --git a/MergerServiceUnitTests/Utils/TaskUtilsTest.cs b/MergerServiceUnitTests/Utils/TaskUtilsTest.cs index b08834ab..93fde34b 100644 --- a/MergerServiceUnitTests/Utils/TaskUtilsTest.cs +++ b/MergerServiceUnitTests/Utils/TaskUtilsTest.cs @@ -1,4 +1,5 @@ using MergerLogic.Clients; +using MergerLogic.ImageProcessing; using MergerLogic.Utils; using MergerService.Models.Tasks; using MergerService.Utils; @@ -6,14 +7,19 @@ using Microsoft.VisualStudio.TestTools.UnitTesting; using Moq; using Newtonsoft.Json; +using System; +using System.Collections.Generic; using System.Diagnostics; +using System.IO; using System.Net.Http; +using static MergerLogic.ImageProcessing.TileFormatStrategy; namespace MergerLogicUnitTests.Utils { [TestClass] [TestCategory("unit")] [TestCategory("utils")] + [DeploymentItem(@"../../../Utils/TestData")] public class TaskUtilsTest { #region mocks @@ -59,11 +65,18 @@ public void WhenGettingTaskByType_ShouldUseCorrectHttpUri() this._httpClientMock.Verify(httpClient => httpClient.PostData(expectedUrl, null, false), Times.Once); } + public static IEnumerable GetBadJsonTestParameters() + { + yield return new object[] { "bad json" }; + yield return new object[] { File.ReadAllText("invalidTask.json") }; + yield return new object[] { File.ReadAllText("invalidTaskStrategy.json") }; + } + [TestMethod] - public void WhenGettingMalformedJsonTask_ShouldReturnNull() + [DynamicData(nameof(GetBadJsonTestParameters), DynamicDataSourceType.Method)] + public void WhenGettingMalformedJsonTask_ShouldReturnNull(string json) { - var malformedJsonString = "bad json"; - this._httpClientMock.Setup(httpClient => httpClient.PostData(It.IsAny(), It.IsAny(), It.IsAny())).Returns(malformedJsonString); + this._httpClientMock.Setup(httpClient => httpClient.PostData(It.IsAny(), It.IsAny(), It.IsAny())).Returns(json); var testTaskUtils = new TaskUtils(_configurationManagerMock.Object, _httpClientMock.Object, _taskUtilsLoggerMock.Object, _testActivitySource); @@ -73,6 +86,59 @@ public void WhenGettingMalformedJsonTask_ShouldReturnNull() Assert.IsNull(resultTask); } + public static IEnumerable GetGoodJsonTestParameters() + { + yield return new object[] { File.ReadAllText("validTaskNoStrategy.json") }; + yield return new object[] { File.ReadAllText("validTaskMixedStrategy.json") }; + } + + [TestMethod] + [DynamicData(nameof(GetGoodJsonTestParameters), DynamicDataSourceType.Method)] + public void WhenGettingJsonTask_ShouldReturnTaskObject(string json) + { + int maxAttempts = this._configurationManagerMock.Object.GetConfiguration("TASK", "maxAttempts"); + this._httpClientMock.Setup(httpClient => httpClient.PostData(It.IsAny(), It.IsAny(), It.IsAny())).Returns(json); + + var testTaskUtils = new TaskUtils(_configurationManagerMock.Object, _httpClientMock.Object, _taskUtilsLoggerMock.Object, + _testActivitySource); + var resultTask = testTaskUtils.GetTask("testJobType", "testTaskType"); + + Assert.IsNotNull(resultTask); + + // Validate GUIDs + Assert.IsTrue(Guid.TryParse(resultTask.Id, out _)); + Assert.IsTrue(Guid.TryParse(resultTask.JobId, out _)); + + // Validate dates + Assert.IsTrue(DateTime.TryParse(resultTask.Created.ToString(), out _)); + Assert.IsTrue(DateTime.TryParse(resultTask.Updated.ToString(), out _)); + + Assert.IsNotNull(resultTask.Parameters); + Assert.IsTrue(resultTask.Parameters.Sources?.Length > 0); + Assert.IsTrue(resultTask.Parameters.Batches?.Length > 0); + Assert.IsInstanceOfType(resultTask.Parameters.TargetFormat, typeof(TileFormat)); + Assert.IsInstanceOfType(resultTask.Parameters.OutputFormatStrategy, typeof(FormatStrategy)); + + Assert.IsTrue(resultTask.Percentage >= 0 && resultTask.Percentage <= 100); + Assert.IsTrue(resultTask.Attempts >= 0 && resultTask.Attempts <= maxAttempts); + } + + [TestMethod] + public void WhenGettingJsonTaskWithoutStrategy_ShouldHaveFixedDefaultStrategy() + { + string json = File.ReadAllText("validTaskNoStrategy.json"); + int maxAttempts = this._configurationManagerMock.Object.GetConfiguration("TASK", "maxAttempts"); + this._httpClientMock.Setup(httpClient => httpClient.PostData(It.IsAny(), It.IsAny(), It.IsAny())).Returns(json); + + var testTaskUtils = new TaskUtils(_configurationManagerMock.Object, _httpClientMock.Object, _taskUtilsLoggerMock.Object, + _testActivitySource); + var resultTask = testTaskUtils.GetTask("testJobType", "testTaskType"); + + Assert.IsNotNull(resultTask); + Assert.IsInstanceOfType(resultTask.Parameters.OutputFormatStrategy, typeof(FormatStrategy)); + Assert.AreEqual(resultTask.Parameters.OutputFormatStrategy, FormatStrategy.Fixed); + } + [TestMethod] public void WhenUpdatingTaskCompleted_ShouldSendCorrectStatusAndPercentage() { diff --git a/MergerServiceUnitTests/Utils/TestData/invalidTask.json b/MergerServiceUnitTests/Utils/TestData/invalidTask.json new file mode 100644 index 00000000..64626732 --- /dev/null +++ b/MergerServiceUnitTests/Utils/TestData/invalidTask.json @@ -0,0 +1,18 @@ +{ + "id": "", + "type": "", + "description": "", + "parameters": { + "tileFormat": "", + "outputFormatStrategy": "", + "isNewTarget": "" + }, + "status": "", + "percentage": "", + "reason": "", + "attempts": 0, + "jobId": "", + "resettable": false, + "created": "", + "updated": "" +} diff --git a/MergerServiceUnitTests/Utils/TestData/invalidTaskStrategy.json b/MergerServiceUnitTests/Utils/TestData/invalidTaskStrategy.json new file mode 100644 index 00000000..9edc8ce2 --- /dev/null +++ b/MergerServiceUnitTests/Utils/TestData/invalidTaskStrategy.json @@ -0,0 +1,44 @@ +{ + "id": "3a59ce9f-9279-43e2-936c-868ee5d0b244", + "type": "tilesMerging", + "description": "", + "parameters": { + "batches": [ + { + "maxX": 156220, + "maxY": 88402, + "minX": 156219, + "minY": 88401, + "zoom": 17 + } + ], + "sources": [ + { + "path": "7a2ad240-c8a9-44a9-ae67-915144abec49/8c1081ab-1552-418a-87ae-b4ae25cdbf7c", + "type": "S3" + }, + { + "grid": "2X1", + "path": "getmap_test_data/aza_north/gpkg/O_ihud_w84geo_Nov04_Sep22_gpkg_19_0.gpkg", + "type": "GPKG", + "extent": { + "maxX": 34.629350139437605, + "maxY": 31.552055574098873, + "minX": 34.49793037765059, + "minY": 31.401062364453793 + } + } + ], + "isNewTarget": true, + "targetFormat": "JPEG", + "outputFormatStrategy": "bad" + }, + "status": "In-Progress", + "percentage": 0, + "reason": "", + "attempts": 0, + "jobId": "a2569d7e-2b86-43af-a6e3-e2096bbad6c5", + "resettable": false, + "created": "2024-01-25 10:29:10.429855+02", + "updated": "2024-01-25 10:42:50.70713+02" +} diff --git a/MergerServiceUnitTests/Utils/TestData/validTaskMixedStrategy.json b/MergerServiceUnitTests/Utils/TestData/validTaskMixedStrategy.json new file mode 100644 index 00000000..f108e571 --- /dev/null +++ b/MergerServiceUnitTests/Utils/TestData/validTaskMixedStrategy.json @@ -0,0 +1,44 @@ +{ + "id": "3a59ce9f-9279-43e2-936c-868ee5d0b244", + "type": "tilesMerging", + "description": "", + "parameters": { + "batches": [ + { + "maxX": 156220, + "maxY": 88402, + "minX": 156219, + "minY": 88401, + "zoom": 17 + } + ], + "sources": [ + { + "path": "7a2ad240-c8a9-44a9-ae67-915144abec49/8c1081ab-1552-418a-87ae-b4ae25cdbf7c", + "type": "S3" + }, + { + "grid": "2X1", + "path": "getmap_test_data/aza_north/gpkg/O_ihud_w84geo_Nov04_Sep22_gpkg_19_0.gpkg", + "type": "GPKG", + "extent": { + "maxX": 34.629350139437605, + "maxY": 31.552055574098873, + "minX": 34.49793037765059, + "minY": 31.401062364453793 + } + } + ], + "isNewTarget": true, + "targetFormat": "JPEG", + "outputFormatStrategy": "mixed" + }, + "status": "In-Progress", + "percentage": 0, + "reason": "", + "attempts": 0, + "jobId": "a2569d7e-2b86-43af-a6e3-e2096bbad6c5", + "resettable": false, + "created": "2024-01-25 10:29:10.429855+02", + "updated": "2024-01-25 10:42:50.70713+02" +} diff --git a/MergerServiceUnitTests/Utils/TestData/validTaskNoStrategy.json b/MergerServiceUnitTests/Utils/TestData/validTaskNoStrategy.json new file mode 100644 index 00000000..ae7e54c0 --- /dev/null +++ b/MergerServiceUnitTests/Utils/TestData/validTaskNoStrategy.json @@ -0,0 +1,43 @@ +{ + "id": "3a59ce9f-9279-43e2-936c-868ee5d0b244", + "type": "tilesMerging", + "description": "", + "parameters": { + "batches": [ + { + "maxX": 156220, + "maxY": 88402, + "minX": 156219, + "minY": 88401, + "zoom": 17 + } + ], + "sources": [ + { + "path": "7a2ad240-c8a9-44a9-ae67-915144abec49/8c1081ab-1552-418a-87ae-b4ae25cdbf7c", + "type": "S3" + }, + { + "grid": "2X1", + "path": "getmap_test_data/aza_north/gpkg/O_ihud_w84geo_Nov04_Sep22_gpkg_19_0.gpkg", + "type": "GPKG", + "extent": { + "maxX": 34.629350139437605, + "maxY": 31.552055574098873, + "minX": 34.49793037765059, + "minY": 31.401062364453793 + } + } + ], + "isNewTarget": true, + "targetFormat": "JPEG" + }, + "status": "In-Progress", + "percentage": 0, + "reason": "", + "attempts": 0, + "jobId": "a2569d7e-2b86-43af-a6e3-e2096bbad6c5", + "resettable": false, + "created": "2024-01-25 10:29:10.429855+02", + "updated": "2024-01-25 10:42:50.70713+02" +} diff --git a/TestMergerOutput/run_tests.sh b/TestMergerOutput/run_tests.sh index 62d04a74..44712cb6 100755 --- a/TestMergerOutput/run_tests.sh +++ b/TestMergerOutput/run_tests.sh @@ -3,6 +3,8 @@ export S3_SECRET_KEY="minio123" #"minioadmin" export S3__url="http://localhost:9000" export S3__bucket="tiles" export GENERAL__uploadOnly=false +export TILE__outputFormatStrategy="fixed" +export TILE__outputFormat="jpeg" MAXIMUM_JEPG_TILES_IN_RAM=100000 MAXIMUM_PNG_TILES_IN_RAM=30000 @@ -12,9 +14,9 @@ TEST_FOLDER='' INPUT_FOLDER="${TEST_FOLDER}/input" OUTPUT_FOLDER="${TEST_FOLDER}/output" TEST_RESULTS="${TEST_FOLDER}/correct-results" +export GENERAL__resumeOutputFolder="$TEST_FOLDER" OUTPUT_DATA_ARR=('gpkg') -OUTPUT_FORMAT_ARR=('jpeg' 'png') BATCH_SIZE_ARR=(500 1000 2000 5000 10000 15000 20000) THREAD_NUM_ARR=(1 3 5 8 10 15) @@ -28,20 +30,22 @@ TZOR=('Tzor' '35.1837230,33.22952440,35.23727232,33.2968062') MERGED=('merged' '33.8882,29.20989,36.22737,33.6244') TILE=('tile' '33.8882,29.20989,36.22737,33.6244') -DATA_TO_CHECK=('GEO[@]' 'TZOR[@]' 'JORD[@]' 'SYRIA[@]' 'AREA1[@]' 'AREA2[@]' 'AREA3[@]') +# DATA_TO_CHECK=('GEO[@]' 'TZOR[@]' 'JORD[@]' 'SYRIA[@]' 'AREA1[@]' 'AREA2[@]' 'AREA3[@]') +DATA_TO_CHECK=('GEO[@]') RESULTS=() test -z $TEST_FOLDER && echo "Please assign value to TEST_FOLDER" && exit function run_tests { - OUTPUT_FILE_TYPE=$1 - BATCH_SIZE=$2 + BATCH_SIZE=$1 echo " ########## -Output format: $OUTPUT_FILE_TYPE +Standard tests +Output strategy: $TILE__outputFormatStrategy +Output format: $TILE__outputFormat Batch size: $BATCH_SIZE Threads: $GENERAL__parallel__numOfThreads ##########" >> run.txt @@ -54,8 +58,8 @@ Threads: $GENERAL__parallel__numOfThreads STARTTIME=$(date +%s) IFS=' ' read -ra data_arr <<< "${!data}" - dotnet run --project MergerCli Program.cs $BATCH_SIZE $OUTPUT_FILE_TYPE gpkg $OUTPUT_FOLDER/gpkgs/${data_arr[0]}.gpkg ${data_arr[1]} gpkg $INPUT_FOLDER/gpkgs/${data_arr[0]}.gpkg >> run.txt - RESULTS+=($(python3 TestMergerOutput/run_tests.py gpkg $OUTPUT_FOLDER/gpkgs/${data_arr[0]}.gpkg gpkg $TEST_RESULTS/gpkgs/${data_arr[0]}_${OUTPUT_FILE_TYPE}.gpkg)) + dotnet run --project MergerCli Program.cs $BATCH_SIZE gpkg $OUTPUT_FOLDER/gpkgs/${data_arr[0]}.gpkg ${data_arr[1]} gpkg $INPUT_FOLDER/gpkgs/${data_arr[0]}.gpkg >> run.txt + RESULTS+=($(python3 TestMergerOutput/run_tests.py gpkg $OUTPUT_FOLDER/gpkgs/${data_arr[0]}.gpkg gpkg $TEST_RESULTS/gpkgs/${data_arr[0]}.gpkg)) rm -f $OUTPUT_FOLDER/gpkgs/${data_arr[0]}.gpkg ENDTIME=$(date +%s) @@ -63,44 +67,92 @@ Threads: $GENERAL__parallel__numOfThreads done STARTTIME=$(date +%s) - dotnet run --project MergerCli Program.cs $BATCH_SIZE $OUTPUT_FILE_TYPE gpkg $OUTPUT_FOLDER/gpkgs/${MERGED[0]}.gpkg ${MERGED[1]} gpkg $INPUT_FOLDER/gpkgs/${GEO[0]}.gpkg gpkg $INPUT_FOLDER/gpkgs/${TZOR[0]}.gpkg gpkg $INPUT_FOLDER/gpkgs/${JORD[0]}.gpkg gpkg $INPUT_FOLDER/gpkgs/${SYRIA[0]}.gpkg gpkg $INPUT_FOLDER/gpkgs/${AREA1[0]}.gpkg gpkg $INPUT_FOLDER/gpkgs/${AREA2[0]}.gpkg gpkg $INPUT_FOLDER/gpkgs/${AREA3[0]}.gpkg >> run.txt - RESULTS+=($(python3 TestMergerOutput/run_tests.py gpkg $OUTPUT_FOLDER/gpkgs/${MERGED[0]}.gpkg gpkg $TEST_RESULTS/gpkgs/${MERGED[0]}_${OUTPUT_FILE_TYPE}.gpkg)) + dotnet run --project MergerCli Program.cs $BATCH_SIZE gpkg $OUTPUT_FOLDER/gpkgs/${MERGED[0]}.gpkg ${MERGED[1]} gpkg $INPUT_FOLDER/gpkgs/${GEO[0]}.gpkg gpkg $INPUT_FOLDER/gpkgs/${TZOR[0]}.gpkg gpkg $INPUT_FOLDER/gpkgs/${JORD[0]}.gpkg gpkg $INPUT_FOLDER/gpkgs/${SYRIA[0]}.gpkg gpkg $INPUT_FOLDER/gpkgs/${AREA1[0]}.gpkg gpkg $INPUT_FOLDER/gpkgs/${AREA2[0]}.gpkg gpkg $INPUT_FOLDER/gpkgs/${AREA3[0]}.gpkg >> run.txt + RESULTS+=($(python3 TestMergerOutput/run_tests.py gpkg $OUTPUT_FOLDER/gpkgs/${MERGED[0]}.gpkg gpkg $TEST_RESULTS/gpkgs/${MERGED[0]}.gpkg)) rm -f $OUTPUT_FOLDER/gpkgs/${MERGED[0]}.gpkg ENDTIME=$(date +%s) echo "It takes $(($ENDTIME - $STARTTIME)) seconds to complete run and check for merged..." + echo "Results: ${RESULTS[@]}" ### FS target - # dotnet run --project MergerCli Program.cs $BATCH_SIZE $OUTPUT_FILE_TYPE fs $OUTPUT_FOLDER/tiles/area1 ${AREA1[1]} gpkg $INPUT_FOLDER/gpkgs/${AREA1[0]} - # dotnet run --project MergerCli Program.cs $BATCH_SIZE $OUTPUT_FILE_TYPE fs $OUTPUT_FOLDER/tiles/area2 ${AREA2[1]} gpkg $INPUT_FOLDER/gpkgs/${AREA2[0]} - # dotnet run --project MergerCli Program.cs $BATCH_SIZE $OUTPUT_FILE_TYPE fs $OUTPUT_FOLDER/tiles/area3 ${AREA3[1]} gpkg $INPUT_FOLDER/gpkgs/${AREA3[0]} + # dotnet run --project MergerCli Program.cs $BATCH_SIZE fs $OUTPUT_FOLDER/tiles/area1 ${AREA1[1]} gpkg $INPUT_FOLDER/gpkgs/${AREA1[0]} + # dotnet run --project MergerCli Program.cs $BATCH_SIZE fs $OUTPUT_FOLDER/tiles/area2 ${AREA2[1]} gpkg $INPUT_FOLDER/gpkgs/${AREA2[0]} + # dotnet run --project MergerCli Program.cs $BATCH_SIZE fs $OUTPUT_FOLDER/tiles/area3 ${AREA3[1]} gpkg $INPUT_FOLDER/gpkgs/${AREA3[0]} ### S3 target - # dotnet run --project MergerCli Program.cs $BATCH_SIZE $OUTPUT_FILE_TYPE s3 $OUTPUT_FOLDER/tiles/area1 ${AREA1[1]} gpkg $INPUT_FOLDER/gpkgs/${AREA1[0]} - # dotnet run --project MergerCli Program.cs $BATCH_SIZE $OUTPUT_FILE_TYPE s3 $OUTPUT_FOLDER/tiles/area2 ${AREA2[1]} gpkg $INPUT_FOLDER/gpkgs/${AREA2[0]} - # dotnet run --project MergerCli Program.cs $BATCH_SIZE $OUTPUT_FILE_TYPE s3 $OUTPUT_FOLDER/tiles/area3 ${AREA3[1]} gpkg $INPUT_FOLDER/gpkgs/${AREA3[0]} + # dotnet run --project MergerCli Program.cs $BATCH_SIZE s3 $OUTPUT_FOLDER/tiles/area1 ${AREA1[1]} gpkg $INPUT_FOLDER/gpkgs/${AREA1[0]} + # dotnet run --project MergerCli Program.cs $BATCH_SIZE s3 $OUTPUT_FOLDER/tiles/area2 ${AREA2[1]} gpkg $INPUT_FOLDER/gpkgs/${AREA2[0]} + # dotnet run --project MergerCli Program.cs $BATCH_SIZE s3 $OUTPUT_FOLDER/tiles/area3 ${AREA3[1]} gpkg $INPUT_FOLDER/gpkgs/${AREA3[0]} +} + +function run_resume_tests { + BATCH_SIZE=$1 + + + echo " +########## +Resume tests +Output strategy: $TILE__outputFormatStrategy +Output format: $TILE__outputFormat +Batch size: $BATCH_SIZE +Threads: $GENERAL__parallel__numOfThreads +##########" >> run.txt + + ## Export to new data target + + ### GPKG target + for data in "${DATA_TO_CHECK[@]}" + do + STARTTIME=$(date +%s) + + IFS=' ' read -ra data_arr <<< "${!data}" + dotnet run --project MergerCli Program.cs $BATCH_SIZE gpkg $OUTPUT_FOLDER/gpkgs/${data_arr[0]}.gpkg ${data_arr[1]} gpkg $INPUT_FOLDER/gpkgs/${data_arr[0]}.gpkg >> run.txt & + + job_id=$(echo $!) + disown + sleep 3 + kill -KILL $job_id + cat $TEST_FOLDER/status.json + + # Resume stopped merge process + dotnet run --project MergerCli Program.cs >> run.txt + + RESULTS+=($(python3 TestMergerOutput/run_tests.py gpkg $OUTPUT_FOLDER/gpkgs/${data_arr[0]}.gpkg gpkg $TEST_RESULTS/gpkgs/${data_arr[0]}.gpkg)) + rm -f $OUTPUT_FOLDER/gpkgs/${data_arr[0]}.gpkg + + ENDTIME=$(date +%s) + echo "It takes $(($ENDTIME - $STARTTIME)) seconds to complete run and check for ${data_arr[0]}..." + done + + STARTTIME=$(date +%s) + dotnet run --project MergerCli Program.cs $BATCH_SIZE gpkg $OUTPUT_FOLDER/gpkgs/${MERGED[0]}.gpkg ${MERGED[1]} gpkg $INPUT_FOLDER/gpkgs/${GEO[0]}.gpkg gpkg $INPUT_FOLDER/gpkgs/${TZOR[0]}.gpkg gpkg $INPUT_FOLDER/gpkgs/${JORD[0]}.gpkg gpkg $INPUT_FOLDER/gpkgs/${SYRIA[0]}.gpkg gpkg $INPUT_FOLDER/gpkgs/${AREA1[0]}.gpkg gpkg $INPUT_FOLDER/gpkgs/${AREA2[0]}.gpkg gpkg $INPUT_FOLDER/gpkgs/${AREA3[0]}.gpkg >> run.txt + RESULTS+=($(python3 TestMergerOutput/run_tests.py gpkg $OUTPUT_FOLDER/gpkgs/${MERGED[0]}.gpkg gpkg $TEST_RESULTS/gpkgs/${MERGED[0]}.gpkg)) + rm -f $OUTPUT_FOLDER/gpkgs/${MERGED[0]}.gpkg + ENDTIME=$(date +%s) + echo "It takes $(($ENDTIME - $STARTTIME)) seconds to complete run and check for merged..." } # Run CLI tests -for format in "${OUTPUT_FORMAT_ARR[@]}" +for batch_size in "${BATCH_SIZE_ARR[@]}" do - for batch_size in "${BATCH_SIZE_ARR[@]}" + for thread_num in "${THREAD_NUM_ARR[@]}" do - for thread_num in "${THREAD_NUM_ARR[@]}" - do - # Skip batches that will be an issue because of RAM limits - if [[ $format == 'jpeg' ]] && (( $(echo "$thread_num * $batch_size > $MAXIMUM_JEPG_TILES_IN_RAM" | bc -l) )); then - echo "Skipping due to RAM limit: $format, Threads: $thread_num, Batch size: $batch_size" + # Skip batches that will be an issue because of RAM limits + if [[ $TILE__outputFormatStrategy == 'fixed' ]]; then + if [[ $TILE__outputFormat == 'jpeg' ]] && (( $(echo "$thread_num * $batch_size > $MAXIMUM_JEPG_TILES_IN_RAM" | bc -l) )); then + echo "Skipping due to RAM limit: $TILE__outputFormat, Threads: $thread_num, Batch size: $batch_size" break fi - if [[ $format == 'png' ]] && (( $(echo "$thread_num * $batch_size > $MAXIMUM_PNG_TILES_IN_RAM" | bc -l) )); then - echo "Skipping due to RAM limit: $format, Threads: $thread_num, Batch size: $batch_size" + if [[ $TILE__outputFormat == 'png' ]] && (( $(echo "$thread_num * $batch_size > $MAXIMUM_PNG_TILES_IN_RAM" | bc -l) )); then + echo "Skipping due to RAM limit: $TILE__outputFormat, Threads: $thread_num, Batch size: $batch_size" break fi + fi - export GENERAL__parallel__numOfThreads=$thread_num - # echo "Format: $format, Threads: $thread_num, Batch size: $batch_size" - run_tests "$format" $batch_size - done + export GENERAL__parallel__numOfThreads=$thread_num + # echo "Format: $format, Threads: $thread_num, Batch size: $batch_size" + run_tests $batch_size + run_resume_tests "$format" $batch_size done done