diff --git a/MergerCli/Properties/launchSettings.json b/MergerCli/Properties/launchSettings.json index e6b7eba2..66c76267 100644 --- a/MergerCli/Properties/launchSettings.json +++ b/MergerCli/Properties/launchSettings.json @@ -3,11 +3,10 @@ "GPKG to S3": { "workingDirectory": "", "commandName": "Project", - "commandLineArgs": "Program.cs 1000 jpeg s3 area2-copy gpkg area2.gpkg", + "commandLineArgs": "Program.cs 1000 jpeg s3 bucket area2-copy gpkg area2.gpkg", "environmentVariables": { "AWS_ACCESS_KEY_ID": "minioadmin", - "AWS_SECRET_ACCESS_KEY": "minioadmin", - "S3__bucket": "dtm" + "AWS_SECRET_ACCESS_KEY": "minioadmin" } }, "s3 to gpkg": { @@ -16,8 +15,7 @@ "commandLineArgs": "Program.cs 1000 jpeg gpkg area2-copy.gpkg -180,-90,180,90 s3 area3Cli", "environmentVariables": { "AWS_ACCESS_KEY_ID": "minio123", - "AWS_SECRET_ACCESS_KEY": "minio123", - "S3__bucket": "tiles" + "AWS_SECRET_ACCESS_KEY": "minio123" } }, "resume": { @@ -27,8 +25,7 @@ "environmentVariables": { "AWS_ACCESS_KEY_ID": "minio123", "AWS_SECRET_ACCESS_KEY": "minio123", - "S3__url": "http://custom.localhost:9000", - "S3__bucket": "tiles" + "S3__url": "http://custom.localhost:9000" } }, "Test SSL": { @@ -39,8 +36,7 @@ "ASPNETCORE_ENVIRONMENT": "Development", "AWS_ACCESS_KEY_ID": "Q3AM3UQ867SPQQA43P2F", "AWS_SECRET_ACCESS_KEY": "zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG", - "S3__url": "https://play.min.io:9000", - "S3__bucket": "tiles" + "S3__url": "https://play.min.io:9000" } }, "Docker": { diff --git a/MergerCli/SourceParser.cs b/MergerCli/SourceParser.cs index a380dc26..1a157b18 100644 --- a/MergerCli/SourceParser.cs +++ b/MergerCli/SourceParser.cs @@ -20,7 +20,8 @@ public SourceParser(IDataFactory dataFactory, ILogger logger) this._logger = logger; } - private void LogDataErrorAndExit(bool isBase, Exception e) { + private void LogDataErrorAndExit(bool isBase, Exception e) + { string source = isBase ? "base" : "new"; this._logger.LogError($"{source} data does not exist, error: {e.Message}"); Environment.Exit(1); @@ -53,7 +54,6 @@ public List ParseSources(string[] args, int batchSize, out TileFormat for break; case "fs": - case "s3": try { sources.Add(this.ParseFileSource(args, ref idx, batchSize, isBase)); @@ -62,7 +62,16 @@ public List ParseSources(string[] args, int batchSize, out TileFormat for { this.LogDataErrorAndExit(isBase, e); } - + break; + case "s3": + try + { + sources.Add(this.ParseS3Source(args, ref idx, batchSize, isBase)); + } + catch (Exception e) + { + this.LogDataErrorAndExit(isBase, e); + } break; case "wmts": case "xyz": @@ -115,7 +124,29 @@ private IData ParseFileSource(string[] args, ref int idx, int batchSize, bool is idx += paramCount; Grid? grid = GetGrid(isOneXOne); - return this._dataFactory.CreateDataSource(sourceType, sourcePath, batchSize, grid, origin, null, isBase); + return this._dataFactory.CreateDataSource(sourceType, sourcePath, string.Empty, batchSize, grid, origin, null, isBase); + } + + private IData ParseS3Source(string[] args, ref int idx, int batchSize, bool isBase) + { + const int requiredParamCount = 3; + const int optionalParamCount = 2; + int paramCount = this.ValidateAndGetSourceLength(args, idx, requiredParamCount, optionalParamCount); + string sourceType = args[idx]; + string bucket = args[idx + 1]; + string sourcePath = args[idx + 2]; + bool? isOneXOne = null; + GridOrigin? origin = null; + if (paramCount > requiredParamCount) + { + // not using set as it allows optional prams with dynamic values aka. --minZoom 3 + var optionalParams = args.Skip(idx + requiredParamCount).Take(optionalParamCount).ToArray(); + this.ParseOptionalParameters(sourceType, sourcePath, ref isOneXOne, ref origin, optionalParams); + } + + idx += paramCount; + Grid? grid = GetGrid(isOneXOne); + return this._dataFactory.CreateDataSource(sourceType, sourcePath, bucket, batchSize, grid, origin, null, isBase); } private IData ParseGpkgSource(string[] args, ref int idx, int batchSize, bool isBase) @@ -142,7 +173,7 @@ private IData ParseGpkgSource(string[] args, ref int idx, int batchSize, bool is idx += paramCount; Grid? grid = GetGrid(isOneXOne); - return this._dataFactory.CreateDataSource(sourceType, sourcePath, batchSize, grid, origin, extent, isBase); + return this._dataFactory.CreateDataSource(sourceType, sourcePath, string.Empty, batchSize, grid, origin, extent, isBase); } private IData ParseHttpSource(string[] args, ref int idx, int batchSize, bool isBase) @@ -166,7 +197,7 @@ private IData ParseHttpSource(string[] args, ref int idx, int batchSize, bool is idx += paramCount; Grid? grid = GetGrid(isOneXOne); - return this._dataFactory.CreateDataSource(sourceType, sourcePath, batchSize, isBase, extent, maxZoom, + return this._dataFactory.CreateDataSource(sourceType, sourcePath, string.Empty, batchSize, isBase, extent, maxZoom, minZoom, grid, origin); } @@ -183,7 +214,7 @@ private Extent parseExtent(string extentString) return extent; } - private int ParseOptionalParameters(string sourceType, string sourcePath, ref bool? isOneXOne, + private int ParseOptionalParameters(string sourceType, string sourcePath, ref bool? isOneXOne, ref GridOrigin? origin, string[] optionalParams) { int parsed = 0; @@ -210,11 +241,21 @@ private int ParseOptionalParameters(string sourceType, string sourcePath, ref bo parsed++; } + /*if (optionalParams.Contains("--Bucket")) + { + if (bucket != null) + { + throw new Exception($"layer {sourceType} {sourcePath} can't have 2 buckets"); + } + + bucket = GridOrigin.LOWER_LEFT; + parsed++; + }*/ + return parsed; } - private int ValidateAndGetSourceLength(string[] args, int startIdx, int minExpectedParamCount, - int optionalParamCount) + private int ValidateAndGetSourceLength(string[] args, int startIdx, int minExpectedParamCount, int optionalParamCount) { int i = startIdx + 1; // check required parameters diff --git a/MergerCli/appsettings.json b/MergerCli/appsettings.json index 783532be..18fa0c67 100644 --- a/MergerCli/appsettings.json +++ b/MergerCli/appsettings.json @@ -12,7 +12,6 @@ }, "S3": { "url": "http://localhost:9000", - "bucket": "tiles", "request": { "timeoutSec": 5, "retries": 3 diff --git a/MergerLogic/Clients/IS3Client.cs b/MergerLogic/Clients/IS3Client.cs index 2bff6aae..29c89267 100644 --- a/MergerLogic/Clients/IS3Client.cs +++ b/MergerLogic/Clients/IS3Client.cs @@ -1,4 +1,5 @@ -using MergerLogic.Batching; +using Amazon.S3.Model; +using MergerLogic.Batching; using MergerLogic.Utils; namespace MergerLogic.Clients @@ -7,5 +8,7 @@ public interface IS3Client : IDataUtils { void UpdateTile(Tile tile); Tile? GetTile(string key); + string Bucket { get; } + ListObjectsV2Response ListObject(ref string? continuationToken, string prefix, string startAfter, int? maxKeys = null); } } diff --git a/MergerLogic/Clients/S3Client.cs b/MergerLogic/Clients/S3Client.cs index c7df9d96..7a90e44d 100644 --- a/MergerLogic/Clients/S3Client.cs +++ b/MergerLogic/Clients/S3Client.cs @@ -11,19 +11,22 @@ namespace MergerLogic.Clients { public class S3Client : DataUtils, IS3Client { - private readonly string _bucket; - + private string _bucket; private readonly IAmazonS3 _client; private readonly ILogger _logger; private readonly IPathUtils _pathUtils; - public S3Client(IAmazonS3 client, IPathUtils pathUtils, IGeoUtils geoUtils, IImageFormatter formatter, ILogger logger, - string bucket, string path) : base(path, geoUtils, formatter) + public S3Client(IAmazonS3 client, IPathUtils pathUtils, IGeoUtils geoUtils, IImageFormatter formatter, ILogger logger, string bucket, string path) : base(path, geoUtils, formatter) { - this._client = client; - this._bucket = bucket; + this._client = client ?? throw new Exception("s3 configuration is required"); this._pathUtils = pathUtils; this._logger = logger; + this._bucket = bucket; + } + + public string Bucket + { + get { return this._bucket; } } private byte[]? GetImageBytes(string key) @@ -104,7 +107,9 @@ public void UpdateTile(Tile tile) var request = new PutObjectRequest() { - BucketName = this._bucket, CannedACL = S3CannedACL.PublicRead, Key = String.Format(key) + BucketName = this._bucket, + CannedACL = S3CannedACL.PublicRead, + Key = String.Format(key) }; byte[] buffer = tile.GetImageBytes(); @@ -130,5 +135,35 @@ public void UpdateTile(Tile tile) this._logger.LogDebug($"[{methodName}] end z: {z}, x: {x}, y: {y}"); return result; } + + public ListObjectsV2Response ListObject(ref string? continuationToken, string prefix, string startAfter, int? maxKeys = null) + { + ListObjectsV2Request listRequests = null; + if (maxKeys == null) + { + listRequests = new ListObjectsV2Request + { + BucketName = this._bucket, + Prefix = prefix, + StartAfter = startAfter, + ContinuationToken = continuationToken + }; + } + else + { + listRequests = new ListObjectsV2Request + { + BucketName = this._bucket, + Prefix = prefix, + StartAfter = startAfter, + ContinuationToken = continuationToken, + MaxKeys = maxKeys.Value + }; + } + + var task = this._client.ListObjectsV2Async(listRequests); + var response = task.Result; + return response; + } } } diff --git a/MergerLogic/DataTypes/Data.cs b/MergerLogic/DataTypes/Data.cs index 22e4b1e2..59131cb4 100644 --- a/MergerLogic/DataTypes/Data.cs +++ b/MergerLogic/DataTypes/Data.cs @@ -76,7 +76,7 @@ public abstract class Data : IData where TUtilsType : IDataUtils protected ValFromCoordFunction ConvertOriginCoord; protected Data(IServiceProvider container, DataType type, string path, int batchSize, Grid? grid, - GridOrigin? origin, bool isBase, Extent? extent = null) + GridOrigin? origin, bool isBase, Extent? extent = null, string? bucket = null) { string methodName = MethodBase.GetCurrentMethod().Name; this._container = container; @@ -88,7 +88,7 @@ protected Data(IServiceProvider container, DataType type, string path, int batch this.Path = path; this.BatchSize = batchSize; var utilsFactory = container.GetRequiredService(); - this.Utils = utilsFactory.GetDataUtils(path); + this.Utils = utilsFactory.GetDataUtils(path, bucket); this.GeoUtils = container.GetRequiredService(); this.Grid = grid ?? this.DefaultGrid(); this.Origin = origin ?? this.DefaultOrigin(); diff --git a/MergerLogic/DataTypes/S3.cs b/MergerLogic/DataTypes/S3.cs index ccb1846b..5ece7b1e 100644 --- a/MergerLogic/DataTypes/S3.cs +++ b/MergerLogic/DataTypes/S3.cs @@ -1,9 +1,7 @@ -using Amazon.S3; using Amazon.S3.Model; using MergerLogic.Batching; using MergerLogic.Clients; using MergerLogic.Utils; -using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using System.Reflection; @@ -11,8 +9,6 @@ namespace MergerLogic.DataTypes { public class S3 : Data { - private IAmazonS3 _client; - private string _bucket; private readonly List _zoomLevels; private IEnumerator _zoomEnumerator; private string? _continuationToken; @@ -22,8 +18,8 @@ public class S3 : Data private readonly IPathUtils _pathUtils; - public S3(IPathUtils pathUtils, IServiceProvider container, string path, int batchSize, Grid? grid, GridOrigin? origin, bool isBase) - : base(container, DataType.S3, path, batchSize, grid, origin, isBase) + public S3(IPathUtils pathUtils, IServiceProvider container, string bucket, string path, int batchSize, Grid? grid, GridOrigin? origin, bool isBase) + : base(container, DataType.S3, path, batchSize, grid, origin, isBase, null, bucket) { this._logger.LogDebug($"[{MethodBase.GetCurrentMethod().Name}] Ctor started"); this._pathUtils = pathUtils; @@ -40,12 +36,6 @@ public S3(IPathUtils pathUtils, IServiceProvider container, string path, int bat protected override void Initialize() { - this._logger.LogDebug($"[{MethodBase.GetCurrentMethod().Name}] start"); - var configurationManager = this._container.GetRequiredService(); - var client = this._container.GetService(); - this._client = client ?? throw new Exception("s3 configuration is required"); - this._bucket = configurationManager.GetConfiguration("S3", "bucket"); - this._logger.LogDebug($"[{MethodBase.GetCurrentMethod().Name}] ended"); } protected override GridOrigin DefaultOrigin() @@ -97,18 +87,7 @@ public override List GetNextBatch(out string currentBatchIdentifier, out s } string path = $"{this.Path}/{this._zoomEnumerator.Current}/"; - var listRequests = new ListObjectsV2Request - { - BucketName = this._bucket, - Prefix = path, - StartAfter = path, - MaxKeys = missingTiles, - ContinuationToken = this._continuationToken - }; - - var listObjectsTask = this._client.ListObjectsV2Async(listRequests); - var response = listObjectsTask.Result; - + ListObjectsV2Response response = this.Utils.ListObject(ref this._continuationToken, path, path, missingTiles); foreach (S3Object item in response.S3Objects) { Tile? tile = this.Utils.GetTile(item.Key); @@ -145,27 +124,19 @@ public override void setBatchIdentifier(string batchIdentifier) private bool FolderExists(string directory) { - this._logger.LogDebug($"[{MethodBase.GetCurrentMethod().Name}] start"); + this._logger.LogDebug($"[{MethodBase.GetCurrentMethod().Name}] start, bucket: {this.Utils.Bucket}, directory: {directory}"); directory = $"{this.Path}/{directory}"; - var listRequests = new ListObjectsV2Request - { - BucketName = this._bucket, - Prefix = directory, - StartAfter = directory, - MaxKeys = 1 - }; - var task = this._client.ListObjectsV2Async(listRequests); - var response = task.Result; - this._logger.LogDebug($"[{MethodBase.GetCurrentMethod().Name}] end"); - return response.KeyCount > 0; + string? continuationToken = null; + ListObjectsV2Response response = this.Utils.ListObject(ref continuationToken, directory, directory, 1); + bool exists = response.KeyCount > 0; + this._logger.LogDebug($"[{MethodBase.GetCurrentMethod().Name}] end, bucket: {this.Utils.Bucket}, directory: {directory}, exists: {exists}"); + return exists; } public override bool Exists() { - this._logger.LogInformation($"[{MethodBase.GetCurrentMethod().Name}] bucket: {this._bucket}, path: {this.Path}"); bool exists = FolderExists(""); - this._logger.LogInformation($"[{MethodBase.GetCurrentMethod().Name}] ended"); return exists; } @@ -174,20 +145,9 @@ public override long TileCount() this._logger.LogDebug($"[{MethodBase.GetCurrentMethod().Name}] start"); long tileCount = 0; string? continuationToken = null; - do { - var listRequests = new ListObjectsV2Request - { - BucketName = this._bucket, - Prefix = this.Path, - StartAfter = this.Path, - ContinuationToken = continuationToken - }; - - var task = this._client.ListObjectsV2Async(listRequests); - var response = task.Result; - + ListObjectsV2Response response = this.Utils.ListObject(ref continuationToken, this.Path, this.Path); tileCount += response.KeyCount; continuationToken = response.NextContinuationToken; } while (continuationToken != null); diff --git a/MergerLogic/Utils/DataFactory.cs b/MergerLogic/Utils/DataFactory.cs index 11c3570a..ab729e90 100644 --- a/MergerLogic/Utils/DataFactory.cs +++ b/MergerLogic/Utils/DataFactory.cs @@ -20,7 +20,7 @@ public DataFactory(IConfigurationManager configuration, IPathUtils pathUtils, IS this._logger = logger; } - public IData CreateDataSource(string type, string path, int batchSize, Grid? grid = null, GridOrigin? origin = null, Extent? extent = null, bool isBase = false) + public IData CreateDataSource(string type, string path, string bucket, int batchSize, Grid? grid = null, GridOrigin? origin = null, Extent? extent = null, bool isBase = false) { IData data; @@ -31,7 +31,7 @@ public IData CreateDataSource(string type, string path, int batchSize, Grid? gri break; case "s3": path = this._pathUtils.RemoveTrailingSlash(path); - data = new S3(this._pathUtils, this._container, path, batchSize, grid, origin, isBase); + data = new S3(this._pathUtils, this._container, bucket, path, batchSize, grid, origin, isBase); break; case "fs": data = new FS(this._pathUtils, this._container, path, batchSize, grid, origin, isBase); @@ -55,7 +55,7 @@ public IData CreateDataSource(string type, string path, int batchSize, Grid? gri return data; } - public IData CreateDataSource(string type, string path, int batchSize, bool isBase, Extent extent, int maxZoom, int minZoom = 0, Grid? grid = null, GridOrigin? origin = null) + public IData CreateDataSource(string type, string path, string bucket, int batchSize, bool isBase, Extent extent, int maxZoom, int minZoom = 0, Grid? grid = null, GridOrigin? origin = null) { IData data; type = type.ToLower(); @@ -64,7 +64,7 @@ public IData CreateDataSource(string type, string path, int batchSize, bool isBa case "gpkg": case "s3": case "fs": - return this.CreateDataSource(type, path, batchSize, grid, origin, extent, isBase); + return this.CreateDataSource(type, path, bucket, batchSize, grid, origin, extent, isBase); }; if (isBase) { diff --git a/MergerLogic/Utils/IDataFactory.cs b/MergerLogic/Utils/IDataFactory.cs index 868dc86e..de52a9d0 100644 --- a/MergerLogic/Utils/IDataFactory.cs +++ b/MergerLogic/Utils/IDataFactory.cs @@ -5,7 +5,7 @@ namespace MergerLogic.Utils { public interface IDataFactory { - IData CreateDataSource(string type, string path, int batchSize, Grid? grid = null, GridOrigin? origin = null, Extent? extent = null, bool isBase = false); - IData CreateDataSource(string type, string path, int batchSize, bool isBase, Extent extent, int maxZoom, int minZoom = 0, Grid? grid = null, GridOrigin? origin = null); + IData CreateDataSource(string type, string path, string bucket, int batchSize, Grid? grid = null, GridOrigin? origin = null, Extent? extent = null, bool isBase = false); + IData CreateDataSource(string type, string path, string bucket, int batchSize, bool isBase, Extent extent, int maxZoom, int minZoom = 0, Grid? grid = null, GridOrigin? origin = null); } } diff --git a/MergerLogic/Utils/IUtilsFactory.cs b/MergerLogic/Utils/IUtilsFactory.cs index e8366da2..d7719b62 100644 --- a/MergerLogic/Utils/IUtilsFactory.cs +++ b/MergerLogic/Utils/IUtilsFactory.cs @@ -4,11 +4,11 @@ namespace MergerLogic.Utils { public interface IUtilsFactory { - T GetDataUtils(string path) where T : IDataUtils; + T GetDataUtils(string? bucket, string path) where T : IDataUtils; IFileClient GetFileUtils(string path); IGpkgClient GetGpkgUtils(string path); IHttpSourceClient GetHttpUtils(string path); IPathPatternUtils GetPathPatternUtils(string pattern); - IS3Client GetS3Utils(string path); + IS3Client GetS3Utils(string? bucket, string path); } } diff --git a/MergerLogic/Utils/UtilsFactory.cs b/MergerLogic/Utils/UtilsFactory.cs index 9c64f64d..7277212a 100644 --- a/MergerLogic/Utils/UtilsFactory.cs +++ b/MergerLogic/Utils/UtilsFactory.cs @@ -4,6 +4,7 @@ using System.IO.Abstractions; using MergerLogic.Clients; using MergerLogic.ImageProcessing; +using Microsoft.Extensions.Configuration; namespace MergerLogic.Utils { @@ -48,19 +49,18 @@ public IHttpSourceClient GetHttpUtils(string path) return new HttpSourceClient(path, this._httpRequestUtils, pathPatternUtils, this._geoUtils, this._imageFormatter); } - public IS3Client GetS3Utils(string path) + public IS3Client GetS3Utils(string? bucket, string path) { - string bucket = this._container.GetRequiredService().GetConfiguration("S3", "bucket"); IAmazonS3? client = this._container.GetService(); - if (client is null || bucket == string.Empty) + if (client is null || bucket is null) { - throw new Exception("S3 Data utils requires s3 client to be configured"); + throw new Exception("S3 Data utils requires s3 client and bucket to be configured"); } var logger = this._container.GetRequiredService>(); return new S3Client(client, this._pathUtils, this._geoUtils, this._imageFormatter, logger, bucket, path); } - public T GetDataUtils(string path) where T : IDataUtils + public T GetDataUtils(string path, string? bucket) where T : IDataUtils { if (typeof(IFileClient).IsAssignableFrom(typeof(T))) { @@ -76,7 +76,7 @@ public T GetDataUtils(string path) where T : IDataUtils } if (typeof(IS3Client).IsAssignableFrom(typeof(T))) { - return (T)(Object)this.GetS3Utils(path); + return (T)(Object)this.GetS3Utils(bucket, path); } throw new NotImplementedException("Invalid Utils type"); } diff --git a/MergerLogicUnitTests/Utils/S3UtilsTest.cs b/MergerLogicUnitTests/Utils/S3UtilsTest.cs index da6d462a..dd61b32e 100644 --- a/MergerLogicUnitTests/Utils/S3UtilsTest.cs +++ b/MergerLogicUnitTests/Utils/S3UtilsTest.cs @@ -119,7 +119,7 @@ public void GetTile(bool exist, GetTileParamType paramType,TileFormat tileFormat } var s3Utils = new S3Client(this._s3ClientMock.Object, this._pathUtilsMock.Object, - this._geoUtilsMock.Object, this._imageFormatterMock.Object, this._loggerMock.Object, "bucket", "test"); + this._geoUtilsMock.Object, this._imageFormatterMock.Object, this._loggerMock.Object, "test"); Tile tile = null; switch (paramType) @@ -193,7 +193,7 @@ public void TileExists(bool exist) }); var s3Utils = new S3Client(this._s3ClientMock.Object, this._pathUtilsMock.Object, - this._geoUtilsMock.Object, this._imageFormatterMock.Object, this._loggerMock.Object, "bucket", "test"); + this._geoUtilsMock.Object, this._imageFormatterMock.Object, this._loggerMock.Object, "test"); Assert.AreEqual(exist, s3Utils.TileExists(0, 0, 0)); @@ -230,7 +230,7 @@ public void UpdateTile() }); var s3Utils = new S3Client(this._s3ClientMock.Object, this._pathUtilsMock.Object, - this._geoUtilsMock.Object, this._imageFormatterMock.Object, this._loggerMock.Object, "bucket", "test"); + this._geoUtilsMock.Object, this._imageFormatterMock.Object, this._loggerMock.Object, "test"); s3Utils.UpdateTile(testTile); this._pathUtilsMock.Verify(utils => utils.GetTilePath(It.IsAny(),It.IsAny(), It.IsAny()), Times.Once); diff --git a/MergerService/Models/Source.cs b/MergerService/Models/Source.cs index 68c8b242..5f51869a 100644 --- a/MergerService/Models/Source.cs +++ b/MergerService/Models/Source.cs @@ -17,16 +17,19 @@ public class Source public Grid? Grid { get; } + public string? Bucket { get; } + [System.Text.Json.Serialization.JsonIgnore] private JsonSerializerSettings _jsonSerializerSettings; - public Source(string path, string type, Extent? extent = null, GridOrigin? origin = null, Grid? grid = null) + public Source(string path, string type, Extent? extent = null, GridOrigin? origin = null, Grid? grid = null, string? bucket = null) { this.Path = path; this.Type = type; this.Extent = extent; this.Origin = origin; this.Grid = grid; + this.Bucket = bucket; this._jsonSerializerSettings = new JsonSerializerSettings(); this._jsonSerializerSettings.Converters.Add(new StringEnumConverter()); diff --git a/MergerService/Properties/launchSettings.json b/MergerService/Properties/launchSettings.json index 95a5c301..b90e5e3a 100644 --- a/MergerService/Properties/launchSettings.json +++ b/MergerService/Properties/launchSettings.json @@ -16,8 +16,7 @@ "environmentVariables": { "AWS_ACCESS_KEY_ID": "minio123", "AWS_SECRET_ACCESS_KEY": "minio123", - "S3__url": "http://localhost:9000", - "S3__bucket": "tiles" + "S3__url": "http://localhost:9000" }, "applicationUrl": "http://localhost:5074", "dotnetRunMessages": true diff --git a/MergerService/appsettings.Development.json b/MergerService/appsettings.Development.json index db085ab9..bd6d9fb3 100644 --- a/MergerService/appsettings.Development.json +++ b/MergerService/appsettings.Development.json @@ -13,8 +13,7 @@ "maxAttempts": 3 }, "S3": { - "url": "http://localhost:9000", - "bucket": "tiles" + "url": "http://localhost:9000" }, "Logging": { "LogLevel": { diff --git a/MergerService/appsettings.json b/MergerService/appsettings.json index e2b8ba07..9441c092 100644 --- a/MergerService/appsettings.json +++ b/MergerService/appsettings.json @@ -16,8 +16,7 @@ "vacuum": false }, "S3": { - "url": "http://localhost:9000", - "bucket": "tiles", + "url": "http://localhost:9000" "request": { "timeoutSec": 5, "retries": 3 diff --git a/MergerService/src/run.cs b/MergerService/src/run.cs index 0f43a4b3..fe9a5c42 100644 --- a/MergerService/src/run.cs +++ b/MergerService/src/run.cs @@ -5,6 +5,7 @@ using MergerLogic.Monitoring.Metrics; using MergerLogic.Utils; using MergerService.Controllers; +using MergerService.Models.Jobs; using MergerService.Models.Tasks; using MergerService.Utils; using System.Diagnostics; @@ -101,14 +102,12 @@ private List BuildDataList(Source[] paths, int batchSize) if (paths.Length != 0) { string path = BuildPath(paths[0], true); - sources.Add(this._dataFactory.CreateDataSource(paths[0].Type, path, batchSize, paths[0].Grid, - paths[0].Origin, paths[0].Extent, true)); + sources.Add(this._dataFactory.CreateDataSource(paths[0].Type, path, paths[0].Bucket, batchSize, paths[0].Grid, paths[0].Origin, paths[0].Extent, true)); foreach (Source source in paths.Skip(1)) { // TODO: add support for HTTP path = BuildPath(source, false); - sources.Add(this._dataFactory.CreateDataSource(source.Type, path, batchSize, - source.Grid, source.Origin)); + sources.Add(this._dataFactory.CreateDataSource(source.Type, path, source.Bucket, batchSize, source.Grid, source.Origin)); } } stopwatch.Stop(); @@ -182,7 +181,27 @@ public void Start() continue; } - string? managerCallbackUrl = jobUtils.GetJob(task.JobId)?.Parameters.ManagerCallbackUrl; + MergeJob? jobData = jobUtils.GetJob(task.JobId); + if (jobData == null) + { + this._logger.LogWarning($"[{methodName}] no job data found for task!"); + // TODO: should we fail the task? + } + if (!(jobData.Status == Status.PENDING || jobData.Status == Status.IN_PROGRESS)) + { + this._logger.LogWarning($"[{methodName}] job status is {jobData.Status}, stop the task and set it's status accordingly"); + + // TODO: should we deal differenttly with completed or failed? + + UpdateParams updateParams = new UpdateParams() + { + Status = jobData.Status + }; + taskUtils.UpdateProgress(task.JobId, task.Id, updateParams); + continue; + } + + string? managerCallbackUrl = jobData?.Parameters?.ManagerCallbackUrl; string log = managerCallbackUrl == null ? "managerCallbackUrl not provided as job parameter" : $"managerCallback url: {managerCallbackUrl}"; this._logger.LogDebug($"[{methodName}]{log}"); var totalTaskStopwatch = Stopwatch.StartNew();