From 551e5b2f7449f381fcb8043faf531a353c34632b Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Tue, 18 Feb 2020 15:23:21 -0800 Subject: [PATCH 01/27] WIP allow for multiple cache directories to be specified --- .../PipelineCache/PipelineCacheServer.cs | 50 +++++++----- .../PipelineCacheTaskPluginBase.cs | 80 +++++++++++++------ .../PipelineCache/RestorePipelineCacheV0.cs | 12 +-- .../PipelineCache/SavePipelineCacheV0.cs | 22 ++--- src/Agent.Plugins/PipelineCache/TarUtils.cs | 61 +++++++++----- 5 files changed, 145 insertions(+), 80 deletions(-) diff --git a/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs b/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs index 9f55168768..2f96442d7a 100644 --- a/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs +++ b/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs @@ -24,8 +24,9 @@ public class PipelineCacheServer { internal async Task UploadAsync( AgentTaskPluginExecutionContext context, - Fingerprint fingerprint, - string path, + Fingerprint keyFingerprint, + string[] pathSegments, + string workspace, CancellationToken cancellationToken, ContentFormat contentFormat) { @@ -39,7 +40,7 @@ public class PipelineCacheServer // Check if the key exists. PipelineCacheActionRecord cacheRecordGet = clientTelemetry.CreateRecord((level, uri, type) => new PipelineCacheActionRecord(level, uri, type, PipelineArtifactConstants.RestoreCache, context)); - PipelineCacheArtifact getResult = await pipelineCacheClient.GetPipelineCacheArtifactAsync(new [] {fingerprint}, cancellationToken, cacheRecordGet); + PipelineCacheArtifact getResult = await pipelineCacheClient.GetPipelineCacheArtifactAsync(new[] { keyFingerprint }, cancellationToken, cacheRecordGet); // Send results to CustomerIntelligence context.PublishTelemetry(area: PipelineArtifactConstants.AzurePipelinesAgent, feature: PipelineArtifactConstants.PipelineCache, record: cacheRecordGet); //If cache exists, return. @@ -49,7 +50,7 @@ public class PipelineCacheServer return; } - string uploadPath = await this.GetUploadPathAsync(contentFormat, context, path, cancellationToken); + string uploadPath = await this.GetUploadPathAsync(contentFormat, context, pathSegments, workspace, cancellationToken); //Upload the pipeline artifact. PipelineCacheActionRecord uploadRecord = clientTelemetry.CreateRecord((level, uri, type) => new PipelineCacheActionRecord(level, uri, type, nameof(dedupManifestClient.PublishAsync), context)); @@ -59,10 +60,10 @@ public class PipelineCacheServer { return await dedupManifestClient.PublishAsync(uploadPath, cancellationToken); }); - + CreatePipelineCacheArtifactContract options = new CreatePipelineCacheArtifactContract { - Fingerprint = fingerprint, + Fingerprint = keyFingerprint, RootId = result.RootId, ManifestId = result.ManifestId, ProofNodes = result.ProofNodes.ToArray(), @@ -81,7 +82,7 @@ public class PipelineCacheServer } catch { } } - + // Cache the artifact PipelineCacheActionRecord cacheRecord = clientTelemetry.CreateRecord((level, uri, type) => new PipelineCacheActionRecord(level, uri, type, PipelineArtifactConstants.SaveCache, context)); @@ -97,8 +98,9 @@ public class PipelineCacheServer internal async Task DownloadAsync( AgentTaskPluginExecutionContext context, Fingerprint[] fingerprints, - string path, + string[] pathSegments, string cacheHitVariable, + string workspace, CancellationToken cancellationToken) { VssConnection connection = context.VssConnection; @@ -125,12 +127,12 @@ public class PipelineCacheServer record: downloadRecord, actionAsync: async () => { - await this.DownloadPipelineCacheAsync(context, dedupManifestClient, result.ManifestId, path, Enum.Parse(result.ContentFormat), cancellationToken); + await this.DownloadPipelineCacheAsync(context, dedupManifestClient, result.ManifestId, pathSegments, workspace, Enum.Parse(result.ContentFormat), cancellationToken); }); // Send results to CustomerIntelligence context.PublishTelemetry(area: PipelineArtifactConstants.AzurePipelinesAgent, feature: PipelineArtifactConstants.PipelineCache, record: downloadRecord); - + context.Output("Cache restored."); } @@ -145,11 +147,11 @@ public class PipelineCacheServer context.Verbose($"Exact fingerprint: `{result.Fingerprint.ToString()}`"); bool foundExact = false; - foreach(var fingerprint in fingerprints) + foreach (var fingerprint in fingerprints) { context.Verbose($"This fingerprint: `{fingerprint.ToString()}`"); - if (fingerprint == result.Fingerprint + if (fingerprint == result.Fingerprint || result.Fingerprint.Segments.Length == 1 && result.Fingerprint.Segments.Single() == fingerprint.SummarizeForV1()) { foundExact = true; @@ -176,12 +178,17 @@ public class PipelineCacheServer return pipelineCacheClient; } - private async Task GetUploadPathAsync(ContentFormat contentFormat, AgentTaskPluginExecutionContext context, string path, CancellationToken cancellationToken) + private async Task GetUploadPathAsync(ContentFormat contentFormat, AgentTaskPluginExecutionContext context, string[] pathSegments, string workspace, CancellationToken cancellationToken) { - string uploadPath = path; - if(contentFormat == ContentFormat.SingleTar) + if (pathSegments.Length == 1) + { + return pathSegments[0]; + } + string uploadPath = string.Empty; + // TODO: what to do if not SingleTar? + if (contentFormat == ContentFormat.SingleTar) { - uploadPath = await TarUtils.ArchiveFilesToTarAsync(context, path, cancellationToken); + uploadPath = await TarUtils.ArchiveFilesToTarAsync(context, pathSegments, workspace, cancellationToken); } return uploadPath; } @@ -190,7 +197,8 @@ private async Task GetUploadPathAsync(ContentFormat contentFormat, Agent AgentTaskPluginExecutionContext context, DedupManifestArtifactClient dedupManifestClient, DedupIdentifier manifestId, - string targetDirectory, + string[] pathSegments, + string workspace, ContentFormat contentFormat, CancellationToken cancellationToken) { @@ -199,21 +207,21 @@ private async Task GetUploadPathAsync(ContentFormat contentFormat, Agent string manifestPath = Path.Combine(Path.GetTempPath(), $"{nameof(DedupManifestArtifactClient)}.{Path.GetRandomFileName()}.manifest"); await dedupManifestClient.DownloadFileToPathAsync(manifestId, manifestPath, proxyUri: null, cancellationToken: cancellationToken); Manifest manifest = JsonSerializer.Deserialize(File.ReadAllText(manifestPath)); - await TarUtils.DownloadAndExtractTarAsync (context, manifest, dedupManifestClient, targetDirectory, cancellationToken); + await TarUtils.DownloadAndExtractTarAsync(context, manifest, dedupManifestClient, pathSegments, workspace, cancellationToken); try { - if(File.Exists(manifestPath)) + if (File.Exists(manifestPath)) { File.Delete(manifestPath); } } - catch {} + catch { } } else { DownloadDedupManifestArtifactOptions options = DownloadDedupManifestArtifactOptions.CreateWithManifestId( manifestId, - targetDirectory, + pathSegments[0], // TODO: whats the right format here proxyUri: null, minimatchPatterns: null); await dedupManifestClient.DownloadAsync(options, cancellationToken); diff --git a/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs b/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs index f7c805c26a..a9ee9eeadb 100644 --- a/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs +++ b/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs @@ -31,26 +31,28 @@ public abstract class PipelineCacheTaskPluginBase : IAgentTaskPlugin public abstract String Stage { get; } public const string ResolvedFingerPrintVariableName = "RESTORE_STEP_RESOLVED_FINGERPRINT"; - internal static (bool isOldFormat, string[] keySegments,IEnumerable restoreKeys) ParseIntoSegments(string salt, string key, string restoreKeysBlock) + internal static (bool isOldFormat, string[] keySegments, IEnumerable restoreKeys, string[] pathSegments) ParseIntoSegments(string salt, string key, string restoreKeysBlock, string path) { - Func splitAcrossPipes = (s) => { - var segments = s.Split(new [] {'|'},StringSplitOptions.RemoveEmptyEntries).Select(segment => segment.Trim()); - if(!string.IsNullOrWhiteSpace(salt)) + Func splitAcrossPipes = (s) => + { + var segments = s.Split(new[] { '|' }, StringSplitOptions.RemoveEmptyEntries).Select(segment => segment.Trim()); + if (!string.IsNullOrWhiteSpace(salt)) { - segments = (new [] { $"{SaltVariableName}={salt}"}).Concat(segments); + segments = (new[] { $"{SaltVariableName}={salt}" }).Concat(segments); } return segments.ToArray(); }; - Func splitAcrossNewlines = (s) => - s.Replace("\r\n", "\n") //normalize newlines - .Split(new [] {'\n'}, StringSplitOptions.RemoveEmptyEntries) - .Select(line => line.Trim()) - .ToArray(); - + Func splitAcrossNewlines = (s) => + s.Replace("\r\n", "\n") //normalize newlines + .Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries) + .Select(line => line.Trim()) + .ToArray(); + string[] keySegments; + string[] pathSegments; bool isOldFormat = key.Contains('\n'); - + IEnumerable restoreKeys; bool hasRestoreKeys = !string.IsNullOrWhiteSpace(restoreKeysBlock); @@ -58,7 +60,7 @@ internal static (bool isOldFormat, string[] keySegments,IEnumerable re { throw new ArgumentException(OldKeyFormatMessage); } - + if (isOldFormat) { keySegments = splitAcrossNewlines(key); @@ -67,7 +69,9 @@ internal static (bool isOldFormat, string[] keySegments,IEnumerable re { keySegments = splitAcrossPipes(key); } - + + // Path can now be a list of path segments to include in cache + pathSegments = splitAcrossPipes(path); if (hasRestoreKeys) { @@ -78,11 +82,13 @@ internal static (bool isOldFormat, string[] keySegments,IEnumerable re restoreKeys = Enumerable.Empty(); } - return (isOldFormat, keySegments, restoreKeys); + return (isOldFormat, keySegments, restoreKeys, pathSegments); } - + public async virtual Task RunAsync(AgentTaskPluginExecutionContext context, CancellationToken token) { + WaitForDebuggerAttach(); + ArgUtil.NotNull(context, nameof(context)); VariableValue saltValue = context.Variables.GetValueOrDefault(SaltVariableName); @@ -93,8 +99,10 @@ public async virtual Task RunAsync(AgentTaskPluginExecutionContext context, Canc string key = context.GetInput(PipelineCacheTaskPluginConstants.Key, required: true); string restoreKeysBlock = context.GetInput(PipelineCacheTaskPluginConstants.RestoreKeys, required: false); + // TODO: Translate path from container to host (Ting) + string path = context.GetInput(PipelineCacheTaskPluginConstants.Path, required: true); - (bool isOldFormat, string[] keySegments, IEnumerable restoreKeys) = ParseIntoSegments(salt, key, restoreKeysBlock); + (bool isOldFormat, string[] keySegments, IEnumerable restoreKeys, string[] pathSegments) = ParseIntoSegments(salt, key, restoreKeysBlock, path); if (isOldFormat) { @@ -105,23 +113,25 @@ public async virtual Task RunAsync(AgentTaskPluginExecutionContext context, Canc Fingerprint keyFp = FingerprintCreator.EvaluateKeyToFingerprint(context, workspaceRoot, keySegments); context.Output($"Resolved to: {keyFp}"); - Func restoreKeysGenerator = () => - restoreKeys.Select(restoreKey => { + Func restoreKeysGenerator = () => + restoreKeys.Select(restoreKey => + { context.Output("Resolving restore key:"); Fingerprint f = FingerprintCreator.EvaluateKeyToFingerprint(context, workspaceRoot, restoreKey); - f.Segments = f.Segments.Concat(new [] { Fingerprint.Wildcard} ).ToArray(); + f.Segments = f.Segments.Concat(new[] { Fingerprint.Wildcard }).ToArray(); context.Output($"Resolved to: {f}"); return f; }).ToArray(); - // TODO: Translate path from container to host (Ting) - string path = context.GetInput(PipelineCacheTaskPluginConstants.Path, required: true); + + var workingDirectory = context.Variables.GetValueOrDefault("system.defaultworkingdirectory")?.Value ?? workspaceRoot; await ProcessCommandInternalAsync( context, keyFp, restoreKeysGenerator, - path, + pathSegments, + workingDirectory, token); } @@ -130,7 +140,8 @@ public async virtual Task RunAsync(AgentTaskPluginExecutionContext context, Canc AgentTaskPluginExecutionContext context, Fingerprint fingerprint, Func restoreKeysGenerator, - string path, + string[] pathSegments, + string workspace, CancellationToken token); // Properties set by tasks @@ -143,5 +154,26 @@ protected static class PipelineCacheTaskPluginConstants public static readonly string CacheHitVariable = "cacheHitVar"; public static readonly string Salt = "salt"; } + + protected void WaitForDebuggerAttach() + { + var process = System.Diagnostics.Process.GetCurrentProcess(); + System.Diagnostics.Process.EnterDebugMode(); + + Console.WriteLine($"ProcessName: {process.ProcessName}"); + Console.WriteLine($"ProcessHandle: {process.Handle}"); + var timeout = 60; + for (int i = 0; i < timeout; i++) + { + Console.WriteLine($"Attach debugger in {timeout - i} seconds"); + Thread.Sleep(1000); + + if (System.Diagnostics.Debugger.IsAttached) + { + Console.WriteLine("Debugger attached!"); + break; + } + } + } } } \ No newline at end of file diff --git a/src/Agent.Plugins/PipelineCache/RestorePipelineCacheV0.cs b/src/Agent.Plugins/PipelineCache/RestorePipelineCacheV0.cs index f454cceba5..6418849819 100644 --- a/src/Agent.Plugins/PipelineCache/RestorePipelineCacheV0.cs +++ b/src/Agent.Plugins/PipelineCache/RestorePipelineCacheV0.cs @@ -9,7 +9,7 @@ using Microsoft.VisualStudio.Services.PipelineCache.WebApi; namespace Agent.Plugins.PipelineCache -{ +{ public class RestorePipelineCacheV0 : PipelineCacheTaskPluginBase { public override string Stage => "main"; @@ -18,7 +18,8 @@ public class RestorePipelineCacheV0 : PipelineCacheTaskPluginBase AgentTaskPluginExecutionContext context, Fingerprint fingerprint, Func restoreKeysGenerator, - string path, + string[] pathSegments, + string workspace, CancellationToken token) { context.SetTaskVariable(RestoreStepRanVariableName, RestoreStepRanVariableValue); @@ -27,10 +28,11 @@ public class RestorePipelineCacheV0 : PipelineCacheTaskPluginBase var server = new PipelineCacheServer(); Fingerprint[] restoreFingerprints = restoreKeysGenerator(); await server.DownloadAsync( - context, - (new [] { fingerprint}).Concat(restoreFingerprints).ToArray(), - path, + context, + (new[] { fingerprint }).Concat(restoreFingerprints).ToArray(), + pathSegments, context.GetInput(PipelineCacheTaskPluginConstants.CacheHitVariable, required: false), + workspace, token); } } diff --git a/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs b/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs index c7ef73086e..a84eb8ba55 100644 --- a/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs +++ b/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs @@ -57,20 +57,21 @@ public override async Task RunAsync(AgentTaskPluginExecutionContext context, Can protected override async Task ProcessCommandInternalAsync( AgentTaskPluginExecutionContext context, - Fingerprint fingerprint, + Fingerprint keyFingerprint, Func restoreKeysGenerator, - string path, + string[] pathSegments, + string workspace, CancellationToken token) { - string contentFormatValue = context.Variables.GetValueOrDefault(ContentFormatVariableName)?.Value ?? string.Empty; + string contentFormatValue = context.Variables.GetValueOrDefault(ContentFormatVariableName)?.Value ?? string.Empty; string calculatedFingerPrint = context.TaskVariables.GetValueOrDefault(ResolvedFingerPrintVariableName)?.Value ?? string.Empty; - if(!string.IsNullOrWhiteSpace(calculatedFingerPrint) && !fingerprint.ToString().Equals(calculatedFingerPrint, StringComparison.Ordinal)) + if (!string.IsNullOrWhiteSpace(calculatedFingerPrint) && !keyFingerprint.ToString().Equals(calculatedFingerPrint, StringComparison.Ordinal)) { - context.Warning($"The given cache key has changed in it's resolved value between restore and save steps;\n"+ - $"original key: {calculatedFingerPrint}\n"+ - $"modified key: {fingerprint}\n"); - } + context.Warning($"The given cache key has changed in it's resolved value between restore and save steps;\n" + + $"original key: {calculatedFingerPrint}\n" + + $"modified key: {keyFingerprint}\n"); + } ContentFormat contentFormat; if (string.IsNullOrWhiteSpace(contentFormatValue)) @@ -85,8 +86,9 @@ public override async Task RunAsync(AgentTaskPluginExecutionContext context, Can PipelineCacheServer server = new PipelineCacheServer(); await server.UploadAsync( context, - fingerprint, - path, + keyFingerprint, + pathSegments, + workspace, token, contentFormat); } diff --git a/src/Agent.Plugins/PipelineCache/TarUtils.cs b/src/Agent.Plugins/PipelineCache/TarUtils.cs index 6e5605af31..fac30cd046 100644 --- a/src/Agent.Plugins/PipelineCache/TarUtils.cs +++ b/src/Agent.Plugins/PipelineCache/TarUtils.cs @@ -13,6 +13,7 @@ using Microsoft.TeamFoundation.DistributedTask.WebApi; using Microsoft.VisualStudio.Services.BlobStore.Common; using Microsoft.VisualStudio.Services.BlobStore.WebApi; +using Microsoft.VisualStudio.Services.PipelineCache.WebApi; namespace Agent.Plugins.PipelineCache { @@ -29,18 +30,22 @@ public static class TarUtils /// The path to the TAR. public static async Task ArchiveFilesToTarAsync( AgentTaskPluginExecutionContext context, - string inputPath, + string[] pathSegments, + string workspace, CancellationToken cancellationToken) { - if(File.Exists(inputPath)) + foreach (var inputPath in pathSegments) { - throw new DirectoryNotFoundException($"Please specify path to a directory, File path is not allowed. {inputPath} is a file."); + if (File.Exists(inputPath)) + { + throw new DirectoryNotFoundException($"Please specify path to a directory, File path is not allowed. {inputPath} is a file."); + } } var archiveFileName = CreateArchiveFileName(); var archiveFile = Path.Combine(Path.GetTempPath(), archiveFileName); - ProcessStartInfo processStartInfo = GetCreateTarProcessInfo(context, archiveFileName, inputPath); + ProcessStartInfo processStartInfo = GetCreateTarProcessInfo(context, archiveFileName, pathSegments, workspace); Action actionOnFailure = () => { @@ -55,6 +60,7 @@ public static class TarUtils (Process process, CancellationToken ct) => Task.CompletedTask, actionOnFailure, cancellationToken); + return archiveFile; } @@ -70,20 +76,26 @@ public static class TarUtils AgentTaskPluginExecutionContext context, Manifest manifest, DedupManifestArtifactClient dedupManifestClient, - string targetDirectory, + string[] pathSegments, + string workspace, CancellationToken cancellationToken) { ValidateTarManifest(manifest); - Directory.CreateDirectory(targetDirectory); - - DedupIdentifier dedupId = DedupIdentifier.Create(manifest.Items.Single(i => i.Path.EndsWith(archive, StringComparison.OrdinalIgnoreCase)).Blob.Id); + // TODO: Use workspace somehow + foreach (var segment in pathSegments) + { + Directory.CreateDirectory(segment); + } + + DedupIdentifier dedupId = DedupIdentifier.Create(manifest.Items.Single(i => i.Path.EndsWith(archive, StringComparison.OrdinalIgnoreCase)).Blob.Id); - ProcessStartInfo processStartInfo = GetExtractStartProcessInfo(context, targetDirectory); + ProcessStartInfo processStartInfo = GetExtractStartProcessInfo(context, pathSegments); Func downloadTaskFunc = (process, ct) => - Task.Run(async () => { + Task.Run(async () => + { try { await dedupManifestClient.DownloadToStreamAsync(dedupId, process.StandardInput.BaseStream, proxyUri: null, cancellationToken: ct); @@ -95,7 +107,7 @@ public static class TarUtils { process.Kill(); } - catch {} + catch { } ExceptionDispatchInfo.Capture(e).Throw(); } }); @@ -165,11 +177,17 @@ private static void CreateProcessStartInfo(ProcessStartInfo processStartInfo, st processStartInfo.WorkingDirectory = processWorkingDirectory; } - private static ProcessStartInfo GetCreateTarProcessInfo(AgentTaskPluginExecutionContext context, string archiveFileName, string inputPath) + private static ProcessStartInfo GetCreateTarProcessInfo(AgentTaskPluginExecutionContext context, string archiveFileName, string[] inputPaths, string workspace) { var processFileName = GetTar(context); - inputPath = inputPath.TrimEnd(Path.DirectorySeparatorChar).TrimEnd(Path.AltDirectorySeparatorChar); - var processArguments = $"-cf \"{archiveFileName}\" -C \"{inputPath}\" ."; // If given the absolute path for the '-cf' option, the GNU tar fails. The workaround is to start the tarring process in the temp directory, and simply speficy 'archive.tar' for that option. + + inputPaths = inputPaths + .Select(i => Directory.Exists(i) ? Path.GetRelativePath(workspace, i) : Path.Combine(workspace, i)) + .Select(i => $"\"{i.TrimEnd(Path.DirectorySeparatorChar).TrimEnd(Path.AltDirectorySeparatorChar)}\"") + .ToArray(); + + // If given the absolute path for the '-cf' option, the GNU tar fails. The workaround is to start the tarring process in the temp directory, and simply speficy 'archive.tar' for that option. + var processArguments = $"-cf \"{archiveFileName}\" -C \"{workspace}\" {string.Join(' ', inputPaths)}"; if (IsSystemDebugTrue(context)) { @@ -179,21 +197,23 @@ private static ProcessStartInfo GetCreateTarProcessInfo(AgentTaskPluginExecution { processArguments = "-h " + processArguments; } - + ProcessStartInfo processStartInfo = new ProcessStartInfo(); CreateProcessStartInfo(processStartInfo, processFileName, processArguments, processWorkingDirectory: Path.GetTempPath()); // We want to create the archiveFile in temp folder, and hence starting the tar process from TEMP to avoid absolute paths in tar cmd line. return processStartInfo; } private static string GetTar(AgentTaskPluginExecutionContext context) - { + { // check if the user specified the tar executable to use: string location = Environment.GetEnvironmentVariable(TarLocationEnvironmentVariableName); - return String.IsNullOrWhiteSpace(location) ? "tar" : location; + return String.IsNullOrWhiteSpace(location) ? "tar" : location; } - private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecutionContext context, string targetDirectory) + private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecutionContext context, string[] pathSegments) { + // TODO: Get common directory of path segments + var targetDirectory = pathSegments[0]; string processFileName, processArguments; if (isWindows && CheckIf7ZExists()) { @@ -221,6 +241,7 @@ private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecut private static void ValidateTarManifest(Manifest manifest) { + // TODO: This will need to change if (manifest == null || manifest.Items.Count() != 1 || !manifest.Items.Single().Path.EndsWith(archive, StringComparison.OrdinalIgnoreCase)) { throw new ArgumentException($"Manifest containing a tar cannot have more than one item."); @@ -236,7 +257,7 @@ private static void TryDeleteFile(string fileName) File.Delete(fileName); } } - catch {} + catch { } } private static string CreateArchiveFileName() @@ -246,7 +267,7 @@ private static string CreateArchiveFileName() private static bool IsSystemDebugTrue(AgentTaskPluginExecutionContext context) { - if (context.Variables.TryGetValue("system.debug", out VariableValue systemDebugVar)) + if (context.Variables.TryGetValue("system.debug", out VariableValue systemDebugVar)) { return string.Equals(systemDebugVar?.Value, "true", StringComparison.OrdinalIgnoreCase); } From 2b71eb1e65debb14b6e2203901ef10bd55844dea Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Wed, 19 Feb 2020 09:07:42 -0800 Subject: [PATCH 02/27] wip caching multiple directories in tarball --- .../PipelineCache/PipelineCacheTaskPluginBase.cs | 2 -- src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs | 2 ++ src/Agent.Plugins/PipelineCache/TarUtils.cs | 7 +++++-- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs b/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs index a9ee9eeadb..3e44949916 100644 --- a/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs +++ b/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs @@ -87,8 +87,6 @@ internal static (bool isOldFormat, string[] keySegments, IEnumerable r public async virtual Task RunAsync(AgentTaskPluginExecutionContext context, CancellationToken token) { - WaitForDebuggerAttach(); - ArgUtil.NotNull(context, nameof(context)); VariableValue saltValue = context.Variables.GetValueOrDefault(SaltVariableName); diff --git a/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs b/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs index a84eb8ba55..99f7e83352 100644 --- a/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs +++ b/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs @@ -19,6 +19,8 @@ public class SavePipelineCacheV0 : PipelineCacheTaskPluginBase Hence we are overriding the RunAsync function to include that logic. */ public override async Task RunAsync(AgentTaskPluginExecutionContext context, CancellationToken token) { + WaitForDebuggerAttach(); + bool successSoFar = false; if (context.Variables.TryGetValue("agent.jobstatus", out VariableValue jobStatusVar)) { diff --git a/src/Agent.Plugins/PipelineCache/TarUtils.cs b/src/Agent.Plugins/PipelineCache/TarUtils.cs index fac30cd046..df635c6829 100644 --- a/src/Agent.Plugins/PipelineCache/TarUtils.cs +++ b/src/Agent.Plugins/PipelineCache/TarUtils.cs @@ -181,11 +181,14 @@ private static ProcessStartInfo GetCreateTarProcessInfo(AgentTaskPluginExecution { var processFileName = GetTar(context); + // TODO: Add unit tests for this path expansion inputPaths = inputPaths - .Select(i => Directory.Exists(i) ? Path.GetRelativePath(workspace, i) : Path.Combine(workspace, i)) - .Select(i => $"\"{i.TrimEnd(Path.DirectorySeparatorChar).TrimEnd(Path.AltDirectorySeparatorChar)}\"") + .Select(i => Path.IsPathFullyQualified(i) ? i : Path.Combine(workspace, i)) + .Select(i => $"\"{i.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar)}\"") .ToArray(); + workspace = workspace.Trim(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); + // If given the absolute path for the '-cf' option, the GNU tar fails. The workaround is to start the tarring process in the temp directory, and simply speficy 'archive.tar' for that option. var processArguments = $"-cf \"{archiveFileName}\" -C \"{workspace}\" {string.Join(' ', inputPaths)}"; From e55adbd9d8741a038de47832c46e12789bdeb45a Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Wed, 19 Feb 2020 12:48:32 -0800 Subject: [PATCH 03/27] Caching multiple directories --- .../PipelineCache/PipelineCacheServer.cs | 2 +- .../PipelineCacheTaskPluginBase.cs | 2 + .../PipelineCache/SavePipelineCacheV0.cs | 2 - src/Agent.Plugins/PipelineCache/TarUtils.cs | 38 ++++++++++--------- 4 files changed, 24 insertions(+), 20 deletions(-) diff --git a/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs b/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs index 2f96442d7a..d7f2e5a51d 100644 --- a/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs +++ b/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs @@ -207,7 +207,7 @@ private async Task GetUploadPathAsync(ContentFormat contentFormat, Agent string manifestPath = Path.Combine(Path.GetTempPath(), $"{nameof(DedupManifestArtifactClient)}.{Path.GetRandomFileName()}.manifest"); await dedupManifestClient.DownloadFileToPathAsync(manifestId, manifestPath, proxyUri: null, cancellationToken: cancellationToken); Manifest manifest = JsonSerializer.Deserialize(File.ReadAllText(manifestPath)); - await TarUtils.DownloadAndExtractTarAsync(context, manifest, dedupManifestClient, pathSegments, workspace, cancellationToken); + await TarUtils.DownloadAndExtractTarAsync(context, manifest, dedupManifestClient, workspace, cancellationToken); try { if (File.Exists(manifestPath)) diff --git a/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs b/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs index 3e44949916..a9ee9eeadb 100644 --- a/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs +++ b/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs @@ -87,6 +87,8 @@ internal static (bool isOldFormat, string[] keySegments, IEnumerable r public async virtual Task RunAsync(AgentTaskPluginExecutionContext context, CancellationToken token) { + WaitForDebuggerAttach(); + ArgUtil.NotNull(context, nameof(context)); VariableValue saltValue = context.Variables.GetValueOrDefault(SaltVariableName); diff --git a/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs b/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs index 99f7e83352..a84eb8ba55 100644 --- a/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs +++ b/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs @@ -19,8 +19,6 @@ public class SavePipelineCacheV0 : PipelineCacheTaskPluginBase Hence we are overriding the RunAsync function to include that logic. */ public override async Task RunAsync(AgentTaskPluginExecutionContext context, CancellationToken token) { - WaitForDebuggerAttach(); - bool successSoFar = false; if (context.Variables.TryGetValue("agent.jobstatus", out VariableValue jobStatusVar)) { diff --git a/src/Agent.Plugins/PipelineCache/TarUtils.cs b/src/Agent.Plugins/PipelineCache/TarUtils.cs index df635c6829..0a53d0b091 100644 --- a/src/Agent.Plugins/PipelineCache/TarUtils.cs +++ b/src/Agent.Plugins/PipelineCache/TarUtils.cs @@ -45,7 +45,7 @@ public static class TarUtils var archiveFileName = CreateArchiveFileName(); var archiveFile = Path.Combine(Path.GetTempPath(), archiveFileName); - ProcessStartInfo processStartInfo = GetCreateTarProcessInfo(context, archiveFileName, pathSegments, workspace); + ProcessStartInfo processStartInfo = GetCreateTarProcessInfo(context, archiveFile, pathSegments, workspace); Action actionOnFailure = () => { @@ -76,21 +76,15 @@ public static class TarUtils AgentTaskPluginExecutionContext context, Manifest manifest, DedupManifestArtifactClient dedupManifestClient, - string[] pathSegments, string workspace, CancellationToken cancellationToken) { ValidateTarManifest(manifest); - // TODO: Use workspace somehow - foreach (var segment in pathSegments) - { - Directory.CreateDirectory(segment); - } - DedupIdentifier dedupId = DedupIdentifier.Create(manifest.Items.Single(i => i.Path.EndsWith(archive, StringComparison.OrdinalIgnoreCase)).Blob.Id); - ProcessStartInfo processStartInfo = GetExtractStartProcessInfo(context, pathSegments); + // We now can simply specify the working directory as the tarball will contain paths relative to it + ProcessStartInfo processStartInfo = GetExtractStartProcessInfo(context, workspace); Func downloadTaskFunc = (process, ct) => @@ -177,20 +171,21 @@ private static void CreateProcessStartInfo(ProcessStartInfo processStartInfo, st processStartInfo.WorkingDirectory = processWorkingDirectory; } - private static ProcessStartInfo GetCreateTarProcessInfo(AgentTaskPluginExecutionContext context, string archiveFileName, string[] inputPaths, string workspace) + private static ProcessStartInfo GetCreateTarProcessInfo(AgentTaskPluginExecutionContext context, string archiveFilePath, string[] inputPaths, string workspace) { var processFileName = GetTar(context); // TODO: Add unit tests for this path expansion inputPaths = inputPaths .Select(i => Path.IsPathFullyQualified(i) ? i : Path.Combine(workspace, i)) + .Select(i => Path.GetRelativePath(workspace, i)) .Select(i => $"\"{i.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar)}\"") .ToArray(); - workspace = workspace.Trim(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); + workspace = workspace.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); // If given the absolute path for the '-cf' option, the GNU tar fails. The workaround is to start the tarring process in the temp directory, and simply speficy 'archive.tar' for that option. - var processArguments = $"-cf \"{archiveFileName}\" -C \"{workspace}\" {string.Join(' ', inputPaths)}"; + var processArguments = $"-cf \"{archiveFilePath}\" -C \"{workspace}\" {string.Join(' ', inputPaths)}"; if (IsSystemDebugTrue(context)) { @@ -213,15 +208,22 @@ private static string GetTar(AgentTaskPluginExecutionContext context) return String.IsNullOrWhiteSpace(location) ? "tar" : location; } - private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecutionContext context, string[] pathSegments) + private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecutionContext context, string workspace) { // TODO: Get common directory of path segments - var targetDirectory = pathSegments[0]; + //var targetDirectory = pathSegments[0]; string processFileName, processArguments; if (isWindows && CheckIf7ZExists()) { + //processFileName = "7z"; + //processArguments = $"x -si -aoa -o\"{targetDirectory}\" -ttar"; + //if (IsSystemDebugTrue(context)) + //{ + // processArguments = "-bb1 " + processArguments; + //} + // TODO: Verify this works processFileName = "7z"; - processArguments = $"x -si -aoa -o\"{targetDirectory}\" -ttar"; + processArguments = $"x -si -aoa -o\"{workspace}\" -ttar"; if (IsSystemDebugTrue(context)) { processArguments = "-bb1 " + processArguments; @@ -230,7 +232,9 @@ private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecut else { processFileName = GetTar(context); - processArguments = $"-xf - -C ."; // Instead of targetDirectory, we are providing . to tar, because the tar process is being started from targetDirectory. + // Instead of targetDirectory, we are providing . to tar, because the tar process is being started from targetDirectory. + // -P is added so that relative paths are accepted by tar + processArguments = $"-xf - -P -C ."; if (IsSystemDebugTrue(context)) { processArguments = "-v " + processArguments; @@ -238,7 +242,7 @@ private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecut } ProcessStartInfo processStartInfo = new ProcessStartInfo(); - CreateProcessStartInfo(processStartInfo, processFileName, processArguments, processWorkingDirectory: targetDirectory); + CreateProcessStartInfo(processStartInfo, processFileName, processArguments, processWorkingDirectory: workspace); // TODO: is this the right directory? return processStartInfo; } From f216caf2e8f788755563b0a9c24963e384e28857 Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Wed, 19 Feb 2020 15:19:06 -0800 Subject: [PATCH 04/27] Use fingerprint code to constuct list of paths to cache --- .../PipelineCache/FingerprintCreator.cs | 312 +++++++++++++----- .../PipelineCache/PipelineCacheServer.cs | 15 +- .../PipelineCacheTaskPluginBase.cs | 3 +- src/Agent.Plugins/PipelineCache/TarUtils.cs | 17 +- 4 files changed, 246 insertions(+), 101 deletions(-) diff --git a/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs b/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs index 71827ed2f4..f8200704eb 100644 --- a/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs +++ b/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs @@ -13,6 +13,7 @@ using System.Runtime.InteropServices; using System.Security.Cryptography; using System.Text; +using System.Text.RegularExpressions; [assembly: InternalsVisibleTo("Test")] @@ -32,7 +33,7 @@ public static class FingerprintCreator AllowWindowsPaths = isWindows, }; - private static readonly char[] GlobChars = new [] { '*', '?', '[', ']' }; + private static readonly char[] GlobChars = new[] { '*', '?', '[', ']' }; private const char ForceStringLiteral = '"'; @@ -49,7 +50,7 @@ internal static bool IsPathyKeySegment(string keySegment) { if (keySegment.First() == ForceStringLiteral && keySegment.Last() == ForceStringLiteral) return false; if (keySegment.Any(c => !IsPathyChar(c))) return false; - if (!keySegment.Contains(".") && + if (!keySegment.Contains(".") && !keySegment.Contains(Path.DirectorySeparatorChar) && !keySegment.Contains(Path.AltDirectorySeparatorChar)) return false; if (keySegment.Last() == '.') return false; @@ -58,8 +59,9 @@ internal static bool IsPathyKeySegment(string keySegment) internal static Func CreateMinimatchFilter(AgentTaskPluginExecutionContext context, string rule, bool invert) { - Func filter = Minimatcher.CreateFilter(rule, minimatchOptions); - Func tracedFilter = (path) => { + Func filter = Minimatcher.CreateFilter(rule, minimatchOptions); + Func tracedFilter = (path) => + { bool filterResult = filter(path); context.Verbose($"Path `{path}` is {(filterResult ? "" : "not ")}{(invert ? "excluded" : "included")} because of pattern `{(invert ? "!" : "")}{rule}`."); return invert ^ filterResult; @@ -81,16 +83,16 @@ internal static string MakePathCanonical(string defaultWorkingDirectory, string } } - internal static Func CreateFilter( + internal static Func CreateFilter( AgentTaskPluginExecutionContext context, IEnumerable includeRules, IEnumerable excludeRules) { - Func[] includeFilters = includeRules.Select(includeRule => - CreateMinimatchFilter(context, includeRule, invert: false)).ToArray(); - Func[] excludeFilters = excludeRules.Select(excludeRule => - CreateMinimatchFilter(context, excludeRule, invert: true)).ToArray(); - Func filter = (path) => includeFilters.Any(f => f(path)) && excludeFilters.All(f => f(path)); + Func[] includeFilters = includeRules.Select(includeRule => + CreateMinimatchFilter(context, includeRule, invert: false)).ToArray(); + Func[] excludeFilters = excludeRules.Select(excludeRule => + CreateMinimatchFilter(context, excludeRule, invert: true)).ToArray(); + Func filter = (path) => includeFilters.Any(f => f(path)) && excludeFilters.All(f => f(path)); return filter; } @@ -109,10 +111,10 @@ public MatchedFile(string displayPath, long fileLength, string hash) { this.DisplayPath = displayPath; this.FileLength = fileLength; - this.Hash = hash; + this.Hash = hash; } - public MatchedFile(string displayPath, FileStream fs): + public MatchedFile(string displayPath, FileStream fs) : this(displayPath, fs.Length, s_sha256.ComputeHash(fs).ToHex()) { } @@ -121,11 +123,13 @@ public MatchedFile(string displayPath, long fileLength, string hash) public long FileLength; public string Hash; - public string GetHash() { - return MatchedFile.GenerateHash(new [] { this }); + public string GetHash() + { + return MatchedFile.GenerateHash(new[] { this }); } - public static string GenerateHash(IEnumerable matches) { + public static string GenerateHash(IEnumerable matches) + { string s = matches.Aggregate(new StringBuilder(), (sb, file) => sb.Append($"\nSHA256({file.DisplayPath})=[{file.FileLength}]{file.Hash}"), sb => sb.ToString()); @@ -159,7 +163,8 @@ internal static Enumeration DetermineFileEnumerationFromGlob(string includeGlobP // no globbing if (firstGlob < 0) { - return new Enumeration() { + return new Enumeration() + { RootPath = Path.GetDirectoryName(includeGlobPathAbsolute), Pattern = Path.GetFileName(includeGlobPathAbsolute), Depth = SearchOption.TopDirectoryOnly @@ -167,105 +172,114 @@ internal static Enumeration DetermineFileEnumerationFromGlob(string includeGlobP } else { - int rootDirLength = includeGlobPathAbsolute.Substring(0,firstGlob).LastIndexOfAny( new [] { Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar}); - return new Enumeration() { - RootPath = includeGlobPathAbsolute.Substring(0,rootDirLength), + int rootDirLength = includeGlobPathAbsolute.Substring(0, firstGlob).LastIndexOfAny(new[] { Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar }); + return new Enumeration() + { + RootPath = includeGlobPathAbsolute.Substring(0, rootDirLength), Pattern = "*", Depth = hasRecursive ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly }; } } - internal static void CheckKeySegment(string keySegment) + internal static void CheckSegment(string segment, string segmentType) { - if (keySegment.Equals("*", StringComparison.Ordinal)) + if (segment.Equals("*", StringComparison.Ordinal)) { - throw new ArgumentException("`*` is a reserved key segment. For path glob, use `./*`."); + throw new ArgumentException($"`*` is a reserved {segmentType} segment. For path glob, use `./*`."); } - else if (keySegment.Equals(Fingerprint.Wildcard, StringComparison.Ordinal)) + else if (segment.Equals(Fingerprint.Wildcard, StringComparison.Ordinal)) { - throw new ArgumentException("`**` is a reserved key segment. For path glob, use `./**`."); + throw new ArgumentException($"`**` is a reserved {segmentType} segment. For path glob, use `./**`."); } - else if (keySegment.First() == '\'') + else if (segment.First() == '\'') { - throw new ArgumentException("A key segment cannot start with a single-quote character`."); + throw new ArgumentException($"A {segmentType} segment cannot start with a single-quote character`."); } - else if (keySegment.First() == '`') + else if (segment.First() == '`') { - throw new ArgumentException("A key segment cannot start with a backtick character`."); + throw new ArgumentException($"A {segmentType} segment cannot start with a backtick character`."); } } - public static Fingerprint EvaluateKeyToFingerprint( + internal static void LogSegment( AgentTaskPluginExecutionContext context, - string filePathRoot, - IEnumerable keySegments) + string[] segments, + string segment, + KeySegmentType type, + Object details) { - // Quickly validate all segments - foreach (string keySegment in keySegments) + Func FormatForDisplay = (value, displayLength) => { - CheckKeySegment(keySegment); - } + if (value.Length > displayLength) + { + value = value.Substring(0, displayLength - 3) + "..."; + } - string defaultWorkingDirectory = context.Variables.GetValueOrDefault( - "system.defaultworkingdirectory" // Constants.Variables.System.DefaultWorkingDirectory - )?.Value; + return value.PadRight(displayLength); + }; - var resolvedSegments = new List(); - var exceptions = new List(); + string formattedSegment = FormatForDisplay(segment, Math.Min(segments.Select(s => s.Length).Max(), 50)); - Action LogKeySegment = (segment, type, details) => + if (type == KeySegmentType.String) { - Func FormatForDisplay = (value, displayLength) => - { - if (value.Length > displayLength) - { - value = value.Substring(0, displayLength - 3) + "..."; - } - - return value.PadRight(displayLength); - }; - - string formattedSegment = FormatForDisplay(segment, Math.Min(keySegments.Select(s => s.Length).Max(), 50)); + context.Output($" - {formattedSegment} [string]"); + } + else + { + var matches = (details as MatchedFile[]) ?? new MatchedFile[0]; - if (type == KeySegmentType.String) + if (type == KeySegmentType.FilePath) { - context.Output($" - {formattedSegment} [string]"); + string fileHash = matches.Length > 0 ? matches[0].Hash : null; + context.Output($" - {formattedSegment} [file] {(!string.IsNullOrWhiteSpace(fileHash) ? $"--> {fileHash}" : "(not found)")}"); } - else + else if (type == KeySegmentType.FilePattern) { - var matches = (details as MatchedFile[]) ?? new MatchedFile[0]; - - if (type == KeySegmentType.FilePath) - { - string fileHash = matches.Length > 0 ? matches[0].Hash : null; - context.Output($" - {formattedSegment} [file] {(!string.IsNullOrWhiteSpace(fileHash) ? $"--> {fileHash}" : "(not found)")}"); - } - else if (type == KeySegmentType.FilePattern) + context.Output($" - {formattedSegment} [file pattern; matches: {matches.Length}]"); + if (matches.Any()) { - context.Output($" - {formattedSegment} [file pattern; matches: {matches.Length}]"); - if (matches.Any()) + int filePathDisplayLength = Math.Min(matches.Select(mf => mf.DisplayPath.Length).Max(), 70); + foreach (var match in matches) { - int filePathDisplayLength = Math.Min(matches.Select(mf => mf.DisplayPath.Length).Max(), 70); - foreach (var match in matches) - { - context.Output($" - {FormatForDisplay(match.DisplayPath, filePathDisplayLength)} --> {match.Hash}"); - } + context.Output($" - {FormatForDisplay(match.DisplayPath, filePathDisplayLength)} --> {match.Hash}"); } - } + } } - }; + } + } + + public static Fingerprint EvaluateKeyToFingerprint( + AgentTaskPluginExecutionContext context, + string filePathRoot, + IEnumerable keySegments) + { + // Avoid multiple enumerations + var segments = keySegments.ToArray(); + + // Quickly validate all segments + foreach (string segment in segments) + { + CheckSegment(segment, "key"); + } + + string defaultWorkingDirectory = context.Variables.GetValueOrDefault( + "system.defaultworkingdirectory" // Constants.Variables.System.DefaultWorkingDirectory + )?.Value; + + var resolvedSegments = new List(); + var exceptions = new List(); - foreach (string keySegment in keySegments) + foreach (string keySegment in segments) { if (!IsPathyKeySegment(keySegment)) { - LogKeySegment(keySegment, KeySegmentType.String, null); + LogSegment(context, segments, keySegment, KeySegmentType.String, null); resolvedSegments.Add(keySegment); } else { - string[] pathRules = keySegment.Split(new []{','}, StringSplitOptions.RemoveEmptyEntries).Select(s => s.Trim()).ToArray(); + string[] pathRules = keySegment.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries).Select(s => s.Trim()).ToArray(); string[] includeRules = pathRules.Where(p => !p.StartsWith('!')).ToArray(); if (!includeRules.Any()) @@ -273,38 +287,39 @@ internal static void CheckKeySegment(string keySegment) throw new ArgumentException("No include rules specified."); } - var enumerations = new Dictionary>(); - foreach(string includeRule in includeRules) + var enumerations = new Dictionary>(); + foreach (string includeRule in includeRules) { string absoluteRootRule = MakePathCanonical(defaultWorkingDirectory, includeRule); context.Verbose($"Expanded include rule is `{absoluteRootRule}`."); Enumeration enumeration = DetermineFileEnumerationFromGlob(absoluteRootRule); List globs; - if(!enumerations.TryGetValue(enumeration, out globs)) + if (!enumerations.TryGetValue(enumeration, out globs)) { - enumerations[enumeration] = globs = new List(); + enumerations[enumeration] = globs = new List(); } globs.Add(absoluteRootRule); } string[] excludeRules = pathRules.Where(p => p.StartsWith('!')).ToArray(); - string[] absoluteExcludeRules = excludeRules.Select(excludeRule => { + string[] absoluteExcludeRules = excludeRules.Select(excludeRule => + { excludeRule = excludeRule.Substring(1); return MakePathCanonical(defaultWorkingDirectory, excludeRule); }).ToArray(); var matchedFiles = new SortedDictionary(StringComparer.Ordinal); - foreach(var kvp in enumerations) + foreach (var kvp in enumerations) { Enumeration enumerate = kvp.Key; List absoluteIncludeGlobs = kvp.Value; context.Verbose($"Enumerating starting at root `{enumerate.RootPath}` with pattern `{enumerate.Pattern}` and depth `{enumerate.Depth}`."); IEnumerable files = Directory.EnumerateFiles(enumerate.RootPath, enumerate.Pattern, enumerate.Depth); - Func filter = CreateFilter(context, absoluteIncludeGlobs, absoluteExcludeRules); + Func filter = CreateFilter(context, absoluteIncludeGlobs, absoluteExcludeRules); files = files.Where(f => filter(f)).Distinct(); - foreach(string path in files) + foreach (string path in files) { using (var fs = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read)) { @@ -324,9 +339,13 @@ internal static void CheckKeySegment(string keySegment) displayKeySegment = context.Container.TranslateToContainerPath(displayKeySegment); } - LogKeySegment(displayKeySegment, + LogSegment( + context, + segments, + displayKeySegment, patternSegment ? KeySegmentType.FilePattern : KeySegmentType.FilePath, - matchedFiles.Values.ToArray()); + matchedFiles.Values.ToArray() + ); if (!matchedFiles.Any()) { @@ -339,9 +358,128 @@ internal static void CheckKeySegment(string keySegment) exceptions.Add(new FileNotFoundException($"File not found: {displayKeySegment}")); } } - + resolvedSegments.Add(MatchedFile.GenerateHash(matchedFiles.Values)); - } + } + } + + if (exceptions.Any()) + { + throw new AggregateException(exceptions); + } + + return new Fingerprint() { Segments = resolvedSegments.ToArray() }; + } + + public static Fingerprint EvaluatePathToFingerprint( + AgentTaskPluginExecutionContext context, + string workingDirectory, + IEnumerable pathSegments) + { + // Avoid multiple enumerations + var segments = pathSegments.ToArray(); + + // Quickly validate all segments + foreach (string pathSegment in pathSegments) + { + CheckSegment(pathSegment, "path"); + } + + //string defaultWorkingDirectory = context.Variables.GetValueOrDefault( + // "system.defaultworkingdirectory" // Constants.Variables.System.DefaultWorkingDirectory + //)?.Value; + + var resolvedSegments = new List(); + var exceptions = new List(); + + foreach (string segment in segments) + { + if (!IsPathyKeySegment(segment)) + { + LogSegment(context, segments, segment, KeySegmentType.String, null); + resolvedSegments.Add(segment); + } + else + { + string[] pathRules = segment.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries).Select(s => s.Trim()).ToArray(); + string[] includeRules = pathRules.Where(p => !p.StartsWith('!')).ToArray(); + + if (!includeRules.Any()) + { + throw new ArgumentException("No include rules specified."); + } + + var enumerations = new Dictionary>(); + foreach (string includeRule in includeRules) + { + string absoluteRootRule = MakePathCanonical(workingDirectory, includeRule); + context.Verbose($"Expanded include rule is `{absoluteRootRule}`."); + Enumeration enumeration = DetermineFileEnumerationFromGlob(absoluteRootRule); + List globs; + if (!enumerations.TryGetValue(enumeration, out globs)) + { + enumerations[enumeration] = globs = new List(); + } + globs.Add(absoluteRootRule); + } + + string[] excludeRules = pathRules.Where(p => p.StartsWith('!')).ToArray(); + string[] absoluteExcludeRules = excludeRules.Select(excludeRule => + { + excludeRule = excludeRule.Substring(1); + return MakePathCanonical(workingDirectory, excludeRule); + }).ToArray(); + + var matchedDirectories = new SortedDictionary(StringComparer.Ordinal); + + foreach (var kvp in enumerations) + { + Enumeration enumerate = kvp.Key; + List absoluteIncludeGlobs = kvp.Value; + context.Verbose($"Enumerating starting at root `{enumerate.RootPath}` with pattern `{enumerate.Pattern}` and depth `{enumerate.Depth}`."); + IEnumerable directories = Directory.EnumerateDirectories(enumerate.RootPath, enumerate.Pattern, enumerate.Depth); + Func filter = CreateFilter(context, absoluteIncludeGlobs, absoluteExcludeRules); + directories = directories.Where(f => filter(f)).Distinct(); + + foreach (string path in directories) + { + // Path.GetRelativePath returns 'The relative path, or path if the paths don't share the same root.' + string displayPath = Path.GetRelativePath(workingDirectory, path); + matchedDirectories.Add(path, displayPath); + } + } + + var patternSegment = segment.IndexOfAny(GlobChars) >= 0 || matchedDirectories.Count() > 1; + + var displayPathSegment = segment; + + if (context.Container != null) + { + displayPathSegment = context.Container.TranslateToContainerPath(displayPathSegment); + } + + LogSegment( + context, + segments, + displayPathSegment, + patternSegment ? KeySegmentType.FilePattern : KeySegmentType.FilePath, + matchedDirectories.Values.ToArray() + ); + + if (!matchedDirectories.Any()) + { + if (patternSegment) + { + exceptions.Add(new DirectoryNotFoundException($"No matching directories found for pattern: {displayPathSegment}")); + } + else + { + exceptions.Add(new DirectoryNotFoundException($"Directory not found: {displayPathSegment}")); + } + } + + resolvedSegments.AddRange(matchedDirectories.Values); + } } if (exceptions.Any()) diff --git a/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs b/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs index d7f2e5a51d..fce3d1a67c 100644 --- a/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs +++ b/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs @@ -50,7 +50,12 @@ public class PipelineCacheServer return; } - string uploadPath = await this.GetUploadPathAsync(contentFormat, context, pathSegments, workspace, cancellationToken); + context.Output("Resolving path:"); + Fingerprint pathFp = FingerprintCreator.EvaluatePathToFingerprint(context, workspace, pathSegments); + context.Output($"Resolved to: {pathFp}"); + + string uploadPath = await this.GetUploadPathAsync(contentFormat, context, pathFp, workspace, cancellationToken); + //Upload the pipeline artifact. PipelineCacheActionRecord uploadRecord = clientTelemetry.CreateRecord((level, uri, type) => new PipelineCacheActionRecord(level, uri, type, nameof(dedupManifestClient.PublishAsync), context)); @@ -178,17 +183,17 @@ public class PipelineCacheServer return pipelineCacheClient; } - private async Task GetUploadPathAsync(ContentFormat contentFormat, AgentTaskPluginExecutionContext context, string[] pathSegments, string workspace, CancellationToken cancellationToken) + private async Task GetUploadPathAsync(ContentFormat contentFormat, AgentTaskPluginExecutionContext context, Fingerprint pathFingerprint, string workspace, CancellationToken cancellationToken) { - if (pathSegments.Length == 1) + if (pathFingerprint.Segments.Length == 1) { - return pathSegments[0]; + return pathFingerprint.Segments[0]; } string uploadPath = string.Empty; // TODO: what to do if not SingleTar? if (contentFormat == ContentFormat.SingleTar) { - uploadPath = await TarUtils.ArchiveFilesToTarAsync(context, pathSegments, workspace, cancellationToken); + uploadPath = await TarUtils.ArchiveFilesToTarAsync(context, pathFingerprint, workspace, cancellationToken); } return uploadPath; } diff --git a/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs b/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs index a9ee9eeadb..b4d9d426c1 100644 --- a/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs +++ b/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs @@ -160,8 +160,7 @@ protected void WaitForDebuggerAttach() var process = System.Diagnostics.Process.GetCurrentProcess(); System.Diagnostics.Process.EnterDebugMode(); - Console.WriteLine($"ProcessName: {process.ProcessName}"); - Console.WriteLine($"ProcessHandle: {process.Handle}"); + Console.WriteLine($"PID: {process.Id}"); var timeout = 60; for (int i = 0; i < timeout; i++) { diff --git a/src/Agent.Plugins/PipelineCache/TarUtils.cs b/src/Agent.Plugins/PipelineCache/TarUtils.cs index 0a53d0b091..6213dfaee6 100644 --- a/src/Agent.Plugins/PipelineCache/TarUtils.cs +++ b/src/Agent.Plugins/PipelineCache/TarUtils.cs @@ -30,11 +30,11 @@ public static class TarUtils /// The path to the TAR. public static async Task ArchiveFilesToTarAsync( AgentTaskPluginExecutionContext context, - string[] pathSegments, + Fingerprint pathFingerprint, string workspace, CancellationToken cancellationToken) { - foreach (var inputPath in pathSegments) + foreach (var inputPath in pathFingerprint.Segments) { if (File.Exists(inputPath)) { @@ -45,7 +45,7 @@ public static class TarUtils var archiveFileName = CreateArchiveFileName(); var archiveFile = Path.Combine(Path.GetTempPath(), archiveFileName); - ProcessStartInfo processStartInfo = GetCreateTarProcessInfo(context, archiveFile, pathSegments, workspace); + ProcessStartInfo processStartInfo = GetCreateTarProcessInfo(context, archiveFile, pathFingerprint, workspace); Action actionOnFailure = () => { @@ -171,14 +171,17 @@ private static void CreateProcessStartInfo(ProcessStartInfo processStartInfo, st processStartInfo.WorkingDirectory = processWorkingDirectory; } - private static ProcessStartInfo GetCreateTarProcessInfo(AgentTaskPluginExecutionContext context, string archiveFilePath, string[] inputPaths, string workspace) + private static ProcessStartInfo GetCreateTarProcessInfo(AgentTaskPluginExecutionContext context, string archiveFilePath, Fingerprint pathFingerprint, string workspace) { var processFileName = GetTar(context); // TODO: Add unit tests for this path expansion - inputPaths = inputPaths - .Select(i => Path.IsPathFullyQualified(i) ? i : Path.Combine(workspace, i)) - .Select(i => Path.GetRelativePath(workspace, i)) + //inputPaths = inputPaths + // .Select(i => Path.IsPathFullyQualified(i) ? i : Path.Combine(workspace, i)) + // .Select(i => Path.GetRelativePath(workspace, i)) + // .Select(i => $"\"{i.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar)}\"") + // .ToArray(); + var inputPaths = pathFingerprint.Segments .Select(i => $"\"{i.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar)}\"") .ToArray(); From 8a99bf0b3b33c07c10752aa2c46dcfb6ed55fba2 Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Thu, 20 Feb 2020 08:36:50 -0800 Subject: [PATCH 05/27] Cleanup path fingerprint logging --- .../PipelineCache/FingerprintCreator.cs | 46 ++++++++++++------- src/Test/L0/Plugin/IsPathyTests.cs | 2 +- 2 files changed, 31 insertions(+), 17 deletions(-) diff --git a/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs b/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs index f8200704eb..1610154530 100644 --- a/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs +++ b/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs @@ -13,7 +13,6 @@ using System.Runtime.InteropServices; using System.Security.Cryptography; using System.Text; -using System.Text.RegularExpressions; [assembly: InternalsVisibleTo("Test")] @@ -46,14 +45,14 @@ private static bool IsPathyChar(char c) return !Path.GetInvalidFileNameChars().Contains(c); } - internal static bool IsPathyKeySegment(string keySegment) + internal static bool IsPathySegment(string segment) { - if (keySegment.First() == ForceStringLiteral && keySegment.Last() == ForceStringLiteral) return false; - if (keySegment.Any(c => !IsPathyChar(c))) return false; - if (!keySegment.Contains(".") && - !keySegment.Contains(Path.DirectorySeparatorChar) && - !keySegment.Contains(Path.AltDirectorySeparatorChar)) return false; - if (keySegment.Last() == '.') return false; + if (segment.First() == ForceStringLiteral && segment.Last() == ForceStringLiteral) return false; + if (segment.Any(c => !IsPathyChar(c))) return false; + if (!segment.Contains(".") && + !segment.Contains(Path.DirectorySeparatorChar) && + !segment.Contains(Path.AltDirectorySeparatorChar)) return false; + if (segment.Last() == '.') return false; return true; } @@ -142,7 +141,9 @@ internal enum KeySegmentType { String = 0, FilePath = 1, - FilePattern = 2 + FilePattern = 2, + Directory = 3, + DirectoryPattern = 4 } // Given a globby path, figure out where to start enumerating. @@ -225,6 +226,23 @@ internal static void CheckSegment(string segment, string segmentType) { context.Output($" - {formattedSegment} [string]"); } + else if (type == KeySegmentType.Directory) + { + context.Output($" - {formattedSegment} [directory]"); + } + else if (type == KeySegmentType.DirectoryPattern) + { + var directories = (details as string[]) ?? new string[0]; + context.Output($" - {formattedSegment} [directory pattern; matches: {directories.Length}]"); + if (directories.Any()) + { + int filePathDisplayLength = Math.Min(directories.Select(d => d.Length).Max(), 70); + foreach (var directory in directories) + { + context.Output($" - {FormatForDisplay(directory, filePathDisplayLength)}"); + } + } + } else { var matches = (details as MatchedFile[]) ?? new MatchedFile[0]; @@ -272,7 +290,7 @@ internal static void CheckSegment(string segment, string segmentType) foreach (string keySegment in segments) { - if (!IsPathyKeySegment(keySegment)) + if (!IsPathySegment(keySegment)) { LogSegment(context, segments, keySegment, KeySegmentType.String, null); resolvedSegments.Add(keySegment); @@ -385,16 +403,12 @@ internal static void CheckSegment(string segment, string segmentType) CheckSegment(pathSegment, "path"); } - //string defaultWorkingDirectory = context.Variables.GetValueOrDefault( - // "system.defaultworkingdirectory" // Constants.Variables.System.DefaultWorkingDirectory - //)?.Value; - var resolvedSegments = new List(); var exceptions = new List(); foreach (string segment in segments) { - if (!IsPathyKeySegment(segment)) + if (!IsPathySegment(segment)) { LogSegment(context, segments, segment, KeySegmentType.String, null); resolvedSegments.Add(segment); @@ -462,7 +476,7 @@ internal static void CheckSegment(string segment, string segmentType) context, segments, displayPathSegment, - patternSegment ? KeySegmentType.FilePattern : KeySegmentType.FilePath, + patternSegment ? KeySegmentType.DirectoryPattern : KeySegmentType.Directory, matchedDirectories.Values.ToArray() ); diff --git a/src/Test/L0/Plugin/IsPathyTests.cs b/src/Test/L0/Plugin/IsPathyTests.cs index 5c60006afd..5349259534 100644 --- a/src/Test/L0/Plugin/IsPathyTests.cs +++ b/src/Test/L0/Plugin/IsPathyTests.cs @@ -15,7 +15,7 @@ public class IsPathyTests public void Fingerprint_IsPath() { Action assertPath = (path, isPath) => - Assert.True(isPath == FingerprintCreator.IsPathyKeySegment(path), $"IsPathy({path}) should have returned {isPath}."); + Assert.True(isPath == FingerprintCreator.IsPathhSSegment(path), $"IsPathy({path}) should have returned {isPath}."); assertPath(@"''", false); assertPath(@"Windows_NT", false); assertPath(@"README.md", true); From 022db006214734a2dfb7d78a9713d052390152d9 Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Thu, 20 Feb 2020 13:09:58 -0800 Subject: [PATCH 06/27] Consolidated fingerprint creation code --- .../PipelineCache/FingerprintCreator.cs | 191 +++++------------- .../PipelineCache/PipelineCacheServer.cs | 2 +- .../PipelineCacheTaskPluginBase.cs | 4 +- 3 files changed, 55 insertions(+), 142 deletions(-) diff --git a/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs b/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs index 1610154530..0e1b6e8b9b 100644 --- a/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs +++ b/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs @@ -18,6 +18,12 @@ namespace Agent.Plugins.PipelineCache { + public enum FingerprintType + { + Key, + Path + } + public static class FingerprintCreator { private static readonly bool isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows); @@ -203,9 +209,9 @@ internal static void CheckSegment(string segment, string segmentType) } } - internal static void LogSegment( + private static void LogSegment( AgentTaskPluginExecutionContext context, - string[] segments, + IEnumerable segments, string segment, KeySegmentType type, Object details) @@ -267,14 +273,12 @@ internal static void CheckSegment(string segment, string segmentType) } } - public static Fingerprint EvaluateKeyToFingerprint( + public static Fingerprint EvaluateToFingerprint( AgentTaskPluginExecutionContext context, string filePathRoot, - IEnumerable keySegments) + IEnumerable segments, + FingerprintType fingerprintType) { - // Avoid multiple enumerations - var segments = keySegments.ToArray(); - // Quickly validate all segments foreach (string segment in segments) { @@ -327,172 +331,81 @@ internal static void CheckSegment(string segment, string segmentType) }).ToArray(); var matchedFiles = new SortedDictionary(StringComparer.Ordinal); + var matchedDirectories = new SortedDictionary(StringComparer.Ordinal); foreach (var kvp in enumerations) { Enumeration enumerate = kvp.Key; List absoluteIncludeGlobs = kvp.Value; context.Verbose($"Enumerating starting at root `{enumerate.RootPath}` with pattern `{enumerate.Pattern}` and depth `{enumerate.Depth}`."); - IEnumerable files = Directory.EnumerateFiles(enumerate.RootPath, enumerate.Pattern, enumerate.Depth); + IEnumerable files = fingerprintType == FingerprintType.Key + ? Directory.EnumerateFiles(enumerate.RootPath, enumerate.Pattern, enumerate.Depth) + : Directory.EnumerateDirectories(enumerate.RootPath, enumerate.Pattern, enumerate.Depth); + Func filter = CreateFilter(context, absoluteIncludeGlobs, absoluteExcludeRules); files = files.Where(f => filter(f)).Distinct(); foreach (string path in files) { - using (var fs = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read)) + // Path.GetRelativePath returns 'The relative path, or path if the paths don't share the same root.' + string displayPath = filePathRoot == null ? path : Path.GetRelativePath(filePathRoot, path); + + if (fingerprintType == FingerprintType.Key) + { + using (var fs = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read)) + { + matchedFiles.Add(path, new MatchedFile(displayPath, fs)); + } + } + else { - // Path.GetRelativePath returns 'The relative path, or path if the paths don't share the same root.' - string displayPath = filePathRoot == null ? path : Path.GetRelativePath(filePathRoot, path); - matchedFiles.Add(path, new MatchedFile(displayPath, fs)); + matchedDirectories.Add(path, displayPath); } } } - var patternSegment = keySegment.IndexOfAny(GlobChars) >= 0 || matchedFiles.Count() > 1; + var patternSegment = keySegment.IndexOfAny(GlobChars) >= 0 || matchedFiles.Count > 1; - var displayKeySegment = keySegment; + var displaySegment = keySegment; if (context.Container != null) { - displayKeySegment = context.Container.TranslateToContainerPath(displayKeySegment); - } - - LogSegment( - context, - segments, - displayKeySegment, - patternSegment ? KeySegmentType.FilePattern : KeySegmentType.FilePath, - matchedFiles.Values.ToArray() - ); - - if (!matchedFiles.Any()) - { - if (patternSegment) - { - exceptions.Add(new FileNotFoundException($"No matching files found for pattern: {displayKeySegment}")); - } - else - { - exceptions.Add(new FileNotFoundException($"File not found: {displayKeySegment}")); - } - } - - resolvedSegments.Add(MatchedFile.GenerateHash(matchedFiles.Values)); - } - } - - if (exceptions.Any()) - { - throw new AggregateException(exceptions); - } - - return new Fingerprint() { Segments = resolvedSegments.ToArray() }; - } - - public static Fingerprint EvaluatePathToFingerprint( - AgentTaskPluginExecutionContext context, - string workingDirectory, - IEnumerable pathSegments) - { - // Avoid multiple enumerations - var segments = pathSegments.ToArray(); - - // Quickly validate all segments - foreach (string pathSegment in pathSegments) - { - CheckSegment(pathSegment, "path"); - } - - var resolvedSegments = new List(); - var exceptions = new List(); - - foreach (string segment in segments) - { - if (!IsPathySegment(segment)) - { - LogSegment(context, segments, segment, KeySegmentType.String, null); - resolvedSegments.Add(segment); - } - else - { - string[] pathRules = segment.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries).Select(s => s.Trim()).ToArray(); - string[] includeRules = pathRules.Where(p => !p.StartsWith('!')).ToArray(); - - if (!includeRules.Any()) - { - throw new ArgumentException("No include rules specified."); + displaySegment = context.Container.TranslateToContainerPath(displaySegment); } - var enumerations = new Dictionary>(); - foreach (string includeRule in includeRules) + KeySegmentType segmentType; + object details; + if (fingerprintType == FingerprintType.Key) { - string absoluteRootRule = MakePathCanonical(workingDirectory, includeRule); - context.Verbose($"Expanded include rule is `{absoluteRootRule}`."); - Enumeration enumeration = DetermineFileEnumerationFromGlob(absoluteRootRule); - List globs; - if (!enumerations.TryGetValue(enumeration, out globs)) + segmentType = patternSegment ? KeySegmentType.FilePattern : KeySegmentType.FilePath; + details = matchedFiles.Values.ToArray(); + resolvedSegments.Add(MatchedFile.GenerateHash(matchedFiles.Values)); + + if (!matchedFiles.Any()) { - enumerations[enumeration] = globs = new List(); + var message = patternSegment ? $"No matching files found for pattern: {displaySegment}" : $"File not found: {displaySegment}"; + exceptions.Add(new FileNotFoundException(message)); } - globs.Add(absoluteRootRule); } - - string[] excludeRules = pathRules.Where(p => p.StartsWith('!')).ToArray(); - string[] absoluteExcludeRules = excludeRules.Select(excludeRule => - { - excludeRule = excludeRule.Substring(1); - return MakePathCanonical(workingDirectory, excludeRule); - }).ToArray(); - - var matchedDirectories = new SortedDictionary(StringComparer.Ordinal); - - foreach (var kvp in enumerations) + else { - Enumeration enumerate = kvp.Key; - List absoluteIncludeGlobs = kvp.Value; - context.Verbose($"Enumerating starting at root `{enumerate.RootPath}` with pattern `{enumerate.Pattern}` and depth `{enumerate.Depth}`."); - IEnumerable directories = Directory.EnumerateDirectories(enumerate.RootPath, enumerate.Pattern, enumerate.Depth); - Func filter = CreateFilter(context, absoluteIncludeGlobs, absoluteExcludeRules); - directories = directories.Where(f => filter(f)).Distinct(); - - foreach (string path in directories) + segmentType = patternSegment ? KeySegmentType.DirectoryPattern : KeySegmentType.Directory; + details = matchedDirectories.Values.ToArray(); + resolvedSegments.AddRange(matchedDirectories.Values); + if (!matchedDirectories.Any()) { - // Path.GetRelativePath returns 'The relative path, or path if the paths don't share the same root.' - string displayPath = Path.GetRelativePath(workingDirectory, path); - matchedDirectories.Add(path, displayPath); + var message = patternSegment ? $"No matching directories found for pattern: {displaySegment}" : $"Directory not found: {displaySegment}"; + exceptions.Add(new DirectoryNotFoundException(message)); } } - - var patternSegment = segment.IndexOfAny(GlobChars) >= 0 || matchedDirectories.Count() > 1; - - var displayPathSegment = segment; - - if (context.Container != null) - { - displayPathSegment = context.Container.TranslateToContainerPath(displayPathSegment); - } - + LogSegment( context, segments, - displayPathSegment, - patternSegment ? KeySegmentType.DirectoryPattern : KeySegmentType.Directory, - matchedDirectories.Values.ToArray() + displaySegment, + segmentType, + details ); - - if (!matchedDirectories.Any()) - { - if (patternSegment) - { - exceptions.Add(new DirectoryNotFoundException($"No matching directories found for pattern: {displayPathSegment}")); - } - else - { - exceptions.Add(new DirectoryNotFoundException($"Directory not found: {displayPathSegment}")); - } - } - - resolvedSegments.AddRange(matchedDirectories.Values); } } diff --git a/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs b/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs index fce3d1a67c..b7f7419833 100644 --- a/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs +++ b/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs @@ -51,7 +51,7 @@ public class PipelineCacheServer } context.Output("Resolving path:"); - Fingerprint pathFp = FingerprintCreator.EvaluatePathToFingerprint(context, workspace, pathSegments); + Fingerprint pathFp = FingerprintCreator.EvaluateToFingerprint(context, workspace, pathSegments, FingerprintType.Path); context.Output($"Resolved to: {pathFp}"); string uploadPath = await this.GetUploadPathAsync(contentFormat, context, pathFp, workspace, cancellationToken); diff --git a/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs b/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs index b4d9d426c1..844f9c297b 100644 --- a/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs +++ b/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs @@ -110,14 +110,14 @@ public async virtual Task RunAsync(AgentTaskPluginExecutionContext context, Canc } context.Output("Resolving key:"); - Fingerprint keyFp = FingerprintCreator.EvaluateKeyToFingerprint(context, workspaceRoot, keySegments); + Fingerprint keyFp = FingerprintCreator.EvaluateToFingerprint(context, workspaceRoot, keySegments, FingerprintType.Key); context.Output($"Resolved to: {keyFp}"); Func restoreKeysGenerator = () => restoreKeys.Select(restoreKey => { context.Output("Resolving restore key:"); - Fingerprint f = FingerprintCreator.EvaluateKeyToFingerprint(context, workspaceRoot, restoreKey); + Fingerprint f = FingerprintCreator.EvaluateToFingerprint(context, workspaceRoot, restoreKey, FingerprintType.Key); f.Segments = f.Segments.Concat(new[] { Fingerprint.Wildcard }).ToArray(); context.Output($"Resolved to: {f}"); return f; From d8161bfc9f3d0c25114bb32b221a0bf154bde75c Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Thu, 20 Feb 2020 15:19:57 -0800 Subject: [PATCH 07/27] Add unit tests for globbing path fingerprints --- .../PipelineCache/FingerprintCreator.cs | 2 +- src/Test/L0/Plugin/FingerprintCreatorTests.cs | 143 +++++++++++++++--- src/Test/L0/Plugin/IsPathyTests.cs | 2 +- 3 files changed, 128 insertions(+), 19 deletions(-) diff --git a/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs b/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs index 0e1b6e8b9b..7b8a5542a4 100644 --- a/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs +++ b/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs @@ -287,7 +287,7 @@ internal static void CheckSegment(string segment, string segmentType) string defaultWorkingDirectory = context.Variables.GetValueOrDefault( "system.defaultworkingdirectory" // Constants.Variables.System.DefaultWorkingDirectory - )?.Value; + )?.Value ?? filePathRoot; var resolvedSegments = new List(); var exceptions = new List(); diff --git a/src/Test/L0/Plugin/FingerprintCreatorTests.cs b/src/Test/L0/Plugin/FingerprintCreatorTests.cs index ee08cc602e..c7083bbb4b 100644 --- a/src/Test/L0/Plugin/FingerprintCreatorTests.cs +++ b/src/Test/L0/Plugin/FingerprintCreatorTests.cs @@ -25,6 +25,12 @@ public class FingerprintCreatorTests private static readonly string directory; private static readonly string path1; private static readonly string path2; + + private static readonly string directory1; + private static readonly string directory2; + + private static readonly string directory1Name; + private static readonly string directory2Name; static FingerprintCreatorTests() { @@ -34,13 +40,28 @@ static FingerprintCreatorTests() path1 = Path.GetTempFileName(); path2 = Path.GetTempFileName(); - + directory = Path.GetDirectoryName(path1); Assert.Equal(directory, Path.GetDirectoryName(path2)); + var path3 = Path.Combine(directory, Guid.NewGuid().ToString(), Guid.NewGuid().ToString()); + var path4 = Path.Combine(directory, Guid.NewGuid().ToString(), Guid.NewGuid().ToString()); + + directory1 = Path.GetDirectoryName(path3); + directory2 = Path.GetDirectoryName(path4); + + directory1Name = Path.GetFileName(directory1); + directory2Name = Path.GetFileName(directory2); + File.WriteAllBytes(path1, content1); File.WriteAllBytes(path2, content2); + Directory.CreateDirectory(directory1); + Directory.CreateDirectory(directory2); + + File.WriteAllBytes(path3, content1); + File.WriteAllBytes(path4, content2); + var hasher = new SHA256Managed(); hash1 = hasher.ComputeHash(content1); hash2 = hasher.ComputeHash(content2); @@ -55,10 +76,16 @@ public void Fingerprint_ReservedFails() { var context = new AgentTaskPluginExecutionContext(hostContext.GetTrace()); Assert.Throws( - () => FingerprintCreator.EvaluateKeyToFingerprint(context, directory, new [] {"*"}) + () => FingerprintCreator.EvaluateToFingerprint(context, directory, new [] {"*"}, FingerprintType.Key) + ); + Assert.Throws( + () => FingerprintCreator.EvaluateToFingerprint(context, directory, new [] {"**"}, FingerprintType.Key) ); Assert.Throws( - () => FingerprintCreator.EvaluateKeyToFingerprint(context, directory, new [] {"**"}) + () => FingerprintCreator.EvaluateToFingerprint(context, directory, new [] {"*"}, FingerprintType.Path) + ); + Assert.Throws( + () => FingerprintCreator.EvaluateToFingerprint(context, directory, new [] {"**"}, FingerprintType.Path) ); } } @@ -66,7 +93,7 @@ public void Fingerprint_ReservedFails() [Fact] [Trait("Level", "L0")] [Trait("Category", "Plugin")] - public void Fingerprint_ExcludeExactMatches() + public void Fingerprint_Key_ExcludeExactMatches() { using(var hostContext = new TestHostContext(this)) { @@ -76,7 +103,72 @@ public void Fingerprint_ExcludeExactMatches() $"{Path.GetDirectoryName(path1)},!{path1}", }; Assert.Throws( - () => FingerprintCreator.EvaluateKeyToFingerprint(context, directory, segments) + () => FingerprintCreator.EvaluateToFingerprint(context, directory, segments, FingerprintType.Key) + ); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Plugin")] + public void Fingerprint_Path_IncludeFullPathMatches() + { + using(var hostContext = new TestHostContext(this)) + { + var context = new AgentTaskPluginExecutionContext(hostContext.GetTrace()); + var segments = new[] + { + directory1, + directory2 + }; + + Fingerprint f = FingerprintCreator.EvaluateToFingerprint(context, directory, segments, FingerprintType.Path); + Assert.Equal( + new [] { directory1Name, directory2Name }.OrderBy(x => x), + f.Segments.OrderBy(x => x) + ); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Plugin")] + public void Fingerprint_Path_IncludeRelativePathMatches() + { + using(var hostContext = new TestHostContext(this)) + { + var context = new AgentTaskPluginExecutionContext(hostContext.GetTrace()); + var segments = new[] + { + $"{directory1Name}", + $"{directory2Name}" + }; + + Fingerprint f = FingerprintCreator.EvaluateToFingerprint(context, directory, segments, FingerprintType.Path); + Assert.Equal( + new [] { directory1Name, directory2Name }.OrderBy(x => x), + f.Segments.OrderBy(x => x) + ); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Plugin")] + public void Fingerprint_Path_IncludeGlobMatches() + { + using(var hostContext = new TestHostContext(this)) + { + var context = new AgentTaskPluginExecutionContext(hostContext.GetTrace()); + var segments = new[] + { + $"**/{directory1Name},**/{directory2Name}", + }; + + Fingerprint f = FingerprintCreator.EvaluateToFingerprint(context, directory, segments, FingerprintType.Path); + Assert.Equal( + new [] { directory1Name, directory2Name }.OrderBy(x => x), + f.Segments.OrderBy(x => x) ); } } @@ -84,7 +176,7 @@ public void Fingerprint_ExcludeExactMatches() [Fact] [Trait("Level", "L0")] [Trait("Category", "Plugin")] - public void Fingerprint_ExcludeExactMisses() + public void Fingerprint_Key_ExcludeExactMisses() { using(var hostContext = new TestHostContext(this)) { @@ -93,7 +185,7 @@ public void Fingerprint_ExcludeExactMisses() { $"{path1},!{path2}", }; - Fingerprint f = FingerprintCreator.EvaluateKeyToFingerprint(context, directory, segments); + Fingerprint f = FingerprintCreator.EvaluateToFingerprint(context, directory, segments, FingerprintType.Key); Assert.Equal(1, f.Segments.Length); @@ -105,7 +197,7 @@ public void Fingerprint_ExcludeExactMisses() [Fact] [Trait("Level", "L0")] [Trait("Category", "Plugin")] - public void Fingerprint_FileAbsolute() + public void Fingerprint_Key_FileAbsolute() { using(var hostContext = new TestHostContext(this)) { @@ -115,7 +207,7 @@ public void Fingerprint_FileAbsolute() $"{path1}", $"{path2}", }; - Fingerprint f = FingerprintCreator.EvaluateKeyToFingerprint(context, directory, segments); + Fingerprint f = FingerprintCreator.EvaluateToFingerprint(context, directory, segments, FingerprintType.Key); var file1 = new FingerprintCreator.MatchedFile(Path.GetFileName(path1), content1.Length, hash1.ToHex()); var file2 = new FingerprintCreator.MatchedFile(Path.GetFileName(path2), content2.Length, hash2.ToHex()); @@ -129,7 +221,7 @@ public void Fingerprint_FileAbsolute() [Fact] [Trait("Level", "L0")] [Trait("Category", "Plugin")] - public void Fingerprint_FileRelative() + public void Fingerprint_Key_FileRelative() { string workingDir = Path.GetDirectoryName(path1); string relPath1 = Path.GetFileName(path1); @@ -149,7 +241,7 @@ public void Fingerprint_FileRelative() $"{relPath2}", }; - Fingerprint f = FingerprintCreator.EvaluateKeyToFingerprint(context, directory, segments); + Fingerprint f = FingerprintCreator.EvaluateToFingerprint(context, directory, segments, FingerprintType.Key); var file1 = new FingerprintCreator.MatchedFile(relPath1, content1.Length, hash1.ToHex()); var file2 = new FingerprintCreator.MatchedFile(relPath2, content2.Length, hash2.ToHex()); @@ -163,7 +255,7 @@ public void Fingerprint_FileRelative() [Fact] [Trait("Level", "L0")] [Trait("Category", "Plugin")] - public void Fingerprint_Str() + public void Fingerprint_Key_Str() { using(var hostContext = new TestHostContext(this)) { @@ -173,7 +265,7 @@ public void Fingerprint_Str() $"hello", }; - Fingerprint f = FingerprintCreator.EvaluateKeyToFingerprint(context, directory, segments); + Fingerprint f = FingerprintCreator.EvaluateToFingerprint(context, directory, segments, FingerprintType.Key); Assert.Equal(1, f.Segments.Length); Assert.Equal($"hello", f.Segments[0]); @@ -185,9 +277,10 @@ public void Fingerprint_Str() [Trait("Category", "Plugin")] public void ParseMultilineKeyAsOld() { - (bool isOldFormat, string[] keySegments, IEnumerable restoreKeys) = PipelineCacheTaskPluginBase.ParseIntoSegments( + (bool isOldFormat, string[] keySegments, IEnumerable restoreKeys, string[] pathSegments) = PipelineCacheTaskPluginBase.ParseIntoSegments( string.Empty, "gems\n$(Agent.OS)\n$(Build.SourcesDirectory)/my.gemspec", + string.Empty, string.Empty); Assert.True(isOldFormat); Assert.Equal(new [] {"gems", "$(Agent.OS)", "$(Build.SourcesDirectory)/my.gemspec"}, keySegments); @@ -199,9 +292,10 @@ public void ParseMultilineKeyAsOld() [Trait("Category", "Plugin")] public void ParseSingleLineAsNew() { - (bool isOldFormat, string[] keySegments, IEnumerable restoreKeys) = PipelineCacheTaskPluginBase.ParseIntoSegments( + (bool isOldFormat, string[] keySegments, IEnumerable restoreKeys, string[] pathSegments) = PipelineCacheTaskPluginBase.ParseIntoSegments( string.Empty, "$(Agent.OS)", + string.Empty, string.Empty); Assert.False(isOldFormat); Assert.Equal(new [] {"$(Agent.OS)"}, keySegments); @@ -213,13 +307,28 @@ public void ParseSingleLineAsNew() [Trait("Category", "Plugin")] public void ParseMultilineWithRestoreKeys() { - (bool isOldFormat, string[] keySegments, IEnumerable restoreKeys) = PipelineCacheTaskPluginBase.ParseIntoSegments( + (bool isOldFormat, string[] keySegments, IEnumerable restoreKeys, string[] pathSegments) = PipelineCacheTaskPluginBase.ParseIntoSegments( string.Empty, "$(Agent.OS) | Gemfile.lock | **/*.gemspec,!./junk/**", - "$(Agent.OS) | Gemfile.lock\n$(Agent.OS)"); + "$(Agent.OS) | Gemfile.lock\n$(Agent.OS)", + string.Empty); Assert.False(isOldFormat); Assert.Equal(new [] {"$(Agent.OS)","Gemfile.lock","**/*.gemspec,!./junk/**"}, keySegments); Assert.Equal(new [] {new []{ "$(Agent.OS)","Gemfile.lock"}, new[] {"$(Agent.OS)"}}, restoreKeys); } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Plugin")] + public void ParsePathSegments() + { + (bool isOldFormat, string[] keySegments, IEnumerable restoreKeys, string[] pathSegments) = PipelineCacheTaskPluginBase.ParseIntoSegments( + string.Empty, + string.Empty, + string.Empty, + "node_modules | dist | **/globby.*,!**.exclude"); + Assert.False(isOldFormat); + Assert.Equal(new []{ "node_modules", "dist", "**/globby.*,!**.exclude"}, pathSegments); + } } } \ No newline at end of file diff --git a/src/Test/L0/Plugin/IsPathyTests.cs b/src/Test/L0/Plugin/IsPathyTests.cs index 5349259534..5c09a7f678 100644 --- a/src/Test/L0/Plugin/IsPathyTests.cs +++ b/src/Test/L0/Plugin/IsPathyTests.cs @@ -15,7 +15,7 @@ public class IsPathyTests public void Fingerprint_IsPath() { Action assertPath = (path, isPath) => - Assert.True(isPath == FingerprintCreator.IsPathhSSegment(path), $"IsPathy({path}) should have returned {isPath}."); + Assert.True(isPath == FingerprintCreator.IsPathySegment(path), $"IsPathy({path}) should have returned {isPath}."); assertPath(@"''", false); assertPath(@"Windows_NT", false); assertPath(@"README.md", true); From 8b7943d4d3440daf4547bcb102bb4d6e6bb335f3 Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Thu, 20 Feb 2020 15:27:29 -0800 Subject: [PATCH 08/27] Some more path fingerprint testing --- src/Test/L0/Plugin/FingerprintCreatorTests.cs | 60 +++++++++++++++++++ 1 file changed, 60 insertions(+) diff --git a/src/Test/L0/Plugin/FingerprintCreatorTests.cs b/src/Test/L0/Plugin/FingerprintCreatorTests.cs index c7083bbb4b..1cd28c2636 100644 --- a/src/Test/L0/Plugin/FingerprintCreatorTests.cs +++ b/src/Test/L0/Plugin/FingerprintCreatorTests.cs @@ -172,6 +172,66 @@ public void Fingerprint_Path_IncludeGlobMatches() ); } } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Plugin")] + public void Fingerprint_Path_ExcludeGlobMatches() + { + using(var hostContext = new TestHostContext(this)) + { + var context = new AgentTaskPluginExecutionContext(hostContext.GetTrace()); + var segments = new[] + { + $"**/{directory1Name},!**/{directory2Name}", + }; + + Fingerprint f = FingerprintCreator.EvaluateToFingerprint(context, directory, segments, FingerprintType.Path); + Assert.Equal( + new [] { directory1Name }, + f.Segments + ); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Plugin")] + public void Fingerprint_Path_NoIncludePatterns() + { + using(var hostContext = new TestHostContext(this)) + { + var context = new AgentTaskPluginExecutionContext(hostContext.GetTrace()); + var segments = new[] + { + $"!**/{directory1Name},!**/{directory2Name}", + }; + + Assert.Throws( + () => FingerprintCreator.EvaluateToFingerprint(context, directory, segments, FingerprintType.Path) + ); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Plugin")] + public void Fingerprint_Path_NoMatches() + { + using(var hostContext = new TestHostContext(this)) + { + var context = new AgentTaskPluginExecutionContext(hostContext.GetTrace()); + var segments = new[] + { + $"**/{Guid.NewGuid().ToString()},!**/{directory2Name}", + }; + + // TODO: Should this really be throwing an exception? + Assert.Throws( + () => FingerprintCreator.EvaluateToFingerprint(context, directory, segments, FingerprintType.Path) + ); + } + } [Fact] [Trait("Level", "L0")] From 62ff73a852b3f194ddf5f0352205c881037f6fe2 Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Thu, 20 Feb 2020 16:40:46 -0800 Subject: [PATCH 09/27] Clean up comments --- src/Agent.Plugins/PipelineCache/TarUtils.cs | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/src/Agent.Plugins/PipelineCache/TarUtils.cs b/src/Agent.Plugins/PipelineCache/TarUtils.cs index 6213dfaee6..64bdce9935 100644 --- a/src/Agent.Plugins/PipelineCache/TarUtils.cs +++ b/src/Agent.Plugins/PipelineCache/TarUtils.cs @@ -175,18 +175,13 @@ private static ProcessStartInfo GetCreateTarProcessInfo(AgentTaskPluginExecution { var processFileName = GetTar(context); - // TODO: Add unit tests for this path expansion - //inputPaths = inputPaths - // .Select(i => Path.IsPathFullyQualified(i) ? i : Path.Combine(workspace, i)) - // .Select(i => Path.GetRelativePath(workspace, i)) - // .Select(i => $"\"{i.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar)}\"") - // .ToArray(); var inputPaths = pathFingerprint.Segments .Select(i => $"\"{i.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar)}\"") .ToArray(); workspace = workspace.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); + // TODO: just how true is this // If given the absolute path for the '-cf' option, the GNU tar fails. The workaround is to start the tarring process in the temp directory, and simply speficy 'archive.tar' for that option. var processArguments = $"-cf \"{archiveFilePath}\" -C \"{workspace}\" {string.Join(' ', inputPaths)}"; From a3f97bd5b378ef473bd8eca27d083b61427507df Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Fri, 21 Feb 2020 14:30:15 -0800 Subject: [PATCH 10/27] Don't delete tarball for debug purposes --- src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs b/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs index b7f7419833..bf9e3662c1 100644 --- a/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs +++ b/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs @@ -82,7 +82,8 @@ public class PipelineCacheServer { if (File.Exists(uploadPath)) { - File.Delete(uploadPath); + Console.WriteLine(uploadPath); + //File.Delete(uploadPath); } } catch { } From 98a4cacf5b0fa05b6bba1fd08180ea62a939f3ec Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Mon, 24 Feb 2020 09:28:33 -0800 Subject: [PATCH 11/27] Rename workspace to workingDirectory --- .../PipelineCache/PipelineCacheServer.cs | 18 ++++++++--------- .../PipelineCacheTaskPluginBase.cs | 2 +- .../PipelineCache/RestorePipelineCacheV0.cs | 4 ++-- .../PipelineCache/SavePipelineCacheV0.cs | 4 ++-- src/Agent.Plugins/PipelineCache/TarUtils.cs | 20 +++++++++---------- 5 files changed, 24 insertions(+), 24 deletions(-) diff --git a/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs b/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs index bf9e3662c1..fcdaef7f51 100644 --- a/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs +++ b/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs @@ -26,7 +26,7 @@ public class PipelineCacheServer AgentTaskPluginExecutionContext context, Fingerprint keyFingerprint, string[] pathSegments, - string workspace, + string workingDirectory, CancellationToken cancellationToken, ContentFormat contentFormat) { @@ -51,10 +51,10 @@ public class PipelineCacheServer } context.Output("Resolving path:"); - Fingerprint pathFp = FingerprintCreator.EvaluateToFingerprint(context, workspace, pathSegments, FingerprintType.Path); + Fingerprint pathFp = FingerprintCreator.EvaluateToFingerprint(context, workingDirectory, pathSegments, FingerprintType.Path); context.Output($"Resolved to: {pathFp}"); - string uploadPath = await this.GetUploadPathAsync(contentFormat, context, pathFp, workspace, cancellationToken); + string uploadPath = await this.GetUploadPathAsync(contentFormat, context, pathFp, workingDirectory, cancellationToken); //Upload the pipeline artifact. PipelineCacheActionRecord uploadRecord = clientTelemetry.CreateRecord((level, uri, type) => @@ -106,7 +106,7 @@ public class PipelineCacheServer Fingerprint[] fingerprints, string[] pathSegments, string cacheHitVariable, - string workspace, + string workingDirectory, CancellationToken cancellationToken) { VssConnection connection = context.VssConnection; @@ -133,7 +133,7 @@ public class PipelineCacheServer record: downloadRecord, actionAsync: async () => { - await this.DownloadPipelineCacheAsync(context, dedupManifestClient, result.ManifestId, pathSegments, workspace, Enum.Parse(result.ContentFormat), cancellationToken); + await this.DownloadPipelineCacheAsync(context, dedupManifestClient, result.ManifestId, pathSegments, workingDirectory, Enum.Parse(result.ContentFormat), cancellationToken); }); // Send results to CustomerIntelligence @@ -184,7 +184,7 @@ public class PipelineCacheServer return pipelineCacheClient; } - private async Task GetUploadPathAsync(ContentFormat contentFormat, AgentTaskPluginExecutionContext context, Fingerprint pathFingerprint, string workspace, CancellationToken cancellationToken) + private async Task GetUploadPathAsync(ContentFormat contentFormat, AgentTaskPluginExecutionContext context, Fingerprint pathFingerprint, string workingDirectory, CancellationToken cancellationToken) { if (pathFingerprint.Segments.Length == 1) { @@ -194,7 +194,7 @@ private async Task GetUploadPathAsync(ContentFormat contentFormat, Agent // TODO: what to do if not SingleTar? if (contentFormat == ContentFormat.SingleTar) { - uploadPath = await TarUtils.ArchiveFilesToTarAsync(context, pathFingerprint, workspace, cancellationToken); + uploadPath = await TarUtils.ArchiveFilesToTarAsync(context, pathFingerprint, workingDirectory, cancellationToken); } return uploadPath; } @@ -204,7 +204,7 @@ private async Task GetUploadPathAsync(ContentFormat contentFormat, Agent DedupManifestArtifactClient dedupManifestClient, DedupIdentifier manifestId, string[] pathSegments, - string workspace, + string workingDirectory, ContentFormat contentFormat, CancellationToken cancellationToken) { @@ -213,7 +213,7 @@ private async Task GetUploadPathAsync(ContentFormat contentFormat, Agent string manifestPath = Path.Combine(Path.GetTempPath(), $"{nameof(DedupManifestArtifactClient)}.{Path.GetRandomFileName()}.manifest"); await dedupManifestClient.DownloadFileToPathAsync(manifestId, manifestPath, proxyUri: null, cancellationToken: cancellationToken); Manifest manifest = JsonSerializer.Deserialize(File.ReadAllText(manifestPath)); - await TarUtils.DownloadAndExtractTarAsync(context, manifest, dedupManifestClient, workspace, cancellationToken); + await TarUtils.DownloadAndExtractTarAsync(context, manifest, dedupManifestClient, workingDirectory, cancellationToken); try { if (File.Exists(manifestPath)) diff --git a/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs b/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs index 844f9c297b..0750d27a4b 100644 --- a/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs +++ b/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs @@ -141,7 +141,7 @@ public async virtual Task RunAsync(AgentTaskPluginExecutionContext context, Canc Fingerprint fingerprint, Func restoreKeysGenerator, string[] pathSegments, - string workspace, + string workingDirectory, CancellationToken token); // Properties set by tasks diff --git a/src/Agent.Plugins/PipelineCache/RestorePipelineCacheV0.cs b/src/Agent.Plugins/PipelineCache/RestorePipelineCacheV0.cs index 6418849819..421022fb9e 100644 --- a/src/Agent.Plugins/PipelineCache/RestorePipelineCacheV0.cs +++ b/src/Agent.Plugins/PipelineCache/RestorePipelineCacheV0.cs @@ -19,7 +19,7 @@ public class RestorePipelineCacheV0 : PipelineCacheTaskPluginBase Fingerprint fingerprint, Func restoreKeysGenerator, string[] pathSegments, - string workspace, + string workingDirectory, CancellationToken token) { context.SetTaskVariable(RestoreStepRanVariableName, RestoreStepRanVariableValue); @@ -32,7 +32,7 @@ public class RestorePipelineCacheV0 : PipelineCacheTaskPluginBase (new[] { fingerprint }).Concat(restoreFingerprints).ToArray(), pathSegments, context.GetInput(PipelineCacheTaskPluginConstants.CacheHitVariable, required: false), - workspace, + workingDirectory, token); } } diff --git a/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs b/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs index a84eb8ba55..2371a2c448 100644 --- a/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs +++ b/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs @@ -60,7 +60,7 @@ public override async Task RunAsync(AgentTaskPluginExecutionContext context, Can Fingerprint keyFingerprint, Func restoreKeysGenerator, string[] pathSegments, - string workspace, + string workingDirectory, CancellationToken token) { string contentFormatValue = context.Variables.GetValueOrDefault(ContentFormatVariableName)?.Value ?? string.Empty; @@ -88,7 +88,7 @@ public override async Task RunAsync(AgentTaskPluginExecutionContext context, Can context, keyFingerprint, pathSegments, - workspace, + workingDirectory, token, contentFormat); } diff --git a/src/Agent.Plugins/PipelineCache/TarUtils.cs b/src/Agent.Plugins/PipelineCache/TarUtils.cs index 64bdce9935..de669fae2c 100644 --- a/src/Agent.Plugins/PipelineCache/TarUtils.cs +++ b/src/Agent.Plugins/PipelineCache/TarUtils.cs @@ -31,7 +31,7 @@ public static class TarUtils public static async Task ArchiveFilesToTarAsync( AgentTaskPluginExecutionContext context, Fingerprint pathFingerprint, - string workspace, + string workingDirectory, CancellationToken cancellationToken) { foreach (var inputPath in pathFingerprint.Segments) @@ -45,7 +45,7 @@ public static class TarUtils var archiveFileName = CreateArchiveFileName(); var archiveFile = Path.Combine(Path.GetTempPath(), archiveFileName); - ProcessStartInfo processStartInfo = GetCreateTarProcessInfo(context, archiveFile, pathFingerprint, workspace); + ProcessStartInfo processStartInfo = GetCreateTarProcessInfo(context, archiveFileName, pathFingerprint, workingDirectory); Action actionOnFailure = () => { @@ -76,7 +76,7 @@ public static class TarUtils AgentTaskPluginExecutionContext context, Manifest manifest, DedupManifestArtifactClient dedupManifestClient, - string workspace, + string workingDirectory, CancellationToken cancellationToken) { ValidateTarManifest(manifest); @@ -84,7 +84,7 @@ public static class TarUtils DedupIdentifier dedupId = DedupIdentifier.Create(manifest.Items.Single(i => i.Path.EndsWith(archive, StringComparison.OrdinalIgnoreCase)).Blob.Id); // We now can simply specify the working directory as the tarball will contain paths relative to it - ProcessStartInfo processStartInfo = GetExtractStartProcessInfo(context, workspace); + ProcessStartInfo processStartInfo = GetExtractStartProcessInfo(context, workingDirectory); Func downloadTaskFunc = (process, ct) => @@ -171,7 +171,7 @@ private static void CreateProcessStartInfo(ProcessStartInfo processStartInfo, st processStartInfo.WorkingDirectory = processWorkingDirectory; } - private static ProcessStartInfo GetCreateTarProcessInfo(AgentTaskPluginExecutionContext context, string archiveFilePath, Fingerprint pathFingerprint, string workspace) + private static ProcessStartInfo GetCreateTarProcessInfo(AgentTaskPluginExecutionContext context, string archiveFileName, Fingerprint pathFingerprint, string workingDirectory) { var processFileName = GetTar(context); @@ -179,11 +179,11 @@ private static ProcessStartInfo GetCreateTarProcessInfo(AgentTaskPluginExecution .Select(i => $"\"{i.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar)}\"") .ToArray(); - workspace = workspace.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); + workingDirectory = workingDirectory.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); // TODO: just how true is this // If given the absolute path for the '-cf' option, the GNU tar fails. The workaround is to start the tarring process in the temp directory, and simply speficy 'archive.tar' for that option. - var processArguments = $"-cf \"{archiveFilePath}\" -C \"{workspace}\" {string.Join(' ', inputPaths)}"; + var processArguments = $"-cf \"{archiveFileName}\" -C \"{workingDirectory}\" {string.Join(' ', inputPaths)}"; if (IsSystemDebugTrue(context)) { @@ -206,7 +206,7 @@ private static string GetTar(AgentTaskPluginExecutionContext context) return String.IsNullOrWhiteSpace(location) ? "tar" : location; } - private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecutionContext context, string workspace) + private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecutionContext context, string workingDirectory) { // TODO: Get common directory of path segments //var targetDirectory = pathSegments[0]; @@ -221,7 +221,7 @@ private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecut //} // TODO: Verify this works processFileName = "7z"; - processArguments = $"x -si -aoa -o\"{workspace}\" -ttar"; + processArguments = $"x -si -aoa -o\"{workingDirectory}\" -ttar"; if (IsSystemDebugTrue(context)) { processArguments = "-bb1 " + processArguments; @@ -240,7 +240,7 @@ private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecut } ProcessStartInfo processStartInfo = new ProcessStartInfo(); - CreateProcessStartInfo(processStartInfo, processFileName, processArguments, processWorkingDirectory: workspace); // TODO: is this the right directory? + CreateProcessStartInfo(processStartInfo, processFileName, processArguments, processWorkingDirectory: workingDirectory); // TODO: is this the right directory? return processStartInfo; } From 614376fd7032fc0630117fafe321b701ec9f2f11 Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Mon, 24 Feb 2020 10:21:54 -0800 Subject: [PATCH 12/27] Clean up comments --- src/Agent.Plugins/PipelineCache/TarUtils.cs | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/Agent.Plugins/PipelineCache/TarUtils.cs b/src/Agent.Plugins/PipelineCache/TarUtils.cs index de669fae2c..61e58c4415 100644 --- a/src/Agent.Plugins/PipelineCache/TarUtils.cs +++ b/src/Agent.Plugins/PipelineCache/TarUtils.cs @@ -208,8 +208,6 @@ private static string GetTar(AgentTaskPluginExecutionContext context) private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecutionContext context, string workingDirectory) { - // TODO: Get common directory of path segments - //var targetDirectory = pathSegments[0]; string processFileName, processArguments; if (isWindows && CheckIf7ZExists()) { @@ -231,7 +229,7 @@ private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecut { processFileName = GetTar(context); // Instead of targetDirectory, we are providing . to tar, because the tar process is being started from targetDirectory. - // -P is added so that relative paths are accepted by tar + // -P is added so that relative paths and backtracing (e.g. ../foo) are accepted by tar processArguments = $"-xf - -P -C ."; if (IsSystemDebugTrue(context)) { @@ -240,7 +238,8 @@ private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecut } ProcessStartInfo processStartInfo = new ProcessStartInfo(); - CreateProcessStartInfo(processStartInfo, processFileName, processArguments, processWorkingDirectory: workingDirectory); // TODO: is this the right directory? + // Tar is started in the working directory because the tarball contains paths relative to it + CreateProcessStartInfo(processStartInfo, processFileName, processArguments, processWorkingDirectory: workingDirectory); return processStartInfo; } From bd089652c3388b7ec2fe8b355c4e505841393d30 Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Mon, 24 Feb 2020 16:44:10 -0800 Subject: [PATCH 13/27] Pipe input files to tar from stdin --- .../PipelineCacheTaskPluginBase.cs | 2 - .../PipelineCache/SavePipelineCacheV0.cs | 2 + src/Agent.Plugins/PipelineCache/TarUtils.cs | 43 +++++++++++++++---- 3 files changed, 36 insertions(+), 11 deletions(-) diff --git a/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs b/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs index 0750d27a4b..b879609ac0 100644 --- a/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs +++ b/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs @@ -87,8 +87,6 @@ internal static (bool isOldFormat, string[] keySegments, IEnumerable r public async virtual Task RunAsync(AgentTaskPluginExecutionContext context, CancellationToken token) { - WaitForDebuggerAttach(); - ArgUtil.NotNull(context, nameof(context)); VariableValue saltValue = context.Variables.GetValueOrDefault(SaltVariableName); diff --git a/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs b/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs index 2371a2c448..fd09bf39d4 100644 --- a/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs +++ b/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs @@ -63,6 +63,8 @@ public override async Task RunAsync(AgentTaskPluginExecutionContext context, Can string workingDirectory, CancellationToken token) { + WaitForDebuggerAttach(); + string contentFormatValue = context.Variables.GetValueOrDefault(ContentFormatVariableName)?.Value ?? string.Empty; string calculatedFingerPrint = context.TaskVariables.GetValueOrDefault(ResolvedFingerPrintVariableName)?.Value ?? string.Empty; diff --git a/src/Agent.Plugins/PipelineCache/TarUtils.cs b/src/Agent.Plugins/PipelineCache/TarUtils.cs index 61e58c4415..7d076a941f 100644 --- a/src/Agent.Plugins/PipelineCache/TarUtils.cs +++ b/src/Agent.Plugins/PipelineCache/TarUtils.cs @@ -45,7 +45,7 @@ public static class TarUtils var archiveFileName = CreateArchiveFileName(); var archiveFile = Path.Combine(Path.GetTempPath(), archiveFileName); - ProcessStartInfo processStartInfo = GetCreateTarProcessInfo(context, archiveFileName, pathFingerprint, workingDirectory); + ProcessStartInfo processStartInfo = GetCreateTarProcessInfo(context, archiveFileName, workingDirectory); Action actionOnFailure = () => { @@ -53,11 +53,39 @@ public static class TarUtils TryDeleteFile(archiveFile); }; + Func createTaskFunc = + (process, ct) => + Task.Run(async () => + { + try + { + var inputPaths = pathFingerprint.Segments + .Select(i => i.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar)); + + foreach (var inputPath in inputPaths) + { + await process.StandardInput.WriteLineAsync(inputPath); + } + + process.StandardInput.BaseStream.Close(); + } + catch (Exception e) + { + try + { + process.Kill(); + } + catch { } + ExceptionDispatchInfo.Capture(e).Throw(); + } + }); + await RunProcessAsync( context, processStartInfo, // no additional tasks on create are required to run whilst running the TAR process - (Process process, CancellationToken ct) => Task.CompletedTask, + //(Process process, CancellationToken ct) => Task.CompletedTask, + createTaskFunc, actionOnFailure, cancellationToken); @@ -171,19 +199,15 @@ private static void CreateProcessStartInfo(ProcessStartInfo processStartInfo, st processStartInfo.WorkingDirectory = processWorkingDirectory; } - private static ProcessStartInfo GetCreateTarProcessInfo(AgentTaskPluginExecutionContext context, string archiveFileName, Fingerprint pathFingerprint, string workingDirectory) + private static ProcessStartInfo GetCreateTarProcessInfo(AgentTaskPluginExecutionContext context, string archiveFileName, string workingDirectory) { var processFileName = GetTar(context); - var inputPaths = pathFingerprint.Segments - .Select(i => $"\"{i.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar)}\"") - .ToArray(); - workingDirectory = workingDirectory.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); - // TODO: just how true is this // If given the absolute path for the '-cf' option, the GNU tar fails. The workaround is to start the tarring process in the temp directory, and simply speficy 'archive.tar' for that option. - var processArguments = $"-cf \"{archiveFileName}\" -C \"{workingDirectory}\" {string.Join(' ', inputPaths)}"; + // The list of input files is piped in through the 'additionalTaskToExecuteWhilstRunningProcess' parameter + var processArguments = $"-cf \"{archiveFileName}\" -C \"{workingDirectory}\" -T -"; if (IsSystemDebugTrue(context)) { @@ -193,6 +217,7 @@ private static ProcessStartInfo GetCreateTarProcessInfo(AgentTaskPluginExecution { processArguments = "-h " + processArguments; } + //cat input.txt | tar - v - cf "pipeline.tar" - C "/Users/ethand/code/azure-pipelines-agent/_layout/osx-x64/_work/3/s" - T - ProcessStartInfo processStartInfo = new ProcessStartInfo(); CreateProcessStartInfo(processStartInfo, processFileName, processArguments, processWorkingDirectory: Path.GetTempPath()); // We want to create the archiveFile in temp folder, and hence starting the tar process from TEMP to avoid absolute paths in tar cmd line. From c5d7509496bd9d4a617c34685712c51602d6d61f Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Mon, 24 Feb 2020 16:44:57 -0800 Subject: [PATCH 14/27] Clean up testing code --- src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs | 3 +-- src/Agent.Plugins/PipelineCache/TarUtils.cs | 6 ++---- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs b/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs index fcdaef7f51..0cf09f36c9 100644 --- a/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs +++ b/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs @@ -82,8 +82,7 @@ public class PipelineCacheServer { if (File.Exists(uploadPath)) { - Console.WriteLine(uploadPath); - //File.Delete(uploadPath); + File.Delete(uploadPath); } } catch { } diff --git a/src/Agent.Plugins/PipelineCache/TarUtils.cs b/src/Agent.Plugins/PipelineCache/TarUtils.cs index 7d076a941f..eea1e7e552 100644 --- a/src/Agent.Plugins/PipelineCache/TarUtils.cs +++ b/src/Agent.Plugins/PipelineCache/TarUtils.cs @@ -53,7 +53,7 @@ public static class TarUtils TryDeleteFile(archiveFile); }; - Func createTaskFunc = + Func inputFilesTask = (process, ct) => Task.Run(async () => { @@ -83,9 +83,7 @@ public static class TarUtils await RunProcessAsync( context, processStartInfo, - // no additional tasks on create are required to run whilst running the TAR process - //(Process process, CancellationToken ct) => Task.CompletedTask, - createTaskFunc, + inputFilesTask, actionOnFailure, cancellationToken); From 292a1cb26e0e369ec65c42606c6ea54686d13748 Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Mon, 24 Feb 2020 16:45:18 -0800 Subject: [PATCH 15/27] Clean up legacy comment --- src/Agent.Plugins/PipelineCache/TarUtils.cs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/Agent.Plugins/PipelineCache/TarUtils.cs b/src/Agent.Plugins/PipelineCache/TarUtils.cs index eea1e7e552..f0c72e1a75 100644 --- a/src/Agent.Plugins/PipelineCache/TarUtils.cs +++ b/src/Agent.Plugins/PipelineCache/TarUtils.cs @@ -215,7 +215,6 @@ private static ProcessStartInfo GetCreateTarProcessInfo(AgentTaskPluginExecution { processArguments = "-h " + processArguments; } - //cat input.txt | tar - v - cf "pipeline.tar" - C "/Users/ethand/code/azure-pipelines-agent/_layout/osx-x64/_work/3/s" - T - ProcessStartInfo processStartInfo = new ProcessStartInfo(); CreateProcessStartInfo(processStartInfo, processFileName, processArguments, processWorkingDirectory: Path.GetTempPath()); // We want to create the archiveFile in temp folder, and hence starting the tar process from TEMP to avoid absolute paths in tar cmd line. From 42a0ec61a2758c4e33b7ce16cd54463d873aaf57 Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Tue, 25 Feb 2020 12:01:48 -0800 Subject: [PATCH 16/27] Clean up comments --- .../PipelineCache/PipelineCacheServer.cs | 9 ++++----- .../PipelineCache/PipelineCacheTaskPluginBase.cs | 12 +++++++----- .../PipelineCache/SavePipelineCacheV0.cs | 2 -- src/Agent.Plugins/PipelineCache/TarUtils.cs | 9 +-------- 4 files changed, 12 insertions(+), 20 deletions(-) diff --git a/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs b/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs index 0cf09f36c9..da1ae222ad 100644 --- a/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs +++ b/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs @@ -189,13 +189,12 @@ private async Task GetUploadPathAsync(ContentFormat contentFormat, Agent { return pathFingerprint.Segments[0]; } - string uploadPath = string.Empty; - // TODO: what to do if not SingleTar? if (contentFormat == ContentFormat.SingleTar) { - uploadPath = await TarUtils.ArchiveFilesToTarAsync(context, pathFingerprint, workingDirectory, cancellationToken); + return await TarUtils.ArchiveFilesToTarAsync(context, pathFingerprint, workingDirectory, cancellationToken); } - return uploadPath; + // TODO: what is the right way to handle !ContentFormat.SingleTar + return pathFingerprint.Segments[0]; } private async Task DownloadPipelineCacheAsync( @@ -226,7 +225,7 @@ private async Task GetUploadPathAsync(ContentFormat contentFormat, Agent { DownloadDedupManifestArtifactOptions options = DownloadDedupManifestArtifactOptions.CreateWithManifestId( manifestId, - pathSegments[0], // TODO: whats the right format here + pathSegments[0], // TODO: is this the right format here proxyUri: null, minimatchPatterns: null); await dedupManifestClient.DownloadAsync(options, cancellationToken); diff --git a/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs b/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs index b879609ac0..4e88aa47ff 100644 --- a/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs +++ b/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs @@ -44,10 +44,10 @@ internal static (bool isOldFormat, string[] keySegments, IEnumerable r }; Func splitAcrossNewlines = (s) => - s.Replace("\r\n", "\n") //normalize newlines - .Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries) - .Select(line => line.Trim()) - .ToArray(); + s.Replace("\r\n", "\n") //normalize newlines + .Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries) + .Select(line => line.Trim()) + .ToArray(); string[] keySegments; string[] pathSegments; @@ -70,7 +70,7 @@ internal static (bool isOldFormat, string[] keySegments, IEnumerable r keySegments = splitAcrossPipes(key); } - // Path can now be a list of path segments to include in cache + // Path can be a list of path segments to include in cache pathSegments = splitAcrossPipes(path); if (hasRestoreKeys) @@ -87,6 +87,8 @@ internal static (bool isOldFormat, string[] keySegments, IEnumerable r public async virtual Task RunAsync(AgentTaskPluginExecutionContext context, CancellationToken token) { + WaitForDebuggerAttach(); + ArgUtil.NotNull(context, nameof(context)); VariableValue saltValue = context.Variables.GetValueOrDefault(SaltVariableName); diff --git a/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs b/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs index fd09bf39d4..2371a2c448 100644 --- a/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs +++ b/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs @@ -63,8 +63,6 @@ public override async Task RunAsync(AgentTaskPluginExecutionContext context, Can string workingDirectory, CancellationToken token) { - WaitForDebuggerAttach(); - string contentFormatValue = context.Variables.GetValueOrDefault(ContentFormatVariableName)?.Value ?? string.Empty; string calculatedFingerPrint = context.TaskVariables.GetValueOrDefault(ResolvedFingerPrintVariableName)?.Value ?? string.Empty; diff --git a/src/Agent.Plugins/PipelineCache/TarUtils.cs b/src/Agent.Plugins/PipelineCache/TarUtils.cs index f0c72e1a75..fff97c6230 100644 --- a/src/Agent.Plugins/PipelineCache/TarUtils.cs +++ b/src/Agent.Plugins/PipelineCache/TarUtils.cs @@ -233,13 +233,7 @@ private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecut string processFileName, processArguments; if (isWindows && CheckIf7ZExists()) { - //processFileName = "7z"; - //processArguments = $"x -si -aoa -o\"{targetDirectory}\" -ttar"; - //if (IsSystemDebugTrue(context)) - //{ - // processArguments = "-bb1 " + processArguments; - //} - // TODO: Verify this works + // TODO: This doesn't support backtracing paths (e.g. ..\foo), we need a mechanism to either force 7z to do so or to use tar if backtracing exists processFileName = "7z"; processArguments = $"x -si -aoa -o\"{workingDirectory}\" -ttar"; if (IsSystemDebugTrue(context)) @@ -267,7 +261,6 @@ private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecut private static void ValidateTarManifest(Manifest manifest) { - // TODO: This will need to change if (manifest == null || manifest.Items.Count() != 1 || !manifest.Items.Single().Path.EndsWith(archive, StringComparison.OrdinalIgnoreCase)) { throw new ArgumentException($"Manifest containing a tar cannot have more than one item."); From 741b67d08deb44d571320210d9b255994e733d20 Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Wed, 26 Feb 2020 14:27:54 -0800 Subject: [PATCH 17/27] Create cache relative to pipeline.workspace --- .../PipelineCache/FingerprintCreator.cs | 14 +++++++------ .../PipelineCache/PipelineCacheServer.cs | 18 ++++++++--------- .../PipelineCacheTaskPluginBase.cs | 6 ++---- .../PipelineCache/RestorePipelineCacheV0.cs | 4 ++-- .../PipelineCache/SavePipelineCacheV0.cs | 4 ++-- src/Agent.Plugins/PipelineCache/TarUtils.cs | 20 +++++++++---------- 6 files changed, 33 insertions(+), 33 deletions(-) diff --git a/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs b/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs index 7b8a5542a4..cdc4e33da2 100644 --- a/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs +++ b/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs @@ -23,7 +23,7 @@ public enum FingerprintType Key, Path } - + public static class FingerprintCreator { private static readonly bool isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows); @@ -341,7 +341,7 @@ internal static void CheckSegment(string segment, string segmentType) IEnumerable files = fingerprintType == FingerprintType.Key ? Directory.EnumerateFiles(enumerate.RootPath, enumerate.Pattern, enumerate.Depth) : Directory.EnumerateDirectories(enumerate.RootPath, enumerate.Pattern, enumerate.Depth); - + Func filter = CreateFilter(context, absoluteIncludeGlobs, absoluteExcludeRules); files = files.Where(f => filter(f)).Distinct(); @@ -349,7 +349,7 @@ internal static void CheckSegment(string segment, string segmentType) { // Path.GetRelativePath returns 'The relative path, or path if the paths don't share the same root.' string displayPath = filePathRoot == null ? path : Path.GetRelativePath(filePathRoot, path); - + if (fingerprintType == FingerprintType.Key) { using (var fs = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read)) @@ -380,7 +380,7 @@ internal static void CheckSegment(string segment, string segmentType) segmentType = patternSegment ? KeySegmentType.FilePattern : KeySegmentType.FilePath; details = matchedFiles.Values.ToArray(); resolvedSegments.Add(MatchedFile.GenerateHash(matchedFiles.Values)); - + if (!matchedFiles.Any()) { var message = patternSegment ? $"No matching files found for pattern: {displaySegment}" : $"File not found: {displaySegment}"; @@ -392,13 +392,15 @@ internal static void CheckSegment(string segment, string segmentType) segmentType = patternSegment ? KeySegmentType.DirectoryPattern : KeySegmentType.Directory; details = matchedDirectories.Values.ToArray(); resolvedSegments.AddRange(matchedDirectories.Values); + + // TODO: Is it the right behavior to throw an exception if a pathsegment isn't resolveable? if (!matchedDirectories.Any()) { - var message = patternSegment ? $"No matching directories found for pattern: {displaySegment}" : $"Directory not found: {displaySegment}"; + var message = patternSegment ? $"No matching directories found for pattern: {displaySegment}" : $"Directory not found: {displaySegment}"; exceptions.Add(new DirectoryNotFoundException(message)); } } - + LogSegment( context, segments, diff --git a/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs b/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs index da1ae222ad..bd38e5f3d5 100644 --- a/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs +++ b/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs @@ -26,7 +26,7 @@ public class PipelineCacheServer AgentTaskPluginExecutionContext context, Fingerprint keyFingerprint, string[] pathSegments, - string workingDirectory, + string workspaceRoot, CancellationToken cancellationToken, ContentFormat contentFormat) { @@ -51,10 +51,10 @@ public class PipelineCacheServer } context.Output("Resolving path:"); - Fingerprint pathFp = FingerprintCreator.EvaluateToFingerprint(context, workingDirectory, pathSegments, FingerprintType.Path); + Fingerprint pathFp = FingerprintCreator.EvaluateToFingerprint(context, workspaceRoot, pathSegments, FingerprintType.Path); context.Output($"Resolved to: {pathFp}"); - string uploadPath = await this.GetUploadPathAsync(contentFormat, context, pathFp, workingDirectory, cancellationToken); + string uploadPath = await this.GetUploadPathAsync(contentFormat, context, pathFp, workspaceRoot, cancellationToken); //Upload the pipeline artifact. PipelineCacheActionRecord uploadRecord = clientTelemetry.CreateRecord((level, uri, type) => @@ -105,7 +105,7 @@ public class PipelineCacheServer Fingerprint[] fingerprints, string[] pathSegments, string cacheHitVariable, - string workingDirectory, + string workspaceRoot, CancellationToken cancellationToken) { VssConnection connection = context.VssConnection; @@ -132,7 +132,7 @@ public class PipelineCacheServer record: downloadRecord, actionAsync: async () => { - await this.DownloadPipelineCacheAsync(context, dedupManifestClient, result.ManifestId, pathSegments, workingDirectory, Enum.Parse(result.ContentFormat), cancellationToken); + await this.DownloadPipelineCacheAsync(context, dedupManifestClient, result.ManifestId, pathSegments, workspaceRoot, Enum.Parse(result.ContentFormat), cancellationToken); }); // Send results to CustomerIntelligence @@ -183,7 +183,7 @@ public class PipelineCacheServer return pipelineCacheClient; } - private async Task GetUploadPathAsync(ContentFormat contentFormat, AgentTaskPluginExecutionContext context, Fingerprint pathFingerprint, string workingDirectory, CancellationToken cancellationToken) + private async Task GetUploadPathAsync(ContentFormat contentFormat, AgentTaskPluginExecutionContext context, Fingerprint pathFingerprint, string workspaceRoot, CancellationToken cancellationToken) { if (pathFingerprint.Segments.Length == 1) { @@ -191,7 +191,7 @@ private async Task GetUploadPathAsync(ContentFormat contentFormat, Agent } if (contentFormat == ContentFormat.SingleTar) { - return await TarUtils.ArchiveFilesToTarAsync(context, pathFingerprint, workingDirectory, cancellationToken); + return await TarUtils.ArchiveFilesToTarAsync(context, pathFingerprint, workspaceRoot, cancellationToken); } // TODO: what is the right way to handle !ContentFormat.SingleTar return pathFingerprint.Segments[0]; @@ -202,7 +202,7 @@ private async Task GetUploadPathAsync(ContentFormat contentFormat, Agent DedupManifestArtifactClient dedupManifestClient, DedupIdentifier manifestId, string[] pathSegments, - string workingDirectory, + string workspaceRoot, ContentFormat contentFormat, CancellationToken cancellationToken) { @@ -211,7 +211,7 @@ private async Task GetUploadPathAsync(ContentFormat contentFormat, Agent string manifestPath = Path.Combine(Path.GetTempPath(), $"{nameof(DedupManifestArtifactClient)}.{Path.GetRandomFileName()}.manifest"); await dedupManifestClient.DownloadFileToPathAsync(manifestId, manifestPath, proxyUri: null, cancellationToken: cancellationToken); Manifest manifest = JsonSerializer.Deserialize(File.ReadAllText(manifestPath)); - await TarUtils.DownloadAndExtractTarAsync(context, manifest, dedupManifestClient, workingDirectory, cancellationToken); + await TarUtils.DownloadAndExtractTarAsync(context, manifest, dedupManifestClient, workspaceRoot, cancellationToken); try { if (File.Exists(manifestPath)) diff --git a/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs b/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs index 4e88aa47ff..697f27d07f 100644 --- a/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs +++ b/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs @@ -124,14 +124,12 @@ public async virtual Task RunAsync(AgentTaskPluginExecutionContext context, Canc }).ToArray(); - var workingDirectory = context.Variables.GetValueOrDefault("system.defaultworkingdirectory")?.Value ?? workspaceRoot; - await ProcessCommandInternalAsync( context, keyFp, restoreKeysGenerator, pathSegments, - workingDirectory, + workspaceRoot, token); } @@ -141,7 +139,7 @@ public async virtual Task RunAsync(AgentTaskPluginExecutionContext context, Canc Fingerprint fingerprint, Func restoreKeysGenerator, string[] pathSegments, - string workingDirectory, + string workspaceRoot, CancellationToken token); // Properties set by tasks diff --git a/src/Agent.Plugins/PipelineCache/RestorePipelineCacheV0.cs b/src/Agent.Plugins/PipelineCache/RestorePipelineCacheV0.cs index 421022fb9e..003fb1eea1 100644 --- a/src/Agent.Plugins/PipelineCache/RestorePipelineCacheV0.cs +++ b/src/Agent.Plugins/PipelineCache/RestorePipelineCacheV0.cs @@ -19,7 +19,7 @@ public class RestorePipelineCacheV0 : PipelineCacheTaskPluginBase Fingerprint fingerprint, Func restoreKeysGenerator, string[] pathSegments, - string workingDirectory, + string workspaceRoot, CancellationToken token) { context.SetTaskVariable(RestoreStepRanVariableName, RestoreStepRanVariableValue); @@ -32,7 +32,7 @@ public class RestorePipelineCacheV0 : PipelineCacheTaskPluginBase (new[] { fingerprint }).Concat(restoreFingerprints).ToArray(), pathSegments, context.GetInput(PipelineCacheTaskPluginConstants.CacheHitVariable, required: false), - workingDirectory, + workspaceRoot, token); } } diff --git a/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs b/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs index 2371a2c448..9397a8e333 100644 --- a/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs +++ b/src/Agent.Plugins/PipelineCache/SavePipelineCacheV0.cs @@ -60,7 +60,7 @@ public override async Task RunAsync(AgentTaskPluginExecutionContext context, Can Fingerprint keyFingerprint, Func restoreKeysGenerator, string[] pathSegments, - string workingDirectory, + string workspaceRoot, CancellationToken token) { string contentFormatValue = context.Variables.GetValueOrDefault(ContentFormatVariableName)?.Value ?? string.Empty; @@ -88,7 +88,7 @@ public override async Task RunAsync(AgentTaskPluginExecutionContext context, Can context, keyFingerprint, pathSegments, - workingDirectory, + workspaceRoot, token, contentFormat); } diff --git a/src/Agent.Plugins/PipelineCache/TarUtils.cs b/src/Agent.Plugins/PipelineCache/TarUtils.cs index fff97c6230..47751a64b0 100644 --- a/src/Agent.Plugins/PipelineCache/TarUtils.cs +++ b/src/Agent.Plugins/PipelineCache/TarUtils.cs @@ -31,7 +31,7 @@ public static class TarUtils public static async Task ArchiveFilesToTarAsync( AgentTaskPluginExecutionContext context, Fingerprint pathFingerprint, - string workingDirectory, + string workspaceRoot, CancellationToken cancellationToken) { foreach (var inputPath in pathFingerprint.Segments) @@ -45,7 +45,7 @@ public static class TarUtils var archiveFileName = CreateArchiveFileName(); var archiveFile = Path.Combine(Path.GetTempPath(), archiveFileName); - ProcessStartInfo processStartInfo = GetCreateTarProcessInfo(context, archiveFileName, workingDirectory); + ProcessStartInfo processStartInfo = GetCreateTarProcessInfo(context, archiveFileName, workspaceRoot); Action actionOnFailure = () => { @@ -102,7 +102,7 @@ public static class TarUtils AgentTaskPluginExecutionContext context, Manifest manifest, DedupManifestArtifactClient dedupManifestClient, - string workingDirectory, + string workspaceRoot, CancellationToken cancellationToken) { ValidateTarManifest(manifest); @@ -110,7 +110,7 @@ public static class TarUtils DedupIdentifier dedupId = DedupIdentifier.Create(manifest.Items.Single(i => i.Path.EndsWith(archive, StringComparison.OrdinalIgnoreCase)).Blob.Id); // We now can simply specify the working directory as the tarball will contain paths relative to it - ProcessStartInfo processStartInfo = GetExtractStartProcessInfo(context, workingDirectory); + ProcessStartInfo processStartInfo = GetExtractStartProcessInfo(context, workspaceRoot); Func downloadTaskFunc = (process, ct) => @@ -197,15 +197,15 @@ private static void CreateProcessStartInfo(ProcessStartInfo processStartInfo, st processStartInfo.WorkingDirectory = processWorkingDirectory; } - private static ProcessStartInfo GetCreateTarProcessInfo(AgentTaskPluginExecutionContext context, string archiveFileName, string workingDirectory) + private static ProcessStartInfo GetCreateTarProcessInfo(AgentTaskPluginExecutionContext context, string archiveFileName, string workspaceRoot) { var processFileName = GetTar(context); - workingDirectory = workingDirectory.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); + workspaceRoot = workspaceRoot.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); // If given the absolute path for the '-cf' option, the GNU tar fails. The workaround is to start the tarring process in the temp directory, and simply speficy 'archive.tar' for that option. // The list of input files is piped in through the 'additionalTaskToExecuteWhilstRunningProcess' parameter - var processArguments = $"-cf \"{archiveFileName}\" -C \"{workingDirectory}\" -T -"; + var processArguments = $"-cf \"{archiveFileName}\" -C \"{workspaceRoot}\" -T -"; if (IsSystemDebugTrue(context)) { @@ -228,14 +228,14 @@ private static string GetTar(AgentTaskPluginExecutionContext context) return String.IsNullOrWhiteSpace(location) ? "tar" : location; } - private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecutionContext context, string workingDirectory) + private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecutionContext context, string workspaceRoot) { string processFileName, processArguments; if (isWindows && CheckIf7ZExists()) { // TODO: This doesn't support backtracing paths (e.g. ..\foo), we need a mechanism to either force 7z to do so or to use tar if backtracing exists processFileName = "7z"; - processArguments = $"x -si -aoa -o\"{workingDirectory}\" -ttar"; + processArguments = $"x -si -aoa -o\"{workspaceRoot}\" -ttar"; if (IsSystemDebugTrue(context)) { processArguments = "-bb1 " + processArguments; @@ -255,7 +255,7 @@ private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecut ProcessStartInfo processStartInfo = new ProcessStartInfo(); // Tar is started in the working directory because the tarball contains paths relative to it - CreateProcessStartInfo(processStartInfo, processFileName, processArguments, processWorkingDirectory: workingDirectory); + CreateProcessStartInfo(processStartInfo, processFileName, processArguments, processWorkingDirectory: workspaceRoot); return processStartInfo; } From 71ed8317d93a386b41bc6a7e88d1761c8ca53fa6 Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Thu, 27 Feb 2020 11:18:39 -0800 Subject: [PATCH 18/27] Clean up code comments --- src/Agent.Plugins/PipelineCache/TarUtils.cs | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/Agent.Plugins/PipelineCache/TarUtils.cs b/src/Agent.Plugins/PipelineCache/TarUtils.cs index 47751a64b0..bd6ea19a87 100644 --- a/src/Agent.Plugins/PipelineCache/TarUtils.cs +++ b/src/Agent.Plugins/PipelineCache/TarUtils.cs @@ -62,6 +62,7 @@ public static class TarUtils var inputPaths = pathFingerprint.Segments .Select(i => i.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar)); + // Stream input paths to tar to avoid command length limitations foreach (var inputPath in inputPaths) { await process.StandardInput.WriteLineAsync(inputPath); @@ -233,7 +234,6 @@ private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecut string processFileName, processArguments; if (isWindows && CheckIf7ZExists()) { - // TODO: This doesn't support backtracing paths (e.g. ..\foo), we need a mechanism to either force 7z to do so or to use tar if backtracing exists processFileName = "7z"; processArguments = $"x -si -aoa -o\"{workspaceRoot}\" -ttar"; if (IsSystemDebugTrue(context)) @@ -244,9 +244,8 @@ private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecut else { processFileName = GetTar(context); - // Instead of targetDirectory, we are providing . to tar, because the tar process is being started from targetDirectory. - // -P is added so that relative paths and backtracing (e.g. ../foo) are accepted by tar - processArguments = $"-xf - -P -C ."; + // Instead of targetDirectory, we are providing . to tar, because the tar process is being started from workspaceRoot. + processArguments = $"-xf - -C ."; if (IsSystemDebugTrue(context)) { processArguments = "-v " + processArguments; From 3981899e4e84c62c65b281b83629d7521c953e65 Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Thu, 27 Feb 2020 12:30:04 -0800 Subject: [PATCH 19/27] Enforce that cache paths are within Pipeline.Workspace --- .../PipelineCache/FingerprintCreator.cs | 6 ++++++ src/Test/L0/Plugin/FingerprintCreatorTests.cs | 20 +++++++++++++++++++ 2 files changed, 26 insertions(+) diff --git a/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs b/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs index cdc4e33da2..5ef820283d 100644 --- a/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs +++ b/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs @@ -393,6 +393,12 @@ internal static void CheckSegment(string segment, string segmentType) details = matchedDirectories.Values.ToArray(); resolvedSegments.AddRange(matchedDirectories.Values); + // Fail if paths our outside of Pipeline.Workspace. This limitation is b/c 7z doesn't extract backtraced paths + foreach (var backtracedPath in matchedDirectories.Where(x => x.Value.StartsWith(".."))) + { + exceptions.Add(new ArgumentException($"Specified path is not within `Pipeline.Workspace`: {backtracedPath.Value}")); + } + // TODO: Is it the right behavior to throw an exception if a pathsegment isn't resolveable? if (!matchedDirectories.Any()) { diff --git a/src/Test/L0/Plugin/FingerprintCreatorTests.cs b/src/Test/L0/Plugin/FingerprintCreatorTests.cs index 1cd28c2636..a90422c556 100644 --- a/src/Test/L0/Plugin/FingerprintCreatorTests.cs +++ b/src/Test/L0/Plugin/FingerprintCreatorTests.cs @@ -233,6 +233,26 @@ public void Fingerprint_Path_NoMatches() } } + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Plugin")] + public void Fingerprint_Path_NoOutsidePipelineWorkspace() + { + using(var hostContext = new TestHostContext(this)) + { + var context = new AgentTaskPluginExecutionContext(hostContext.GetTrace()); + var directoryInfo = new DirectoryInfo(directory); + var segments = new[] + { + directoryInfo.Parent.FullName, + }; + + Assert.Throws( + () => FingerprintCreator.EvaluateToFingerprint(context, directory, segments, FingerprintType.Path) + ); + } + } + [Fact] [Trait("Level", "L0")] [Trait("Category", "Plugin")] From 2262e712380341d40ae70b38a14ec1513c170368 Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Mon, 2 Mar 2020 11:27:11 -0800 Subject: [PATCH 20/27] Allow for single path segments to be outside of Pipeline.Workspace --- .../PipelineCache/FingerprintCreator.cs | 39 +++++++++++------- .../PipelineCache/PipelineCacheServer.cs | 12 ++---- src/Agent.Plugins/PipelineCache/TarUtils.cs | 40 ++++++++++++++----- 3 files changed, 58 insertions(+), 33 deletions(-) diff --git a/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs b/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs index 5ef820283d..e340d5b466 100644 --- a/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs +++ b/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs @@ -282,7 +282,7 @@ internal static void CheckSegment(string segment, string segmentType) // Quickly validate all segments foreach (string segment in segments) { - CheckSegment(segment, "key"); + CheckSegment(segment, fingerprintType.ToString()); } string defaultWorkingDirectory = context.Variables.GetValueOrDefault( @@ -291,17 +291,18 @@ internal static void CheckSegment(string segment, string segmentType) var resolvedSegments = new List(); var exceptions = new List(); + var hasPatternSegments = false; - foreach (string keySegment in segments) + foreach (string segment in segments) { - if (!IsPathySegment(keySegment)) + if (!IsPathySegment(segment)) { - LogSegment(context, segments, keySegment, KeySegmentType.String, null); - resolvedSegments.Add(keySegment); + LogSegment(context, segments, segment, KeySegmentType.String, null); + resolvedSegments.Add(segment); } else { - string[] pathRules = keySegment.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries).Select(s => s.Trim()).ToArray(); + string[] pathRules = segment.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries).Select(s => s.Trim()).ToArray(); string[] includeRules = pathRules.Where(p => !p.StartsWith('!')).ToArray(); if (!includeRules.Any()) @@ -364,9 +365,10 @@ internal static void CheckSegment(string segment, string segmentType) } } - var patternSegment = keySegment.IndexOfAny(GlobChars) >= 0 || matchedFiles.Count > 1; + var patternSegment = segment.IndexOfAny(GlobChars) >= 0 || matchedFiles.Count > 1; + hasPatternSegments |= patternSegment; - var displaySegment = keySegment; + var displaySegment = segment; if (context.Container != null) { @@ -393,13 +395,7 @@ internal static void CheckSegment(string segment, string segmentType) details = matchedDirectories.Values.ToArray(); resolvedSegments.AddRange(matchedDirectories.Values); - // Fail if paths our outside of Pipeline.Workspace. This limitation is b/c 7z doesn't extract backtraced paths - foreach (var backtracedPath in matchedDirectories.Where(x => x.Value.StartsWith(".."))) - { - exceptions.Add(new ArgumentException($"Specified path is not within `Pipeline.Workspace`: {backtracedPath.Value}")); - } - - // TODO: Is it the right behavior to throw an exception if a pathsegment isn't resolveable? + // TODO: Is it the right behavior to throw an exception if a path segment isn't resolveable? if (!matchedDirectories.Any()) { var message = patternSegment ? $"No matching directories found for pattern: {displaySegment}" : $"Directory not found: {displaySegment}"; @@ -417,6 +413,19 @@ internal static void CheckSegment(string segment, string segmentType) } } + if (fingerprintType == FingerprintType.Path) + { + // If there are segments or contains a glob pattern, all resolved segments must be rooted within filePathRoot (e.g. Pipeline.Workspace) + // This limitation is mainly due to 7z not extracting backtraced paths + if (resolvedSegments.Count() > 1 || hasPatternSegments) + { + foreach (var backtracedPath in resolvedSegments.Where(x => x.StartsWith(".."))) + { + exceptions.Add(new ArgumentException($"Resolved path is not within `Pipeline.Workspace`: {backtracedPath}")); + } + } + } + if (exceptions.Any()) { throw new AggregateException(exceptions); diff --git a/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs b/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs index bd38e5f3d5..7f0daf2f33 100644 --- a/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs +++ b/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs @@ -183,18 +183,14 @@ public class PipelineCacheServer return pipelineCacheClient; } - private async Task GetUploadPathAsync(ContentFormat contentFormat, AgentTaskPluginExecutionContext context, Fingerprint pathFingerprint, string workspaceRoot, CancellationToken cancellationToken) + private Task GetUploadPathAsync(ContentFormat contentFormat, AgentTaskPluginExecutionContext context, Fingerprint pathFingerprint, string workspaceRoot, CancellationToken cancellationToken) { - if (pathFingerprint.Segments.Length == 1) - { - return pathFingerprint.Segments[0]; - } if (contentFormat == ContentFormat.SingleTar) { - return await TarUtils.ArchiveFilesToTarAsync(context, pathFingerprint, workspaceRoot, cancellationToken); + return TarUtils.ArchiveFilesToTarAsync(context, pathFingerprint, workspaceRoot, cancellationToken); } // TODO: what is the right way to handle !ContentFormat.SingleTar - return pathFingerprint.Segments[0]; + return Task.FromResult(pathFingerprint.Segments[0]); } private async Task DownloadPipelineCacheAsync( @@ -211,7 +207,7 @@ private async Task GetUploadPathAsync(ContentFormat contentFormat, Agent string manifestPath = Path.Combine(Path.GetTempPath(), $"{nameof(DedupManifestArtifactClient)}.{Path.GetRandomFileName()}.manifest"); await dedupManifestClient.DownloadFileToPathAsync(manifestId, manifestPath, proxyUri: null, cancellationToken: cancellationToken); Manifest manifest = JsonSerializer.Deserialize(File.ReadAllText(manifestPath)); - await TarUtils.DownloadAndExtractTarAsync(context, manifest, dedupManifestClient, workspaceRoot, cancellationToken); + await TarUtils.DownloadAndExtractTarAsync(context, manifest, dedupManifestClient, pathSegments, workspaceRoot, cancellationToken); try { if (File.Exists(manifestPath)) diff --git a/src/Agent.Plugins/PipelineCache/TarUtils.cs b/src/Agent.Plugins/PipelineCache/TarUtils.cs index bd6ea19a87..2a4721b920 100644 --- a/src/Agent.Plugins/PipelineCache/TarUtils.cs +++ b/src/Agent.Plugins/PipelineCache/TarUtils.cs @@ -45,7 +45,7 @@ public static class TarUtils var archiveFileName = CreateArchiveFileName(); var archiveFile = Path.Combine(Path.GetTempPath(), archiveFileName); - ProcessStartInfo processStartInfo = GetCreateTarProcessInfo(context, archiveFileName, workspaceRoot); + ProcessStartInfo processStartInfo = GetCreateTarProcessInfo(context, archiveFileName, pathFingerprint, workspaceRoot); Action actionOnFailure = () => { @@ -103,6 +103,7 @@ public static class TarUtils AgentTaskPluginExecutionContext context, Manifest manifest, DedupManifestArtifactClient dedupManifestClient, + string[] pathSegments, string workspaceRoot, CancellationToken cancellationToken) { @@ -111,7 +112,7 @@ public static class TarUtils DedupIdentifier dedupId = DedupIdentifier.Create(manifest.Items.Single(i => i.Path.EndsWith(archive, StringComparison.OrdinalIgnoreCase)).Blob.Id); // We now can simply specify the working directory as the tarball will contain paths relative to it - ProcessStartInfo processStartInfo = GetExtractStartProcessInfo(context, workspaceRoot); + ProcessStartInfo processStartInfo = GetExtractStartProcessInfo(context, pathSegments, workspaceRoot); Func downloadTaskFunc = (process, ct) => @@ -198,15 +199,15 @@ private static void CreateProcessStartInfo(ProcessStartInfo processStartInfo, st processStartInfo.WorkingDirectory = processWorkingDirectory; } - private static ProcessStartInfo GetCreateTarProcessInfo(AgentTaskPluginExecutionContext context, string archiveFileName, string workspaceRoot) + private static ProcessStartInfo GetCreateTarProcessInfo(AgentTaskPluginExecutionContext context, string archiveFileName, Fingerprint pathFingerprint, string workspaceRoot) { var processFileName = GetTar(context); - workspaceRoot = workspaceRoot.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); + string workingDirectory = GetTarWorkingDirectory(pathFingerprint.Segments, workspaceRoot); // If given the absolute path for the '-cf' option, the GNU tar fails. The workaround is to start the tarring process in the temp directory, and simply speficy 'archive.tar' for that option. // The list of input files is piped in through the 'additionalTaskToExecuteWhilstRunningProcess' parameter - var processArguments = $"-cf \"{archiveFileName}\" -C \"{workspaceRoot}\" -T -"; + var processArguments = $"-cf \"{archiveFileName}\" -C \"{workingDirectory}\" -T -"; if (IsSystemDebugTrue(context)) { @@ -218,7 +219,8 @@ private static ProcessStartInfo GetCreateTarProcessInfo(AgentTaskPluginExecution } ProcessStartInfo processStartInfo = new ProcessStartInfo(); - CreateProcessStartInfo(processStartInfo, processFileName, processArguments, processWorkingDirectory: Path.GetTempPath()); // We want to create the archiveFile in temp folder, and hence starting the tar process from TEMP to avoid absolute paths in tar cmd line. + // We want to create the archiveFile in temp folder, and hence starting the tar process from TEMP to avoid absolute paths in tar cmd line. + CreateProcessStartInfo(processStartInfo, processFileName, processArguments, processWorkingDirectory: Path.GetTempPath()); return processStartInfo; } @@ -229,13 +231,15 @@ private static string GetTar(AgentTaskPluginExecutionContext context) return String.IsNullOrWhiteSpace(location) ? "tar" : location; } - private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecutionContext context, string workspaceRoot) + private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecutionContext context, string[] pathSegments, string workspaceRoot) { string processFileName, processArguments; + string workingDirectory = GetTarWorkingDirectory(pathSegments, workspaceRoot); + if (isWindows && CheckIf7ZExists()) { processFileName = "7z"; - processArguments = $"x -si -aoa -o\"{workspaceRoot}\" -ttar"; + processArguments = $"x -si -aoa -o\"{workingDirectory}\" -ttar"; if (IsSystemDebugTrue(context)) { processArguments = "-bb1 " + processArguments; @@ -244,7 +248,7 @@ private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecut else { processFileName = GetTar(context); - // Instead of targetDirectory, we are providing . to tar, because the tar process is being started from workspaceRoot. + // Instead of targetDirectory, we are providing . to tar, because the tar process is being started from workingDirectory. processArguments = $"-xf - -C ."; if (IsSystemDebugTrue(context)) { @@ -254,10 +258,26 @@ private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecut ProcessStartInfo processStartInfo = new ProcessStartInfo(); // Tar is started in the working directory because the tarball contains paths relative to it - CreateProcessStartInfo(processStartInfo, processFileName, processArguments, processWorkingDirectory: workspaceRoot); + CreateProcessStartInfo(processStartInfo, processFileName, processArguments, processWorkingDirectory: workingDirectory); return processStartInfo; } + private static string GetTarWorkingDirectory(string[] segments, string workspaceRoot) + { + // If path segment is single directory outside of Pipeline.Workspace extract tarball directly to this path + if (segments.Count() == 1) + { + var workingDirectory = segments[0]; + if (workingDirectory.StartsWith("..")) + { + return Path.GetFullPath(workingDirectory, workspaceRoot); + } + } + + // If path segment contains multiple directories, extract relative to Pipeline.Workspace + return workspaceRoot.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); + } + private static void ValidateTarManifest(Manifest manifest) { if (manifest == null || manifest.Items.Count() != 1 || !manifest.Items.Single().Path.EndsWith(archive, StringComparison.OrdinalIgnoreCase)) From 20f32b0cbbc988d1dce170f3070b192e7b791794 Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Mon, 2 Mar 2020 14:32:39 -0800 Subject: [PATCH 21/27] Allow for single, absolute paths outside of workspace --- .../PipelineCache/PipelineCacheServer.cs | 33 ++++++++++-- src/Agent.Plugins/PipelineCache/TarUtils.cs | 50 +++++++------------ 2 files changed, 48 insertions(+), 35 deletions(-) diff --git a/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs b/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs index 7f0daf2f33..efb536e07e 100644 --- a/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs +++ b/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs @@ -54,7 +54,7 @@ public class PipelineCacheServer Fingerprint pathFp = FingerprintCreator.EvaluateToFingerprint(context, workspaceRoot, pathSegments, FingerprintType.Path); context.Output($"Resolved to: {pathFp}"); - string uploadPath = await this.GetUploadPathAsync(contentFormat, context, pathFp, workspaceRoot, cancellationToken); + string uploadPath = await this.GetUploadPathAsync(contentFormat, context, pathFp, pathSegments, workspaceRoot, cancellationToken); //Upload the pipeline artifact. PipelineCacheActionRecord uploadRecord = clientTelemetry.CreateRecord((level, uri, type) => @@ -183,11 +183,19 @@ public class PipelineCacheServer return pipelineCacheClient; } - private Task GetUploadPathAsync(ContentFormat contentFormat, AgentTaskPluginExecutionContext context, Fingerprint pathFingerprint, string workspaceRoot, CancellationToken cancellationToken) + private Task GetUploadPathAsync(ContentFormat contentFormat, AgentTaskPluginExecutionContext context, Fingerprint pathFingerprint, string[] pathSegments, string workspaceRoot, CancellationToken cancellationToken) { if (contentFormat == ContentFormat.SingleTar) { - return TarUtils.ArchiveFilesToTarAsync(context, pathFingerprint, workspaceRoot, cancellationToken); + var (tarWorkingDirectory, isWorkspaceContained) = GetTarWorkingDirectory(pathSegments, workspaceRoot); + + return TarUtils.ArchiveFilesToTarAsync( + context, + pathFingerprint, + tarWorkingDirectory, + isWorkspaceContained, + cancellationToken + ); } // TODO: what is the right way to handle !ContentFormat.SingleTar return Task.FromResult(pathFingerprint.Segments[0]); @@ -207,7 +215,8 @@ private Task GetUploadPathAsync(ContentFormat contentFormat, AgentTaskPl string manifestPath = Path.Combine(Path.GetTempPath(), $"{nameof(DedupManifestArtifactClient)}.{Path.GetRandomFileName()}.manifest"); await dedupManifestClient.DownloadFileToPathAsync(manifestId, manifestPath, proxyUri: null, cancellationToken: cancellationToken); Manifest manifest = JsonSerializer.Deserialize(File.ReadAllText(manifestPath)); - await TarUtils.DownloadAndExtractTarAsync(context, manifest, dedupManifestClient, pathSegments, workspaceRoot, cancellationToken); + var (tarWorkingDirectory, _) = GetTarWorkingDirectory(pathSegments, workspaceRoot); + await TarUtils.DownloadAndExtractTarAsync(context, manifest, dedupManifestClient, tarWorkingDirectory, cancellationToken); try { if (File.Exists(manifestPath)) @@ -226,7 +235,23 @@ private Task GetUploadPathAsync(ContentFormat contentFormat, AgentTaskPl minimatchPatterns: null); await dedupManifestClient.DownloadAsync(options, cancellationToken); } + } + + private (string workingDirectory, bool isWorkspaceContained) GetTarWorkingDirectory(string[] segments, string workspaceRoot) + { + // If path segment is single directory outside of Pipeline.Workspace extract tarball directly to this path + if (segments.Count() == 1) + { + var workingDirectory = segments[0]; + // TODO: verify this works with src/foo/ (e.g. relative to defaultWorkingDirectory) + if (FingerprintCreator.IsPathySegment(workingDirectory) && !workingDirectory.StartsWith(workspaceRoot)) + { + return (workingDirectory, false); + } + } + // All other scenarios means that paths must within and relative to Pipeline.Workspace + return (workspaceRoot.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar), true); } } } \ No newline at end of file diff --git a/src/Agent.Plugins/PipelineCache/TarUtils.cs b/src/Agent.Plugins/PipelineCache/TarUtils.cs index 2a4721b920..27a3280bc5 100644 --- a/src/Agent.Plugins/PipelineCache/TarUtils.cs +++ b/src/Agent.Plugins/PipelineCache/TarUtils.cs @@ -31,7 +31,8 @@ public static class TarUtils public static async Task ArchiveFilesToTarAsync( AgentTaskPluginExecutionContext context, Fingerprint pathFingerprint, - string workspaceRoot, + string tarWorkingDirectory, + bool isWorkspaceContained, CancellationToken cancellationToken) { foreach (var inputPath in pathFingerprint.Segments) @@ -45,7 +46,7 @@ public static class TarUtils var archiveFileName = CreateArchiveFileName(); var archiveFile = Path.Combine(Path.GetTempPath(), archiveFileName); - ProcessStartInfo processStartInfo = GetCreateTarProcessInfo(context, archiveFileName, pathFingerprint, workspaceRoot); + ProcessStartInfo processStartInfo = GetCreateTarProcessInfo(context, archiveFileName, tarWorkingDirectory); Action actionOnFailure = () => { @@ -59,8 +60,10 @@ public static class TarUtils { try { - var inputPaths = pathFingerprint.Segments - .Select(i => i.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar)); + // If path segment is single directory outside of Pipeline.Workspace, inputPaths is simply `.` + var inputPaths = isWorkspaceContained ? + pathFingerprint.Segments.Select(i => i.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar)) + : new[]{ "." }; // Stream input paths to tar to avoid command length limitations foreach (var inputPath in inputPaths) @@ -103,8 +106,7 @@ public static class TarUtils AgentTaskPluginExecutionContext context, Manifest manifest, DedupManifestArtifactClient dedupManifestClient, - string[] pathSegments, - string workspaceRoot, + string tarWorkingDirectory, CancellationToken cancellationToken) { ValidateTarManifest(manifest); @@ -112,7 +114,12 @@ public static class TarUtils DedupIdentifier dedupId = DedupIdentifier.Create(manifest.Items.Single(i => i.Path.EndsWith(archive, StringComparison.OrdinalIgnoreCase)).Blob.Id); // We now can simply specify the working directory as the tarball will contain paths relative to it - ProcessStartInfo processStartInfo = GetExtractStartProcessInfo(context, pathSegments, workspaceRoot); + ProcessStartInfo processStartInfo = GetExtractStartProcessInfo(context, tarWorkingDirectory); + + if (!Directory.Exists(tarWorkingDirectory)) + { + Directory.CreateDirectory(tarWorkingDirectory); + } Func downloadTaskFunc = (process, ct) => @@ -199,15 +206,13 @@ private static void CreateProcessStartInfo(ProcessStartInfo processStartInfo, st processStartInfo.WorkingDirectory = processWorkingDirectory; } - private static ProcessStartInfo GetCreateTarProcessInfo(AgentTaskPluginExecutionContext context, string archiveFileName, Fingerprint pathFingerprint, string workspaceRoot) + private static ProcessStartInfo GetCreateTarProcessInfo(AgentTaskPluginExecutionContext context, string archiveFileName, string tarWorkingDirectory) { var processFileName = GetTar(context); - string workingDirectory = GetTarWorkingDirectory(pathFingerprint.Segments, workspaceRoot); - // If given the absolute path for the '-cf' option, the GNU tar fails. The workaround is to start the tarring process in the temp directory, and simply speficy 'archive.tar' for that option. // The list of input files is piped in through the 'additionalTaskToExecuteWhilstRunningProcess' parameter - var processArguments = $"-cf \"{archiveFileName}\" -C \"{workingDirectory}\" -T -"; + var processArguments = $"-cf \"{archiveFileName}\" -C \"{tarWorkingDirectory}\" -T -"; if (IsSystemDebugTrue(context)) { @@ -231,15 +236,14 @@ private static string GetTar(AgentTaskPluginExecutionContext context) return String.IsNullOrWhiteSpace(location) ? "tar" : location; } - private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecutionContext context, string[] pathSegments, string workspaceRoot) + private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecutionContext context, string tarWorkingDirectory) { string processFileName, processArguments; - string workingDirectory = GetTarWorkingDirectory(pathSegments, workspaceRoot); if (isWindows && CheckIf7ZExists()) { processFileName = "7z"; - processArguments = $"x -si -aoa -o\"{workingDirectory}\" -ttar"; + processArguments = $"x -si -aoa -o\"{tarWorkingDirectory}\" -ttar"; if (IsSystemDebugTrue(context)) { processArguments = "-bb1 " + processArguments; @@ -258,26 +262,10 @@ private static ProcessStartInfo GetExtractStartProcessInfo(AgentTaskPluginExecut ProcessStartInfo processStartInfo = new ProcessStartInfo(); // Tar is started in the working directory because the tarball contains paths relative to it - CreateProcessStartInfo(processStartInfo, processFileName, processArguments, processWorkingDirectory: workingDirectory); + CreateProcessStartInfo(processStartInfo, processFileName, processArguments, processWorkingDirectory: tarWorkingDirectory); return processStartInfo; } - private static string GetTarWorkingDirectory(string[] segments, string workspaceRoot) - { - // If path segment is single directory outside of Pipeline.Workspace extract tarball directly to this path - if (segments.Count() == 1) - { - var workingDirectory = segments[0]; - if (workingDirectory.StartsWith("..")) - { - return Path.GetFullPath(workingDirectory, workspaceRoot); - } - } - - // If path segment contains multiple directories, extract relative to Pipeline.Workspace - return workspaceRoot.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); - } - private static void ValidateTarManifest(Manifest manifest) { if (manifest == null || manifest.Items.Count() != 1 || !manifest.Items.Single().Path.EndsWith(archive, StringComparison.OrdinalIgnoreCase)) From 9f02f369479322c3747cfb8900fbcd71fe044046 Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Mon, 2 Mar 2020 14:55:34 -0800 Subject: [PATCH 22/27] Add some unit test coverage --- src/Test/L0/Plugin/FingerprintCreatorTests.cs | 47 ++++++++++++++++++- 1 file changed, 46 insertions(+), 1 deletion(-) diff --git a/src/Test/L0/Plugin/FingerprintCreatorTests.cs b/src/Test/L0/Plugin/FingerprintCreatorTests.cs index a90422c556..8b0ba4ef3f 100644 --- a/src/Test/L0/Plugin/FingerprintCreatorTests.cs +++ b/src/Test/L0/Plugin/FingerprintCreatorTests.cs @@ -236,7 +236,7 @@ public void Fingerprint_Path_NoMatches() [Fact] [Trait("Level", "L0")] [Trait("Category", "Plugin")] - public void Fingerprint_Path_NoOutsidePipelineWorkspace() + public void Fingerprint_Path_SinglePathOutsidePipelineWorkspace() { using(var hostContext = new TestHostContext(this)) { @@ -246,6 +246,51 @@ public void Fingerprint_Path_NoOutsidePipelineWorkspace() { directoryInfo.Parent.FullName, }; + + Fingerprint f = FingerprintCreator.EvaluateToFingerprint(context, directory, segments, FingerprintType.Path); + + Assert.Equal(1, f.Segments.Count()); + Assert.Equal( + new [] { Path.GetRelativePath(directory, directoryInfo.Parent.FullName) }, + f.Segments + ); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Plugin")] + public void Fingerprint_Path_MultiplePathOutsidePipelineWorkspace() + { + using(var hostContext = new TestHostContext(this)) + { + var context = new AgentTaskPluginExecutionContext(hostContext.GetTrace()); + var directoryInfo = new DirectoryInfo(directory); + var segments = new[] + { + directoryInfo.Parent.FullName, + directoryInfo.Parent.Parent.FullName, + }; + + Assert.Throws( + () => FingerprintCreator.EvaluateToFingerprint(context, directory, segments, FingerprintType.Path) + ); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Plugin")] + public void Fingerprint_Path_BacktracedGlobPattern() + { + using(var hostContext = new TestHostContext(this)) + { + var context = new AgentTaskPluginExecutionContext(hostContext.GetTrace()); + var directoryInfo = new DirectoryInfo(directory); + var segments = new[] + { + $"{directoryInfo.Parent.FullName}/*", + }; Assert.Throws( () => FingerprintCreator.EvaluateToFingerprint(context, directory, segments, FingerprintType.Path) From 4f965de3e6624a2eca22eca3a69e2c554a24fee8 Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Mon, 2 Mar 2020 15:01:16 -0800 Subject: [PATCH 23/27] Remove legacy comment --- src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs b/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs index efb536e07e..ed51c943c2 100644 --- a/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs +++ b/src/Agent.Plugins/PipelineCache/PipelineCacheServer.cs @@ -243,7 +243,6 @@ private Task GetUploadPathAsync(ContentFormat contentFormat, AgentTaskPl if (segments.Count() == 1) { var workingDirectory = segments[0]; - // TODO: verify this works with src/foo/ (e.g. relative to defaultWorkingDirectory) if (FingerprintCreator.IsPathySegment(workingDirectory) && !workingDirectory.StartsWith(workspaceRoot)) { return (workingDirectory, false); From c289135eb8a54efeb19a9ff59b115c18cff11b37 Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Mon, 2 Mar 2020 15:06:25 -0800 Subject: [PATCH 24/27] Remove extra whitespace --- src/Test/L0/Plugin/TarUtilsL0.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Test/L0/Plugin/TarUtilsL0.cs b/src/Test/L0/Plugin/TarUtilsL0.cs index 52213ee369..7ffac3e9a6 100644 --- a/src/Test/L0/Plugin/TarUtilsL0.cs +++ b/src/Test/L0/Plugin/TarUtilsL0.cs @@ -8,7 +8,7 @@ namespace Microsoft.VisualStudio.Services.Agent.Tests.PipelineCaching { - public class TarUtilsL0 { + public class TarUtilsL0 { [Fact] [Trait("Level", "L0")] [Trait("Category", "Plugin")] From dc42bbe07758edcc49aa8c87ff53e75c51298f83 Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Mon, 2 Mar 2020 16:39:17 -0800 Subject: [PATCH 25/27] Remove debugging code --- .../PipelineCacheTaskPluginBase.cs | 22 ------------------- 1 file changed, 22 deletions(-) diff --git a/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs b/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs index 697f27d07f..0be8ab55d1 100644 --- a/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs +++ b/src/Agent.Plugins/PipelineCache/PipelineCacheTaskPluginBase.cs @@ -87,8 +87,6 @@ internal static (bool isOldFormat, string[] keySegments, IEnumerable r public async virtual Task RunAsync(AgentTaskPluginExecutionContext context, CancellationToken token) { - WaitForDebuggerAttach(); - ArgUtil.NotNull(context, nameof(context)); VariableValue saltValue = context.Variables.GetValueOrDefault(SaltVariableName); @@ -152,25 +150,5 @@ protected static class PipelineCacheTaskPluginConstants public static readonly string CacheHitVariable = "cacheHitVar"; public static readonly string Salt = "salt"; } - - protected void WaitForDebuggerAttach() - { - var process = System.Diagnostics.Process.GetCurrentProcess(); - System.Diagnostics.Process.EnterDebugMode(); - - Console.WriteLine($"PID: {process.Id}"); - var timeout = 60; - for (int i = 0; i < timeout; i++) - { - Console.WriteLine($"Attach debugger in {timeout - i} seconds"); - Thread.Sleep(1000); - - if (System.Diagnostics.Debugger.IsAttached) - { - Console.WriteLine("Debugger attached!"); - break; - } - } - } } } \ No newline at end of file From b39125607fd85c90a2b7ca7ebd44d24eb146da9f Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Tue, 3 Mar 2020 14:24:19 -0800 Subject: [PATCH 26/27] Place test paths in isolated directory --- src/Test/L0/Plugin/FingerprintCreatorTests.cs | 44 ++++++++++--------- 1 file changed, 24 insertions(+), 20 deletions(-) diff --git a/src/Test/L0/Plugin/FingerprintCreatorTests.cs b/src/Test/L0/Plugin/FingerprintCreatorTests.cs index 8b0ba4ef3f..1229a4c436 100644 --- a/src/Test/L0/Plugin/FingerprintCreatorTests.cs +++ b/src/Test/L0/Plugin/FingerprintCreatorTests.cs @@ -21,11 +21,11 @@ public class FingerprintCreatorTests private static readonly byte[] hash1; private static readonly byte[] hash2; - private static readonly string directory; private static readonly string path1; private static readonly string path2; + private static readonly string workspaceRoot; private static readonly string directory1; private static readonly string directory2; @@ -44,12 +44,16 @@ static FingerprintCreatorTests() directory = Path.GetDirectoryName(path1); Assert.Equal(directory, Path.GetDirectoryName(path2)); - var path3 = Path.Combine(directory, Guid.NewGuid().ToString(), Guid.NewGuid().ToString()); - var path4 = Path.Combine(directory, Guid.NewGuid().ToString(), Guid.NewGuid().ToString()); + var workspace = Guid.NewGuid().ToString(); + + var path3 = Path.Combine(directory, workspace, Guid.NewGuid().ToString(), Guid.NewGuid().ToString()); + var path4 = Path.Combine(directory, workspace, Guid.NewGuid().ToString(), Guid.NewGuid().ToString()); directory1 = Path.GetDirectoryName(path3); directory2 = Path.GetDirectoryName(path4); + workspaceRoot = Path.GetDirectoryName(directory1); + directory1Name = Path.GetFileName(directory1); directory2Name = Path.GetFileName(directory2); @@ -122,7 +126,7 @@ public void Fingerprint_Path_IncludeFullPathMatches() directory2 }; - Fingerprint f = FingerprintCreator.EvaluateToFingerprint(context, directory, segments, FingerprintType.Path); + Fingerprint f = FingerprintCreator.EvaluateToFingerprint(context, workspaceRoot, segments, FingerprintType.Path); Assert.Equal( new [] { directory1Name, directory2Name }.OrderBy(x => x), f.Segments.OrderBy(x => x) @@ -140,11 +144,11 @@ public void Fingerprint_Path_IncludeRelativePathMatches() var context = new AgentTaskPluginExecutionContext(hostContext.GetTrace()); var segments = new[] { - $"{directory1Name}", - $"{directory2Name}" + directory1Name, + directory2Name }; - Fingerprint f = FingerprintCreator.EvaluateToFingerprint(context, directory, segments, FingerprintType.Path); + Fingerprint f = FingerprintCreator.EvaluateToFingerprint(context, workspaceRoot, segments, FingerprintType.Path); Assert.Equal( new [] { directory1Name, directory2Name }.OrderBy(x => x), f.Segments.OrderBy(x => x) @@ -165,7 +169,7 @@ public void Fingerprint_Path_IncludeGlobMatches() $"**/{directory1Name},**/{directory2Name}", }; - Fingerprint f = FingerprintCreator.EvaluateToFingerprint(context, directory, segments, FingerprintType.Path); + Fingerprint f = FingerprintCreator.EvaluateToFingerprint(context, workspaceRoot, segments, FingerprintType.Path); Assert.Equal( new [] { directory1Name, directory2Name }.OrderBy(x => x), f.Segments.OrderBy(x => x) @@ -186,7 +190,7 @@ public void Fingerprint_Path_ExcludeGlobMatches() $"**/{directory1Name},!**/{directory2Name}", }; - Fingerprint f = FingerprintCreator.EvaluateToFingerprint(context, directory, segments, FingerprintType.Path); + Fingerprint f = FingerprintCreator.EvaluateToFingerprint(context, workspaceRoot, segments, FingerprintType.Path); Assert.Equal( new [] { directory1Name }, f.Segments @@ -208,7 +212,7 @@ public void Fingerprint_Path_NoIncludePatterns() }; Assert.Throws( - () => FingerprintCreator.EvaluateToFingerprint(context, directory, segments, FingerprintType.Path) + () => FingerprintCreator.EvaluateToFingerprint(context, workspaceRoot, segments, FingerprintType.Path) ); } } @@ -223,12 +227,12 @@ public void Fingerprint_Path_NoMatches() var context = new AgentTaskPluginExecutionContext(hostContext.GetTrace()); var segments = new[] { - $"**/{Guid.NewGuid().ToString()},!**/{directory2Name}", + $"**/{Guid.NewGuid().ToString()},!**/{directory2Name}" }; // TODO: Should this really be throwing an exception? Assert.Throws( - () => FingerprintCreator.EvaluateToFingerprint(context, directory, segments, FingerprintType.Path) + () => FingerprintCreator.EvaluateToFingerprint(context, workspaceRoot, segments, FingerprintType.Path) ); } } @@ -241,17 +245,17 @@ public void Fingerprint_Path_SinglePathOutsidePipelineWorkspace() using(var hostContext = new TestHostContext(this)) { var context = new AgentTaskPluginExecutionContext(hostContext.GetTrace()); - var directoryInfo = new DirectoryInfo(directory); + var directoryInfo = new DirectoryInfo(workspaceRoot); var segments = new[] { directoryInfo.Parent.FullName, }; - Fingerprint f = FingerprintCreator.EvaluateToFingerprint(context, directory, segments, FingerprintType.Path); + Fingerprint f = FingerprintCreator.EvaluateToFingerprint(context, workspaceRoot, segments, FingerprintType.Path); Assert.Equal(1, f.Segments.Count()); Assert.Equal( - new [] { Path.GetRelativePath(directory, directoryInfo.Parent.FullName) }, + new [] { Path.GetRelativePath(workspaceRoot, directoryInfo.Parent.FullName) }, f.Segments ); } @@ -265,15 +269,15 @@ public void Fingerprint_Path_MultiplePathOutsidePipelineWorkspace() using(var hostContext = new TestHostContext(this)) { var context = new AgentTaskPluginExecutionContext(hostContext.GetTrace()); - var directoryInfo = new DirectoryInfo(directory); + var directoryInfo = new DirectoryInfo(workspaceRoot); var segments = new[] { directoryInfo.Parent.FullName, - directoryInfo.Parent.Parent.FullName, + directory1Name, }; Assert.Throws( - () => FingerprintCreator.EvaluateToFingerprint(context, directory, segments, FingerprintType.Path) + () => FingerprintCreator.EvaluateToFingerprint(context, workspaceRoot, segments, FingerprintType.Path) ); } } @@ -286,14 +290,14 @@ public void Fingerprint_Path_BacktracedGlobPattern() using(var hostContext = new TestHostContext(this)) { var context = new AgentTaskPluginExecutionContext(hostContext.GetTrace()); - var directoryInfo = new DirectoryInfo(directory); + var directoryInfo = new DirectoryInfo(workspaceRoot); var segments = new[] { $"{directoryInfo.Parent.FullName}/*", }; Assert.Throws( - () => FingerprintCreator.EvaluateToFingerprint(context, directory, segments, FingerprintType.Path) + () => FingerprintCreator.EvaluateToFingerprint(context, workspaceRoot, segments, FingerprintType.Path) ); } } From 3bb578a31417afe4d51a5d8a8effda238bc485d9 Mon Sep 17 00:00:00 2001 From: Ethan Dennis Date: Wed, 25 Mar 2020 10:08:56 -0700 Subject: [PATCH 27/27] Refactor LogSegment method --- .../PipelineCache/FingerprintCreator.cs | 74 +++++++++---------- 1 file changed, 37 insertions(+), 37 deletions(-) diff --git a/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs b/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs index e340d5b466..7c05bc07c4 100644 --- a/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs +++ b/src/Agent.Plugins/PipelineCache/FingerprintCreator.cs @@ -216,7 +216,7 @@ internal static void CheckSegment(string segment, string segmentType) KeySegmentType type, Object details) { - Func FormatForDisplay = (value, displayLength) => + string FormatForDisplay(string value, int displayLength) { if (value.Length > displayLength) { @@ -227,49 +227,49 @@ internal static void CheckSegment(string segment, string segmentType) }; string formattedSegment = FormatForDisplay(segment, Math.Min(segments.Select(s => s.Length).Max(), 50)); - - if (type == KeySegmentType.String) - { - context.Output($" - {formattedSegment} [string]"); - } - else if (type == KeySegmentType.Directory) - { - context.Output($" - {formattedSegment} [directory]"); - } - else if (type == KeySegmentType.DirectoryPattern) + + void LogPatternSegment(object value, string title, Func getText, Func getSuffix = null) { - var directories = (details as string[]) ?? new string[0]; - context.Output($" - {formattedSegment} [directory pattern; matches: {directories.Length}]"); - if (directories.Any()) + var matches = (value as T[]) ?? Array.Empty(); + context.Output($" - {formattedSegment} [{title} pattern; matches: {matches.Length}]"); + if (matches.Any()) { - int filePathDisplayLength = Math.Min(directories.Select(d => d.Length).Max(), 70); - foreach (var directory in directories) + int displayLength = Math.Min(matches.Select(d => getText(d).Length).Max(), 70); + foreach (var match in matches) { - context.Output($" - {FormatForDisplay(directory, filePathDisplayLength)}"); + context.Output($" - {FormatForDisplay(getText(match), displayLength)}{(getSuffix != null ? getSuffix(match) : "")}"); } } - } - else - { - var matches = (details as MatchedFile[]) ?? new MatchedFile[0]; + }; - if (type == KeySegmentType.FilePath) - { - string fileHash = matches.Length > 0 ? matches[0].Hash : null; + switch (type) + { + case KeySegmentType.String: + context.Output($" - {formattedSegment} [string]"); + break; + case KeySegmentType.Directory: + context.Output($" - {formattedSegment} [directory]"); + break; + case KeySegmentType.DirectoryPattern: + LogPatternSegment( + details, + "directory", + s => s + ); + break; + case KeySegmentType.FilePath: + var files = (details as MatchedFile[]) ?? new MatchedFile[0]; + string fileHash = files.Length > 0 ? files[0].Hash : null; context.Output($" - {formattedSegment} [file] {(!string.IsNullOrWhiteSpace(fileHash) ? $"--> {fileHash}" : "(not found)")}"); - } - else if (type == KeySegmentType.FilePattern) - { - context.Output($" - {formattedSegment} [file pattern; matches: {matches.Length}]"); - if (matches.Any()) - { - int filePathDisplayLength = Math.Min(matches.Select(mf => mf.DisplayPath.Length).Max(), 70); - foreach (var match in matches) - { - context.Output($" - {FormatForDisplay(match.DisplayPath, filePathDisplayLength)} --> {match.Hash}"); - } - } - } + break; + case KeySegmentType.FilePattern: + LogPatternSegment( + details, + "file", + match => match.DisplayPath, + match => $" --> {match.Hash}" + ); + break; } }