Skip to content

Commit

Permalink
Fix httpclient timeout on AzureMetada retrieving (#4249)
Browse files Browse the repository at this point in the history
* Decreased http client timeout and moved to task execution

* Fix unitest by adding asynccommand mock

---------

Co-authored-by: Kirill Ivlev <102740624+kirill-ivlev@users.noreply.github.com>
  • Loading branch information
ismayilov-ismayil and kirill-ivlev committed May 4, 2023
1 parent e0dd7e0 commit 8d4e3f3
Show file tree
Hide file tree
Showing 4 changed files with 37 additions and 15 deletions.
1 change: 1 addition & 0 deletions src/Agent.Sdk/Util/AzureInstanceMetadataProvider.cs
Expand Up @@ -16,6 +16,7 @@ class AzureInstanceMetadataProvider : IDisposable
public AzureInstanceMetadataProvider()
{
_client = new HttpClient();
_client.Timeout = TimeSpan.FromSeconds(5);
}

public void Dispose()
Expand Down
20 changes: 9 additions & 11 deletions src/Agent.Worker/JobRunner.cs
Expand Up @@ -164,17 +164,15 @@ public async Task<TaskResult> RunAsync(Pipelines.AgentJobRequestMessage message,
jobContext.SetVariable(Constants.Variables.Agent.WorkFolder, HostContext.GetDirectory(WellKnownDirectory.Work), isFilePath: true);
jobContext.SetVariable(Constants.Variables.System.WorkFolder, HostContext.GetDirectory(WellKnownDirectory.Work), isFilePath: true);

try
{
jobContext.SetVariable(Constants.Variables.System.IsAzureVM, PlatformUtil.DetectAzureVM() ? "1" : "0");
jobContext.SetVariable(Constants.Variables.System.IsDockerContainer, PlatformUtil.DetectDockerContainer() ? "1" : "0");
}
catch (Exception ex)
{
// Error with telemetry shouldn't affect job run
Trace.Info($"Couldn't retrieve telemetry information");
Trace.Info(ex);
}
var azureVmCheckCommand = jobContext.GetHostContext().GetService<IAsyncCommandContext>();
azureVmCheckCommand.InitializeCommandContext(jobContext,"GetAzureVMMetada");
azureVmCheckCommand.Task = Task.Run(() => jobContext.SetVariable(Constants.Variables.System.IsAzureVM, PlatformUtil.DetectAzureVM() ? "1" : "0"));
jobContext.AsyncCommands.Add(azureVmCheckCommand);

var dockerDetectCommand = jobContext.GetHostContext().GetService<IAsyncCommandContext>();
dockerDetectCommand.InitializeCommandContext(jobContext,"DetectDockerContainer");
dockerDetectCommand.Task = Task.Run(() => jobContext.SetVariable(Constants.Variables.System.IsDockerContainer, PlatformUtil.DetectDockerContainer() ? "1" : "0"));
jobContext.AsyncCommands.Add(dockerDetectCommand);

string toolsDirectory = HostContext.GetDirectory(WellKnownDirectory.Tools);
Directory.CreateDirectory(toolsDirectory);
Expand Down
25 changes: 21 additions & 4 deletions src/Agent.Worker/StepsRunner.cs
@@ -1,16 +1,18 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.

using Agent.Sdk;
using Microsoft.TeamFoundation.DistributedTask.WebApi;
using Microsoft.VisualStudio.Services.Agent.Util;
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;

using Agent.Sdk;

using Microsoft.TeamFoundation.DistributedTask.Expressions;
using Microsoft.TeamFoundation.DistributedTask.WebApi;
using Microsoft.VisualStudio.Services.Agent.Util;

using Pipelines = Microsoft.TeamFoundation.DistributedTask.Pipelines;
using Agent.Sdk.Knob;

namespace Microsoft.VisualStudio.Services.Agent.Worker
{
Expand Down Expand Up @@ -50,6 +52,21 @@ public async Task RunAsync(IExecutionContext jobContext, IList<IStep> steps)
CancellationTokenRegistration? jobCancelRegister = null;
int stepIndex = 0;
jobContext.Variables.Agent_JobStatus = jobContext.Result ?? TaskResult.Succeeded;
// Wait till all async commands finish.
foreach (var command in jobContext.AsyncCommands ?? new List<IAsyncCommandContext>())
{
try
{
// wait async command to finish.
await command.WaitAsync();
}

catch (Exception ex)
{
// Log the error
Trace.Info($"Caught exception from async command {command.Name}: {ex}");
}
}
foreach (IStep step in steps)
{
Trace.Info($"Processing step: DisplayName='{step.DisplayName}', ContinueOnError={step.ContinueOnError}, Enabled={step.Enabled}");
Expand Down
6 changes: 6 additions & 0 deletions src/Test/L0/Worker/JobRunnerL0.cs
Expand Up @@ -35,6 +35,7 @@ public sealed class JobRunnerL0
private Mock<IPagingLogger> _logger;
private Mock<ITempDirectoryManager> _temp;
private Mock<IDiagnosticLogManager> _diagnosticLogManager;
private Mock<IAsyncCommandContext> _asyncCommandContext;

private TestHostContext CreateTestContext([CallerMemberName] String testName = "")
{
Expand All @@ -53,6 +54,7 @@ private TestHostContext CreateTestContext([CallerMemberName] String testName = "
_logger = new Mock<IPagingLogger>();
_temp = new Mock<ITempDirectoryManager>();
_diagnosticLogManager = new Mock<IDiagnosticLogManager>();
_asyncCommandContext = new Mock<IAsyncCommandContext>();

var expressionManager = new ExpressionManager();
expressionManager.Initialize(hc);
Expand Down Expand Up @@ -118,6 +120,7 @@ private TestHostContext CreateTestContext([CallerMemberName] String testName = "
hc.SetSingleton(_diagnosticLogManager.Object);
hc.EnqueueInstance<IExecutionContext>(_jobEc);
hc.EnqueueInstance<IPagingLogger>(_logger.Object);
hc.SetSingleton(_asyncCommandContext.Object);
return hc;
}

Expand All @@ -137,6 +140,8 @@ private TestHostContext CreateMSITestContext([CallerMemberName] String testName
_logger = new Mock<IPagingLogger>();
_temp = new Mock<ITempDirectoryManager>();
_diagnosticLogManager = new Mock<IDiagnosticLogManager>();
_asyncCommandContext = new Mock<IAsyncCommandContext>();


var expressionManager = new ExpressionManager();
expressionManager.Initialize(hc);
Expand Down Expand Up @@ -202,6 +207,7 @@ private TestHostContext CreateMSITestContext([CallerMemberName] String testName
hc.SetSingleton(_diagnosticLogManager.Object);
hc.EnqueueInstance<IExecutionContext>(_jobEc);
hc.EnqueueInstance<IPagingLogger>(_logger.Object);
hc.SetSingleton(_asyncCommandContext.Object);
return hc;
}

Expand Down

0 comments on commit 8d4e3f3

Please sign in to comment.