Skip to content

Commit

Permalink
Enable VSTHRD200 (Use "Async" suffix for async methods) (#4794)
Browse files Browse the repository at this point in the history
This rule remains disabled in test code to avoid requiring test methods
to be renamed.
  • Loading branch information
sharwell committed Mar 2, 2020
1 parent 2bcb481 commit d26b896
Show file tree
Hide file tree
Showing 16 changed files with 54 additions and 53 deletions.
7 changes: 4 additions & 3 deletions .editorconfig
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,6 @@ dotnet_sort_system_directives_first = true
# VSTHRD002: Avoid problematic synchronous waits
dotnet_diagnostic.VSTHRD002.severity = none

# VSTHRD200: Use "Async" suffix for async methods
dotnet_diagnostic.VSTHRD200.severity = none

[test/**/*.cs]

# MSML_GeneralName: This name should be PascalCased
Expand All @@ -25,3 +22,7 @@ dotnet_diagnostic.MSML_NoInstanceInitializers.severity = none
# BaseTestClass does not apply for analyzer testing.
# MSML_ExtendBaseTestClass: Test classes should be derived from BaseTestClass
dotnet_diagnostic.MSML_ExtendBaseTestClass.severity = none

# The MSML_RelaxTestNaming suppressor for VSTHRD200 is not active for CodeAnalyzer.Tests, so we disable it altogether.
# VSTHRD200: Use "Async" suffix for async methods
dotnet_diagnostic.VSTHRD200.severity = none
10 changes: 5 additions & 5 deletions src/Microsoft.Extensions.ML/ModelLoaders/UriModelLoader.cs
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ internal void Start(Uri uri, TimeSpan period)
{
_timerPeriod = period;
_uri = uri;
if (LoadModel().ConfigureAwait(false).GetAwaiter().GetResult())
if (LoadModelAsync().ConfigureAwait(false).GetAwaiter().GetResult())
{
StartReloadTimer();
}
Expand Down Expand Up @@ -78,10 +78,10 @@ internal async Task RunAsync()
cancellation.CancelAfter(TimeoutMilliseconds);
Logger.UriReloadBegin(_logger, _uri);

var eTagMatches = await MatchEtag(_uri, _eTag);
var eTagMatches = await MatchEtagAsync(_uri, _eTag);
if (!eTagMatches)
{
await LoadModel();
await LoadModelAsync();
var previousToken = Interlocked.Exchange(ref _reloadToken, new ModelReloadToken());
previousToken.OnReload();
}
Expand All @@ -102,7 +102,7 @@ internal async Task RunAsync()
}
}

internal virtual async Task<bool> MatchEtag(Uri uri, string eTag)
internal virtual async Task<bool> MatchEtagAsync(Uri uri, string eTag)
{
using (var client = new HttpClient())
{
Expand Down Expand Up @@ -133,7 +133,7 @@ internal void StopReloadTimer()
}
}

internal virtual async Task<bool> LoadModel()
internal virtual async Task<bool> LoadModelAsync()
{
//TODO: We probably need some sort of retry policy for this.
try
Expand Down
6 changes: 3 additions & 3 deletions src/Microsoft.ML.Core/Utilities/ResourceManagerUtils.cs
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ public static string GetUrl(string suffix)
/// <param name="timeout">An integer indicating the number of milliseconds to wait before timing out while downloading a resource.</param>
/// <returns>The download results, containing the file path where the resources was (or should have been) downloaded to, and an error message
/// (or null if there was no error).</returns>
public async Task<ResourceDownloadResults> EnsureResource(IHostEnvironment env, IChannel ch, string relativeUrl, string fileName, string dir, int timeout)
public async Task<ResourceDownloadResults> EnsureResourceAsync(IHostEnvironment env, IChannel ch, string relativeUrl, string fileName, string dir, int timeout)
{
var filePath = GetFilePath(ch, fileName, dir, out var error);
if (File.Exists(filePath) || !string.IsNullOrEmpty(error))
Expand All @@ -120,7 +120,7 @@ private async Task<string> DownloadFromUrlWithRetryAsync(IHostEnvironment env, I

for (int i = 0; i < retryTimes; ++i)
{
var thisDownloadResult = await DownloadFromUrl(env, ch, url, fileName, timeout, filePath);
var thisDownloadResult = await DownloadFromUrlAsync(env, ch, url, fileName, timeout, filePath);

if (string.IsNullOrEmpty(thisDownloadResult))
return thisDownloadResult;
Expand All @@ -134,7 +134,7 @@ private async Task<string> DownloadFromUrlWithRetryAsync(IHostEnvironment env, I
}

/// <returns>Returns the error message if an error occurred, null if download was successful.</returns>
private async Task<string> DownloadFromUrl(IHostEnvironment env, IChannel ch, string url, string fileName, int timeout, string filePath)
private async Task<string> DownloadFromUrlAsync(IHostEnvironment env, IChannel ch, string url, string fileName, int timeout, string filePath)
{
using (var webClient = new WebClient())
using (var downloadCancel = new CancellationTokenSource())
Expand Down
14 changes: 7 additions & 7 deletions src/Microsoft.ML.Core/Utilities/ThreadUtils.cs
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,11 @@ namespace Microsoft.ML.Internal.Utilities
{
internal static partial class Utils
{
public static Task RunOnBackgroundThread(Action start) =>
ImmediateBackgroundThreadPool.Queue(start);
public static Task RunOnBackgroundThreadAsync(Action start) =>
ImmediateBackgroundThreadPool.QueueAsync(start);

public static Task RunOnBackgroundThread(Action<object> start, object obj) =>
ImmediateBackgroundThreadPool.Queue(start, obj);
public static Task RunOnBackgroundThreadAsync(Action<object> start, object obj) =>
ImmediateBackgroundThreadPool.QueueAsync(start, obj);

public static Thread RunOnForegroundThread(ParameterizedThreadStart start) =>
new Thread(start) { IsBackground = false };
Expand All @@ -42,17 +42,17 @@ private static class ImmediateBackgroundThreadPool
/// always end in the <see cref="TaskStatus.RanToCompletion"/> state; if the delegate throws
/// an exception, it'll be allowed to propagate on the thread, crashing the process.
/// </summary>
public static Task Queue(Action threadStart) => Queue((Delegate)threadStart, null);
public static Task QueueAsync(Action threadStart) => QueueAsync((Delegate)threadStart, null);

/// <summary>
/// Queues an <see cref="Action{Object}"/> delegate and associated state to be executed immediately on another thread,
/// and returns a <see cref="Task"/> that represents its eventual completion. The task will
/// always end in the <see cref="TaskStatus.RanToCompletion"/> state; if the delegate throws
/// an exception, it'll be allowed to propagate on the thread, crashing the process.
/// </summary>
public static Task Queue(Action<object> threadStart, object state) => Queue((Delegate)threadStart, state);
public static Task QueueAsync(Action<object> threadStart, object state) => QueueAsync((Delegate)threadStart, state);

private static Task Queue(Delegate threadStart, object state)
private static Task QueueAsync(Delegate threadStart, object state)
{
// Create the TaskCompletionSource used to represent this work.
// Call sites only care about completion, not about the distinction between
Expand Down
4 changes: 2 additions & 2 deletions src/Microsoft.ML.Data/Data/DataViewUtils.cs
Original file line number Diff line number Diff line change
Expand Up @@ -367,7 +367,7 @@ private static DataViewRowCursor ConsolidateCore(IChannelProvider provider, Data
ch.Assert(localCursor.Position < 0);
// Note that these all take ownership of their respective cursors,
// so they all handle their disposal internal to the thread.
workers[t] = Utils.RunOnBackgroundThread(() =>
workers[t] = Utils.RunOnBackgroundThreadAsync(() =>
{
// This will be the last batch sent in the finally. If iteration proceeds without
// error, it will remain null, and be sent as a sentinel. If iteration results in
Expand Down Expand Up @@ -557,7 +557,7 @@ private DataViewRowCursor[] SplitCore(IChannelProvider ch, DataViewRowCursor inp
// Set up and start the thread that consumes the input, and utilizes the InPipe
// instances to compose the Batch objects. The thread takes ownership of the
// cursor, and so handles its disposal.
Task thread = Utils.RunOnBackgroundThread(
Task thread = Utils.RunOnBackgroundThreadAsync(
() =>
{
Batch lastBatch = null;
Expand Down
8 changes: 4 additions & 4 deletions src/Microsoft.ML.Data/DataLoadSave/Binary/BinaryLoader.cs
Original file line number Diff line number Diff line change
Expand Up @@ -1333,9 +1333,9 @@ public Cursor(BinaryLoader parent, IEnumerable<DataViewSchema.Column> columnsNee
_pipeGetters[c] = _pipes[c].GetGetter();
}
// The data structures are initialized. Now set up the workers.
_readerThread = Utils.RunOnBackgroundThread(ReaderWorker);
_readerThread = Utils.RunOnBackgroundThreadAsync(ReaderWorker);

_pipeTask = SetupDecompressTask();
_pipeTask = DecompressAsync();
}

protected override void Dispose(bool disposing)
Expand Down Expand Up @@ -1405,14 +1405,14 @@ protected override void Dispose(bool disposing)
base.Dispose(disposing);
}

private Task SetupDecompressTask()
private Task DecompressAsync()
{
Task[] pipeWorkers = new Task[_parent._threads];
long decompressSequence = -1;
long decompressSequenceLim = (long)_numBlocks * _actives.Length;
for (int w = 0; w < pipeWorkers.Length; ++w)
{
pipeWorkers[w] = Utils.RunOnBackgroundThread(() =>
pipeWorkers[w] = Utils.RunOnBackgroundThreadAsync(() =>
{
try
{
Expand Down
4 changes: 2 additions & 2 deletions src/Microsoft.ML.Data/DataLoadSave/Binary/BinarySaver.cs
Original file line number Diff line number Diff line change
Expand Up @@ -664,15 +664,15 @@ public void SaveData(Stream stream, IDataView data, params int[] colIndices)
Task[] compressionThreads = new Task[Environment.ProcessorCount];
for (int i = 0; i < compressionThreads.Length; ++i)
{
compressionThreads[i] = Utils.RunOnBackgroundThread(
compressionThreads[i] = Utils.RunOnBackgroundThreadAsync(
() => CompressionWorker(toCompress, toWrite, activeColumns.Length, waiter, exMarshaller));
}
compressionTask = Task.WhenAll(compressionThreads);
}

// While there is an advantage to putting the IO into a separate thread, there is not an
// advantage to having more than one worker.
Task writeThread = Utils.RunOnBackgroundThread(
Task writeThread = Utils.RunOnBackgroundThreadAsync(
() => WriteWorker(stream, toWrite, activeColumns, data.Schema, rowsPerBlock, _host, exMarshaller));
sw.Start();

Expand Down
4 changes: 2 additions & 2 deletions src/Microsoft.ML.Data/DataLoadSave/Text/TextLoaderCursor.cs
Original file line number Diff line number Diff line change
Expand Up @@ -427,7 +427,7 @@ public LineReader(IMultiStreamSource files, int batchSize, int bufSize, bool has
_cref = cref;

_queue = new BlockingQueue<LineBatch>(bufSize);
_thdRead = Utils.RunOnBackgroundThread(ThreadProc);
_thdRead = Utils.RunOnBackgroundThreadAsync(ThreadProc);
}

public void Release()
Expand Down Expand Up @@ -691,7 +691,7 @@ public ParallelState(Cursor curs, out RowSet rows, int cthd)

for (int tid = 0; tid < _threads.Length; tid++)
{
_threads[tid] = Utils.RunOnBackgroundThread(ThreadProc, tid);
_threads[tid] = Utils.RunOnBackgroundThreadAsync(ThreadProc, tid);
}
}

Expand Down
2 changes: 1 addition & 1 deletion src/Microsoft.ML.Data/DataView/CacheDataView.cs
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,7 @@ private void KickoffFiller(int[] columns)
// They will not be caught by the big catch in the main thread, as filler is not running
// in the main thread. Some sort of scheme by which these exceptions could be
// cleanly handled would be more appropriate. See task 3740.
var fillerThread = Utils.RunOnBackgroundThread(() => Filler(cursor, caches, waiter));
var fillerThread = Utils.RunOnBackgroundThreadAsync(() => Filler(cursor, caches, waiter));
_cacheFillerThreads.Add(fillerThread);
}

Expand Down
4 changes: 2 additions & 2 deletions src/Microsoft.ML.Data/Transforms/RowShufflingTransformer.cs
Original file line number Diff line number Diff line change
Expand Up @@ -546,7 +546,7 @@ public Cursor(IChannelProvider provider, int poolRows, DataViewRowCursor input,
for (int i = 1; i < _bufferDepth; ++i)
PostAssert(_toProduce, _blockSize);

_producerTask = LoopProducerWorker();
_producerTask = ProduceAsync();
}

protected override void Dispose(bool disposing)
Expand Down Expand Up @@ -586,7 +586,7 @@ public override ValueGetter<DataViewRowId> GetIdGetter()
return _idGetter;
}

private async Task LoopProducerWorker()
private async Task ProduceAsync()
{
try
{
Expand Down
6 changes: 3 additions & 3 deletions src/Microsoft.ML.Sweeper/AsyncSweeper.cs
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ public interface IAsyncSweeper
/// Propose a <see cref="ParameterSet"/>.
/// </summary>
/// <returns>A future <see cref="ParameterSet"/> and its id. Null if unavailable or cancelled.</returns>
Task<ParameterSetWithId> Propose();
Task<ParameterSetWithId> ProposeAsync();

/// <summary>
/// Notify the sweeper of a finished run.
Expand Down Expand Up @@ -108,7 +108,7 @@ public void Update(int id, IRunResult result)
}
}

public Task<ParameterSetWithId> Propose()
public Task<ParameterSetWithId> ProposeAsync()
{
if (_canceled)
return Task.FromResult<ParameterSetWithId>(null);
Expand Down Expand Up @@ -272,7 +272,7 @@ private void UpdateBarrierStatus(int id)
}
}

public async Task<ParameterSetWithId> Propose()
public async Task<ParameterSetWithId> ProposeAsync()
{
if (_cts.IsCancellationRequested)
return null;
Expand Down
2 changes: 1 addition & 1 deletion src/Microsoft.ML.TensorFlow/TensorflowUtils.cs
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ internal static void DownloadIfNeeded(IHostEnvironment env, string url, string d
{
using (var ch = env.Start("Ensuring meta files are present."))
{
var ensureModel = ResourceManagerUtils.Instance.EnsureResource(env, ch, url, fileName, dir, timeout);
var ensureModel = ResourceManagerUtils.Instance.EnsureResourceAsync(env, ch, url, fileName, dir, timeout);
ensureModel.Wait();
var errorResult = ResourceManagerUtils.GetErrorMessage(out var errorMessage, ensureModel.Result);
if (errorResult != null)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -628,7 +628,7 @@ private string EnsureModelFile(IHostEnvironment env, out int linesToSkip, WordEm
{
string dir = kind == WordEmbeddingEstimator.PretrainedModelKind.SentimentSpecificWordEmbedding ? Path.Combine("Text", "Sswe") : "WordVectors";
var url = $"{dir}/{modelFileName}";
var ensureModel = ResourceManagerUtils.Instance.EnsureResource(Host, ch, url, modelFileName, dir, Timeout);
var ensureModel = ResourceManagerUtils.Instance.EnsureResourceAsync(Host, ch, url, modelFileName, dir, Timeout);
ensureModel.Wait();
var errorResult = ResourceManagerUtils.GetErrorMessage(out var errorMessage, ensureModel.Result);
if (errorResult != null)
Expand Down
4 changes: 2 additions & 2 deletions test/Microsoft.Extensions.ML.Tests/UriLoaderTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -92,12 +92,12 @@ public override ITransformer GetModel()
return null;
}

internal override Task<bool> LoadModel()
internal override Task<bool> LoadModelAsync()
{
return Task.FromResult(true);
}

internal override Task<bool> MatchEtag(Uri uri, string eTag)
internal override Task<bool> MatchEtagAsync(Uri uri, string eTag)
{
return Task.FromResult(ETagMatches(uri, eTag));
}
Expand Down
16 changes: 8 additions & 8 deletions test/Microsoft.ML.Sweeper.Tests/TestSweeper.cs
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ public void TestSimpleSweeperAsync()
var paramSets = new List<ParameterSet>();
for (int i = 0; i < sweeps; i++)
{
var task = sweeper.Propose();
var task = sweeper.ProposeAsync();
Assert.True(task.IsCompleted);
paramSets.Add(task.Result.ParameterSet);
var result = new RunResult(task.Result.ParameterSet, random.NextDouble(), true);
Expand All @@ -166,7 +166,7 @@ public void TestSimpleSweeperAsync()
paramSets.Clear();
for (int i = 0; i < sweeps; i++)
{
var task = gridSweeper.Propose();
var task = gridSweeper.ProposeAsync();
Assert.True(task.IsCompleted);
paramSets.Add(task.Result.ParameterSet);
}
Expand Down Expand Up @@ -202,7 +202,7 @@ public async Task TestDeterministicSweeperAsyncCancellation()
int numCompleted = 0;
for (int i = 0; i < sweeps; i++)
{
var task = sweeper.Propose();
var task = sweeper.ProposeAsync();
if (i < args.BatchSize - args.Relaxation)
{
Assert.True(task.IsCompleted);
Expand Down Expand Up @@ -252,7 +252,7 @@ public async Task TestDeterministicSweeperAsync()
var paramSets = new List<ParameterSet>();
for (int i = 0; i < sweeps; i++)
{
var task = sweeper.Propose();
var task = sweeper.ProposeAsync();
Assert.True(task.IsCompleted);
paramSets.Add(task.CompletedResult().ParameterSet);
var result = new RunResult(task.CompletedResult().ParameterSet, random.NextDouble(), true);
Expand All @@ -270,7 +270,7 @@ public async Task TestDeterministicSweeperAsync()
var results = new List<KeyValuePair<int, IRunResult>>();
for (int i = 0; i < args.BatchSize; i++)
{
var task = sweeper.Propose();
var task = sweeper.ProposeAsync();
Assert.True(task.IsCompleted);
tasks[i] = task;
if (task.CompletedResult() == null)
Expand All @@ -281,7 +281,7 @@ public async Task TestDeterministicSweeperAsync()
// in the previous batch has been posted to the sweeper.
for (int i = args.BatchSize; i < 2 * args.BatchSize; i++)
{
var task = sweeper.Propose();
var task = sweeper.ProposeAsync();
Assert.False(task.IsCompleted);
tasks[i] = task;
}
Expand Down Expand Up @@ -328,7 +328,7 @@ public void TestDeterministicSweeperAsyncParallel()
sleeps[i] = random.Next(10, 100);
var r = Task.Run(() => Parallel.For(0, sweeps, options, (int i) =>
{
var task = sweeper.Propose();
var task = sweeper.ProposeAsync();
task.Wait();
Assert.Equal(TaskStatus.RanToCompletion, task.Status);
var paramWithId = task.Result;
Expand Down Expand Up @@ -386,7 +386,7 @@ public async Task TestNelderMeadSweeperAsync()

for (int i = 0; i < sweeps; i++)
{
var paramWithId = await sweeper.Propose();
var paramWithId = await sweeper.ProposeAsync();
if (paramWithId == null)
return;
var result = new RunResult(paramWithId.ParameterSet, metrics[i], true);
Expand Down
Loading

0 comments on commit d26b896

Please sign in to comment.