Skip to content
This repository was archived by the owner on Jan 23, 2023. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 12 additions & 1 deletion dependencies.props
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,9 @@
<PropertyGroup>
<CoreClrPackageVersion>2.1.0-preview1-25327-01</CoreClrPackageVersion>
<XunitPackageVersion>2.2.0-beta2-build3300</XunitPackageVersion>
<XUnitConsoleNetCoreVersion>1.0.2-prerelease-00177</XUnitConsoleNetCoreVersion>
<XUnitPerformanceApiVersion>1.0.0-beta-build0007</XUnitPerformanceApiVersion>
<MicrosoftDiagnosticsTracingLibraryVersion>1.0.3-alpha-experimental</MicrosoftDiagnosticsTracingLibraryVersion>
</PropertyGroup>

<!-- Package dependency verification/auto-upgrade configuration. -->
Expand Down Expand Up @@ -78,8 +81,16 @@
<Version>1.0.0-alpha-build0040</Version>
</StaticDependency>

<XUnitPerformanceApiDependency Include="xunit.performance.api" />
<XUnitPerformanceApiDependency Include="xunit.performance.core" />
<XUnitPerformanceApiDependency Include="xunit.performance.execution" />
<XUnitPerformanceApiDependency Include="xunit.performance.metrics" />
<StaticDependency Include="@(XUnitPerformanceApiDependency)">
<Version>$(XUnitPerformanceApiVersion)</Version>
</StaticDependency>

<StaticDependency Include="xunit.console.netcore">
<Version>1.0.2-prerelease-00177</Version>
<Version>$(XUnitConsoleNetCoreVersion)</Version>
</StaticDependency>

<DependencyBuildInfo Include="@(StaticDependency)">
Expand Down
6 changes: 4 additions & 2 deletions perf.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -105,12 +105,12 @@ def static getOSGroup(def os) {

batchFile("tests\\runtest.cmd ${configuration} ${architecture} GenerateLayoutOnly")

// Run with just stopwatch
// Run with just stopwatch: Profile=Off
batchFile("tests\\scripts\\run-xunit-perf.cmd -arch ${arch} -configuration ${configuration} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\perflab\\Perflab -library -generateBenchviewData \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" ${uploadString} -runtype ${runType} -stabilityPrefix \"START \"CORECLR_PERF_RUN\" /B /WAIT /HIGH /AFFINITY 0x2\"")
batchFile("tests\\scripts\\run-xunit-perf.cmd -arch ${arch} -configuration ${configuration} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\Jit\\Performance\\CodeQuality -generateBenchviewData \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" ${uploadString} -runtype ${runType} -stabilityPrefix \"START \"CORECLR_PERF_RUN\" /B /WAIT /HIGH /AFFINITY 0x2\"")
batchFile("tests\\scripts\\run-xunit-perf.cmd -arch ${arch} -configuration ${configuration} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\linkbench\\linkbench -generateBenchviewData \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" ${uploadString} -nowarmup -runtype ${runType} -scenarioTest -group ILLink")

// Run with the full set of counters enabled
// Run with the full set of counters enabled: Profile=On
batchFile("tests\\scripts\\run-xunit-perf.cmd -arch ${arch} -configuration ${configuration} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\perflab\\Perflab -library -generateBenchviewData \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" ${uploadString} -runtype ${runType} -collectionFlags default+BranchMispredictions+CacheMisses+InstructionRetired+gcapi -stabilityPrefix \"START \"CORECLR_PERF_RUN\" /B /WAIT /HIGH /AFFINITY 0x2\"")
batchFile("tests\\scripts\\run-xunit-perf.cmd -arch ${arch} -configuration ${configuration} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\Jit\\Performance\\CodeQuality -generateBenchviewData \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" ${uploadString} -runtype ${runType} -collectionFlags default+BranchMispredictions+CacheMisses+InstructionRetired+gcapi -stabilityPrefix \"START \"CORECLR_PERF_RUN\" /B /WAIT /HIGH /AFFINITY 0x2\"")
}
Expand All @@ -124,6 +124,7 @@ def static getOSGroup(def os) {
def archiveSettings = new ArchivalSettings()
archiveSettings.addFiles('Perf-*.xml')
archiveSettings.addFiles('Perf-*.etl')
archiveSettings.addFiles('Perf-*.log')
archiveSettings.addFiles('machinedata.json')
Utilities.addArchival(newJob, archiveSettings)

Expand Down Expand Up @@ -288,6 +289,7 @@ def static getOSGroup(def os) {

// Save machinedata.json to /artifact/bin/ Jenkins dir
def archiveSettings = new ArchivalSettings()
archiveSettings.addFiles('Perf-*.log')
archiveSettings.addFiles('Perf-*.xml')
archiveSettings.addFiles('machinedata.json')
Utilities.addArchival(newJob, archiveSettings)
Expand Down
74 changes: 37 additions & 37 deletions tests/scripts/run-xunit-perf.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
@if defined _echo echo on

setlocal ENABLEDELAYEDEXPANSION
setlocal
set ERRORLEVEL=
set BENCHVIEW_RUN_TYPE=local
set CORECLR_REPO=%CD%
Expand Down Expand Up @@ -45,7 +44,6 @@ setlocal
rem find and stage the tests
set /A "LV_FAILURES=0"
for /R %CORECLR_PERF% %%T in (*.%TEST_FILE_EXT%) do (
rem Skip known failures
call :run_benchmark %%T || (
set /A "LV_FAILURES+=1"
)
Expand Down Expand Up @@ -73,27 +71,38 @@ setlocal
set BENCHDIR=%~p1

rem copy benchmark and any input files
call :run_cmd xcopy /s %~1 . >> %RUNLOG% || exit /b 1
call :run_cmd xcopy /sy %~1 . >> %RUNLOG% || exit /b 1
if exist "%BENCHDIR%*.txt" (
call :run_cmd xcopy /s %BENCHDIR%*.txt . >> %RUNLOG% || exit /b 1
call :run_cmd xcopy /sy %BENCHDIR%*.txt . >> %RUNLOG% || exit /b 1
)

set CORE_ROOT=%CORECLR_REPO%\sandbox

rem setup additional environment variables
if DEFINED TEST_ENV (
if EXIST "%TEST_ENV%" (
call "%TEST_ENV%"
)
)

set BENCHNAME_LOG_FILE_NAME=%BENCHNAME%.log
echo/
echo/ ----------
echo/ Running %BENCHNAME%
echo/ ----------

rem CORE_ROOT environment variable is used by some benchmarks such as Roslyn / CscBench.
set CORE_ROOT=%CORECLR_REPO%\sandbox

set LV_RUNID=Perf-%ETW_COLLECTION%
set BENCHNAME_LOG_FILE_NAME=%LV_RUNID%-%BENCHNAME%.log
set LV_CMD=
if defined IS_SCENARIO_TEST (
call :run_cmd corerun.exe "%CORECLR_REPO%\sandbox\%BENCHNAME%.%TEST_FILE_EXT%" --perf:runid Perf 1>"%BENCHNAME_LOG_FILE_NAME%" 2>&1
set "LV_CMD=corerun.exe "%CORECLR_REPO%\sandbox\%BENCHNAME%.%TEST_FILE_EXT%" --perf:runid "%LV_RUNID%""
) else (
call :run_cmd %STABILITY_PREFIX% corerun.exe PerfHarness.dll "%CORECLR_REPO%\sandbox\%BENCHNAME%.%TEST_FILE_EXT%" --perf:runid Perf --perf:collect %COLLECTION_FLAGS% 1>"%BENCHNAME_LOG_FILE_NAME%" 2>&1
set "LV_CMD=%STABILITY_PREFIX% corerun.exe PerfHarness.dll "%CORECLR_REPO%\sandbox\%BENCHNAME%.%TEST_FILE_EXT%" --perf:runid "%LV_RUNID%" --perf:collect %COLLECTION_FLAGS%"
)

call :print_to_console $ !LV_CMD!
call :run_cmd !LV_CMD! 1>"%BENCHNAME_LOG_FILE_NAME%" 2>&1

IF %ERRORLEVEL% NEQ 0 (
call :print_error corerun.exe exited with %ERRORLEVEL% code.
if exist "%BENCHNAME_LOG_FILE_NAME%" type "%BENCHNAME_LOG_FILE_NAME%"
Expand All @@ -103,14 +112,13 @@ setlocal
rem optionally generate results for benchview
if exist "%BENCHVIEW_PATH%" (
call :generate_results_for_benchview || exit /b 1
)
)

rem Save off the results to the root directory for recovery later in Jenkins
IF EXIST "Perf-*%BENCHNAME%.xml" (
call :run_cmd copy "Perf-*%BENCHNAME%.xml" "%CORECLR_REPO%\Perf-%BENCHNAME%-%ETW_COLLECTION%.xml" || exit /b 1
)
IF EXIST "Perf-*%BENCHNAME%.etl" (
call :run_cmd copy "Perf-*%BENCHNAME%.etl" "%CORECLR_REPO%\Perf-%BENCHNAME%-%ETW_COLLECTION%.etl" || exit /b 1
for %%e in (xml etl log) do (
IF EXIST ".\%LV_RUNID%-%BENCHNAME%.%%e" (
call :run_cmd xcopy /vy ".\%LV_RUNID%-%BENCHNAME%.%%e" .. || exit /b 1
)
)

exit /b 0
Expand Down Expand Up @@ -219,7 +227,7 @@ rem Sets the test architecture.
rem ****************************************************************************
set TEST_ARCH=%TEST_ARCHITECTURE%
exit /b 0

:verify_benchview_tools
rem ****************************************************************************
rem Verifies that the path to the benchview tools is correct.
Expand All @@ -231,7 +239,7 @@ rem ****************************************************************************
)
)
exit /b 0

:verify_core_overlay
rem ****************************************************************************
rem Verify that the Core_Root folder exist.
Expand All @@ -243,7 +251,7 @@ rem ****************************************************************************
)
exit /b 0

:set_collection_config
:set_collection_config
rem ****************************************************************************
rem Set's the config based on the providers used for collection
rem ****************************************************************************
Expand All @@ -254,7 +262,7 @@ rem ****************************************************************************
)
exit /b 0


:set_perf_run_log
rem ****************************************************************************
rem Sets the script's output log file.
Expand All @@ -271,7 +279,7 @@ rem ****************************************************************************
rem Creates the sandbox folder used by the script to copy binaries locally,
rem and execute benchmarks.
rem ****************************************************************************
if exist sandbox rd /s /q sandbox
if exist sandbox rmdir /s /q sandbox
if exist sandbox call :print_error Failed to remove the sandbox folder& exit /b 1
if not exist sandbox mkdir sandbox
if not exist sandbox call :print_error Failed to create the sandbox folder& exit /b 1
Expand All @@ -293,7 +301,7 @@ rem ****************************************************************************
set LV_MEASUREMENT_ARGS=%LV_MEASUREMENT_ARGS% --append

for /f %%f in ('dir /b Perf-*%BENCHNAME%.xml 2^>nul') do (
call :run_cmd py.exe "%BENCHVIEW_PATH%\measurement.py" %LV_MEASUREMENT_ARGS% %%f
call :run_cmd py.exe "%BENCHVIEW_PATH%\measurement.py" %LV_MEASUREMENT_ARGS% %%f

IF !ERRORLEVEL! NEQ 0 (
call :print_error Failed to generate BenchView measurement data.
Expand Down Expand Up @@ -322,7 +330,7 @@ setlocal
set LV_SUBMISSION_ARGS=%LV_SUBMISSION_ARGS% --machinepool "PerfSnake"

call :run_cmd py.exe "%BENCHVIEW_PATH%\submission.py" measurement.json %LV_SUBMISSION_ARGS%

IF %ERRORLEVEL% NEQ 0 (
call :print_error Creating BenchView submission data failed.
exit /b 1
Expand Down Expand Up @@ -360,27 +368,25 @@ rem ****************************************************************************
echo -collectionFlags This is used to specify what collectoin flags get passed to the performance
echo harness that is doing the test running. If this is not specified we only use stopwatch.
echo Other flags are "default", which is the whatever the test being run specified, "CacheMisses",
echo "BranchMispredictions", and "InstructionsRetired".
echo "BranchMispredictions", and "InstructionsRetired".
exit /b %ERRORLEVEL%

:print_error
rem ****************************************************************************
rem Function wrapper that unifies how errors are output by the script.
rem Functions output to the standard error.
rem ****************************************************************************
echo [%DATE%][%TIME:~0,-3%][ERROR] %* 1>&2
call :print_to_console [ERROR] %* 1>&2
exit /b %ERRORLEVEL%

:print_to_console
rem ****************************************************************************
rem Sends text to the console screen, no matter what (even when the script's
rem output is redirected). This can be useful to provide information on where
rem the script is executing.
rem Sends text to the console screen. This can be useful to provide
rem information on where the script is executing.
rem ****************************************************************************
if defined _debug (
echo [%DATE%][%TIME:~0,-3%] %* >CON
)
echo [%DATE%][%TIME:~0,-3%] %*
echo/
echo/%USERNAME%@%COMPUTERNAME% "%CD%"
echo/[%DATE%][%TIME:~0,-3%] %*
exit /b %ERRORLEVEL%

:run_cmd
Expand All @@ -396,9 +402,3 @@ rem ****************************************************************************
call :print_to_console $ %*
call %*
exit /b %ERRORLEVEL%

:skip_failures
rem ****************************************************************************
rem Skip known failures
rem ****************************************************************************
exit /b 0
6 changes: 5 additions & 1 deletion tests/scripts/run-xunit-perf.sh
Original file line number Diff line number Diff line change
Expand Up @@ -340,7 +340,7 @@ for testcase in ${tests[@]}; do
echo "----------"
echo " Running $testname"
echo "----------"
run_command $stabilityPrefix ./corerun PerfHarness.dll $test --perf:runid Perf --perf:collect $collectionflags || exit 1
run_command $stabilityPrefix ./corerun PerfHarness.dll $test --perf:runid Perf --perf:collect $collectionflags 1>"Perf-$filename.log" 2>&1 || exit 1
if [ -d "$BENCHVIEW_TOOLS_PATH" ]; then
run_command python3.5 "$BENCHVIEW_TOOLS_PATH/measurement.py" xunit "Perf-$filename.xml" --better desc $hasWarmupRun --append || {
echo [ERROR] Failed to generate BenchView data;
Expand All @@ -349,6 +349,10 @@ for testcase in ${tests[@]}; do
fi

# Rename file to be archived by Jenkins.
mv -f "Perf-$filename.log" "$CORECLR_REPO/Perf-$filename-$perfCollection.log" || {
echo [ERROR] Failed to move "Perf-$filename.log" to "$CORECLR_REPO".
exit 1;
}
mv -f "Perf-$filename.xml" "$CORECLR_REPO/Perf-$filename-$perfCollection.xml" || {
echo [ERROR] Failed to move "Perf-$filename.xml" to "$CORECLR_REPO".
exit 1;
Expand Down
4 changes: 1 addition & 3 deletions tests/src/Common/PerfHarness/PerfHarness.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,7 @@
</PropertyGroup>

<ItemGroup>
<PackageReference Include="xunit.performance.api">
<Version>1.0.0-beta-build0006</Version>
</PackageReference>
<PackageReference Include="xunit.performance.api" Version="1.0.0-beta-build0007" />
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think this works with all versions of msbuild - @weshaggard can you comment?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Correct this will only work if you load this project with the new msbuild 15 or msbuild core that comes with the SDK. If someone tries to load this project in VS2015 or msbuild <15 then it will blow-up. It might not be an issue for this particular project but something that needs to be kept in mind.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@weshaggard Is there a way to import dir.props and dir.targets in the new csproj file? I would like to have the version of the package in a central place. Is that a supported scenario?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes you can still do an import the same way as you used to be able to <Import Project="$([MSBuild]::GetDirectoryNameOfFileAbove($(MSBuildThisFileDirectory), dir.props))\dir.props" />

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If I add Project="$([MSBuild]::GetDirectoryNameOfFileAbove($(MSBuildThisFileDirectory), dir.props))\dir.props" /> to the PerfHarness.csproj, then I can restore, but I cannot publish. What am I missing?

$ "%CORECLR_ROOT%Tools\dotnetcli\dotnet.exe" --info
.NET Command Line Tools (2.0.0-preview1-005724)

Product Information:
 Version:            2.0.0-preview1-005724
 Commit SHA-1 hash:  e391b5d5f3

Runtime Environment:
 OS Name:     Windows
 OS Version:  10.0.15063
 OS Platform: Windows
 RID:         win10-x64
 Base Path:   %CORECLR_ROOT%\Tools\dotnetcli\sdk\2.0.0-preview1-005724\

$ "%CORECLR_ROOT%\Tools\dotnetcli\dotnet.exe" restore "%CORECLR_ROOT%\tests\src\Common\PerfHarness\PerfHarness.csproj"
%CORECLR_ROOT%\tests\src\Common\PerfHarness\PerfHarness.csproj
  Restoring packages for %CORECLR_ROOT%\tests\src\Common\PerfHarness\PerfHarness.csproj...
  Lock file has not changed. Skipping lock file write. Path: %CORECLR_ROOT%\tests\src\Common\PerfHarness\obj\project.assets.json
  Restore completed in 280.64 ms for %CORECLR_ROOT%\tests\src\Common\PerfHarness\PerfHarness.csproj.

  NuGet Config files used:
      %CORECLR_ROOT%\tests\src\NuGet.Config
      %APPDATA%\NuGet\NuGet.Config
      C:\Program Files (x86)\NuGet\Config\Microsoft.VisualStudio.Offline.config

  Feeds used:
      https://dotnet.myget.org/F/dotnet-core/api/v3/index.json
      https://www.myget.org/F/nugetbuild/api/v3/index.json
      https://api.nuget.org/v3/index.json

$ "%CORECLR_ROOT%\Tools\dotnetcli\dotnet.exe" publish "%CORECLR_ROOT%\tests\src\Common\PerfHarness\PerfHarness.csproj" -c Release -o "%CORECLR_ROOT%\sandbox"

Microsoft (R) Build Engine version 15.2.93.5465
Copyright (C) Microsoft Corporation. All rights reserved.

%CORECLR_ROOT%\Tools\Microsoft.CSharp.Core.targets(106,11): error MSB4064: The "OverrideToolHost" parameter is not supported by the "Csc" task. Verify the parameter exists on the task, and it is a settable public instance property. [%CORECLR_ROOT%\tests\src\Common\PerfHarness\PerfHarness.csproj]
%CORECLR_ROOT%\Tools\Microsoft.CSharp.Core.targets(67,5): error MSB4063: The "Csc" task could not be initialized with its input parameters.  [%CORECLR_ROOT%\tests\src\Common\PerfHarness\PerfHarness.csproj]

</ItemGroup>

</Project>
12 changes: 6 additions & 6 deletions tests/src/Common/external/external.depproj
Original file line number Diff line number Diff line change
Expand Up @@ -22,19 +22,19 @@
<Version>1.1.1</Version>
</PackageReference>
<PackageReference Include="xunit.performance.api">
<Version>1.0.0-beta-build0006</Version>
<Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.performance.core">
<Version>1.0.0-beta-build0006</Version>
<Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.performance.execution">
<Version>1.0.0-beta-build0006</Version>
<Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.performance.metrics">
<Version>1.0.0-beta-build0006</Version>
<Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="Microsoft.Diagnostics.Tracing.TraceEvent">
<Version>1.0.3-alpha-experimental</Version>
<Version>$(MicrosoftDiagnosticsTracingLibraryVersion)</Version>
</PackageReference>
<PackageReference Include="Newtonsoft.Json">
<Version>9.0.1</Version>
Expand All @@ -43,7 +43,7 @@
<Version>$(XunitPackageVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.console.netcore">
<Version>1.0.2-prerelease-00177</Version>
<Version>$(XUnitConsoleNetCoreVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.runner.utility">
<Version>$(XunitPackageVersion)</Version>
Expand Down
12 changes: 6 additions & 6 deletions tests/src/JIT/config/benchmark+roslyn/benchmark+roslyn.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -11,19 +11,19 @@
<Version>1.1.1</Version>
</PackageReference>
<PackageReference Include="xunit.performance.api">
<Version>1.0.0-beta-build0006</Version>
<Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.performance.core">
<Version>1.0.0-beta-build0006</Version>
<Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.performance.execution">
<Version>1.0.0-beta-build0006</Version>
<Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.performance.metrics">
<Version>1.0.0-beta-build0006</Version>
<Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="Microsoft.Diagnostics.Tracing.TraceEvent">
<Version>1.0.3-alpha-experimental</Version>
<Version>$(MicrosoftDiagnosticsTracingLibraryVersion)</Version>
</PackageReference>
<PackageReference Include="Microsoft.NETCore.Platforms">
<Version>2.0.0-preview2-25302-03</Version>
Expand Down Expand Up @@ -80,7 +80,7 @@
<Version>$(XunitPackageVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.console.netcore">
<Version>1.0.2-prerelease-00177</Version>
<Version>$(XUnitConsoleNetCoreVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.runner.utility">
<Version>$(XunitPackageVersion)</Version>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,19 +8,19 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="xunit.performance.api">
<Version>1.0.0-beta-build0006</Version>
<Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.performance.core">
<Version>1.0.0-beta-build0006</Version>
<Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.performance.execution">
<Version>1.0.0-beta-build0006</Version>
<Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.performance.metrics">
<Version>1.0.0-beta-build0006</Version>
<Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="Microsoft.Diagnostics.Tracing.TraceEvent">
<Version>1.0.3-alpha-experimental</Version>
<Version>$(MicrosoftDiagnosticsTracingLibraryVersion)</Version>
</PackageReference>
<PackageReference Include="Microsoft.NETCore.Platforms">
<Version>2.0.0-preview2-25302-03</Version>
Expand Down Expand Up @@ -80,7 +80,7 @@
<Version>$(XunitPackageVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.console.netcore">
<Version>1.0.2-prerelease-00177</Version>
<Version>$(XUnitConsoleNetCoreVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.runner.utility">
<Version>$(XunitPackageVersion)</Version>
Expand Down
Loading