Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Tests - Refine test cases for microbenchmark #268

Merged
merged 6 commits into from
Dec 16, 2021
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -3,29 +3,20 @@

"""Tests for cpu-memory-bw-latency benchmark."""

from pathlib import Path
import os
import unittest

from tests.helper.testcase import BenchmarkTestCase
from superbench.benchmarks import BenchmarkRegistry, BenchmarkType, ReturnCode, Platform


class CpuMemBwLatencyBenchmarkTest(unittest.TestCase):
class CpuMemBwLatencyBenchmarkTest(BenchmarkTestCase, unittest.TestCase):
"""Test class for cpu-memory-bw-latency benchmark."""
def setUp(self):
"""Method called to prepare the test fixture."""
# Create fake binary file just for testing.
self.__curr_micro_path = os.environ.get('SB_MICRO_PATH', '')
os.environ['SB_MICRO_PATH'] = '/tmp/superbench/'
binary_path = Path(os.getenv('SB_MICRO_PATH'), 'bin')
binary_path.mkdir(parents=True, exist_ok=True)
self.__binary_file = binary_path / 'mlc'
self.__binary_file.touch(mode=0o755, exist_ok=True)

def tearDown(self):
"""Method called after the test method has been called and the result recorded."""
self.__binary_file.unlink()
os.environ['SB_MICRO_PATH'] = self.__curr_micro_path
@classmethod
def setUpClass(cls):
"""Hook method for setting up class fixture before running tests in the class."""
super().setUpClass()
cls.createMockEnvs(cls)
cls.createMockFiles(cls, ['bin/mlc'])

def test_cpu_mem_bw_latency_benchmark_empty_param(self):
"""Test cpu-memory-bw-latency benchmark command generation with empty parameter."""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,29 +3,22 @@

"""Tests for gemm-flops benchmark."""

import os
import unittest
from pathlib import Path

from tests.helper import decorator
from tests.helper.testcase import BenchmarkTestCase
from superbench.common.utils import device_manager as dm
from superbench.benchmarks import BenchmarkRegistry, ReturnCode, Platform, BenchmarkType


class CudaGemmFlopsBenchmarkTest(unittest.TestCase):
class CudaGemmFlopsBenchmarkTest(BenchmarkTestCase, unittest.TestCase):
"""Tests for CudaGemmFlopsBenchmark benchmark."""
def setUp(self):
"""Method called to prepare the test fixture."""
# Create fake binary file just for testing.
os.environ['SB_MICRO_PATH'] = '/tmp/superbench/'
binary_path = os.path.join(os.getenv('SB_MICRO_PATH'), 'bin')
Path(binary_path).mkdir(parents=True, exist_ok=True)
self.__binary_file = Path(os.path.join(binary_path, 'cutlass_profiler'))
self.__binary_file.touch(mode=0o755, exist_ok=True)

def tearDown(self):
"""Method called after the test method has been called and the result recorded."""
self.__binary_file.unlink()
@classmethod
def setUpClass(cls):
"""Hook method for setting up class fixture before running tests in the class."""
super().setUpClass()
cls.createMockEnvs(cls)
cls.createMockFiles(cls, ['bin/cutlass_profiler'])

@decorator.cuda_test
def test_flops_performance_cuda(self):
Expand Down
306 changes: 15 additions & 291 deletions tests/benchmarks/micro_benchmarks/test_cuda_memory_bw_performance.py

Large diffs are not rendered by default.

389 changes: 36 additions & 353 deletions tests/benchmarks/micro_benchmarks/test_cuda_nccl_bw_performance.py

Large diffs are not rendered by default.