Skip to content

Commit

Permalink
Merge pull request microsoft#4 from microsoft/master
Browse files Browse the repository at this point in the history
Merge latest changes from DeepSpeed
  • Loading branch information
sdtblck committed Jan 29, 2021
2 parents cb670be + 91b1b7f commit 9261197
Show file tree
Hide file tree
Showing 2 changed files with 55 additions and 8 deletions.
44 changes: 44 additions & 0 deletions .github/workflows/torch16.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
# Unit test config for manual use on torch1.6 runners

name: Torch16

# Controls when the action will run.
on:
#pull_request:
# paths-ignore:
# - 'docs/**'
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:

# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
# This workflow contains a single job called "build"
build:
# The type of runner that the job will run on
runs-on: [self-hosted, torch1.6]

# Steps represent a sequence of tasks that will be executed as part of the job
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v2

# Runs a single command using the runners shell
- name: environment
run: |
nvidia-smi
which python
python --version
which nvcc
nvcc --version
python -c "import torch; print('torch:', torch.__version__, torch)"
python -c "import torch; print('CUDA available:', torch.cuda.is_available())"
# Runs a set of commands using the runners shell
- name: Install deepspeed
run: |
pip install .[dev]
ds_report
# Runs a set of commands using the runners shell
- name: Unit tests
run: |
if [[ -d ./torch-extensions ]]; then rm -rf ./torch-extensions; fi
TORCH_EXTENSIONS_DIR=./torch-extensions pytest --durations=0 --forked --verbose -x tests/unit/
19 changes: 11 additions & 8 deletions tests/unit/test_cuda_backward.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,9 @@ def check_equal(first, second, atol=1e-2, verbose=False):
diction_x = {}
diction_y = {}

for i, (x, y) in enumerate(zip(first, second)):
print(x[1], y[1])
if verbose:
for i, (x, y) in enumerate(zip(first, second)):
print(x[1], y[1])

for i, (x, y) in enumerate(zip(first, second)):
k = 0
Expand All @@ -38,18 +39,20 @@ def check_equal(first, second, atol=1e-2, verbose=False):
diction_y[k, y[1]] = y[0]
if verbose:
print()
for i, (x, y) in enumerate(zip(diction_x, diction_y)):
print(x, y)
for i, (x, y) in enumerate(zip(diction_x, diction_y)):
print(x, y)

for i, (x, y) in enumerate(zip(diction_x, diction_y)):
if (x[0] == 1): continue
print("checking ", x[1], ":")
if verbose:
print("checking ", x[1], ":")
y = diction_y[x[0], x[1]]
x = diction_x[x[0], x[1]]
x = x.cpu().detach().numpy()
y = y.cpu().detach().numpy()
print(x)
print(y)
if verbose:
print(x)
print(y)

avgx = np.sum(abs(x), dtype=float)
countx = x.shape[0]
Expand All @@ -60,8 +63,8 @@ def check_equal(first, second, atol=1e-2, verbose=False):
if avgx != float('inf') and avgx != -float('inf'):
avgx = avgx / countx
tollerance = avgx * atol
print("tollerance is ", tollerance)
if verbose:
print("tollerance is ", tollerance)
print("x = {}".format(x.flatten()))
print("y = {}".format(y.flatten()))
print('-' * 80)
Expand Down

0 comments on commit 9261197

Please sign in to comment.