lightning/.azure/ipu-tests.yml

85 lines
2.4 KiB
YAML

trigger:
tags:
include:
- '*'
branches:
include:
- master
- release/*
- refs/tags/*
pr:
- master
- release/*
variables:
- name: poplar_sdk
value: "poplar_sdk-ubuntu_20_04-2.3.1+793-89796d462d"
jobs:
- job: testing
# how long to run the job before automatically cancelling
timeoutInMinutes: "15"
pool: graphcore-ipus
workspace:
clean: all
steps:
- script: tar -xvzf /opt/poplar/${{ variables.poplar_sdk }}.tar.gz
displayName: "Extract Poplar SDK"
- script: |
set -eux
pip debug --verbose
pip install ${{ variables.poplar_sdk }}/poptorch-*ubuntu*.whl
displayName: "Install poptorch"
- script: |
set -eux
source ${{ variables.poplar_sdk }}/poplar-ubuntu*/enable.sh
NUM_IPUS=$(gc-info --ipu-count)
if [[ -z "${NUM_IPUS}" ]] || [[ "${NUM_IPUS}" -eq 0 ]]; then
echo "No IPUs found to reset. Exiting"
exit 1
fi
echo "Resetting parity on ${NUM_IPUS} IPU devices"
i=0
while [[ i -lt "${NUM_IPUS}" ]]; do
gc-reset -d "${i}"
i=$((i + 1))
done
displayName: "Reset IPU devices"
- bash: |
export GIT_TERMINAL_PROMPT=1
python ./requirements/pytorch/adjust-versions.py requirements/pytorch/extra.txt
python ./requirements/pytorch/adjust-versions.py requirements/pytorch/examples.txt
pip install -e . --requirement ./requirements/pytorch/devel.txt
pip list
env:
PACKAGE_NAME: pytorch
FREEZE_REQUIREMENTS: 1
displayName: 'Install dependencies'
- bash: |
python requirements/collect_env_details.py
python -c "import torch"
displayName: 'Env details'
- script: |
set -eux
source ${{ variables.poplar_sdk }}/poplar-ubuntu*/enable.sh
source ${{ variables.poplar_sdk }}/popart-ubuntu*/enable.sh
python -c "import poptorch; print(poptorch.__version__)"
displayName: "Check poptorch installation"
- bash: |
source ${{ variables.poplar_sdk }}/poplar-ubuntu*/enable.sh
source ${{ variables.poplar_sdk }}/popart-ubuntu*/enable.sh
python -m coverage run --source pytorch_lightning -m pytest tests/tests_pytorch -vv --durations=50
env:
MKL_THREADING_LAYER: "GNU"
POPTORCH_WAIT_FOR_IPU: 1
PL_RUN_IPU_TESTS: 1
displayName: 'Testing: PyTorch standard'