• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1name: Run all unittests
2
3on:
4  workflow_call:
5    inputs:
6      docker-image:
7        required: true
8        type: string
9        description: Name of the docker image to use.
10      python-version:
11        required: false
12        type: string
13        default: '3.10'
14
15jobs:
16  linux:
17    uses: pytorch/test-infra/.github/workflows/linux_job.yml@release/2.5
18    with:
19      runner: linux.2xlarge
20      docker-image: ${{ inputs.docker-image }}
21      submodules: 'true'
22      ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
23      timeout: 90
24      script: |
25        set -eux
26
27        # The generic Linux job chooses to use base env, not the one setup by the image
28        CONDA_ENV=$(conda env list --json | jq -r ".envs | .[-1]")
29        conda activate "${CONDA_ENV}"
30
31        # Setup swiftshader and Vulkan SDK which are required to build the Vulkan delegate
32        source .ci/scripts/setup-vulkan-linux-deps.sh
33
34        # Setup MacOS dependencies as there is no Docker support on MacOS atm
35        PYTHON_EXECUTABLE=python \
36        EXECUTORCH_BUILD_PYBIND=ON \
37        CMAKE_ARGS="-DEXECUTORCH_BUILD_XNNPACK=ON -DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON" \
38        .ci/scripts/setup-linux.sh cmake
39
40        # Install llama3_2_vision dependencies.
41        PYTHON_EXECUTABLE=python ./examples/models/llama3_2_vision/install_requirements.sh
42
43        # Run pytest with coverage
44        pytest -n auto --cov=./ --cov-report=xml
45        # Run gtest
46        LLVM_PROFDATA=llvm-profdata-12 LLVM_COV=llvm-cov-12 \
47        test/run_oss_cpp_tests.sh
48
49  macos:
50    uses: pytorch/test-infra/.github/workflows/macos_job.yml@release/2.5
51    with:
52      runner: macos-m1-stable
53      python-version: '3.11'
54      submodules: 'true'
55      ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
56      script: |
57        set -eux
58
59        bash .ci/scripts/setup-conda.sh
60
61        # Create temp directory for sccache shims
62        export TMP_DIR=$(mktemp -d)
63        export PATH="${TMP_DIR}:$PATH"
64        trap 'rm -rfv ${TMP_DIR}' EXIT
65
66        # Setup MacOS dependencies as there is no Docker support on MacOS atm
67        PYTHON_EXECUTABLE=python \
68        EXECUTORCH_BUILD_PYBIND=ON \
69        CMAKE_ARGS="-DEXECUTORCH_BUILD_COREML=ON -DEXECUTORCH_BUILD_MPS=ON -DEXECUTORCH_BUILD_XNNPACK=ON -DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON" \
70        ${CONDA_RUN} --no-capture-output \
71        .ci/scripts/setup-macos.sh cmake
72
73        # Install llama3_2_vision dependencies.
74        PYTHON_EXECUTABLE=python ${CONDA_RUN} --no-capture-output \
75        ./examples/models/llama3_2_vision/install_requirements.sh
76
77        # Run pytest with coverage
78        ${CONDA_RUN} pytest -n auto --cov=./ --cov-report=xml
79        # Run gtest
80        LLVM_PROFDATA="xcrun llvm-profdata" LLVM_COV="xcrun llvm-cov" \
81        ${CONDA_RUN} test/run_oss_cpp_tests.sh
82