name: linux-build on: workflow_call: inputs: build-environment: required: true type: string description: Top-level label for what's being built/tested. docker-image-name: required: true type: string description: Name of the base docker image to build with. build-generates-artifacts: required: false type: boolean default: true description: If set, upload generated build artifacts. build-with-debug: required: false type: boolean default: false description: If set, build in debug mode. sync-tag: required: false type: string default: "" description: | If this is set, our linter will use this to make sure that every other job with the same `sync-tag` is identical. cuda-arch-list: required: false type: string default: "5.2" description: | List of CUDA architectures CI build should target. runner: required: false type: string default: "linux.2xlarge" description: | List of CUDA architectures CI build should target. test-matrix: required: false type: string description: | An option JSON description of what test configs to run later on. This is moved here from the Linux test workflow so that we can apply filter logic using test-config labels earlier and skip unnecessary builds selected-test-configs: description: | A comma-separated list of test configurations from the test matrix to keep, The empty list means we are going to keep every configurations by defaults required: false type: string default: "" s3-bucket: description: S3 bucket to download artifact required: false type: string default: "gha-artifacts" aws-role-to-assume: description: Role to assume for downloading artifacts required: false type: string default: "" secrets: HUGGING_FACE_HUB_TOKEN: required: false description: | HF Auth token to avoid rate limits when downloading models or datasets from hub outputs: docker-image: value: ${{ jobs.build.outputs.docker-image }} description: The docker image containing the built PyTorch. test-matrix: value: ${{ jobs.build.outputs.test-matrix }} description: An optional JSON description of what test configs to run later on. jobs: build: # Don't run on forked repos if: github.repository_owner == 'pytorch' runs-on: ${{ inputs.runner }} timeout-minutes: 240 outputs: docker-image: ${{ steps.calculate-docker-image.outputs.docker-image }} test-matrix: ${{ steps.filter.outputs.test-matrix }} steps: - name: Setup SSH (Click me for login details) uses: pytorch/test-infra/.github/actions/setup-ssh@release/2.4 with: github-secret: ${{ secrets.GITHUB_TOKEN }} # [pytorch repo ref] # Use a pytorch/pytorch reference instead of a reference to the local # checkout because when we run this action we don't *have* a local # checkout. In other cases you should prefer a local checkout. - name: Checkout PyTorch uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.4 - name: Setup Linux uses: ./.github/actions/setup-linux - name: configure aws credentials uses: aws-actions/configure-aws-credentials@v3 if: ${{ inputs.aws-role-to-assume != '' }} with: role-to-assume: ${{ inputs.aws-role-to-assume }} role-session-name: gha-linux-build aws-region: us-east-1 - name: Calculate docker image id: calculate-docker-image uses: pytorch/test-infra/.github/actions/calculate-docker-image@release/2.4 with: docker-image-name: ${{ inputs.docker-image-name }} - name: Use following to pull public copy of the image id: print-ghcr-mirror env: ECR_DOCKER_IMAGE: ${{ steps.calculate-docker-image.outputs.docker-image }} shell: bash run: | tag=${ECR_DOCKER_IMAGE##*/} echo "docker pull ghcr.io/pytorch/ci-image:${tag/:/-}" - name: Pull docker image uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.4 with: docker-image: ${{ steps.calculate-docker-image.outputs.docker-image }} - name: Parse ref id: parse-ref run: .github/scripts/parse_ref.py - name: Get workflow job id id: get-job-id uses: ./.github/actions/get-workflow-job-id if: always() with: github-token: ${{ secrets.GITHUB_TOKEN }} # Apply the filter logic to the build step too if the test-config label is already there - name: Select all requested test configurations (if the test matrix is available) id: filter uses: ./.github/actions/filter-test-configs with: github-token: ${{ secrets.GITHUB_TOKEN }} test-matrix: ${{ inputs.test-matrix }} selected-test-configs: ${{ inputs.selected-test-configs }} job-name: ${{ steps.get-job-id.outputs.job-name }} - name: Download pytest cache uses: ./.github/actions/pytest-cache-download continue-on-error: true with: cache_dir: .pytest_cache job_identifier: ${{ github.workflow }}_${{ inputs.build-environment }} s3_bucket: ${{ inputs.s3-bucket }} - name: Build if: steps.filter.outputs.is-test-matrix-empty == 'False' || inputs.test-matrix == '' id: build env: BUILD_ENVIRONMENT: ${{ inputs.build-environment }} BRANCH: ${{ steps.parse-ref.outputs.branch }} # TODO duplicated AWS_DEFAULT_REGION: us-east-1 PR_NUMBER: ${{ github.event.pull_request.number }} SHA1: ${{ github.event.pull_request.head.sha || github.sha }} SCCACHE_BUCKET: ossci-compiler-cache-circleci-v2 SCCACHE_S3_KEY_PREFIX: ${{ github.workflow }} XLA_CLANG_CACHE_S3_BUCKET_NAME: ossci-compiler-clang-cache-circleci-xla PR_LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }} TORCH_CUDA_ARCH_LIST: ${{ inputs.cuda-arch-list }} DOCKER_IMAGE: ${{ steps.calculate-docker-image.outputs.docker-image }} XLA_CUDA: ${{ contains(inputs.build-environment, 'xla') && '0' || '' }} DEBUG: ${{ inputs.build-with-debug && '1' || '0' }} OUR_GITHUB_JOB_ID: ${{ steps.get-job-id.outputs.job-id }} HUGGING_FACE_HUB_TOKEN: ${{ secrets.HUGGING_FACE_HUB_TOKEN }} run: | # detached container should get cleaned up by teardown_ec2_linux container_name=$(docker run \ -e BUILD_ENVIRONMENT \ -e MAX_JOBS="$(nproc --ignore=2)" \ -e AWS_DEFAULT_REGION \ -e PR_NUMBER \ -e SHA1 \ -e BRANCH \ -e SCCACHE_BUCKET \ -e SCCACHE_S3_KEY_PREFIX \ -e XLA_CUDA \ -e XLA_CLANG_CACHE_S3_BUCKET_NAME \ -e SKIP_SCCACHE_INITIALIZATION=1 \ -e TORCH_CUDA_ARCH_LIST \ -e PR_LABELS \ -e OUR_GITHUB_JOB_ID \ -e HUGGING_FACE_HUB_TOKEN \ --env-file="/tmp/github_env_${GITHUB_RUN_ID}" \ --security-opt seccomp=unconfined \ --cap-add=SYS_PTRACE \ --tty \ --detach \ --user jenkins \ -v "${GITHUB_WORKSPACE}:/var/lib/jenkins/workspace" \ -w /var/lib/jenkins/workspace \ "${DOCKER_IMAGE}" ) docker exec -t "${container_name}" sh -c '.ci/pytorch/build.sh' - name: Archive artifacts into zip if: inputs.build-generates-artifacts && steps.build.outcome != 'skipped' run: | zip -1 -r artifacts.zip dist/ build/custom_test_artifacts build/lib build/bin .additional_ci_files - name: Store PyTorch Build Artifacts on S3 uses: seemethere/upload-artifact-s3@v5 if: inputs.build-generates-artifacts && steps.build.outcome != 'skipped' with: name: ${{ inputs.build-environment }} retention-days: 14 if-no-files-found: error path: artifacts.zip s3-bucket: ${{ inputs.s3-bucket }} - name: Upload sccache stats if: steps.build.outcome != 'skipped' uses: seemethere/upload-artifact-s3@v5 with: s3-prefix: | ${{ github.repository }}/${{ github.run_id }}/${{ github.run_attempt }}/artifact retention-days: 365 if-no-files-found: warn path: sccache-stats-*.json s3-bucket: ${{ inputs.s3-bucket }} - name: Teardown Linux uses: pytorch/test-infra/.github/actions/teardown-linux@release/2.4 if: always()