• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env bash
2# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8#     http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15# ==============================================================================
16#
17# Usage: ci_sanity.sh [--pep8] [--incremental] [bazel flags]
18#
19# Options:
20#           run sanity checks: python 2&3 pylint checks and bazel nobuild
21#  --pep8   run pep8 test only
22#  --incremental  Performs checks incrementally, by using the files changed in
23#                 the latest commit
24
25# Current script directory
26SCRIPT_DIR=$( cd ${0%/*} && pwd -P )
27source "${SCRIPT_DIR}/builds/builds_common.sh"
28
29ROOT_DIR=$( cd "$SCRIPT_DIR/../../.." && pwd -P )
30
31# Helper functions
32die() {
33  echo $@
34  exit 1
35}
36
37num_cpus() {
38  # Get the number of CPUs
39  if [[ -f /proc/cpuinfo ]]; then
40    N_CPUS=$(grep -c ^processor /proc/cpuinfo)
41  else
42    # Fallback method
43    N_CPUS=`getconf _NPROCESSORS_ONLN`
44  fi
45  if [[ -z ${N_CPUS} ]]; then
46    die "ERROR: Unable to determine the number of CPUs"
47  fi
48
49  echo ${N_CPUS}
50}
51
52# Helper functions for examining changed files in the last non-merge git
53# commit.
54
55# Get the hash of the last non-merge git commit on the current branch.
56# Usage: get_last_non_merge_git_commit
57get_last_non_merge_git_commit() {
58  git rev-list --no-merges -n 1 HEAD
59}
60
61# List files changed (i.e., added, removed or revised) in the last non-merge
62# git commit.
63# Usage: get_changed_files_in_last_non_merge_git_commit
64get_changed_files_in_last_non_merge_git_commit() {
65  git diff-tree --no-commit-id --name-only -r $(get_last_non_merge_git_commit)
66}
67
68# List Python files changed in the last non-merge git commit that still exist,
69# i.e., not removed.
70# Usage: get_py_files_to_check [--incremental]
71get_py_files_to_check() {
72  if [[ "$1" == "--incremental" ]]; then
73    CHANGED_PY_FILES=$(get_changed_files_in_last_non_merge_git_commit | \
74                       grep '.*\.py$')
75
76    # Do not include files removed in the last non-merge commit.
77    PY_FILES=""
78    for PY_FILE in ${CHANGED_PY_FILES}; do
79      if [[ -f "${PY_FILE}" ]]; then
80        PY_FILES="${PY_FILES} ${PY_FILE}"
81      fi
82    done
83
84    echo "${PY_FILES}"
85  else
86    find tensorflow -name '*.py'
87  fi
88}
89
90# Subfunctions for substeps
91# Run pylint
92do_pylint() {
93  # Usage: do_pylint [--incremental]
94  #
95  # Options:
96  #   --incremental  Performs check on only the python files changed in the
97  #                  last non-merge git commit.
98
99  # Use this list to whitelist pylint errors
100  ERROR_WHITELIST="^tensorflow/python/framework/function_test\.py.*\[E1123.*noinline "\
101"^tensorflow/python/platform/default/_gfile\.py.*\[E0301.*non-iterator "\
102"^tensorflow/python/platform/default/_googletest\.py.*\[E0102.*function\salready\sdefined "\
103"^tensorflow/python/feature_column/feature_column_test\.py.*\[E0110.*abstract-class-instantiated "\
104"^tensorflow/contrib/layers/python/layers/feature_column\.py.*\[E0110.*abstract-class-instantiated "\
105"^tensorflow/contrib/eager/python/evaluator\.py.*\[E0202.*method-hidden "\
106"^tensorflow/contrib/eager/python/metrics_impl\.py.*\[E0202.*method-hidden "\
107"^tensorflow/contrib/rate/rate\.py.*\[E0202.*method-hidden "\
108"^tensorflow/python/training/tracking/tracking\.py.*\[E0202.*method-hidden "\
109"^tensorflow/python/platform/gfile\.py.*\[E0301.*non-iterator "\
110"^tensorflow/python/keras/callbacks\.py.*\[E1133.*not-an-iterable "\
111"^tensorflow/python/keras/engine/base_layer.py.*\[E0203.*access-member-before-definition "\
112"^tensorflow/python/keras/layers/recurrent\.py.*\[E0203.*access-member-before-definition "\
113"^tensorflow/python/kernel_tests/constant_op_eager_test.py.*\[E0303.*invalid-length-returned "\
114"^tensorflow/python/keras/utils/data_utils.py.*\[E1102.*not-callable "\
115"^tensorflow/python/autograph/.*_py3_test\.py.*\[E0001.*syntax-error "\
116"^tensorflow/python/keras/preprocessing/image\.py.*\[E0240.*Inconsistent method resolution "
117
118  echo "ERROR_WHITELIST=\"${ERROR_WHITELIST}\""
119
120  if [[ $# != "0" ]]  && [[ $# != "1" ]]; then
121    echo "Invalid syntax when invoking do_pylint"
122    echo "Usage: do_pylint [--incremental]"
123    return 1
124  fi
125
126  PYLINT_BIN="python3 -m pylint"
127
128  if [[ "$1" == "--incremental" ]]; then
129    PYTHON_SRC_FILES=$(get_py_files_to_check --incremental)
130
131    if [[ -z "${PYTHON_SRC_FILES}" ]]; then
132      echo "do_pylint will NOT run due to --incremental flag and due to the "\
133"absence of Python code changes in the last commit."
134      return 0
135    else
136      # For incremental builds, we still check all Python files in cases there
137      # are function signature changes that affect unchanged Python files.
138      PYTHON_SRC_FILES=$(get_py_files_to_check)
139    fi
140  elif [[ -z "$1" ]]; then
141    PYTHON_SRC_FILES=$(get_py_files_to_check)
142  else
143    echo "Invalid syntax for invoking do_pylint"
144    echo "Usage: do_pylint [--incremental]"
145    return 1
146  fi
147
148  if [[ -z ${PYTHON_SRC_FILES} ]]; then
149    echo "do_pylint found no Python files to check. Returning."
150    return 0
151  fi
152
153  PYLINTRC_FILE="${SCRIPT_DIR}/pylintrc"
154
155  if [[ ! -f "${PYLINTRC_FILE}" ]]; then
156    die "ERROR: Cannot find pylint rc file at ${PYLINTRC_FILE}"
157  fi
158
159  NUM_SRC_FILES=$(echo ${PYTHON_SRC_FILES} | wc -w)
160  NUM_CPUS=$(num_cpus)
161
162  echo "Running pylint on ${NUM_SRC_FILES} files with ${NUM_CPUS} "\
163"parallel jobs..."
164  echo ""
165
166  PYLINT_START_TIME=$(date +'%s')
167  OUTPUT_FILE="$(mktemp)_pylint_output.log"
168  ERRORS_FILE="$(mktemp)_pylint_errors.log"
169  NONWL_ERRORS_FILE="$(mktemp)_pylint_nonwl_errors.log"
170
171  rm -rf ${OUTPUT_FILE}
172  rm -rf ${ERRORS_FILE}
173  rm -rf ${NONWL_ERRORS_FILE}
174  touch ${NONWL_ERRORS_FILE}
175
176  ${PYLINT_BIN} --rcfile="${PYLINTRC_FILE}" --output-format=parseable \
177      --jobs=${NUM_CPUS} ${PYTHON_SRC_FILES} > ${OUTPUT_FILE} 2>&1
178  PYLINT_END_TIME=$(date +'%s')
179
180  echo ""
181  echo "pylint took $((PYLINT_END_TIME - PYLINT_START_TIME)) s"
182  echo ""
183
184  # Report only what we care about
185  # Ref https://pylint.readthedocs.io/en/latest/technical_reference/features.html
186  # E: all errors
187  # W0311 bad-indentation
188  # W0312 mixed-indentation
189  # C0330 bad-continuation
190  # C0301 line-too-long
191  # C0326 bad-whitespace
192  # W0611 unused-import
193  # W0622 redefined-builtin
194  grep -E '(\[E|\[W0311|\[W0312|\[C0330|\[C0301|\[C0326|\[W0611|\[W0622)' ${OUTPUT_FILE} > ${ERRORS_FILE}
195
196  N_ERRORS=0
197  while read -r LINE; do
198    IS_WHITELISTED=0
199    for WL_REGEX in ${ERROR_WHITELIST}; do
200      if echo ${LINE} | grep -q "${WL_REGEX}"; then
201        echo "Found a whitelisted error:"
202        echo "  ${LINE}"
203        IS_WHITELISTED=1
204      fi
205    done
206
207    if [[ ${IS_WHITELISTED} == "0" ]]; then
208      echo "${LINE}" >> ${NONWL_ERRORS_FILE}
209      echo "" >> ${NONWL_ERRORS_FILE}
210      ((N_ERRORS++))
211    fi
212  done <${ERRORS_FILE}
213
214  echo ""
215  if [[ ${N_ERRORS} != 0 ]]; then
216    echo "FAIL: Found ${N_ERRORS} non-whitelisted pylint errors:"
217    cat "${NONWL_ERRORS_FILE}"
218    return 1
219  else
220    echo "PASS: No non-whitelisted pylint errors were found."
221    return 0
222  fi
223}
224
225# Run pep8 check
226do_pep8() {
227  # Usage: do_pep8 [--incremental]
228  # Options:
229  #   --incremental  Performs check on only the python files changed in the
230  #                  last non-merge git commit.
231
232  PEP8_BIN="/usr/local/bin/pep8"
233  PEP8_CONFIG_FILE="${SCRIPT_DIR}/pep8"
234
235  if [[ "$1" == "--incremental" ]]; then
236    PYTHON_SRC_FILES=$(get_py_files_to_check --incremental)
237    NUM_PYTHON_SRC_FILES=$(echo ${PYTHON_SRC_FILES} | wc -w)
238
239    echo "do_pep8 will perform checks on only the ${NUM_PYTHON_SRC_FILES} "\
240"Python file(s) changed in the last non-merge git commit due to the "\
241"--incremental flag:"
242    echo "${PYTHON_SRC_FILES}"
243    echo ""
244  else
245    PYTHON_SRC_FILES=$(get_py_files_to_check)
246  fi
247
248  if [[ -z ${PYTHON_SRC_FILES} ]]; then
249    echo "do_pep8 found no Python files to check. Returning."
250    return 0
251  fi
252
253  if [[ ! -f "${PEP8_CONFIG_FILE}" ]]; then
254    die "ERROR: Cannot find pep8 config file at ${PEP8_CONFIG_FILE}"
255  fi
256  echo "See \"${PEP8_CONFIG_FILE}\" for pep8 config( e.g., ignored errors)"
257
258  NUM_SRC_FILES=$(echo ${PYTHON_SRC_FILES} | wc -w)
259
260  echo "Running pep8 on ${NUM_SRC_FILES} files"
261  echo ""
262
263  PEP8_START_TIME=$(date +'%s')
264  PEP8_OUTPUT_FILE="$(mktemp)_pep8_output.log"
265
266  rm -rf ${PEP8_OUTPUT_FILE}
267
268  ${PEP8_BIN} --config="${PEP8_CONFIG_FILE}" --statistics \
269      ${PYTHON_SRC_FILES} 2>&1 | tee ${PEP8_OUTPUT_FILE}
270  PEP8_END_TIME=$(date +'%s')
271
272  echo ""
273  echo "pep8 took $((PEP8_END_TIME - PEP8_START_TIME)) s"
274  echo ""
275
276  if [[ -s ${PEP8_OUTPUT_FILE} ]]; then
277    echo "FAIL: pep8 found above errors and/or warnings."
278    return 1
279  else
280    echo "PASS: No pep8 errors or warnings were found"
281    return 0
282  fi
283}
284
285
286do_buildifier(){
287  BUILD_FILES=$(find tensorflow -name 'BUILD*')
288  NUM_BUILD_FILES=$(echo ${BUILD_FILES} | wc -w)
289
290  echo "Running do_buildifier on ${NUM_BUILD_FILES} files"
291  echo ""
292
293  BUILDIFIER_START_TIME=$(date +'%s')
294  BUILDIFIER_OUTPUT_FILE="$(mktemp)_buildifier_output.log"
295
296  rm -rf ${BUILDIFIER_OUTPUT_FILE}
297
298  buildifier -v -mode=check \
299    ${BUILD_FILES} 2>&1 | tee ${BUILDIFIER_OUTPUT_FILE}
300  BUILDIFIER_END_TIME=$(date +'%s')
301
302  echo ""
303  echo "buildifier took $((BUILDIFIER_END_TIME - BUILDIFIER_START_TIME)) s"
304  echo ""
305
306  if [[ -s ${BUILDIFIER_OUTPUT_FILE} ]]; then
307    echo "FAIL: buildifier found errors and/or warnings in above BUILD files."
308    echo "buildifier suggested the following changes:"
309    buildifier -v -mode=diff -diff_command=diff ${BUILD_FILES}
310    echo "Please fix manually or run buildifier <file> to auto-fix."
311    return 1
312  else
313    echo "PASS: No buildifier errors or warnings were found"
314    return 0
315  fi
316}
317
318do_external_licenses_check(){
319  BUILD_TARGET="$1"
320  LICENSES_TARGET="$2"
321
322  EXTERNAL_LICENSES_CHECK_START_TIME=$(date +'%s')
323
324  EXTERNAL_DEPENDENCIES_FILE="$(mktemp)_external_dependencies.log"
325  LICENSES_FILE="$(mktemp)_licenses.log"
326  MISSING_LICENSES_FILE="$(mktemp)_missing_licenses.log"
327  EXTRA_LICENSES_FILE="$(mktemp)_extra_licenses.log"
328  TMP_FILE="$(mktemp)_tmp.log"
329
330  echo "Getting external dependencies for ${BUILD_TARGET}"
331 bazel query "attr('licenses', 'notice', deps(${BUILD_TARGET}))" --keep_going > "${TMP_FILE}" 2>&1
332 cat "${TMP_FILE}" \
333  | grep -e "^\/\/" -e "^@" \
334  | grep -E -v "^//tensorflow" \
335  | sed -e 's|:.*||' \
336  | sort \
337  | uniq 2>&1 \
338  | tee ${EXTERNAL_DEPENDENCIES_FILE}
339
340  echo
341  echo "Getting list of external licenses mentioned in ${LICENSES_TARGET}."
342  bazel query "deps(${LICENSES_TARGET})" --keep_going > "${TMP_FILE}" 2>&1
343 cat "${TMP_FILE}" \
344  | grep -e "^\/\/" -e "^@" \
345  | grep -E -v "^//tensorflow" \
346  | sed -e 's|:.*||' \
347  | sort \
348  | uniq 2>&1 \
349  | tee ${LICENSES_FILE}
350
351  echo
352  comm -1 -3 ${EXTERNAL_DEPENDENCIES_FILE}  ${LICENSES_FILE} 2>&1 | tee ${EXTRA_LICENSES_FILE}
353  echo
354  comm -2 -3 ${EXTERNAL_DEPENDENCIES_FILE}  ${LICENSES_FILE} 2>&1 | tee ${MISSING_LICENSES_FILE}
355
356  EXTERNAL_LICENSES_CHECK_END_TIME=$(date +'%s')
357
358  # Blacklist
359  echo ${MISSING_LICENSES_FILE}
360  grep -e "@bazel_tools//third_party/" -e "@bazel_tools//tools" -e "@local" -e "@com_google_absl//absl" -e "@org_tensorflow//" -e "@com_github_googlecloudplatform_google_cloud_cpp//google" -v ${MISSING_LICENSES_FILE} > temp.txt
361  mv temp.txt ${MISSING_LICENSES_FILE}
362
363  # Whitelist
364  echo ${EXTRA_LICENSE_FILE}
365  grep -e "//third_party/mkl_dnn" -e "@bazel_tools//src" -e "@bazel_tools//tools/" -e "@org_tensorflow//tensorflow" -e "@com_google_absl//" -e "//external" -e "@local" -e "@com_github_googlecloudplatform_google_cloud_cpp//" -e "@embedded_jdk//" -e "^//$" -v ${EXTRA_LICENSES_FILE} > temp.txt
366  mv temp.txt ${EXTRA_LICENSES_FILE}
367
368
369
370  echo
371  echo "do_external_licenses_check took $((EXTERNAL_LICENSES_CHECK_END_TIME - EXTERNAL_LICENSES_CHECK_START_TIME)) s"
372  echo
373
374  if [[ -s ${MISSING_LICENSES_FILE} ]] || [[ -s ${EXTRA_LICENSES_FILE} ]] ; then
375    echo "FAIL: mismatch in packaged licenses and external dependencies"
376    if [[ -s ${MISSING_LICENSES_FILE} ]] ; then
377      echo "Missing the licenses for the following external dependencies:"
378      cat ${MISSING_LICENSES_FILE}
379    fi
380    if [[ -s ${EXTRA_LICENSES_FILE} ]] ; then
381      echo "Please remove the licenses for the following external dependencies:"
382      cat ${EXTRA_LICENSES_FILE}
383    fi
384    rm -rf ${EXTERNAL_DEPENDENCIES_FILE}
385    rm -rf ${LICENSES_FILE}
386    rm -rf ${MISSING_LICENSES_FILE}
387    rm -rf ${EXTRA_LICENSES_FILE}
388    return 1
389  else
390    echo "PASS: all external licenses included."
391    rm -rf ${EXTERNAL_DEPENDENCIES_FILE}
392    rm -rf ${LICENSES_FILE}
393    rm -rf ${MISSING_LICENSES_FILE}
394    rm -rf ${EXTRA_LICENSES_FILE}
395    return 0
396  fi
397}
398
399do_pip_package_licenses_check() {
400  echo "Running do_pip_package_licenses_check"
401  echo ""
402  do_external_licenses_check \
403    "//tensorflow/tools/pip_package:build_pip_package" \
404    "//tensorflow/tools/pip_package:licenses"
405}
406
407do_lib_package_licenses_check() {
408  echo "Running do_lib_package_licenses_check"
409  echo ""
410  do_external_licenses_check \
411    "//tensorflow:libtensorflow.so" \
412    "//tensorflow/tools/lib_package:clicenses_generate"
413}
414
415do_java_package_licenses_check() {
416  echo "Running do_java_package_licenses_check"
417  echo ""
418  do_external_licenses_check \
419    "//tensorflow/java:libtensorflow_jni.so" \
420    "//tensorflow/tools/lib_package:jnilicenses_generate"
421}
422
423#Check for the bazel cmd status (First arg is error message)
424cmd_status(){
425  if [[ $? != 0 ]]; then
426    echo ""
427    echo "FAIL: ${BUILD_CMD}"
428    echo "  $1 See lines above for details."
429    return 1
430  else
431    echo ""
432    echo "PASS: ${BUILD_CMD}"
433    return 0
434  fi
435}
436
437# Run bazel build --nobuild to test the validity of the BUILD files
438# TODO(mikecase): Remove TF Lite exclusion from this list. Exclusion is
439# necessary since the @androidsdk WORKSPACE dependency is commented
440# out by default in TF WORKSPACE file.
441do_bazel_nobuild() {
442  BUILD_TARGET="//tensorflow/..."
443  BUILD_TARGET="${BUILD_TARGET} -//tensorflow/lite/delegates/gpu/..."
444  BUILD_TARGET="${BUILD_TARGET} -//tensorflow/lite/java/demo/app/..."
445  BUILD_TARGET="${BUILD_TARGET} -//tensorflow/lite/schema/..."
446  BUILD_CMD="bazel build --nobuild ${BAZEL_FLAGS} -- ${BUILD_TARGET}"
447
448  ${BUILD_CMD}
449
450  cmd_status \
451    "This is due to invalid BUILD files."
452}
453
454do_bazel_deps_query() {
455  local BUILD_TARGET='//tensorflow/...'
456  # Android targets tend to depend on an Android runtime being available.
457  # Exclude until the sanity test has such a runtime available.
458  #
459  # TODO(mikecase): Remove TF Lite exclusion from this list. Exclusion is
460  # necessary since the @androidsdk WORKSPACE dependency is commented out by
461  # default in TF WORKSPACE file.
462  local BUILD_TARGET="${BUILD_TARGET}"' - kind("android_*", //tensorflow/...)'
463
464  # We've set the flag noimplicit_deps as a workaround for
465  # https://github.com/bazelbuild/bazel/issues/10544
466  bazel query ${BAZEL_FLAGS} --noimplicit_deps -- "deps($BUILD_TARGET)" > /dev/null
467
468  cmd_status \
469    "This is due to invalid BUILD files."
470}
471
472do_pip_smoke_test() {
473  cd "$ROOT_DIR/tensorflow/tools/pip_package"
474  python pip_smoke_test.py
475}
476
477do_code_link_check() {
478  tensorflow/tools/ci_build/code_link_check.sh
479}
480
481# List .h|.cc files changed in the last non-merge git commit that still exist,
482# i.e., not removed.
483# Usage: get_clang_files_to_check [--incremental]
484get_clang_files_to_check() {
485  if [[ "$1" == "--incremental" ]]; then
486    CHANGED_CLANG_FILES=$(get_changed_files_in_last_non_merge_git_commit | \
487                       grep '.*\.h$\|.*\.cc$')
488
489    # Do not include files removed in the last non-merge commit.
490    CLANG_FILES=""
491    for CLANG_FILE in ${CHANGED_CLANG_FILES}; do
492      if [[ -f "${CLANG_FILE}" ]]; then
493        CLANG_FILES="${CLANG_FILES} ${CLANG_FILE}"
494      fi
495    done
496
497    echo "${CLANG_FILES}"
498  else
499    find tensorflow -name '*.h' -o -name '*.cc'
500  fi
501}
502
503do_clang_format_check() {
504  if [[ $# != "0" ]] && [[ $# != "1" ]]; then
505    echo "Invalid syntax when invoking do_clang_format_check"
506    echo "Usage: do_clang_format_check [--incremental]"
507    return 1
508  fi
509
510  if [[ "$1" == "--incremental" ]]; then
511    CLANG_SRC_FILES=$(get_clang_files_to_check --incremental)
512
513    if [[ -z "${CLANG_SRC_FILES}" ]]; then
514      echo "do_clang_format_check will NOT run due to --incremental flag and "\
515"due to the absence of .h or .cc code changes in the last commit."
516      return 0
517    fi
518  elif [[ -z "$1" ]]; then
519    # TODO (yongtang): Always pass --incremental until all files have
520    # been sanitized gradually. Then this --incremental could be removed.
521    CLANG_SRC_FILES=$(get_clang_files_to_check --incremental)
522  else
523    echo "Invalid syntax for invoking do_clang_format_check"
524    echo "Usage: do_clang_format_check [--incremental]"
525    return 1
526  fi
527
528  CLANG_FORMAT=${CLANG_FORMAT:-clang-format-3.8}
529
530  success=1
531  for filename in $CLANG_SRC_FILES; do
532    $CLANG_FORMAT --style=google $filename | diff $filename - > /dev/null
533    if [ ! $? -eq 0 ]; then
534      success=0
535      echo File $filename is not properly formatted with "clang-format "\
536"--style=google"
537    fi
538  done
539
540  if [ $success == 0 ]; then
541    echo Clang format check fails.
542    exit 1
543  fi
544  echo Clang format check success.
545}
546
547do_check_load_py_test() {
548  cd "$ROOT_DIR/tensorflow/tools/pip_package"
549  python check_load_py_test.py
550}
551
552do_check_futures_test() {
553  cd "$ROOT_DIR/tensorflow/tools/test"
554  python check_futures_test.py
555}
556
557do_check_file_name_test() {
558  cd "$ROOT_DIR/tensorflow/tools/test"
559  python file_name_test.py
560}
561
562# Check that TARGET does not depend on DISALLOWED_DEP.
563_check_no_deps() {
564  TARGET="$1"
565  DISALLOWED_DEP="$2"
566  EXTRA_FLAG="$3"
567
568  TMP_FILE="$(mktemp)_tmp.log"
569  echo "Checking ${TARGET} does not depend on ${DISALLOWED_DEP} ..."
570  bazel cquery ${EXTRA_FLAG} "somepath(${TARGET}, ${DISALLOWED_DEP})" --keep_going> "${TMP_FILE}" 2>&1
571  if cat "${TMP_FILE}" | grep "Empty query results"; then
572      echo "Success."
573  else
574      cat "${TMP_FILE}"
575      echo
576      echo "ERROR: Found path from ${TARGET} to disallowed dependency ${DISALLOWED_DEP}."
577      echo "See above for path."
578      rm "${TMP_FILE}"
579      exit 1
580  fi
581  rm "${TMP_FILE}"
582}
583
584_do_pip_no_cuda_deps_check() {
585  EXTRA_FLAG="$1"
586  DISALLOWED_CUDA_DEPS=("@local_config_cuda//cuda:cudart"
587        "@local_config_cuda//cuda:cublas"
588        "@local_config_cuda//cuda:cuda_driver"
589        "@local_config_cuda//cuda:cudnn"
590        "@local_config_cuda//cuda:curand"
591        "@local_config_cuda//cuda:cusolver"
592        "@local_config_cuda//cuda:cusparse"
593        "@local_config_tensorrt//:tensorrt")
594  for cuda_dep in "${DISALLOWED_CUDA_DEPS[@]}"
595  do
596   _check_no_deps "//tensorflow/tools/pip_package:build_pip_package" "${cuda_dep}" "${EXTRA_FLAG}"
597   RESULT=$?
598
599   if [[ ${RESULT} != "0" ]]; then
600    exit 1
601   fi
602  done
603}
604
605do_pip_no_cuda_deps_check_ubuntu() {
606  _do_pip_no_cuda_deps_check "--define using_cuda=true --define using_cuda_nvcc=true"
607}
608
609do_pip_no_cuda_deps_check_windows() {
610  _do_pip_no_cuda_deps_check "--define using_cuda=true --define using_cuda_nvcc=true --define framework_shared_object=false"
611}
612
613do_configure_test() {
614  for WITH_CUDA in 1 0
615  do
616    export TF_NEED_CUDA=${WITH_CUDA}
617    export CUDNN_INSTALL_PATH="/usr/local/cudnn"
618    export PYTHON_BIN_PATH=$(which python)
619    yes "" | ./configure
620
621    RESULT=$?
622    if [[ ${RESULT} != "0" ]]; then
623     exit 1
624    fi
625  done
626}
627
628# Supply all sanity step commands and descriptions
629SANITY_STEPS=("do_configure_test" "do_pylint" "do_check_futures_test" "do_buildifier" "do_bazel_nobuild" "do_bazel_deps_query" "do_pip_package_licenses_check" "do_lib_package_licenses_check" "do_java_package_licenses_check" "do_pip_smoke_test" "do_check_load_py_test" "do_code_link_check" "do_check_file_name_test" "do_pip_no_cuda_deps_check_ubuntu" "do_pip_no_cuda_deps_check_windows")
630SANITY_STEPS_DESC=("Run ./configure" "Python 3 pylint" "Check that python files have certain __future__ imports" "buildifier check" "bazel nobuild" "bazel query" "pip: license check for external dependencies" "C library: license check for external dependencies" "Java Native Library: license check for external dependencies" "Pip Smoke Test: Checking py_test dependencies exist in pip package" "Check load py_test: Check that BUILD files with py_test target properly load py_test" "Code Link Check: Check there are no broken links" "Check file names for cases" "Check Ubuntu gpu pip package does not depend on cuda shared libraries" "Check Windows gpu pip package does not depend on cuda shared libraries")
631
632INCREMENTAL_FLAG=""
633DEFAULT_BAZEL_CONFIGS=""
634
635# Parse command-line arguments
636BAZEL_FLAGS=${DEFAULT_BAZEL_CONFIGS}
637for arg in "$@"; do
638  if [[ "${arg}" == "--pep8" ]]; then
639    # Only run pep8 test if "--pep8" option supplied
640    SANITY_STEPS=("do_pep8")
641    SANITY_STEPS_DESC=("pep8 test")
642  elif [[ "${arg}" == "--incremental" ]]; then
643    INCREMENTAL_FLAG="--incremental"
644  else
645    BAZEL_FLAGS="${BAZEL_FLAGS} ${arg}"
646  fi
647done
648
649
650FAIL_COUNTER=0
651PASS_COUNTER=0
652STEP_EXIT_CODES=()
653
654# Execute all the sanity build steps
655COUNTER=0
656while [[ ${COUNTER} -lt "${#SANITY_STEPS[@]}" ]]; do
657  INDEX=COUNTER
658  ((INDEX++))
659
660  echo ""
661  echo "=== Sanity check step ${INDEX} of ${#SANITY_STEPS[@]}: "\
662"${SANITY_STEPS[COUNTER]} (${SANITY_STEPS_DESC[COUNTER]}) ==="
663  echo ""
664
665  # subshell: don't leak variables or changes of working directory
666  (
667  ${SANITY_STEPS[COUNTER]} ${INCREMENTAL_FLAG}
668  )
669  RESULT=$?
670
671  if [[ ${RESULT} != "0" ]]; then
672    ((FAIL_COUNTER++))
673  else
674    ((PASS_COUNTER++))
675  fi
676
677  STEP_EXIT_CODES+=(${RESULT})
678
679  echo ""
680  ((COUNTER++))
681done
682
683# Print summary of build results
684COUNTER=0
685echo "==== Summary of sanity check results ===="
686while [[ ${COUNTER} -lt "${#SANITY_STEPS[@]}" ]]; do
687  INDEX=COUNTER
688  ((INDEX++))
689
690  echo "${INDEX}. ${SANITY_STEPS[COUNTER]}: ${SANITY_STEPS_DESC[COUNTER]}"
691  if [[ ${STEP_EXIT_CODES[COUNTER]} == "0" ]]; then
692    printf "  ${COLOR_GREEN}PASS${COLOR_NC}\n"
693  else
694    printf "  ${COLOR_RED}FAIL${COLOR_NC}\n"
695  fi
696
697  ((COUNTER++))
698done
699
700echo
701echo "${FAIL_COUNTER} failed; ${PASS_COUNTER} passed."
702
703echo
704if [[ ${FAIL_COUNTER} == "0" ]]; then
705  printf "Sanity checks ${COLOR_GREEN}PASSED${COLOR_NC}\n"
706else
707  printf "Sanity checks ${COLOR_RED}FAILED${COLOR_NC}\n"
708  exit 1
709fi
710