1#!/bin/sh 2# SPDX-License-Identifier: GPL-2.0-only 3 4# ftracetest - Ftrace test shell scripts 5# 6# Copyright (C) Hitachi Ltd., 2014 7# Written by Masami Hiramatsu <masami.hiramatsu.pt@hitachi.com> 8# 9 10usage() { # errno [message] 11[ ! -z "$2" ] && echo $2 12echo "Usage: ftracetest [options] [testcase(s)] [testcase-directory(s)]" 13echo " Options:" 14echo " -h|--help Show help message" 15echo " -k|--keep Keep passed test logs" 16echo " -v|--verbose Increase verbosity of test messages" 17echo " -vv Alias of -v -v (Show all results in stdout)" 18echo " -vvv Alias of -v -v -v (Show all commands immediately)" 19echo " --fail-unsupported Treat UNSUPPORTED as a failure" 20echo " --fail-unresolved Treat UNRESOLVED as a failure" 21echo " -d|--debug Debug mode (trace all shell commands)" 22echo " -l|--logdir <dir> Save logs on the <dir>" 23echo " If <dir> is -, all logs output in console only" 24exit $1 25} 26 27# default error 28err_ret=1 29 30# kselftest skip code is 4 31err_skip=4 32 33# umount required 34UMOUNT_DIR="" 35 36# cgroup RT scheduling prevents chrt commands from succeeding, which 37# induces failures in test wakeup tests. Disable for the duration of 38# the tests. 39 40readonly sched_rt_runtime=/proc/sys/kernel/sched_rt_runtime_us 41 42sched_rt_runtime_orig=$(cat $sched_rt_runtime) 43 44setup() { 45 echo -1 > $sched_rt_runtime 46} 47 48cleanup() { 49 echo $sched_rt_runtime_orig > $sched_rt_runtime 50 if [ -n "${UMOUNT_DIR}" ]; then 51 umount ${UMOUNT_DIR} ||: 52 fi 53} 54 55errexit() { # message 56 echo "Error: $1" 1>&2 57 cleanup 58 exit $err_ret 59} 60 61# Ensuring user privilege 62if [ `id -u` -ne 0 ]; then 63 errexit "this must be run by root user" 64fi 65 66setup 67 68# Utilities 69absdir() { # file_path 70 (cd `dirname $1`; pwd) 71} 72 73abspath() { 74 echo `absdir $1`/`basename $1` 75} 76 77find_testcases() { #directory 78 echo `find $1 -name \*.tc | sort` 79} 80 81parse_opts() { # opts 82 local OPT_TEST_CASES= 83 local OPT_TEST_DIR= 84 85 while [ ! -z "$1" ]; do 86 case "$1" in 87 --help|-h) 88 usage 0 89 ;; 90 --keep|-k) 91 KEEP_LOG=1 92 shift 1 93 ;; 94 --verbose|-v|-vv|-vvv) 95 if [ $VERBOSE -eq -1 ]; then 96 usage "--console can not use with --verbose" 97 fi 98 VERBOSE=$((VERBOSE + 1)) 99 [ $1 = '-vv' ] && VERBOSE=$((VERBOSE + 1)) 100 [ $1 = '-vvv' ] && VERBOSE=$((VERBOSE + 2)) 101 shift 1 102 ;; 103 --console) 104 if [ $VERBOSE -ne 0 ]; then 105 usage "--console can not use with --verbose" 106 fi 107 VERBOSE=-1 108 shift 1 109 ;; 110 --debug|-d) 111 DEBUG=1 112 shift 1 113 ;; 114 --stop-fail) 115 STOP_FAILURE=1 116 shift 1 117 ;; 118 --fail-unsupported) 119 UNSUPPORTED_RESULT=1 120 shift 1 121 ;; 122 --fail-unresolved) 123 UNRESOLVED_RESULT=1 124 shift 1 125 ;; 126 --logdir|-l) 127 LOG_DIR=$2 128 shift 2 129 ;; 130 *.tc) 131 if [ -f "$1" ]; then 132 OPT_TEST_CASES="$OPT_TEST_CASES `abspath $1`" 133 shift 1 134 else 135 usage 1 "$1 is not a testcase" 136 fi 137 ;; 138 *) 139 if [ -d "$1" ]; then 140 OPT_TEST_DIR=`abspath $1` 141 OPT_TEST_CASES="$OPT_TEST_CASES `find_testcases $OPT_TEST_DIR`" 142 shift 1 143 else 144 usage 1 "Invalid option ($1)" 145 fi 146 ;; 147 esac 148 done 149 if [ ! -z "$OPT_TEST_CASES" ]; then 150 TEST_CASES=$OPT_TEST_CASES 151 fi 152} 153 154# Parameters 155TRACING_DIR=`grep tracefs /proc/mounts | cut -f2 -d' ' | head -1` 156if [ -z "$TRACING_DIR" ]; then 157 DEBUGFS_DIR=`grep debugfs /proc/mounts | cut -f2 -d' ' | head -1` 158 if [ -z "$DEBUGFS_DIR" ]; then 159 # If tracefs exists, then so does /sys/kernel/tracing 160 if [ -d "/sys/kernel/tracing" ]; then 161 mount -t tracefs nodev /sys/kernel/tracing || 162 errexit "Failed to mount /sys/kernel/tracing" 163 TRACING_DIR="/sys/kernel/tracing" 164 UMOUNT_DIR=${TRACING_DIR} 165 # If debugfs exists, then so does /sys/kernel/debug 166 elif [ -d "/sys/kernel/debug" ]; then 167 mount -t debugfs nodev /sys/kernel/debug || 168 errexit "Failed to mount /sys/kernel/debug" 169 TRACING_DIR="/sys/kernel/debug/tracing" 170 UMOUNT_DIR=${TRACING_DIR} 171 else 172 err_ret=$err_skip 173 errexit "debugfs and tracefs are not configured in this kernel" 174 fi 175 else 176 TRACING_DIR="$DEBUGFS_DIR/tracing" 177 fi 178fi 179if [ ! -d "$TRACING_DIR" ]; then 180 err_ret=$err_skip 181 errexit "ftrace is not configured in this kernel" 182fi 183 184TOP_DIR=`absdir $0` 185TEST_DIR=$TOP_DIR/test.d 186TEST_CASES=`find_testcases $TEST_DIR` 187LOG_DIR=$TOP_DIR/logs/`date +%Y%m%d-%H%M%S`/ 188KEEP_LOG=0 189DEBUG=0 190VERBOSE=0 191UNSUPPORTED_RESULT=0 192UNRESOLVED_RESULT=0 193STOP_FAILURE=0 194# Parse command-line options 195parse_opts $* 196 197[ $DEBUG -ne 0 ] && set -x 198 199# Verify parameters 200if [ -z "$TRACING_DIR" -o ! -d "$TRACING_DIR" ]; then 201 errexit "No ftrace directory found" 202fi 203 204# Preparing logs 205if [ "x$LOG_DIR" = "x-" ]; then 206 LOG_FILE= 207 date 208else 209 LOG_FILE=$LOG_DIR/ftracetest.log 210 mkdir -p $LOG_DIR || errexit "Failed to make a log directory: $LOG_DIR" 211 date > $LOG_FILE 212fi 213 214# Define text colors 215# Check available colors on the terminal, if any 216ncolors=`tput colors 2>/dev/null || echo 0` 217color_reset= 218color_red= 219color_green= 220color_blue= 221# If stdout exists and number of colors is eight or more, use them 222if [ -t 1 -a "$ncolors" -ge 8 ]; then 223 color_reset="\033[0m" 224 color_red="\033[31m" 225 color_green="\033[32m" 226 color_blue="\033[34m" 227fi 228 229strip_esc() { 230 # busybox sed implementation doesn't accept "\x1B", so use [:cntrl:] instead. 231 sed -E "s/[[:cntrl:]]\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]//g" 232} 233 234prlog() { # messages 235 newline="\n" 236 if [ "$1" = "-n" ] ; then 237 newline= 238 shift 239 fi 240 printf "$*$newline" 241 [ "$LOG_FILE" ] && printf "$*$newline" | strip_esc >> $LOG_FILE 242} 243catlog() { #file 244 cat $1 245 [ "$LOG_FILE" ] && cat $1 | strip_esc >> $LOG_FILE 246} 247prlog "=== Ftrace unit tests ===" 248 249 250# Testcase management 251# Test result codes - Dejagnu extended code 252PASS=0 # The test succeeded. 253FAIL=1 # The test failed, but was expected to succeed. 254UNRESOLVED=2 # The test produced indeterminate results. (e.g. interrupted) 255UNTESTED=3 # The test was not run, currently just a placeholder. 256UNSUPPORTED=4 # The test failed because of lack of feature. 257XFAIL=5 # The test failed, and was expected to fail. 258 259# Accumulations 260PASSED_CASES= 261FAILED_CASES= 262UNRESOLVED_CASES= 263UNTESTED_CASES= 264UNSUPPORTED_CASES= 265XFAILED_CASES= 266UNDEFINED_CASES= 267TOTAL_RESULT=0 268 269INSTANCE= 270CASENO=0 271 272testcase() { # testfile 273 CASENO=$((CASENO+1)) 274 desc=`grep "^#[ \t]*description:" $1 | cut -f2 -d:` 275 prlog -n "[$CASENO]$INSTANCE$desc" 276} 277 278test_on_instance() { # testfile 279 grep -q "^#[ \t]*flags:.*instance" $1 280} 281 282eval_result() { # sigval 283 case $1 in 284 $PASS) 285 prlog " [${color_green}PASS${color_reset}]" 286 PASSED_CASES="$PASSED_CASES $CASENO" 287 return 0 288 ;; 289 $FAIL) 290 prlog " [${color_red}FAIL${color_reset}]" 291 FAILED_CASES="$FAILED_CASES $CASENO" 292 return 1 # this is a bug. 293 ;; 294 $UNRESOLVED) 295 prlog " [${color_blue}UNRESOLVED${color_reset}]" 296 UNRESOLVED_CASES="$UNRESOLVED_CASES $CASENO" 297 return $UNRESOLVED_RESULT # depends on use case 298 ;; 299 $UNTESTED) 300 prlog " [${color_blue}UNTESTED${color_reset}]" 301 UNTESTED_CASES="$UNTESTED_CASES $CASENO" 302 return 0 303 ;; 304 $UNSUPPORTED) 305 prlog " [${color_blue}UNSUPPORTED${color_reset}]" 306 UNSUPPORTED_CASES="$UNSUPPORTED_CASES $CASENO" 307 return $UNSUPPORTED_RESULT # depends on use case 308 ;; 309 $XFAIL) 310 prlog " [${color_red}XFAIL${color_reset}]" 311 XFAILED_CASES="$XFAILED_CASES $CASENO" 312 return 0 313 ;; 314 *) 315 prlog " [${color_blue}UNDEFINED${color_reset}]" 316 UNDEFINED_CASES="$UNDEFINED_CASES $CASENO" 317 return 1 # this must be a test bug 318 ;; 319 esac 320} 321 322# Signal handling for result codes 323SIG_RESULT= 324SIG_BASE=36 # Use realtime signals 325SIG_PID=$$ 326 327exit_pass () { 328 exit 0 329} 330 331SIG_FAIL=$((SIG_BASE + FAIL)) 332exit_fail () { 333 exit 1 334} 335trap 'SIG_RESULT=$FAIL' $SIG_FAIL 336 337SIG_UNRESOLVED=$((SIG_BASE + UNRESOLVED)) 338exit_unresolved () { 339 kill -s $SIG_UNRESOLVED $SIG_PID 340 exit 0 341} 342trap 'SIG_RESULT=$UNRESOLVED' $SIG_UNRESOLVED 343 344SIG_UNTESTED=$((SIG_BASE + UNTESTED)) 345exit_untested () { 346 kill -s $SIG_UNTESTED $SIG_PID 347 exit 0 348} 349trap 'SIG_RESULT=$UNTESTED' $SIG_UNTESTED 350 351SIG_UNSUPPORTED=$((SIG_BASE + UNSUPPORTED)) 352exit_unsupported () { 353 kill -s $SIG_UNSUPPORTED $SIG_PID 354 exit 0 355} 356trap 'SIG_RESULT=$UNSUPPORTED' $SIG_UNSUPPORTED 357 358SIG_XFAIL=$((SIG_BASE + XFAIL)) 359exit_xfail () { 360 kill -s $SIG_XFAIL $SIG_PID 361 exit 0 362} 363trap 'SIG_RESULT=$XFAIL' $SIG_XFAIL 364 365__run_test() { # testfile 366 # setup PID and PPID, $$ is not updated. 367 (cd $TRACING_DIR; read PID _ < /proc/self/stat; set -e; set -x; initialize_ftrace; . $1) 368 [ $? -ne 0 ] && kill -s $SIG_FAIL $SIG_PID 369} 370 371# Run one test case 372run_test() { # testfile 373 local testname=`basename $1` 374 testcase $1 375 if [ ! -z "$LOG_FILE" ] ; then 376 local testlog=`mktemp $LOG_DIR/${CASENO}-${testname}-log.XXXXXX` 377 else 378 local testlog=/proc/self/fd/1 379 fi 380 export TMPDIR=`mktemp -d /tmp/ftracetest-dir.XXXXXX` 381 export FTRACETEST_ROOT=$TOP_DIR 382 echo "execute$INSTANCE: "$1 > $testlog 383 SIG_RESULT=0 384 if [ $VERBOSE -eq -1 ]; then 385 __run_test $1 386 elif [ -z "$LOG_FILE" ]; then 387 __run_test $1 2>&1 388 elif [ $VERBOSE -ge 3 ]; then 389 __run_test $1 | tee -a $testlog 2>&1 390 elif [ $VERBOSE -eq 2 ]; then 391 __run_test $1 2>> $testlog | tee -a $testlog 392 else 393 __run_test $1 >> $testlog 2>&1 394 fi 395 eval_result $SIG_RESULT 396 if [ $? -eq 0 ]; then 397 # Remove test log if the test was done as it was expected. 398 [ $KEEP_LOG -eq 0 -a ! -z "$LOG_FILE" ] && rm $testlog 399 else 400 [ $VERBOSE -eq 1 -o $VERBOSE -eq 2 ] && catlog $testlog 401 TOTAL_RESULT=1 402 fi 403 rm -rf $TMPDIR 404} 405 406# load in the helper functions 407. $TEST_DIR/functions 408 409# Main loop 410for t in $TEST_CASES; do 411 run_test $t 412 if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then 413 echo "A failure detected. Stop test." 414 exit 1 415 fi 416done 417 418# Test on instance loop 419INSTANCE=" (instance) " 420for t in $TEST_CASES; do 421 test_on_instance $t || continue 422 SAVED_TRACING_DIR=$TRACING_DIR 423 export TRACING_DIR=`mktemp -d $TRACING_DIR/instances/ftracetest.XXXXXX` 424 run_test $t 425 rmdir $TRACING_DIR 426 TRACING_DIR=$SAVED_TRACING_DIR 427 if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then 428 echo "A failure detected. Stop test." 429 exit 1 430 fi 431done 432(cd $TRACING_DIR; initialize_ftrace) # for cleanup 433 434prlog "" 435prlog "# of passed: " `echo $PASSED_CASES | wc -w` 436prlog "# of failed: " `echo $FAILED_CASES | wc -w` 437prlog "# of unresolved: " `echo $UNRESOLVED_CASES | wc -w` 438prlog "# of untested: " `echo $UNTESTED_CASES | wc -w` 439prlog "# of unsupported: " `echo $UNSUPPORTED_CASES | wc -w` 440prlog "# of xfailed: " `echo $XFAILED_CASES | wc -w` 441prlog "# of undefined(test bug): " `echo $UNDEFINED_CASES | wc -w` 442 443cleanup 444 445# if no error, return 0 446exit $TOTAL_RESULT 447