• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/bin/sh
2# SPDX-License-Identifier: GPL-2.0-only
3
4# ftracetest - Ftrace test shell scripts
5#
6# Copyright (C) Hitachi Ltd., 2014
7#  Written by Masami Hiramatsu <masami.hiramatsu.pt@hitachi.com>
8#
9
10usage() { # errno [message]
11[ ! -z "$2" ] && echo $2
12echo "Usage: ftracetest [options] [testcase(s)] [testcase-directory(s)]"
13echo " Options:"
14echo "		-h|--help  Show help message"
15echo "		-k|--keep  Keep passed test logs"
16echo "		-v|--verbose Increase verbosity of test messages"
17echo "		-vv        Alias of -v -v (Show all results in stdout)"
18echo "		-vvv       Alias of -v -v -v (Show all commands immediately)"
19echo "		--fail-unsupported Treat UNSUPPORTED as a failure"
20echo "		--fail-unresolved Treat UNRESOLVED as a failure"
21echo "		-d|--debug Debug mode (trace all shell commands)"
22echo "		-l|--logdir <dir> Save logs on the <dir>"
23echo "		            If <dir> is -, all logs output in console only"
24exit $1
25}
26
27# default error
28err_ret=1
29
30# kselftest skip code is 4
31err_skip=4
32
33# umount required
34UMOUNT_DIR=""
35
36# cgroup RT scheduling prevents chrt commands from succeeding, which
37# induces failures in test wakeup tests.  Disable for the duration of
38# the tests.
39
40readonly sched_rt_runtime=/proc/sys/kernel/sched_rt_runtime_us
41
42sched_rt_runtime_orig=$(cat $sched_rt_runtime)
43
44setup() {
45  echo -1 > $sched_rt_runtime
46}
47
48cleanup() {
49  echo $sched_rt_runtime_orig > $sched_rt_runtime
50  if [ -n "${UMOUNT_DIR}" ]; then
51    umount ${UMOUNT_DIR} ||:
52  fi
53}
54
55errexit() { # message
56  echo "Error: $1" 1>&2
57  cleanup
58  exit $err_ret
59}
60
61# Ensuring user privilege
62if [ `id -u` -ne 0 ]; then
63  errexit "this must be run by root user"
64fi
65
66setup
67
68# Utilities
69absdir() { # file_path
70  (cd `dirname $1`; pwd)
71}
72
73abspath() {
74  echo `absdir $1`/`basename $1`
75}
76
77find_testcases() { #directory
78  echo `find $1 -name \*.tc | sort`
79}
80
81parse_opts() { # opts
82  local OPT_TEST_CASES=
83  local OPT_TEST_DIR=
84
85  while [ ! -z "$1" ]; do
86    case "$1" in
87    --help|-h)
88      usage 0
89    ;;
90    --keep|-k)
91      KEEP_LOG=1
92      shift 1
93    ;;
94    --verbose|-v|-vv|-vvv)
95      if [ $VERBOSE -eq -1 ]; then
96	usage "--console can not use with --verbose"
97      fi
98      VERBOSE=$((VERBOSE + 1))
99      [ $1 = '-vv' ] && VERBOSE=$((VERBOSE + 1))
100      [ $1 = '-vvv' ] && VERBOSE=$((VERBOSE + 2))
101      shift 1
102    ;;
103    --console)
104      if [ $VERBOSE -ne 0 ]; then
105	usage "--console can not use with --verbose"
106      fi
107      VERBOSE=-1
108      shift 1
109    ;;
110    --debug|-d)
111      DEBUG=1
112      shift 1
113    ;;
114    --stop-fail)
115      STOP_FAILURE=1
116      shift 1
117    ;;
118    --fail-unsupported)
119      UNSUPPORTED_RESULT=1
120      shift 1
121    ;;
122    --fail-unresolved)
123      UNRESOLVED_RESULT=1
124      shift 1
125    ;;
126    --logdir|-l)
127      LOG_DIR=$2
128      shift 2
129    ;;
130    *.tc)
131      if [ -f "$1" ]; then
132        OPT_TEST_CASES="$OPT_TEST_CASES `abspath $1`"
133        shift 1
134      else
135        usage 1 "$1 is not a testcase"
136      fi
137      ;;
138    *)
139      if [ -d "$1" ]; then
140        OPT_TEST_DIR=`abspath $1`
141        OPT_TEST_CASES="$OPT_TEST_CASES `find_testcases $OPT_TEST_DIR`"
142        shift 1
143      else
144        usage 1 "Invalid option ($1)"
145      fi
146    ;;
147    esac
148  done
149  if [ ! -z "$OPT_TEST_CASES" ]; then
150    TEST_CASES=$OPT_TEST_CASES
151  fi
152}
153
154# Parameters
155TRACING_DIR=`grep tracefs /proc/mounts | cut -f2 -d' ' | head -1`
156if [ -z "$TRACING_DIR" ]; then
157    DEBUGFS_DIR=`grep debugfs /proc/mounts | cut -f2 -d' ' | head -1`
158    if [ -z "$DEBUGFS_DIR" ]; then
159	# If tracefs exists, then so does /sys/kernel/tracing
160	if [ -d "/sys/kernel/tracing" ]; then
161	    mount -t tracefs nodev /sys/kernel/tracing ||
162	      errexit "Failed to mount /sys/kernel/tracing"
163	    TRACING_DIR="/sys/kernel/tracing"
164	    UMOUNT_DIR=${TRACING_DIR}
165	# If debugfs exists, then so does /sys/kernel/debug
166	elif [ -d "/sys/kernel/debug" ]; then
167	    mount -t debugfs nodev /sys/kernel/debug ||
168	      errexit "Failed to mount /sys/kernel/debug"
169	    TRACING_DIR="/sys/kernel/debug/tracing"
170	    UMOUNT_DIR=${TRACING_DIR}
171	else
172	    err_ret=$err_skip
173	    errexit "debugfs and tracefs are not configured in this kernel"
174	fi
175    else
176	TRACING_DIR="$DEBUGFS_DIR/tracing"
177    fi
178fi
179if [ ! -d "$TRACING_DIR" ]; then
180    err_ret=$err_skip
181    errexit "ftrace is not configured in this kernel"
182fi
183
184TOP_DIR=`absdir $0`
185TEST_DIR=$TOP_DIR/test.d
186TEST_CASES=`find_testcases $TEST_DIR`
187LOG_DIR=$TOP_DIR/logs/`date +%Y%m%d-%H%M%S`/
188KEEP_LOG=0
189DEBUG=0
190VERBOSE=0
191UNSUPPORTED_RESULT=0
192UNRESOLVED_RESULT=0
193STOP_FAILURE=0
194# Parse command-line options
195parse_opts $*
196
197[ $DEBUG -ne 0 ] && set -x
198
199# Verify parameters
200if [ -z "$TRACING_DIR" -o ! -d "$TRACING_DIR" ]; then
201  errexit "No ftrace directory found"
202fi
203
204# Preparing logs
205if [ "x$LOG_DIR" = "x-" ]; then
206  LOG_FILE=
207  date
208else
209  LOG_FILE=$LOG_DIR/ftracetest.log
210  mkdir -p $LOG_DIR || errexit "Failed to make a log directory: $LOG_DIR"
211  date > $LOG_FILE
212fi
213
214# Define text colors
215# Check available colors on the terminal, if any
216ncolors=`tput colors 2>/dev/null || echo 0`
217color_reset=
218color_red=
219color_green=
220color_blue=
221# If stdout exists and number of colors is eight or more, use them
222if [ -t 1 -a "$ncolors" -ge 8 ]; then
223  color_reset="\033[0m"
224  color_red="\033[31m"
225  color_green="\033[32m"
226  color_blue="\033[34m"
227fi
228
229strip_esc() {
230  # busybox sed implementation doesn't accept "\x1B", so use [:cntrl:] instead.
231  sed -E "s/[[:cntrl:]]\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]//g"
232}
233
234prlog() { # messages
235  newline="\n"
236  if [ "$1" = "-n" ] ; then
237    newline=
238    shift
239  fi
240  printf "$*$newline"
241  [ "$LOG_FILE" ] && printf "$*$newline" | strip_esc >> $LOG_FILE
242}
243catlog() { #file
244  cat $1
245  [ "$LOG_FILE" ] && cat $1 | strip_esc >> $LOG_FILE
246}
247prlog "=== Ftrace unit tests ==="
248
249
250# Testcase management
251# Test result codes - Dejagnu extended code
252PASS=0	# The test succeeded.
253FAIL=1	# The test failed, but was expected to succeed.
254UNRESOLVED=2  # The test produced indeterminate results. (e.g. interrupted)
255UNTESTED=3    # The test was not run, currently just a placeholder.
256UNSUPPORTED=4 # The test failed because of lack of feature.
257XFAIL=5	# The test failed, and was expected to fail.
258
259# Accumulations
260PASSED_CASES=
261FAILED_CASES=
262UNRESOLVED_CASES=
263UNTESTED_CASES=
264UNSUPPORTED_CASES=
265XFAILED_CASES=
266UNDEFINED_CASES=
267TOTAL_RESULT=0
268
269INSTANCE=
270CASENO=0
271
272testcase() { # testfile
273  CASENO=$((CASENO+1))
274  desc=`grep "^#[ \t]*description:" $1 | cut -f2- -d:`
275  prlog -n "[$CASENO]$INSTANCE$desc"
276}
277
278checkreq() { # testfile
279  requires=`grep "^#[ \t]*requires:" $1 | cut -f2- -d:`
280  # Use eval to pass quoted-patterns correctly.
281  eval check_requires "$requires"
282}
283
284test_on_instance() { # testfile
285  grep -q "^#[ \t]*flags:.*instance" $1
286}
287
288eval_result() { # sigval
289  case $1 in
290    $PASS)
291      prlog "	[${color_green}PASS${color_reset}]"
292      PASSED_CASES="$PASSED_CASES $CASENO"
293      return 0
294    ;;
295    $FAIL)
296      prlog "	[${color_red}FAIL${color_reset}]"
297      FAILED_CASES="$FAILED_CASES $CASENO"
298      return 1 # this is a bug.
299    ;;
300    $UNRESOLVED)
301      prlog "	[${color_blue}UNRESOLVED${color_reset}]"
302      UNRESOLVED_CASES="$UNRESOLVED_CASES $CASENO"
303      return $UNRESOLVED_RESULT # depends on use case
304    ;;
305    $UNTESTED)
306      prlog "	[${color_blue}UNTESTED${color_reset}]"
307      UNTESTED_CASES="$UNTESTED_CASES $CASENO"
308      return 0
309    ;;
310    $UNSUPPORTED)
311      prlog "	[${color_blue}UNSUPPORTED${color_reset}]"
312      UNSUPPORTED_CASES="$UNSUPPORTED_CASES $CASENO"
313      return $UNSUPPORTED_RESULT # depends on use case
314    ;;
315    $XFAIL)
316      prlog "	[${color_green}XFAIL${color_reset}]"
317      XFAILED_CASES="$XFAILED_CASES $CASENO"
318      return 0
319    ;;
320    *)
321      prlog "	[${color_blue}UNDEFINED${color_reset}]"
322      UNDEFINED_CASES="$UNDEFINED_CASES $CASENO"
323      return 1 # this must be a test bug
324    ;;
325  esac
326}
327
328# Signal handling for result codes
329SIG_RESULT=
330SIG_BASE=36	# Use realtime signals
331SIG_PID=$$
332
333exit_pass () {
334  exit 0
335}
336
337SIG_FAIL=$((SIG_BASE + FAIL))
338exit_fail () {
339  exit 1
340}
341trap 'SIG_RESULT=$FAIL' $SIG_FAIL
342
343SIG_UNRESOLVED=$((SIG_BASE + UNRESOLVED))
344exit_unresolved () {
345  kill -s $SIG_UNRESOLVED $SIG_PID
346  exit 0
347}
348trap 'SIG_RESULT=$UNRESOLVED' $SIG_UNRESOLVED
349
350SIG_UNTESTED=$((SIG_BASE + UNTESTED))
351exit_untested () {
352  kill -s $SIG_UNTESTED $SIG_PID
353  exit 0
354}
355trap 'SIG_RESULT=$UNTESTED' $SIG_UNTESTED
356
357SIG_UNSUPPORTED=$((SIG_BASE + UNSUPPORTED))
358exit_unsupported () {
359  kill -s $SIG_UNSUPPORTED $SIG_PID
360  exit 0
361}
362trap 'SIG_RESULT=$UNSUPPORTED' $SIG_UNSUPPORTED
363
364SIG_XFAIL=$((SIG_BASE + XFAIL))
365exit_xfail () {
366  kill -s $SIG_XFAIL $SIG_PID
367  exit 0
368}
369trap 'SIG_RESULT=$XFAIL' $SIG_XFAIL
370
371__run_test() { # testfile
372  # setup PID and PPID, $$ is not updated.
373  (cd $TRACING_DIR; read PID _ < /proc/self/stat; set -e; set -x;
374   checkreq $1; initialize_ftrace; . $1)
375  [ $? -ne 0 ] && kill -s $SIG_FAIL $SIG_PID
376}
377
378# Run one test case
379run_test() { # testfile
380  local testname=`basename $1`
381  testcase $1
382  if [ ! -z "$LOG_FILE" ] ; then
383    local testlog=`mktemp $LOG_DIR/${CASENO}-${testname}-log.XXXXXX`
384  else
385    local testlog=/proc/self/fd/1
386  fi
387  export TMPDIR=`mktemp -d /tmp/ftracetest-dir.XXXXXX`
388  export FTRACETEST_ROOT=$TOP_DIR
389  echo "execute$INSTANCE: "$1 > $testlog
390  SIG_RESULT=0
391  if [ $VERBOSE -eq -1 ]; then
392    __run_test $1
393  elif [ -z "$LOG_FILE" ]; then
394    __run_test $1 2>&1
395  elif [ $VERBOSE -ge 3 ]; then
396    __run_test $1 | tee -a $testlog 2>&1
397  elif [ $VERBOSE -eq 2 ]; then
398    __run_test $1 2>> $testlog | tee -a $testlog
399  else
400    __run_test $1 >> $testlog 2>&1
401  fi
402  eval_result $SIG_RESULT
403  if [ $? -eq 0 ]; then
404    # Remove test log if the test was done as it was expected.
405    [ $KEEP_LOG -eq 0 -a ! -z "$LOG_FILE" ] && rm $testlog
406  else
407    [ $VERBOSE -eq 1 -o $VERBOSE -eq 2 ] && catlog $testlog
408    TOTAL_RESULT=1
409  fi
410  rm -rf $TMPDIR
411}
412
413# load in the helper functions
414. $TEST_DIR/functions
415
416# Main loop
417for t in $TEST_CASES; do
418  run_test $t
419  if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then
420    echo "A failure detected. Stop test."
421    exit 1
422  fi
423done
424
425# Test on instance loop
426INSTANCE=" (instance) "
427for t in $TEST_CASES; do
428  test_on_instance $t || continue
429  SAVED_TRACING_DIR=$TRACING_DIR
430  export TRACING_DIR=`mktemp -d $TRACING_DIR/instances/ftracetest.XXXXXX`
431  run_test $t
432  rmdir $TRACING_DIR
433  TRACING_DIR=$SAVED_TRACING_DIR
434  if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then
435    echo "A failure detected. Stop test."
436    exit 1
437  fi
438done
439(cd $TRACING_DIR; initialize_ftrace) # for cleanup
440
441prlog ""
442prlog "# of passed: " `echo $PASSED_CASES | wc -w`
443prlog "# of failed: " `echo $FAILED_CASES | wc -w`
444prlog "# of unresolved: " `echo $UNRESOLVED_CASES | wc -w`
445prlog "# of untested: " `echo $UNTESTED_CASES | wc -w`
446prlog "# of unsupported: " `echo $UNSUPPORTED_CASES | wc -w`
447prlog "# of xfailed: " `echo $XFAILED_CASES | wc -w`
448prlog "# of undefined(test bug): " `echo $UNDEFINED_CASES | wc -w`
449
450cleanup
451
452# if no error, return 0
453exit $TOTAL_RESULT
454