xref: /linux/tools/testing/selftests/ftrace/ftracetest (revision e997ac58ad0b47141c62c79cde8356fe5633287a)
1#!/bin/sh
2# SPDX-License-Identifier: GPL-2.0-only
3
4# ftracetest - Ftrace test shell scripts
5#
6# Copyright (C) Hitachi Ltd., 2014
7#  Written by Masami Hiramatsu <masami.hiramatsu.pt@hitachi.com>
8#
9
10usage() { # errno [message]
11[ ! -z "$2" ] && echo $2
12echo "Usage: ftracetest [options] [testcase(s)] [testcase-directory(s)]"
13echo " Options:"
14echo "		-h|--help  Show help message"
15echo "		-k|--keep  Keep passed test logs"
16echo "		-K|--ktap  Output in KTAP format"
17echo "		-v|--verbose Increase verbosity of test messages"
18echo "		-vv        Alias of -v -v (Show all results in stdout)"
19echo "		-vvv       Alias of -v -v -v (Show all commands immediately)"
20echo "		--fail-unsupported Treat UNSUPPORTED as a failure"
21echo "		--fail-unresolved Treat UNRESOLVED as a failure"
22echo "		-d|--debug Debug mode (trace all shell commands)"
23echo "		-l|--logdir <dir> Save logs on the <dir>"
24echo "		            If <dir> is -, all logs output in console only"
25echo "		--rv       Run RV selftests instead of ftrace ones"
26exit $1
27}
28
29# default error
30err_ret=1
31
32# kselftest skip code is 4
33err_skip=4
34
35# umount required
36UMOUNT_DIR=""
37
38# cgroup RT scheduling prevents chrt commands from succeeding, which
39# induces failures in test wakeup tests.  Disable for the duration of
40# the tests.
41
42readonly sched_rt_runtime=/proc/sys/kernel/sched_rt_runtime_us
43
44sched_rt_runtime_orig=$(cat $sched_rt_runtime)
45
46setup() {
47  echo -1 > $sched_rt_runtime
48}
49
50cleanup() {
51  echo $sched_rt_runtime_orig > $sched_rt_runtime
52  if [ -n "${UMOUNT_DIR}" ]; then
53    umount ${UMOUNT_DIR} ||:
54  fi
55}
56
57errexit() { # message
58  echo "Error: $1" 1>&2
59  cleanup
60  exit $err_ret
61}
62
63# Ensuring user privilege
64if [ `id -u` -ne 0 ]; then
65  errexit "this must be run by root user"
66fi
67
68setup
69
70# Utilities
71absdir() { # file_path
72  (cd `dirname $1`; pwd)
73}
74
75abspath() {
76  echo `absdir $1`/`basename $1`
77}
78
79find_testcases() { #directory
80  echo `find $1 -name \*.tc | sort`
81}
82
83parse_opts() { # opts
84  local OPT_TEST_CASES=
85  local OPT_TEST_DIR=
86
87  while [ ! -z "$1" ]; do
88    case "$1" in
89    --help|-h)
90      usage 0
91    ;;
92    --keep|-k)
93      KEEP_LOG=1
94      shift 1
95    ;;
96    --ktap|-K)
97      KTAP=1
98      shift 1
99    ;;
100    --verbose|-v|-vv|-vvv)
101      if [ $VERBOSE -eq -1 ]; then
102	usage "--console can not use with --verbose"
103      fi
104      VERBOSE=$((VERBOSE + 1))
105      [ $1 = '-vv' ] && VERBOSE=$((VERBOSE + 1))
106      [ $1 = '-vvv' ] && VERBOSE=$((VERBOSE + 2))
107      shift 1
108    ;;
109    --console)
110      if [ $VERBOSE -ne 0 ]; then
111	usage "--console can not use with --verbose"
112      fi
113      VERBOSE=-1
114      shift 1
115    ;;
116    --debug|-d)
117      DEBUG=1
118      shift 1
119    ;;
120    --stop-fail)
121      STOP_FAILURE=1
122      shift 1
123    ;;
124    --fail-unsupported)
125      UNSUPPORTED_RESULT=1
126      shift 1
127    ;;
128    --fail-unresolved)
129      UNRESOLVED_RESULT=1
130      shift 1
131    ;;
132    --logdir|-l)
133      USER_LOG_DIR=$2
134      shift 2
135    ;;
136    --rv)
137      RV_TEST=1
138      shift 1
139    ;;
140    *.tc)
141      if [ -f "$1" ]; then
142        OPT_TEST_CASES="$OPT_TEST_CASES `abspath $1`"
143        shift 1
144      else
145        usage 1 "$1 is not a testcase"
146      fi
147      ;;
148    *)
149      if [ -d "$1" ]; then
150        OPT_TEST_DIR=`abspath $1`
151        OPT_TEST_CASES="$OPT_TEST_CASES `find_testcases $OPT_TEST_DIR`"
152        shift 1
153      else
154        usage 1 "Invalid option ($1)"
155      fi
156    ;;
157    esac
158  done
159  if [ -n "$OPT_TEST_CASES" ]; then
160    TEST_CASES=$OPT_TEST_CASES
161  fi
162  if [ -n "$OPT_TEST_DIR" -a -f "$OPT_TEST_DIR"/test.d/functions ]; then
163    TOP_DIR=$OPT_TEST_DIR
164    TEST_DIR=$TOP_DIR/test.d
165  fi
166}
167
168# Parameters
169TRACING_DIR=`grep tracefs /proc/mounts | cut -f2 -d' ' | head -1`
170if [ -z "$TRACING_DIR" ]; then
171    DEBUGFS_DIR=`grep debugfs /proc/mounts | cut -f2 -d' ' | head -1`
172    if [ -z "$DEBUGFS_DIR" ]; then
173	# If tracefs exists, then so does /sys/kernel/tracing
174	if [ -d "/sys/kernel/tracing" ]; then
175	    mount -t tracefs nodev /sys/kernel/tracing ||
176	      errexit "Failed to mount /sys/kernel/tracing"
177	    TRACING_DIR="/sys/kernel/tracing"
178	    UMOUNT_DIR=${TRACING_DIR}
179	# If debugfs exists, then so does /sys/kernel/debug
180	elif [ -d "/sys/kernel/debug" ]; then
181	    mount -t debugfs nodev /sys/kernel/debug ||
182	      errexit "Failed to mount /sys/kernel/debug"
183	    TRACING_DIR="/sys/kernel/debug/tracing"
184	    UMOUNT_DIR=${TRACING_DIR}
185	else
186	    err_ret=$err_skip
187	    errexit "debugfs and tracefs are not configured in this kernel"
188	fi
189    else
190	TRACING_DIR="$DEBUGFS_DIR/tracing"
191    fi
192fi
193if [ ! -d "$TRACING_DIR" ]; then
194    err_ret=$err_skip
195    errexit "ftrace is not configured in this kernel"
196fi
197
198TOP_DIR=`absdir $0`
199TEST_DIR=$TOP_DIR/test.d
200TEST_CASES=`find_testcases $TEST_DIR`
201USER_LOG_DIR=
202KEEP_LOG=0
203KTAP=0
204DEBUG=0
205VERBOSE=0
206UNSUPPORTED_RESULT=0
207UNRESOLVED_RESULT=0
208STOP_FAILURE=0
209RV_TEST=0
210# Parse command-line options
211parse_opts $*
212
213[ $DEBUG -ne 0 ] && set -x
214
215# TOP_DIR can be changed for rv. Setting log directory.
216LOG_TOP_DIR=$TOP_DIR/logs
217LOG_DATE=`date +%Y%m%d-%H%M%S`
218if [ -n "$USER_LOG_DIR" ]; then
219  LOG_DIR=$USER_LOG_DIR
220  LINK_PTR=
221else
222  LOG_DIR=$LOG_TOP_DIR/$LOG_DATE/
223  LINK_PTR=$LOG_TOP_DIR/latest
224fi
225
226if [ $RV_TEST -ne 0 ]; then
227	TRACING_DIR=$TRACING_DIR/rv
228	if [ ! -d "$TRACING_DIR" ]; then
229		err_ret=$err_skip
230		errexit "rv is not configured in this kernel"
231	fi
232fi
233
234# Preparing logs
235if [ "x$LOG_DIR" = "x-" ]; then
236  LOG_FILE=
237  date
238else
239  LOG_FILE=$LOG_DIR/ftracetest.log
240  mkdir -p $LOG_DIR || errexit "Failed to make a log directory: $LOG_DIR"
241  date > $LOG_FILE
242  if [ "x-$LINK_PTR" != "x-" ]; then
243    unlink $LINK_PTR
244    ln -fs $LOG_DATE $LINK_PTR
245  fi
246fi
247
248# Define text colors
249# Check available colors on the terminal, if any
250ncolors=`tput colors 2>/dev/null || echo 0`
251color_reset=
252color_red=
253color_green=
254color_blue=
255# If stdout exists and number of colors is eight or more, use them
256if [ -t 1 -a "$ncolors" -ge 8 ]; then
257  color_reset="\033[0m"
258  color_red="\033[31m"
259  color_green="\033[32m"
260  color_blue="\033[34m"
261fi
262
263strip_esc() {
264  # busybox sed implementation doesn't accept "\x1B", so use [:cntrl:] instead.
265  sed -E "s/[[:cntrl:]]\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]//g"
266}
267
268prlog() { # messages
269  newline="\n"
270  if [ "$1" = "-n" ] ; then
271    newline=
272    shift
273  fi
274  [ "$KTAP" != "1" ] && printf "$*$newline"
275  [ "$LOG_FILE" ] && printf "$*$newline" | strip_esc >> $LOG_FILE
276}
277catlog() { #file
278  if [ "${KTAP}" = "1" ]; then
279    cat $1 | while read line ; do
280      echo "# $line"
281    done
282  else
283    cat $1
284  fi
285  [ "$LOG_FILE" ] && cat $1 | strip_esc >> $LOG_FILE
286}
287prlog "=== Ftrace unit tests ==="
288
289
290# Testcase management
291# Test result codes - Dejagnu extended code
292PASS=0	# The test succeeded.
293FAIL=1	# The test failed, but was expected to succeed.
294UNRESOLVED=2  # The test produced indeterminate results. (e.g. interrupted)
295UNTESTED=3    # The test was not run, currently just a placeholder.
296UNSUPPORTED=4 # The test failed because of lack of feature.
297XFAIL=5	# The test failed, and was expected to fail.
298
299# Accumulations
300PASSED_CASES=
301FAILED_CASES=
302UNRESOLVED_CASES=
303UNTESTED_CASES=
304UNSUPPORTED_CASES=
305XFAILED_CASES=
306UNDEFINED_CASES=
307TOTAL_RESULT=0
308
309INSTANCE=
310CASENO=0
311CASENAME=
312
313testcase() { # testfile
314  CASENO=$((CASENO+1))
315  CASENAME=`grep "^#[ \t]*description:" $1 | cut -f2- -d:`
316}
317
318checkreq() { # testfile
319  requires=`grep "^#[ \t]*requires:" $1 | cut -f2- -d:`
320  # Use eval to pass quoted-patterns correctly.
321  eval check_requires "$requires"
322}
323
324test_on_instance() { # testfile
325  grep -q "^#[ \t]*flags:.*instance" $1
326}
327
328ktaptest() { # result comment
329  if [ "$KTAP" != "1" ]; then
330    return
331  fi
332
333  local result=
334  if [ "$1" = "1" ]; then
335    result="ok"
336  else
337    result="not ok"
338  fi
339  shift
340
341  local comment=$*
342  if [ "$comment" != "" ]; then
343    comment="# $comment"
344  fi
345
346  echo $result $CASENO $INSTANCE$CASENAME $comment
347}
348
349eval_result() { # sigval
350  case $1 in
351    $PASS)
352      prlog "	[${color_green}PASS${color_reset}]"
353      ktaptest 1
354      PASSED_CASES="$PASSED_CASES $CASENO"
355      return 0
356    ;;
357    $FAIL)
358      prlog "	[${color_red}FAIL${color_reset}]"
359      ktaptest 0
360      FAILED_CASES="$FAILED_CASES $CASENO"
361      return 1 # this is a bug.
362    ;;
363    $UNRESOLVED)
364      prlog "	[${color_blue}UNRESOLVED${color_reset}]"
365      ktaptest 0 UNRESOLVED
366      UNRESOLVED_CASES="$UNRESOLVED_CASES $CASENO"
367      return $UNRESOLVED_RESULT # depends on use case
368    ;;
369    $UNTESTED)
370      prlog "	[${color_blue}UNTESTED${color_reset}]"
371      ktaptest 1 SKIP
372      UNTESTED_CASES="$UNTESTED_CASES $CASENO"
373      return 0
374    ;;
375    $UNSUPPORTED)
376      prlog "	[${color_blue}UNSUPPORTED${color_reset}]"
377      ktaptest 1 SKIP
378      UNSUPPORTED_CASES="$UNSUPPORTED_CASES $CASENO"
379      return $UNSUPPORTED_RESULT # depends on use case
380    ;;
381    $XFAIL)
382      prlog "	[${color_green}XFAIL${color_reset}]"
383      ktaptest 1 XFAIL
384      XFAILED_CASES="$XFAILED_CASES $CASENO"
385      return 0
386    ;;
387    *)
388      prlog "	[${color_blue}UNDEFINED${color_reset}]"
389      ktaptest 0 error
390      UNDEFINED_CASES="$UNDEFINED_CASES $CASENO"
391      return 1 # this must be a test bug
392    ;;
393  esac
394}
395
396# Signal handling for result codes
397SIG_RESULT=
398SIG_BASE=36	# Use realtime signals
399SIG_PID=$$
400
401exit_pass () {
402  exit 0
403}
404
405SIG_FAIL=$((SIG_BASE + FAIL))
406exit_fail () {
407  exit 1
408}
409trap 'SIG_RESULT=$FAIL' $SIG_FAIL
410
411SIG_UNRESOLVED=$((SIG_BASE + UNRESOLVED))
412exit_unresolved () {
413  kill -s $SIG_UNRESOLVED $SIG_PID
414  exit 0
415}
416trap 'SIG_RESULT=$UNRESOLVED' $SIG_UNRESOLVED
417
418SIG_UNTESTED=$((SIG_BASE + UNTESTED))
419exit_untested () {
420  kill -s $SIG_UNTESTED $SIG_PID
421  exit 0
422}
423trap 'SIG_RESULT=$UNTESTED' $SIG_UNTESTED
424
425SIG_UNSUPPORTED=$((SIG_BASE + UNSUPPORTED))
426exit_unsupported () {
427  kill -s $SIG_UNSUPPORTED $SIG_PID
428  exit 0
429}
430trap 'SIG_RESULT=$UNSUPPORTED' $SIG_UNSUPPORTED
431
432SIG_XFAIL=$((SIG_BASE + XFAIL))
433exit_xfail () {
434  kill -s $SIG_XFAIL $SIG_PID
435  exit 0
436}
437trap 'SIG_RESULT=$XFAIL' $SIG_XFAIL
438
439__run_test() { # testfile
440  # setup PID and PPID, $$ is not updated.
441  (cd $TRACING_DIR; read PID _ < /proc/self/stat; set -e; set -x;
442   checkreq $1; initialize_system; . $1)
443  [ $? -ne 0 ] && kill -s $SIG_FAIL $SIG_PID
444}
445
446# Run one test case
447run_test() { # testfile
448  local testname=`basename $1`
449  testcase $1
450  prlog -n "[$CASENO]$INSTANCE$CASENAME"
451  if [ ! -z "$LOG_FILE" ] ; then
452    local testlog=`mktemp $LOG_DIR/${CASENO}-${testname}-log.XXXXXX`
453  else
454    local testlog=/proc/self/fd/1
455  fi
456  export TMPDIR=`mktemp -d /tmp/ftracetest-dir.XXXXXX`
457  export FTRACETEST_ROOT=$TOP_DIR
458  echo "execute$INSTANCE: "$1 > $testlog
459  SIG_RESULT=0
460  if [ $VERBOSE -eq -1 ]; then
461    __run_test $1
462  elif [ -z "$LOG_FILE" ]; then
463    __run_test $1 2>&1
464  elif [ $VERBOSE -ge 3 ]; then
465    __run_test $1 | tee -a $testlog 2>&1
466  elif [ $VERBOSE -eq 2 ]; then
467    __run_test $1 2>> $testlog | tee -a $testlog
468  else
469    __run_test $1 >> $testlog 2>&1
470  fi
471  eval_result $SIG_RESULT
472  if [ $? -eq 0 ]; then
473    # Remove test log if the test was done as it was expected.
474    [ $KEEP_LOG -eq 0 -a ! -z "$LOG_FILE" ] && rm $testlog
475  else
476    [ $VERBOSE -eq 1 -o $VERBOSE -eq 2 ] && catlog $testlog
477    TOTAL_RESULT=1
478  fi
479  rm -rf $TMPDIR
480}
481
482# load in the helper functions
483. $TEST_DIR/functions
484
485if [ "$KTAP" = "1" ]; then
486  echo "TAP version 13"
487
488  casecount=`echo $TEST_CASES | wc -w`
489  for t in $TEST_CASES; do
490    test_on_instance $t || continue
491    casecount=$((casecount+1))
492  done
493  echo "1..${casecount}"
494fi
495
496# Main loop
497for t in $TEST_CASES; do
498  run_test $t
499  if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then
500    echo "A failure detected. Stop test."
501    exit 1
502  fi
503done
504
505# Test on instance loop
506INSTANCE=" (instance) "
507for t in $TEST_CASES; do
508  test_on_instance $t || continue
509  SAVED_TRACING_DIR=$TRACING_DIR
510  export TRACING_DIR=`mktemp -d $TRACING_DIR/instances/ftracetest.XXXXXX`
511  run_test $t
512  rmdir $TRACING_DIR
513  TRACING_DIR=$SAVED_TRACING_DIR
514  if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then
515    echo "A failure detected. Stop test."
516    exit 1
517  fi
518done
519(cd $TRACING_DIR; finish_system) # for cleanup
520
521prlog ""
522prlog "# of passed: " `echo $PASSED_CASES | wc -w`
523prlog "# of failed: " `echo $FAILED_CASES | wc -w`
524prlog "# of unresolved: " `echo $UNRESOLVED_CASES | wc -w`
525prlog "# of untested: " `echo $UNTESTED_CASES | wc -w`
526prlog "# of unsupported: " `echo $UNSUPPORTED_CASES | wc -w`
527prlog "# of xfailed: " `echo $XFAILED_CASES | wc -w`
528prlog "# of undefined(test bug): " `echo $UNDEFINED_CASES | wc -w`
529
530if [ "$KTAP" = "1" ]; then
531  echo -n "# Totals:"
532  echo -n " pass:"`echo $PASSED_CASES | wc -w`
533  echo -n " fail:"`echo $FAILED_CASES | wc -w`
534  echo -n " xfail:"`echo $XFAILED_CASES | wc -w`
535  echo -n " xpass:0"
536  echo -n " skip:"`echo $UNTESTED_CASES $UNSUPPORTED_CASES | wc -w`
537  echo -n " error:"`echo $UNRESOLVED_CASES $UNDEFINED_CASES | wc -w`
538  echo
539fi
540
541cleanup
542
543# if no error, return 0
544exit $TOTAL_RESULT
545