xref: /linux/tools/testing/selftests/ftrace/ftracetest (revision 0b1b4a3d8ebec3c42231c306d4b9a5153d047674)
1#!/bin/sh
2# SPDX-License-Identifier: GPL-2.0-only
3
4# ftracetest - Ftrace test shell scripts
5#
6# Copyright (C) Hitachi Ltd., 2014
7#  Written by Masami Hiramatsu <masami.hiramatsu.pt@hitachi.com>
8#
9
10usage() { # errno [message]
11[ ! -z "$2" ] && echo $2
12echo "Usage: ftracetest [options] [testcase(s)] [testcase-directory(s)]"
13echo " Options:"
14echo "		-h|--help  Show help message"
15echo "		-k|--keep  Keep passed test logs"
16echo "		-K|--ktap  Output in KTAP format"
17echo "		-v|--verbose Increase verbosity of test messages"
18echo "		-vv        Alias of -v -v (Show all results in stdout)"
19echo "		-vvv       Alias of -v -v -v (Show all commands immediately)"
20echo "		--fail-unsupported Treat UNSUPPORTED as a failure"
21echo "		--fail-unresolved Treat UNRESOLVED as a failure"
22echo "		-d|--debug Debug mode (trace all shell commands)"
23echo "		-l|--logdir <dir> Save logs on the <dir>"
24echo "		            If <dir> is -, all logs output in console only"
25echo "		--rv       Run RV selftests instead of ftrace ones"
26exit $1
27}
28
29# default error
30err_ret=1
31
32# kselftest skip code is 4
33err_skip=4
34
35# umount required
36UMOUNT_DIR=""
37
38# cgroup RT scheduling prevents chrt commands from succeeding, which
39# induces failures in test wakeup tests.  Disable for the duration of
40# the tests.
41
42readonly sched_rt_runtime=/proc/sys/kernel/sched_rt_runtime_us
43
44sched_rt_runtime_orig=$(cat $sched_rt_runtime)
45
46setup() {
47  echo -1 > $sched_rt_runtime
48}
49
50cleanup() {
51  echo $sched_rt_runtime_orig > $sched_rt_runtime
52  if [ -n "${UMOUNT_DIR}" ]; then
53    umount ${UMOUNT_DIR} ||:
54  fi
55}
56
57errexit() { # message
58  echo "Error: $1" 1>&2
59  cleanup
60  exit $err_ret
61}
62
63# Ensuring user privilege
64if [ `id -u` -ne 0 ]; then
65  errexit "this must be run by root user"
66fi
67
68setup
69
70# Utilities
71absdir() { # file_path
72  (cd `dirname $1`; pwd)
73}
74
75abspath() {
76  echo `absdir $1`/`basename $1`
77}
78
79find_testcases() { #directory
80  echo `find $1 -name \*.tc | sort`
81}
82
83parse_opts() { # opts
84  local OPT_TEST_CASES=
85  local OPT_TEST_DIR=
86
87  while [ ! -z "$1" ]; do
88    case "$1" in
89    --help|-h)
90      usage 0
91    ;;
92    --keep|-k)
93      KEEP_LOG=1
94      shift 1
95    ;;
96    --ktap|-K)
97      KTAP=1
98      shift 1
99    ;;
100    --verbose|-v|-vv|-vvv)
101      if [ $VERBOSE -eq -1 ]; then
102	usage "--console can not use with --verbose"
103      fi
104      VERBOSE=$((VERBOSE + 1))
105      [ $1 = '-vv' ] && VERBOSE=$((VERBOSE + 1))
106      [ $1 = '-vvv' ] && VERBOSE=$((VERBOSE + 2))
107      shift 1
108    ;;
109    --console)
110      if [ $VERBOSE -ne 0 ]; then
111	usage "--console can not use with --verbose"
112      fi
113      VERBOSE=-1
114      shift 1
115    ;;
116    --debug|-d)
117      DEBUG=1
118      shift 1
119    ;;
120    --stop-fail)
121      STOP_FAILURE=1
122      shift 1
123    ;;
124    --fail-unsupported)
125      UNSUPPORTED_RESULT=1
126      shift 1
127    ;;
128    --fail-unresolved)
129      UNRESOLVED_RESULT=1
130      shift 1
131    ;;
132    --logdir|-l)
133      LOG_DIR=$2
134      LINK_PTR=
135      shift 2
136    ;;
137    --rv)
138      RV_TEST=1
139      shift 1
140    ;;
141    *.tc)
142      if [ -f "$1" ]; then
143        OPT_TEST_CASES="$OPT_TEST_CASES `abspath $1`"
144        shift 1
145      else
146        usage 1 "$1 is not a testcase"
147      fi
148      ;;
149    *)
150      if [ -d "$1" ]; then
151        OPT_TEST_DIR=`abspath $1`
152        OPT_TEST_CASES="$OPT_TEST_CASES `find_testcases $OPT_TEST_DIR`"
153        shift 1
154      else
155        usage 1 "Invalid option ($1)"
156      fi
157    ;;
158    esac
159  done
160  if [ -n "$OPT_TEST_CASES" ]; then
161    TEST_CASES=$OPT_TEST_CASES
162  fi
163  if [ -n "$OPT_TEST_DIR" -a -f "$OPT_TEST_DIR"/test.d/functions ]; then
164    TOP_DIR=$OPT_TEST_DIR
165    TEST_DIR=$TOP_DIR/test.d
166  fi
167}
168
169# Parameters
170TRACING_DIR=`grep tracefs /proc/mounts | cut -f2 -d' ' | head -1`
171if [ -z "$TRACING_DIR" ]; then
172    DEBUGFS_DIR=`grep debugfs /proc/mounts | cut -f2 -d' ' | head -1`
173    if [ -z "$DEBUGFS_DIR" ]; then
174	# If tracefs exists, then so does /sys/kernel/tracing
175	if [ -d "/sys/kernel/tracing" ]; then
176	    mount -t tracefs nodev /sys/kernel/tracing ||
177	      errexit "Failed to mount /sys/kernel/tracing"
178	    TRACING_DIR="/sys/kernel/tracing"
179	    UMOUNT_DIR=${TRACING_DIR}
180	# If debugfs exists, then so does /sys/kernel/debug
181	elif [ -d "/sys/kernel/debug" ]; then
182	    mount -t debugfs nodev /sys/kernel/debug ||
183	      errexit "Failed to mount /sys/kernel/debug"
184	    TRACING_DIR="/sys/kernel/debug/tracing"
185	    UMOUNT_DIR=${TRACING_DIR}
186	else
187	    err_ret=$err_skip
188	    errexit "debugfs and tracefs are not configured in this kernel"
189	fi
190    else
191	TRACING_DIR="$DEBUGFS_DIR/tracing"
192    fi
193fi
194if [ ! -d "$TRACING_DIR" ]; then
195    err_ret=$err_skip
196    errexit "ftrace is not configured in this kernel"
197fi
198
199TOP_DIR=`absdir $0`
200TEST_DIR=$TOP_DIR/test.d
201TEST_CASES=`find_testcases $TEST_DIR`
202KEEP_LOG=0
203KTAP=0
204DEBUG=0
205VERBOSE=0
206UNSUPPORTED_RESULT=0
207UNRESOLVED_RESULT=0
208STOP_FAILURE=0
209RV_TEST=0
210# Parse command-line options
211parse_opts $*
212
213LOG_TOP_DIR=$TOP_DIR/logs
214LOG_DATE=`date +%Y%m%d-%H%M%S`
215LOG_DIR=$LOG_TOP_DIR/$LOG_DATE/
216LINK_PTR=$LOG_TOP_DIR/latest
217
218[ $DEBUG -ne 0 ] && set -x
219
220if [ $RV_TEST -ne 0 ]; then
221	TRACING_DIR=$TRACING_DIR/rv
222	if [ ! -d "$TRACING_DIR" ]; then
223		err_ret=$err_skip
224		errexit "rv is not configured in this kernel"
225	fi
226fi
227
228# Preparing logs
229if [ "x$LOG_DIR" = "x-" ]; then
230  LOG_FILE=
231  date
232else
233  LOG_FILE=$LOG_DIR/ftracetest.log
234  mkdir -p $LOG_DIR || errexit "Failed to make a log directory: $LOG_DIR"
235  date > $LOG_FILE
236  if [ "x-$LINK_PTR" != "x-" ]; then
237    unlink $LINK_PTR
238    ln -fs $LOG_DATE $LINK_PTR
239  fi
240fi
241
242# Define text colors
243# Check available colors on the terminal, if any
244ncolors=`tput colors 2>/dev/null || echo 0`
245color_reset=
246color_red=
247color_green=
248color_blue=
249# If stdout exists and number of colors is eight or more, use them
250if [ -t 1 -a "$ncolors" -ge 8 ]; then
251  color_reset="\033[0m"
252  color_red="\033[31m"
253  color_green="\033[32m"
254  color_blue="\033[34m"
255fi
256
257strip_esc() {
258  # busybox sed implementation doesn't accept "\x1B", so use [:cntrl:] instead.
259  sed -E "s/[[:cntrl:]]\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]//g"
260}
261
262prlog() { # messages
263  newline="\n"
264  if [ "$1" = "-n" ] ; then
265    newline=
266    shift
267  fi
268  [ "$KTAP" != "1" ] && printf "$*$newline"
269  [ "$LOG_FILE" ] && printf "$*$newline" | strip_esc >> $LOG_FILE
270}
271catlog() { #file
272  if [ "${KTAP}" = "1" ]; then
273    cat $1 | while read line ; do
274      echo "# $line"
275    done
276  else
277    cat $1
278  fi
279  [ "$LOG_FILE" ] && cat $1 | strip_esc >> $LOG_FILE
280}
281prlog "=== Ftrace unit tests ==="
282
283
284# Testcase management
285# Test result codes - Dejagnu extended code
286PASS=0	# The test succeeded.
287FAIL=1	# The test failed, but was expected to succeed.
288UNRESOLVED=2  # The test produced indeterminate results. (e.g. interrupted)
289UNTESTED=3    # The test was not run, currently just a placeholder.
290UNSUPPORTED=4 # The test failed because of lack of feature.
291XFAIL=5	# The test failed, and was expected to fail.
292
293# Accumulations
294PASSED_CASES=
295FAILED_CASES=
296UNRESOLVED_CASES=
297UNTESTED_CASES=
298UNSUPPORTED_CASES=
299XFAILED_CASES=
300UNDEFINED_CASES=
301TOTAL_RESULT=0
302
303INSTANCE=
304CASENO=0
305CASENAME=
306
307testcase() { # testfile
308  CASENO=$((CASENO+1))
309  CASENAME=`grep "^#[ \t]*description:" $1 | cut -f2- -d:`
310}
311
312checkreq() { # testfile
313  requires=`grep "^#[ \t]*requires:" $1 | cut -f2- -d:`
314  # Use eval to pass quoted-patterns correctly.
315  eval check_requires "$requires"
316}
317
318test_on_instance() { # testfile
319  grep -q "^#[ \t]*flags:.*instance" $1
320}
321
322ktaptest() { # result comment
323  if [ "$KTAP" != "1" ]; then
324    return
325  fi
326
327  local result=
328  if [ "$1" = "1" ]; then
329    result="ok"
330  else
331    result="not ok"
332  fi
333  shift
334
335  local comment=$*
336  if [ "$comment" != "" ]; then
337    comment="# $comment"
338  fi
339
340  echo $result $CASENO $INSTANCE$CASENAME $comment
341}
342
343eval_result() { # sigval
344  case $1 in
345    $PASS)
346      prlog "	[${color_green}PASS${color_reset}]"
347      ktaptest 1
348      PASSED_CASES="$PASSED_CASES $CASENO"
349      return 0
350    ;;
351    $FAIL)
352      prlog "	[${color_red}FAIL${color_reset}]"
353      ktaptest 0
354      FAILED_CASES="$FAILED_CASES $CASENO"
355      return 1 # this is a bug.
356    ;;
357    $UNRESOLVED)
358      prlog "	[${color_blue}UNRESOLVED${color_reset}]"
359      ktaptest 0 UNRESOLVED
360      UNRESOLVED_CASES="$UNRESOLVED_CASES $CASENO"
361      return $UNRESOLVED_RESULT # depends on use case
362    ;;
363    $UNTESTED)
364      prlog "	[${color_blue}UNTESTED${color_reset}]"
365      ktaptest 1 SKIP
366      UNTESTED_CASES="$UNTESTED_CASES $CASENO"
367      return 0
368    ;;
369    $UNSUPPORTED)
370      prlog "	[${color_blue}UNSUPPORTED${color_reset}]"
371      ktaptest 1 SKIP
372      UNSUPPORTED_CASES="$UNSUPPORTED_CASES $CASENO"
373      return $UNSUPPORTED_RESULT # depends on use case
374    ;;
375    $XFAIL)
376      prlog "	[${color_green}XFAIL${color_reset}]"
377      ktaptest 1 XFAIL
378      XFAILED_CASES="$XFAILED_CASES $CASENO"
379      return 0
380    ;;
381    *)
382      prlog "	[${color_blue}UNDEFINED${color_reset}]"
383      ktaptest 0 error
384      UNDEFINED_CASES="$UNDEFINED_CASES $CASENO"
385      return 1 # this must be a test bug
386    ;;
387  esac
388}
389
390# Signal handling for result codes
391SIG_RESULT=
392SIG_BASE=36	# Use realtime signals
393SIG_PID=$$
394
395exit_pass () {
396  exit 0
397}
398
399SIG_FAIL=$((SIG_BASE + FAIL))
400exit_fail () {
401  exit 1
402}
403trap 'SIG_RESULT=$FAIL' $SIG_FAIL
404
405SIG_UNRESOLVED=$((SIG_BASE + UNRESOLVED))
406exit_unresolved () {
407  kill -s $SIG_UNRESOLVED $SIG_PID
408  exit 0
409}
410trap 'SIG_RESULT=$UNRESOLVED' $SIG_UNRESOLVED
411
412SIG_UNTESTED=$((SIG_BASE + UNTESTED))
413exit_untested () {
414  kill -s $SIG_UNTESTED $SIG_PID
415  exit 0
416}
417trap 'SIG_RESULT=$UNTESTED' $SIG_UNTESTED
418
419SIG_UNSUPPORTED=$((SIG_BASE + UNSUPPORTED))
420exit_unsupported () {
421  kill -s $SIG_UNSUPPORTED $SIG_PID
422  exit 0
423}
424trap 'SIG_RESULT=$UNSUPPORTED' $SIG_UNSUPPORTED
425
426SIG_XFAIL=$((SIG_BASE + XFAIL))
427exit_xfail () {
428  kill -s $SIG_XFAIL $SIG_PID
429  exit 0
430}
431trap 'SIG_RESULT=$XFAIL' $SIG_XFAIL
432
433__run_test() { # testfile
434  # setup PID and PPID, $$ is not updated.
435  (cd $TRACING_DIR; read PID _ < /proc/self/stat; set -e; set -x;
436   checkreq $1; initialize_system; . $1)
437  [ $? -ne 0 ] && kill -s $SIG_FAIL $SIG_PID
438}
439
440# Run one test case
441run_test() { # testfile
442  local testname=`basename $1`
443  testcase $1
444  prlog -n "[$CASENO]$INSTANCE$CASENAME"
445  if [ ! -z "$LOG_FILE" ] ; then
446    local testlog=`mktemp $LOG_DIR/${CASENO}-${testname}-log.XXXXXX`
447  else
448    local testlog=/proc/self/fd/1
449  fi
450  export TMPDIR=`mktemp -d /tmp/ftracetest-dir.XXXXXX`
451  export FTRACETEST_ROOT=$TOP_DIR
452  echo "execute$INSTANCE: "$1 > $testlog
453  SIG_RESULT=0
454  if [ $VERBOSE -eq -1 ]; then
455    __run_test $1
456  elif [ -z "$LOG_FILE" ]; then
457    __run_test $1 2>&1
458  elif [ $VERBOSE -ge 3 ]; then
459    __run_test $1 | tee -a $testlog 2>&1
460  elif [ $VERBOSE -eq 2 ]; then
461    __run_test $1 2>> $testlog | tee -a $testlog
462  else
463    __run_test $1 >> $testlog 2>&1
464  fi
465  eval_result $SIG_RESULT
466  if [ $? -eq 0 ]; then
467    # Remove test log if the test was done as it was expected.
468    [ $KEEP_LOG -eq 0 -a ! -z "$LOG_FILE" ] && rm $testlog
469  else
470    [ $VERBOSE -eq 1 -o $VERBOSE -eq 2 ] && catlog $testlog
471    TOTAL_RESULT=1
472  fi
473  rm -rf $TMPDIR
474}
475
476# load in the helper functions
477. $TEST_DIR/functions
478
479if [ "$KTAP" = "1" ]; then
480  echo "TAP version 13"
481
482  casecount=`echo $TEST_CASES | wc -w`
483  for t in $TEST_CASES; do
484    test_on_instance $t || continue
485    casecount=$((casecount+1))
486  done
487  echo "1..${casecount}"
488fi
489
490# Main loop
491for t in $TEST_CASES; do
492  run_test $t
493  if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then
494    echo "A failure detected. Stop test."
495    exit 1
496  fi
497done
498
499# Test on instance loop
500INSTANCE=" (instance) "
501for t in $TEST_CASES; do
502  test_on_instance $t || continue
503  SAVED_TRACING_DIR=$TRACING_DIR
504  export TRACING_DIR=`mktemp -d $TRACING_DIR/instances/ftracetest.XXXXXX`
505  run_test $t
506  rmdir $TRACING_DIR
507  TRACING_DIR=$SAVED_TRACING_DIR
508  if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then
509    echo "A failure detected. Stop test."
510    exit 1
511  fi
512done
513(cd $TRACING_DIR; finish_system) # for cleanup
514
515prlog ""
516prlog "# of passed: " `echo $PASSED_CASES | wc -w`
517prlog "# of failed: " `echo $FAILED_CASES | wc -w`
518prlog "# of unresolved: " `echo $UNRESOLVED_CASES | wc -w`
519prlog "# of untested: " `echo $UNTESTED_CASES | wc -w`
520prlog "# of unsupported: " `echo $UNSUPPORTED_CASES | wc -w`
521prlog "# of xfailed: " `echo $XFAILED_CASES | wc -w`
522prlog "# of undefined(test bug): " `echo $UNDEFINED_CASES | wc -w`
523
524if [ "$KTAP" = "1" ]; then
525  echo -n "# Totals:"
526  echo -n " pass:"`echo $PASSED_CASES | wc -w`
527  echo -n " fail:"`echo $FAILED_CASES | wc -w`
528  echo -n " xfail:"`echo $XFAILED_CASES | wc -w`
529  echo -n " xpass:0"
530  echo -n " skip:"`echo $UNTESTED_CASES $UNSUPPORTED_CASES | wc -w`
531  echo -n " error:"`echo $UNRESOLVED_CASES $UNDEFINED_CASES | wc -w`
532  echo
533fi
534
535cleanup
536
537# if no error, return 0
538exit $TOTAL_RESULT
539