xref: /linux/tools/testing/selftests/ftrace/ftracetest (revision 40d269c000bda9fcd276a0412a9cebd3f6e344c5)
1#!/bin/sh
2# SPDX-License-Identifier: GPL-2.0-only
3
4# ftracetest - Ftrace test shell scripts
5#
6# Copyright (C) Hitachi Ltd., 2014
7#  Written by Masami Hiramatsu <masami.hiramatsu.pt@hitachi.com>
8#
9
10usage() { # errno [message]
11[ ! -z "$2" ] && echo $2
12echo "Usage: ftracetest [options] [testcase(s)] [testcase-directory(s)]"
13echo " Options:"
14echo "		-h|--help  Show help message"
15echo "		-k|--keep  Keep passed test logs"
16echo "		-K|--ktap  Output in KTAP format"
17echo "		-v|--verbose Increase verbosity of test messages"
18echo "		-vv        Alias of -v -v (Show all results in stdout)"
19echo "		-vvv       Alias of -v -v -v (Show all commands immediately)"
20echo "		--fail-unsupported Treat UNSUPPORTED as a failure"
21echo "		--fail-unresolved Treat UNRESOLVED as a failure"
22echo "		-d|--debug Debug mode (trace all shell commands)"
23echo "		-l|--logdir <dir> Save logs on the <dir>"
24echo "		            If <dir> is -, all logs output in console only"
25exit $1
26}
27
28# default error
29err_ret=1
30
31# kselftest skip code is 4
32err_skip=4
33
34# umount required
35UMOUNT_DIR=""
36
37# cgroup RT scheduling prevents chrt commands from succeeding, which
38# induces failures in test wakeup tests.  Disable for the duration of
39# the tests.
40
41readonly sched_rt_runtime=/proc/sys/kernel/sched_rt_runtime_us
42
43sched_rt_runtime_orig=$(cat $sched_rt_runtime)
44
45setup() {
46  echo -1 > $sched_rt_runtime
47}
48
49cleanup() {
50  echo $sched_rt_runtime_orig > $sched_rt_runtime
51  if [ -n "${UMOUNT_DIR}" ]; then
52    umount ${UMOUNT_DIR} ||:
53  fi
54}
55
56errexit() { # message
57  echo "Error: $1" 1>&2
58  cleanup
59  exit $err_ret
60}
61
62# Ensuring user privilege
63if [ `id -u` -ne 0 ]; then
64  errexit "this must be run by root user"
65fi
66
67setup
68
69# Utilities
70absdir() { # file_path
71  (cd `dirname $1`; pwd)
72}
73
74abspath() {
75  echo `absdir $1`/`basename $1`
76}
77
78find_testcases() { #directory
79  echo `find $1 -name \*.tc | sort`
80}
81
82parse_opts() { # opts
83  local OPT_TEST_CASES=
84  local OPT_TEST_DIR=
85
86  while [ ! -z "$1" ]; do
87    case "$1" in
88    --help|-h)
89      usage 0
90    ;;
91    --keep|-k)
92      KEEP_LOG=1
93      shift 1
94    ;;
95    --ktap|-K)
96      KTAP=1
97      shift 1
98    ;;
99    --verbose|-v|-vv|-vvv)
100      if [ $VERBOSE -eq -1 ]; then
101	usage "--console can not use with --verbose"
102      fi
103      VERBOSE=$((VERBOSE + 1))
104      [ $1 = '-vv' ] && VERBOSE=$((VERBOSE + 1))
105      [ $1 = '-vvv' ] && VERBOSE=$((VERBOSE + 2))
106      shift 1
107    ;;
108    --console)
109      if [ $VERBOSE -ne 0 ]; then
110	usage "--console can not use with --verbose"
111      fi
112      VERBOSE=-1
113      shift 1
114    ;;
115    --debug|-d)
116      DEBUG=1
117      shift 1
118    ;;
119    --stop-fail)
120      STOP_FAILURE=1
121      shift 1
122    ;;
123    --fail-unsupported)
124      UNSUPPORTED_RESULT=1
125      shift 1
126    ;;
127    --fail-unresolved)
128      UNRESOLVED_RESULT=1
129      shift 1
130    ;;
131    --logdir|-l)
132      LOG_DIR=$2
133      LINK_PTR=
134      shift 2
135    ;;
136    *.tc)
137      if [ -f "$1" ]; then
138        OPT_TEST_CASES="$OPT_TEST_CASES `abspath $1`"
139        shift 1
140      else
141        usage 1 "$1 is not a testcase"
142      fi
143      ;;
144    *)
145      if [ -d "$1" ]; then
146        OPT_TEST_DIR=`abspath $1`
147        OPT_TEST_CASES="$OPT_TEST_CASES `find_testcases $OPT_TEST_DIR`"
148        shift 1
149      else
150        usage 1 "Invalid option ($1)"
151      fi
152    ;;
153    esac
154  done
155  if [ ! -z "$OPT_TEST_CASES" ]; then
156    TEST_CASES=$OPT_TEST_CASES
157  fi
158}
159
160# Parameters
161TRACING_DIR=`grep tracefs /proc/mounts | cut -f2 -d' ' | head -1`
162if [ -z "$TRACING_DIR" ]; then
163    DEBUGFS_DIR=`grep debugfs /proc/mounts | cut -f2 -d' ' | head -1`
164    if [ -z "$DEBUGFS_DIR" ]; then
165	# If tracefs exists, then so does /sys/kernel/tracing
166	if [ -d "/sys/kernel/tracing" ]; then
167	    mount -t tracefs nodev /sys/kernel/tracing ||
168	      errexit "Failed to mount /sys/kernel/tracing"
169	    TRACING_DIR="/sys/kernel/tracing"
170	    UMOUNT_DIR=${TRACING_DIR}
171	# If debugfs exists, then so does /sys/kernel/debug
172	elif [ -d "/sys/kernel/debug" ]; then
173	    mount -t debugfs nodev /sys/kernel/debug ||
174	      errexit "Failed to mount /sys/kernel/debug"
175	    TRACING_DIR="/sys/kernel/debug/tracing"
176	    UMOUNT_DIR=${TRACING_DIR}
177	else
178	    err_ret=$err_skip
179	    errexit "debugfs and tracefs are not configured in this kernel"
180	fi
181    else
182	TRACING_DIR="$DEBUGFS_DIR/tracing"
183    fi
184fi
185if [ ! -d "$TRACING_DIR" ]; then
186    err_ret=$err_skip
187    errexit "ftrace is not configured in this kernel"
188fi
189
190TOP_DIR=`absdir $0`
191TEST_DIR=$TOP_DIR/test.d
192TEST_CASES=`find_testcases $TEST_DIR`
193LOG_TOP_DIR=$TOP_DIR/logs
194LOG_DATE=`date +%Y%m%d-%H%M%S`
195LOG_DIR=$LOG_TOP_DIR/$LOG_DATE/
196LINK_PTR=$LOG_TOP_DIR/latest
197KEEP_LOG=0
198KTAP=0
199DEBUG=0
200VERBOSE=0
201UNSUPPORTED_RESULT=0
202UNRESOLVED_RESULT=0
203STOP_FAILURE=0
204# Parse command-line options
205parse_opts $*
206
207[ $DEBUG -ne 0 ] && set -x
208
209# Verify parameters
210if [ -z "$TRACING_DIR" -o ! -d "$TRACING_DIR" ]; then
211  errexit "No ftrace directory found"
212fi
213
214# Preparing logs
215if [ "x$LOG_DIR" = "x-" ]; then
216  LOG_FILE=
217  date
218else
219  LOG_FILE=$LOG_DIR/ftracetest.log
220  mkdir -p $LOG_DIR || errexit "Failed to make a log directory: $LOG_DIR"
221  date > $LOG_FILE
222  if [ "x-$LINK_PTR" != "x-" ]; then
223    unlink $LINK_PTR
224    ln -fs $LOG_DATE $LINK_PTR
225  fi
226fi
227
228# Define text colors
229# Check available colors on the terminal, if any
230ncolors=`tput colors 2>/dev/null || echo 0`
231color_reset=
232color_red=
233color_green=
234color_blue=
235# If stdout exists and number of colors is eight or more, use them
236if [ -t 1 -a "$ncolors" -ge 8 ]; then
237  color_reset="\033[0m"
238  color_red="\033[31m"
239  color_green="\033[32m"
240  color_blue="\033[34m"
241fi
242
243strip_esc() {
244  # busybox sed implementation doesn't accept "\x1B", so use [:cntrl:] instead.
245  sed -E "s/[[:cntrl:]]\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]//g"
246}
247
248prlog() { # messages
249  newline="\n"
250  if [ "$1" = "-n" ] ; then
251    newline=
252    shift
253  fi
254  [ "$KTAP" != "1" ] && printf "$*$newline"
255  [ "$LOG_FILE" ] && printf "$*$newline" | strip_esc >> $LOG_FILE
256}
257catlog() { #file
258  cat $1
259  [ "$LOG_FILE" ] && cat $1 | strip_esc >> $LOG_FILE
260}
261prlog "=== Ftrace unit tests ==="
262
263
264# Testcase management
265# Test result codes - Dejagnu extended code
266PASS=0	# The test succeeded.
267FAIL=1	# The test failed, but was expected to succeed.
268UNRESOLVED=2  # The test produced indeterminate results. (e.g. interrupted)
269UNTESTED=3    # The test was not run, currently just a placeholder.
270UNSUPPORTED=4 # The test failed because of lack of feature.
271XFAIL=5	# The test failed, and was expected to fail.
272
273# Accumulations
274PASSED_CASES=
275FAILED_CASES=
276UNRESOLVED_CASES=
277UNTESTED_CASES=
278UNSUPPORTED_CASES=
279XFAILED_CASES=
280UNDEFINED_CASES=
281TOTAL_RESULT=0
282
283INSTANCE=
284CASENO=0
285CASENAME=
286
287testcase() { # testfile
288  CASENO=$((CASENO+1))
289  CASENAME=`grep "^#[ \t]*description:" $1 | cut -f2- -d:`
290}
291
292checkreq() { # testfile
293  requires=`grep "^#[ \t]*requires:" $1 | cut -f2- -d:`
294  # Use eval to pass quoted-patterns correctly.
295  eval check_requires "$requires"
296}
297
298test_on_instance() { # testfile
299  grep -q "^#[ \t]*flags:.*instance" $1
300}
301
302ktaptest() { # result comment
303  if [ "$KTAP" != "1" ]; then
304    return
305  fi
306
307  local result=
308  if [ "$1" = "1" ]; then
309    result="ok"
310  else
311    result="not ok"
312  fi
313  shift
314
315  local comment=$*
316  if [ "$comment" != "" ]; then
317    comment="# $comment"
318  fi
319
320  echo $result $CASENO $INSTANCE$CASENAME $comment
321}
322
323eval_result() { # sigval
324  case $1 in
325    $PASS)
326      prlog "	[${color_green}PASS${color_reset}]"
327      ktaptest 1
328      PASSED_CASES="$PASSED_CASES $CASENO"
329      return 0
330    ;;
331    $FAIL)
332      prlog "	[${color_red}FAIL${color_reset}]"
333      ktaptest 0
334      FAILED_CASES="$FAILED_CASES $CASENO"
335      return 1 # this is a bug.
336    ;;
337    $UNRESOLVED)
338      prlog "	[${color_blue}UNRESOLVED${color_reset}]"
339      ktaptest 0 UNRESOLVED
340      UNRESOLVED_CASES="$UNRESOLVED_CASES $CASENO"
341      return $UNRESOLVED_RESULT # depends on use case
342    ;;
343    $UNTESTED)
344      prlog "	[${color_blue}UNTESTED${color_reset}]"
345      ktaptest 1 SKIP
346      UNTESTED_CASES="$UNTESTED_CASES $CASENO"
347      return 0
348    ;;
349    $UNSUPPORTED)
350      prlog "	[${color_blue}UNSUPPORTED${color_reset}]"
351      ktaptest 1 SKIP
352      UNSUPPORTED_CASES="$UNSUPPORTED_CASES $CASENO"
353      return $UNSUPPORTED_RESULT # depends on use case
354    ;;
355    $XFAIL)
356      prlog "	[${color_green}XFAIL${color_reset}]"
357      ktaptest 1 XFAIL
358      XFAILED_CASES="$XFAILED_CASES $CASENO"
359      return 0
360    ;;
361    *)
362      prlog "	[${color_blue}UNDEFINED${color_reset}]"
363      ktaptest 0 error
364      UNDEFINED_CASES="$UNDEFINED_CASES $CASENO"
365      return 1 # this must be a test bug
366    ;;
367  esac
368}
369
370# Signal handling for result codes
371SIG_RESULT=
372SIG_BASE=36	# Use realtime signals
373SIG_PID=$$
374
375exit_pass () {
376  exit 0
377}
378
379SIG_FAIL=$((SIG_BASE + FAIL))
380exit_fail () {
381  exit 1
382}
383trap 'SIG_RESULT=$FAIL' $SIG_FAIL
384
385SIG_UNRESOLVED=$((SIG_BASE + UNRESOLVED))
386exit_unresolved () {
387  kill -s $SIG_UNRESOLVED $SIG_PID
388  exit 0
389}
390trap 'SIG_RESULT=$UNRESOLVED' $SIG_UNRESOLVED
391
392SIG_UNTESTED=$((SIG_BASE + UNTESTED))
393exit_untested () {
394  kill -s $SIG_UNTESTED $SIG_PID
395  exit 0
396}
397trap 'SIG_RESULT=$UNTESTED' $SIG_UNTESTED
398
399SIG_UNSUPPORTED=$((SIG_BASE + UNSUPPORTED))
400exit_unsupported () {
401  kill -s $SIG_UNSUPPORTED $SIG_PID
402  exit 0
403}
404trap 'SIG_RESULT=$UNSUPPORTED' $SIG_UNSUPPORTED
405
406SIG_XFAIL=$((SIG_BASE + XFAIL))
407exit_xfail () {
408  kill -s $SIG_XFAIL $SIG_PID
409  exit 0
410}
411trap 'SIG_RESULT=$XFAIL' $SIG_XFAIL
412
413__run_test() { # testfile
414  # setup PID and PPID, $$ is not updated.
415  (cd $TRACING_DIR; read PID _ < /proc/self/stat; set -e; set -x;
416   checkreq $1; initialize_ftrace; . $1)
417  [ $? -ne 0 ] && kill -s $SIG_FAIL $SIG_PID
418}
419
420# Run one test case
421run_test() { # testfile
422  local testname=`basename $1`
423  testcase $1
424  prlog -n "[$CASENO]$INSTANCE$CASENAME"
425  if [ ! -z "$LOG_FILE" ] ; then
426    local testlog=`mktemp $LOG_DIR/${CASENO}-${testname}-log.XXXXXX`
427  else
428    local testlog=/proc/self/fd/1
429  fi
430  export TMPDIR=`mktemp -d /tmp/ftracetest-dir.XXXXXX`
431  export FTRACETEST_ROOT=$TOP_DIR
432  echo "execute$INSTANCE: "$1 > $testlog
433  SIG_RESULT=0
434  if [ $VERBOSE -eq -1 ]; then
435    __run_test $1
436  elif [ -z "$LOG_FILE" ]; then
437    __run_test $1 2>&1
438  elif [ $VERBOSE -ge 3 ]; then
439    __run_test $1 | tee -a $testlog 2>&1
440  elif [ $VERBOSE -eq 2 ]; then
441    __run_test $1 2>> $testlog | tee -a $testlog
442  else
443    __run_test $1 >> $testlog 2>&1
444  fi
445  eval_result $SIG_RESULT
446  if [ $? -eq 0 ]; then
447    # Remove test log if the test was done as it was expected.
448    [ $KEEP_LOG -eq 0 -a ! -z "$LOG_FILE" ] && rm $testlog
449  else
450    [ $VERBOSE -eq 1 -o $VERBOSE -eq 2 ] && catlog $testlog
451    TOTAL_RESULT=1
452  fi
453  rm -rf $TMPDIR
454}
455
456# load in the helper functions
457. $TEST_DIR/functions
458
459if [ "$KTAP" = "1" ]; then
460  echo "TAP version 13"
461
462  casecount=`echo $TEST_CASES | wc -w`
463  for t in $TEST_CASES; do
464    test_on_instance $t || continue
465    casecount=$((casecount+1))
466  done
467  echo "1..${casecount}"
468fi
469
470# Main loop
471for t in $TEST_CASES; do
472  run_test $t
473  if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then
474    echo "A failure detected. Stop test."
475    exit 1
476  fi
477done
478
479# Test on instance loop
480INSTANCE=" (instance) "
481for t in $TEST_CASES; do
482  test_on_instance $t || continue
483  SAVED_TRACING_DIR=$TRACING_DIR
484  export TRACING_DIR=`mktemp -d $TRACING_DIR/instances/ftracetest.XXXXXX`
485  run_test $t
486  rmdir $TRACING_DIR
487  TRACING_DIR=$SAVED_TRACING_DIR
488  if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then
489    echo "A failure detected. Stop test."
490    exit 1
491  fi
492done
493(cd $TRACING_DIR; finish_ftrace) # for cleanup
494
495prlog ""
496prlog "# of passed: " `echo $PASSED_CASES | wc -w`
497prlog "# of failed: " `echo $FAILED_CASES | wc -w`
498prlog "# of unresolved: " `echo $UNRESOLVED_CASES | wc -w`
499prlog "# of untested: " `echo $UNTESTED_CASES | wc -w`
500prlog "# of unsupported: " `echo $UNSUPPORTED_CASES | wc -w`
501prlog "# of xfailed: " `echo $XFAILED_CASES | wc -w`
502prlog "# of undefined(test bug): " `echo $UNDEFINED_CASES | wc -w`
503
504if [ "$KTAP" = "1" ]; then
505  echo -n "# Totals:"
506  echo -n " pass:"`echo $PASSED_CASES | wc -w`
507  echo -n " fail:"`echo $FAILED_CASES | wc -w`
508  echo -n " xfail:"`echo $XFAILED_CASES | wc -w`
509  echo -n " xpass:0"
510  echo -n " skip:"`echo $UNTESTED_CASES $UNSUPPORTED_CASES | wc -w`
511  echo -n " error:"`echo $UNRESOLVED_CASES $UNDEFINED_CASES | wc -w`
512  echo
513fi
514
515cleanup
516
517# if no error, return 0
518exit $TOTAL_RESULT
519