xref: /linux/tools/testing/selftests/ftrace/ftracetest (revision b30d7a77c53ec04a6d94683d7680ec406b7f3ac8)
1#!/bin/sh
2# SPDX-License-Identifier: GPL-2.0-only
3
4# ftracetest - Ftrace test shell scripts
5#
6# Copyright (C) Hitachi Ltd., 2014
7#  Written by Masami Hiramatsu <masami.hiramatsu.pt@hitachi.com>
8#
9
10usage() { # errno [message]
11[ ! -z "$2" ] && echo $2
12echo "Usage: ftracetest [options] [testcase(s)] [testcase-directory(s)]"
13echo " Options:"
14echo "		-h|--help  Show help message"
15echo "		-k|--keep  Keep passed test logs"
16echo "		-K|--ktap  Output in KTAP format"
17echo "		-v|--verbose Increase verbosity of test messages"
18echo "		-vv        Alias of -v -v (Show all results in stdout)"
19echo "		-vvv       Alias of -v -v -v (Show all commands immediately)"
20echo "		--fail-unsupported Treat UNSUPPORTED as a failure"
21echo "		--fail-unresolved Treat UNRESOLVED as a failure"
22echo "		-d|--debug Debug mode (trace all shell commands)"
23echo "		-l|--logdir <dir> Save logs on the <dir>"
24echo "		            If <dir> is -, all logs output in console only"
25exit $1
26}
27
28# default error
29err_ret=1
30
31# kselftest skip code is 4
32err_skip=4
33
34# cgroup RT scheduling prevents chrt commands from succeeding, which
35# induces failures in test wakeup tests.  Disable for the duration of
36# the tests.
37
38readonly sched_rt_runtime=/proc/sys/kernel/sched_rt_runtime_us
39
40sched_rt_runtime_orig=$(cat $sched_rt_runtime)
41
42setup() {
43  echo -1 > $sched_rt_runtime
44}
45
46cleanup() {
47  echo $sched_rt_runtime_orig > $sched_rt_runtime
48}
49
50errexit() { # message
51  echo "Error: $1" 1>&2
52  cleanup
53  exit $err_ret
54}
55
56# Ensuring user privilege
57if [ `id -u` -ne 0 ]; then
58  errexit "this must be run by root user"
59fi
60
61setup
62
63# Utilities
64absdir() { # file_path
65  (cd `dirname $1`; pwd)
66}
67
68abspath() {
69  echo `absdir $1`/`basename $1`
70}
71
72find_testcases() { #directory
73  echo `find $1 -name \*.tc | sort`
74}
75
76parse_opts() { # opts
77  local OPT_TEST_CASES=
78  local OPT_TEST_DIR=
79
80  while [ ! -z "$1" ]; do
81    case "$1" in
82    --help|-h)
83      usage 0
84    ;;
85    --keep|-k)
86      KEEP_LOG=1
87      shift 1
88    ;;
89    --ktap|-K)
90      KTAP=1
91      shift 1
92    ;;
93    --verbose|-v|-vv|-vvv)
94      if [ $VERBOSE -eq -1 ]; then
95	usage "--console can not use with --verbose"
96      fi
97      VERBOSE=$((VERBOSE + 1))
98      [ $1 = '-vv' ] && VERBOSE=$((VERBOSE + 1))
99      [ $1 = '-vvv' ] && VERBOSE=$((VERBOSE + 2))
100      shift 1
101    ;;
102    --console)
103      if [ $VERBOSE -ne 0 ]; then
104	usage "--console can not use with --verbose"
105      fi
106      VERBOSE=-1
107      shift 1
108    ;;
109    --debug|-d)
110      DEBUG=1
111      shift 1
112    ;;
113    --stop-fail)
114      STOP_FAILURE=1
115      shift 1
116    ;;
117    --fail-unsupported)
118      UNSUPPORTED_RESULT=1
119      shift 1
120    ;;
121    --fail-unresolved)
122      UNRESOLVED_RESULT=1
123      shift 1
124    ;;
125    --logdir|-l)
126      LOG_DIR=$2
127      shift 2
128    ;;
129    *.tc)
130      if [ -f "$1" ]; then
131        OPT_TEST_CASES="$OPT_TEST_CASES `abspath $1`"
132        shift 1
133      else
134        usage 1 "$1 is not a testcase"
135      fi
136      ;;
137    *)
138      if [ -d "$1" ]; then
139        OPT_TEST_DIR=`abspath $1`
140        OPT_TEST_CASES="$OPT_TEST_CASES `find_testcases $OPT_TEST_DIR`"
141        shift 1
142      else
143        usage 1 "Invalid option ($1)"
144      fi
145    ;;
146    esac
147  done
148  if [ ! -z "$OPT_TEST_CASES" ]; then
149    TEST_CASES=$OPT_TEST_CASES
150  fi
151}
152
153# Parameters
154TRACING_DIR=`grep tracefs /proc/mounts | cut -f2 -d' ' | head -1`
155if [ -z "$TRACING_DIR" ]; then
156    DEBUGFS_DIR=`grep debugfs /proc/mounts | cut -f2 -d' ' | head -1`
157    if [ -z "$DEBUGFS_DIR" ]; then
158	# If tracefs exists, then so does /sys/kernel/tracing
159	if [ -d "/sys/kernel/tracing" ]; then
160	    mount -t tracefs nodev /sys/kernel/tracing ||
161	      errexit "Failed to mount /sys/kernel/tracing"
162	    TRACING_DIR="/sys/kernel/tracing"
163	# If debugfs exists, then so does /sys/kernel/debug
164	elif [ -d "/sys/kernel/debug" ]; then
165	    mount -t debugfs nodev /sys/kernel/debug ||
166	      errexit "Failed to mount /sys/kernel/debug"
167	    TRACING_DIR="/sys/kernel/debug/tracing"
168	else
169	    err_ret=$err_skip
170	    errexit "debugfs and tracefs are not configured in this kernel"
171	fi
172    else
173	TRACING_DIR="$DEBUGFS_DIR/tracing"
174    fi
175fi
176if [ ! -d "$TRACING_DIR" ]; then
177    err_ret=$err_skip
178    errexit "ftrace is not configured in this kernel"
179fi
180
181TOP_DIR=`absdir $0`
182TEST_DIR=$TOP_DIR/test.d
183TEST_CASES=`find_testcases $TEST_DIR`
184LOG_DIR=$TOP_DIR/logs/`date +%Y%m%d-%H%M%S`/
185KEEP_LOG=0
186KTAP=0
187DEBUG=0
188VERBOSE=0
189UNSUPPORTED_RESULT=0
190UNRESOLVED_RESULT=0
191STOP_FAILURE=0
192# Parse command-line options
193parse_opts $*
194
195[ $DEBUG -ne 0 ] && set -x
196
197# Verify parameters
198if [ -z "$TRACING_DIR" -o ! -d "$TRACING_DIR" ]; then
199  errexit "No ftrace directory found"
200fi
201
202# Preparing logs
203if [ "x$LOG_DIR" = "x-" ]; then
204  LOG_FILE=
205  date
206else
207  LOG_FILE=$LOG_DIR/ftracetest.log
208  mkdir -p $LOG_DIR || errexit "Failed to make a log directory: $LOG_DIR"
209  date > $LOG_FILE
210fi
211
212# Define text colors
213# Check available colors on the terminal, if any
214ncolors=`tput colors 2>/dev/null || echo 0`
215color_reset=
216color_red=
217color_green=
218color_blue=
219# If stdout exists and number of colors is eight or more, use them
220if [ -t 1 -a "$ncolors" -ge 8 ]; then
221  color_reset="\033[0m"
222  color_red="\033[31m"
223  color_green="\033[32m"
224  color_blue="\033[34m"
225fi
226
227strip_esc() {
228  # busybox sed implementation doesn't accept "\x1B", so use [:cntrl:] instead.
229  sed -E "s/[[:cntrl:]]\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]//g"
230}
231
232prlog() { # messages
233  newline="\n"
234  if [ "$1" = "-n" ] ; then
235    newline=
236    shift
237  fi
238  [ "$KTAP" != "1" ] && printf "$*$newline"
239  [ "$LOG_FILE" ] && printf "$*$newline" | strip_esc >> $LOG_FILE
240}
241catlog() { #file
242  cat $1
243  [ "$LOG_FILE" ] && cat $1 | strip_esc >> $LOG_FILE
244}
245prlog "=== Ftrace unit tests ==="
246
247
248# Testcase management
249# Test result codes - Dejagnu extended code
250PASS=0	# The test succeeded.
251FAIL=1	# The test failed, but was expected to succeed.
252UNRESOLVED=2  # The test produced indeterminate results. (e.g. interrupted)
253UNTESTED=3    # The test was not run, currently just a placeholder.
254UNSUPPORTED=4 # The test failed because of lack of feature.
255XFAIL=5	# The test failed, and was expected to fail.
256
257# Accumulations
258PASSED_CASES=
259FAILED_CASES=
260UNRESOLVED_CASES=
261UNTESTED_CASES=
262UNSUPPORTED_CASES=
263XFAILED_CASES=
264UNDEFINED_CASES=
265TOTAL_RESULT=0
266
267INSTANCE=
268CASENO=0
269CASENAME=
270
271testcase() { # testfile
272  CASENO=$((CASENO+1))
273  CASENAME=`grep "^#[ \t]*description:" $1 | cut -f2- -d:`
274}
275
276checkreq() { # testfile
277  requires=`grep "^#[ \t]*requires:" $1 | cut -f2- -d:`
278  # Use eval to pass quoted-patterns correctly.
279  eval check_requires "$requires"
280}
281
282test_on_instance() { # testfile
283  grep -q "^#[ \t]*flags:.*instance" $1
284}
285
286ktaptest() { # result comment
287  if [ "$KTAP" != "1" ]; then
288    return
289  fi
290
291  local result=
292  if [ "$1" = "1" ]; then
293    result="ok"
294  else
295    result="not ok"
296  fi
297  shift
298
299  local comment=$*
300  if [ "$comment" != "" ]; then
301    comment="# $comment"
302  fi
303
304  echo $result $CASENO $INSTANCE$CASENAME $comment
305}
306
307eval_result() { # sigval
308  case $1 in
309    $PASS)
310      prlog "	[${color_green}PASS${color_reset}]"
311      ktaptest 1
312      PASSED_CASES="$PASSED_CASES $CASENO"
313      return 0
314    ;;
315    $FAIL)
316      prlog "	[${color_red}FAIL${color_reset}]"
317      ktaptest 0
318      FAILED_CASES="$FAILED_CASES $CASENO"
319      return 1 # this is a bug.
320    ;;
321    $UNRESOLVED)
322      prlog "	[${color_blue}UNRESOLVED${color_reset}]"
323      ktaptest 0 UNRESOLVED
324      UNRESOLVED_CASES="$UNRESOLVED_CASES $CASENO"
325      return $UNRESOLVED_RESULT # depends on use case
326    ;;
327    $UNTESTED)
328      prlog "	[${color_blue}UNTESTED${color_reset}]"
329      ktaptest 1 SKIP
330      UNTESTED_CASES="$UNTESTED_CASES $CASENO"
331      return 0
332    ;;
333    $UNSUPPORTED)
334      prlog "	[${color_blue}UNSUPPORTED${color_reset}]"
335      ktaptest 1 SKIP
336      UNSUPPORTED_CASES="$UNSUPPORTED_CASES $CASENO"
337      return $UNSUPPORTED_RESULT # depends on use case
338    ;;
339    $XFAIL)
340      prlog "	[${color_green}XFAIL${color_reset}]"
341      ktaptest 1 XFAIL
342      XFAILED_CASES="$XFAILED_CASES $CASENO"
343      return 0
344    ;;
345    *)
346      prlog "	[${color_blue}UNDEFINED${color_reset}]"
347      ktaptest 0 error
348      UNDEFINED_CASES="$UNDEFINED_CASES $CASENO"
349      return 1 # this must be a test bug
350    ;;
351  esac
352}
353
354# Signal handling for result codes
355SIG_RESULT=
356SIG_BASE=36	# Use realtime signals
357SIG_PID=$$
358
359exit_pass () {
360  exit 0
361}
362
363SIG_FAIL=$((SIG_BASE + FAIL))
364exit_fail () {
365  exit 1
366}
367trap 'SIG_RESULT=$FAIL' $SIG_FAIL
368
369SIG_UNRESOLVED=$((SIG_BASE + UNRESOLVED))
370exit_unresolved () {
371  kill -s $SIG_UNRESOLVED $SIG_PID
372  exit 0
373}
374trap 'SIG_RESULT=$UNRESOLVED' $SIG_UNRESOLVED
375
376SIG_UNTESTED=$((SIG_BASE + UNTESTED))
377exit_untested () {
378  kill -s $SIG_UNTESTED $SIG_PID
379  exit 0
380}
381trap 'SIG_RESULT=$UNTESTED' $SIG_UNTESTED
382
383SIG_UNSUPPORTED=$((SIG_BASE + UNSUPPORTED))
384exit_unsupported () {
385  kill -s $SIG_UNSUPPORTED $SIG_PID
386  exit 0
387}
388trap 'SIG_RESULT=$UNSUPPORTED' $SIG_UNSUPPORTED
389
390SIG_XFAIL=$((SIG_BASE + XFAIL))
391exit_xfail () {
392  kill -s $SIG_XFAIL $SIG_PID
393  exit 0
394}
395trap 'SIG_RESULT=$XFAIL' $SIG_XFAIL
396
397__run_test() { # testfile
398  # setup PID and PPID, $$ is not updated.
399  (cd $TRACING_DIR; read PID _ < /proc/self/stat; set -e; set -x;
400   checkreq $1; initialize_ftrace; . $1)
401  [ $? -ne 0 ] && kill -s $SIG_FAIL $SIG_PID
402}
403
404# Run one test case
405run_test() { # testfile
406  local testname=`basename $1`
407  testcase $1
408  prlog -n "[$CASENO]$INSTANCE$CASENAME"
409  if [ ! -z "$LOG_FILE" ] ; then
410    local testlog=`mktemp $LOG_DIR/${CASENO}-${testname}-log.XXXXXX`
411  else
412    local testlog=/proc/self/fd/1
413  fi
414  export TMPDIR=`mktemp -d /tmp/ftracetest-dir.XXXXXX`
415  export FTRACETEST_ROOT=$TOP_DIR
416  echo "execute$INSTANCE: "$1 > $testlog
417  SIG_RESULT=0
418  if [ $VERBOSE -eq -1 ]; then
419    __run_test $1
420  elif [ -z "$LOG_FILE" ]; then
421    __run_test $1 2>&1
422  elif [ $VERBOSE -ge 3 ]; then
423    __run_test $1 | tee -a $testlog 2>&1
424  elif [ $VERBOSE -eq 2 ]; then
425    __run_test $1 2>> $testlog | tee -a $testlog
426  else
427    __run_test $1 >> $testlog 2>&1
428  fi
429  eval_result $SIG_RESULT
430  if [ $? -eq 0 ]; then
431    # Remove test log if the test was done as it was expected.
432    [ $KEEP_LOG -eq 0 -a ! -z "$LOG_FILE" ] && rm $testlog
433  else
434    [ $VERBOSE -eq 1 -o $VERBOSE -eq 2 ] && catlog $testlog
435    TOTAL_RESULT=1
436  fi
437  rm -rf $TMPDIR
438}
439
440# load in the helper functions
441. $TEST_DIR/functions
442
443if [ "$KTAP" = "1" ]; then
444  echo "TAP version 13"
445
446  casecount=`echo $TEST_CASES | wc -w`
447  for t in $TEST_CASES; do
448    test_on_instance $t || continue
449    casecount=$((casecount+1))
450  done
451  echo "1..${casecount}"
452fi
453
454# Main loop
455for t in $TEST_CASES; do
456  run_test $t
457  if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then
458    echo "A failure detected. Stop test."
459    exit 1
460  fi
461done
462
463# Test on instance loop
464INSTANCE=" (instance) "
465for t in $TEST_CASES; do
466  test_on_instance $t || continue
467  SAVED_TRACING_DIR=$TRACING_DIR
468  export TRACING_DIR=`mktemp -d $TRACING_DIR/instances/ftracetest.XXXXXX`
469  run_test $t
470  rmdir $TRACING_DIR
471  TRACING_DIR=$SAVED_TRACING_DIR
472  if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then
473    echo "A failure detected. Stop test."
474    exit 1
475  fi
476done
477(cd $TRACING_DIR; finish_ftrace) # for cleanup
478
479prlog ""
480prlog "# of passed: " `echo $PASSED_CASES | wc -w`
481prlog "# of failed: " `echo $FAILED_CASES | wc -w`
482prlog "# of unresolved: " `echo $UNRESOLVED_CASES | wc -w`
483prlog "# of untested: " `echo $UNTESTED_CASES | wc -w`
484prlog "# of unsupported: " `echo $UNSUPPORTED_CASES | wc -w`
485prlog "# of xfailed: " `echo $XFAILED_CASES | wc -w`
486prlog "# of undefined(test bug): " `echo $UNDEFINED_CASES | wc -w`
487
488if [ "$KTAP" = "1" ]; then
489  echo -n "# Totals:"
490  echo -n " pass:"`echo $PASSED_CASES | wc -w`
491  echo -n " faii:"`echo $FAILED_CASES | wc -w`
492  echo -n " xfail:"`echo $XFAILED_CASES | wc -w`
493  echo -n " xpass:0"
494  echo -n " skip:"`echo $UNTESTED_CASES $UNSUPPORTED_CASES | wc -w`
495  echo -n " error:"`echo $UNRESOLVED_CASES $UNDEFINED_CASES | wc -w`
496  echo
497fi
498
499cleanup
500
501# if no error, return 0
502exit $TOTAL_RESULT
503