xref: /linux/tools/testing/selftests/ftrace/ftracetest (revision 5e0266f0e5f57617472d5aac4013f58a3ef264ac)
1#!/bin/sh
2# SPDX-License-Identifier: GPL-2.0-only
3
4# ftracetest - Ftrace test shell scripts
5#
6# Copyright (C) Hitachi Ltd., 2014
7#  Written by Masami Hiramatsu <masami.hiramatsu.pt@hitachi.com>
8#
9
10usage() { # errno [message]
11[ ! -z "$2" ] && echo $2
12echo "Usage: ftracetest [options] [testcase(s)] [testcase-directory(s)]"
13echo " Options:"
14echo "		-h|--help  Show help message"
15echo "		-k|--keep  Keep passed test logs"
16echo "		-v|--verbose Increase verbosity of test messages"
17echo "		-vv        Alias of -v -v (Show all results in stdout)"
18echo "		-vvv       Alias of -v -v -v (Show all commands immediately)"
19echo "		--fail-unsupported Treat UNSUPPORTED as a failure"
20echo "		--fail-unresolved Treat UNRESOLVED as a failure"
21echo "		-d|--debug Debug mode (trace all shell commands)"
22echo "		-l|--logdir <dir> Save logs on the <dir>"
23echo "		            If <dir> is -, all logs output in console only"
24exit $1
25}
26
27# default error
28err_ret=1
29
30# kselftest skip code is 4
31err_skip=4
32
33# cgroup RT scheduling prevents chrt commands from succeeding, which
34# induces failures in test wakeup tests.  Disable for the duration of
35# the tests.
36
37readonly sched_rt_runtime=/proc/sys/kernel/sched_rt_runtime_us
38
39sched_rt_runtime_orig=$(cat $sched_rt_runtime)
40
41setup() {
42  echo -1 > $sched_rt_runtime
43}
44
45cleanup() {
46  echo $sched_rt_runtime_orig > $sched_rt_runtime
47}
48
49errexit() { # message
50  echo "Error: $1" 1>&2
51  cleanup
52  exit $err_ret
53}
54
55# Ensuring user privilege
56if [ `id -u` -ne 0 ]; then
57  errexit "this must be run by root user"
58fi
59
60setup
61
62# Utilities
63absdir() { # file_path
64  (cd `dirname $1`; pwd)
65}
66
67abspath() {
68  echo `absdir $1`/`basename $1`
69}
70
71find_testcases() { #directory
72  echo `find $1 -name \*.tc | sort`
73}
74
75parse_opts() { # opts
76  local OPT_TEST_CASES=
77  local OPT_TEST_DIR=
78
79  while [ ! -z "$1" ]; do
80    case "$1" in
81    --help|-h)
82      usage 0
83    ;;
84    --keep|-k)
85      KEEP_LOG=1
86      shift 1
87    ;;
88    --verbose|-v|-vv|-vvv)
89      if [ $VERBOSE -eq -1 ]; then
90	usage "--console can not use with --verbose"
91      fi
92      VERBOSE=$((VERBOSE + 1))
93      [ $1 = '-vv' ] && VERBOSE=$((VERBOSE + 1))
94      [ $1 = '-vvv' ] && VERBOSE=$((VERBOSE + 2))
95      shift 1
96    ;;
97    --console)
98      if [ $VERBOSE -ne 0 ]; then
99	usage "--console can not use with --verbose"
100      fi
101      VERBOSE=-1
102      shift 1
103    ;;
104    --debug|-d)
105      DEBUG=1
106      shift 1
107    ;;
108    --stop-fail)
109      STOP_FAILURE=1
110      shift 1
111    ;;
112    --fail-unsupported)
113      UNSUPPORTED_RESULT=1
114      shift 1
115    ;;
116    --fail-unresolved)
117      UNRESOLVED_RESULT=1
118      shift 1
119    ;;
120    --logdir|-l)
121      LOG_DIR=$2
122      shift 2
123    ;;
124    *.tc)
125      if [ -f "$1" ]; then
126        OPT_TEST_CASES="$OPT_TEST_CASES `abspath $1`"
127        shift 1
128      else
129        usage 1 "$1 is not a testcase"
130      fi
131      ;;
132    *)
133      if [ -d "$1" ]; then
134        OPT_TEST_DIR=`abspath $1`
135        OPT_TEST_CASES="$OPT_TEST_CASES `find_testcases $OPT_TEST_DIR`"
136        shift 1
137      else
138        usage 1 "Invalid option ($1)"
139      fi
140    ;;
141    esac
142  done
143  if [ ! -z "$OPT_TEST_CASES" ]; then
144    TEST_CASES=$OPT_TEST_CASES
145  fi
146}
147
148# Parameters
149TRACING_DIR=`grep tracefs /proc/mounts | cut -f2 -d' ' | head -1`
150if [ -z "$TRACING_DIR" ]; then
151    DEBUGFS_DIR=`grep debugfs /proc/mounts | cut -f2 -d' ' | head -1`
152    if [ -z "$DEBUGFS_DIR" ]; then
153	# If tracefs exists, then so does /sys/kernel/tracing
154	if [ -d "/sys/kernel/tracing" ]; then
155	    mount -t tracefs nodev /sys/kernel/tracing ||
156	      errexit "Failed to mount /sys/kernel/tracing"
157	    TRACING_DIR="/sys/kernel/tracing"
158	# If debugfs exists, then so does /sys/kernel/debug
159	elif [ -d "/sys/kernel/debug" ]; then
160	    mount -t debugfs nodev /sys/kernel/debug ||
161	      errexit "Failed to mount /sys/kernel/debug"
162	    TRACING_DIR="/sys/kernel/debug/tracing"
163	else
164	    err_ret=$err_skip
165	    errexit "debugfs and tracefs are not configured in this kernel"
166	fi
167    else
168	TRACING_DIR="$DEBUGFS_DIR/tracing"
169    fi
170fi
171if [ ! -d "$TRACING_DIR" ]; then
172    err_ret=$err_skip
173    errexit "ftrace is not configured in this kernel"
174fi
175
176TOP_DIR=`absdir $0`
177TEST_DIR=$TOP_DIR/test.d
178TEST_CASES=`find_testcases $TEST_DIR`
179LOG_DIR=$TOP_DIR/logs/`date +%Y%m%d-%H%M%S`/
180KEEP_LOG=0
181DEBUG=0
182VERBOSE=0
183UNSUPPORTED_RESULT=0
184UNRESOLVED_RESULT=0
185STOP_FAILURE=0
186# Parse command-line options
187parse_opts $*
188
189[ $DEBUG -ne 0 ] && set -x
190
191# Verify parameters
192if [ -z "$TRACING_DIR" -o ! -d "$TRACING_DIR" ]; then
193  errexit "No ftrace directory found"
194fi
195
196# Preparing logs
197if [ "x$LOG_DIR" = "x-" ]; then
198  LOG_FILE=
199  date
200else
201  LOG_FILE=$LOG_DIR/ftracetest.log
202  mkdir -p $LOG_DIR || errexit "Failed to make a log directory: $LOG_DIR"
203  date > $LOG_FILE
204fi
205
206# Define text colors
207# Check available colors on the terminal, if any
208ncolors=`tput colors 2>/dev/null || echo 0`
209color_reset=
210color_red=
211color_green=
212color_blue=
213# If stdout exists and number of colors is eight or more, use them
214if [ -t 1 -a "$ncolors" -ge 8 ]; then
215  color_reset="\033[0m"
216  color_red="\033[31m"
217  color_green="\033[32m"
218  color_blue="\033[34m"
219fi
220
221strip_esc() {
222  # busybox sed implementation doesn't accept "\x1B", so use [:cntrl:] instead.
223  sed -E "s/[[:cntrl:]]\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]//g"
224}
225
226prlog() { # messages
227  newline="\n"
228  if [ "$1" = "-n" ] ; then
229    newline=
230    shift
231  fi
232  printf "$*$newline"
233  [ "$LOG_FILE" ] && printf "$*$newline" | strip_esc >> $LOG_FILE
234}
235catlog() { #file
236  cat $1
237  [ "$LOG_FILE" ] && cat $1 | strip_esc >> $LOG_FILE
238}
239prlog "=== Ftrace unit tests ==="
240
241
242# Testcase management
243# Test result codes - Dejagnu extended code
244PASS=0	# The test succeeded.
245FAIL=1	# The test failed, but was expected to succeed.
246UNRESOLVED=2  # The test produced indeterminate results. (e.g. interrupted)
247UNTESTED=3    # The test was not run, currently just a placeholder.
248UNSUPPORTED=4 # The test failed because of lack of feature.
249XFAIL=5	# The test failed, and was expected to fail.
250
251# Accumulations
252PASSED_CASES=
253FAILED_CASES=
254UNRESOLVED_CASES=
255UNTESTED_CASES=
256UNSUPPORTED_CASES=
257XFAILED_CASES=
258UNDEFINED_CASES=
259TOTAL_RESULT=0
260
261INSTANCE=
262CASENO=0
263
264testcase() { # testfile
265  CASENO=$((CASENO+1))
266  desc=`grep "^#[ \t]*description:" $1 | cut -f2- -d:`
267  prlog -n "[$CASENO]$INSTANCE$desc"
268}
269
270checkreq() { # testfile
271  requires=`grep "^#[ \t]*requires:" $1 | cut -f2- -d:`
272  # Use eval to pass quoted-patterns correctly.
273  eval check_requires "$requires"
274}
275
276test_on_instance() { # testfile
277  grep -q "^#[ \t]*flags:.*instance" $1
278}
279
280eval_result() { # sigval
281  case $1 in
282    $PASS)
283      prlog "	[${color_green}PASS${color_reset}]"
284      PASSED_CASES="$PASSED_CASES $CASENO"
285      return 0
286    ;;
287    $FAIL)
288      prlog "	[${color_red}FAIL${color_reset}]"
289      FAILED_CASES="$FAILED_CASES $CASENO"
290      return 1 # this is a bug.
291    ;;
292    $UNRESOLVED)
293      prlog "	[${color_blue}UNRESOLVED${color_reset}]"
294      UNRESOLVED_CASES="$UNRESOLVED_CASES $CASENO"
295      return $UNRESOLVED_RESULT # depends on use case
296    ;;
297    $UNTESTED)
298      prlog "	[${color_blue}UNTESTED${color_reset}]"
299      UNTESTED_CASES="$UNTESTED_CASES $CASENO"
300      return 0
301    ;;
302    $UNSUPPORTED)
303      prlog "	[${color_blue}UNSUPPORTED${color_reset}]"
304      UNSUPPORTED_CASES="$UNSUPPORTED_CASES $CASENO"
305      return $UNSUPPORTED_RESULT # depends on use case
306    ;;
307    $XFAIL)
308      prlog "	[${color_green}XFAIL${color_reset}]"
309      XFAILED_CASES="$XFAILED_CASES $CASENO"
310      return 0
311    ;;
312    *)
313      prlog "	[${color_blue}UNDEFINED${color_reset}]"
314      UNDEFINED_CASES="$UNDEFINED_CASES $CASENO"
315      return 1 # this must be a test bug
316    ;;
317  esac
318}
319
320# Signal handling for result codes
321SIG_RESULT=
322SIG_BASE=36	# Use realtime signals
323SIG_PID=$$
324
325exit_pass () {
326  exit 0
327}
328
329SIG_FAIL=$((SIG_BASE + FAIL))
330exit_fail () {
331  exit 1
332}
333trap 'SIG_RESULT=$FAIL' $SIG_FAIL
334
335SIG_UNRESOLVED=$((SIG_BASE + UNRESOLVED))
336exit_unresolved () {
337  kill -s $SIG_UNRESOLVED $SIG_PID
338  exit 0
339}
340trap 'SIG_RESULT=$UNRESOLVED' $SIG_UNRESOLVED
341
342SIG_UNTESTED=$((SIG_BASE + UNTESTED))
343exit_untested () {
344  kill -s $SIG_UNTESTED $SIG_PID
345  exit 0
346}
347trap 'SIG_RESULT=$UNTESTED' $SIG_UNTESTED
348
349SIG_UNSUPPORTED=$((SIG_BASE + UNSUPPORTED))
350exit_unsupported () {
351  kill -s $SIG_UNSUPPORTED $SIG_PID
352  exit 0
353}
354trap 'SIG_RESULT=$UNSUPPORTED' $SIG_UNSUPPORTED
355
356SIG_XFAIL=$((SIG_BASE + XFAIL))
357exit_xfail () {
358  kill -s $SIG_XFAIL $SIG_PID
359  exit 0
360}
361trap 'SIG_RESULT=$XFAIL' $SIG_XFAIL
362
363__run_test() { # testfile
364  # setup PID and PPID, $$ is not updated.
365  (cd $TRACING_DIR; read PID _ < /proc/self/stat; set -e; set -x;
366   checkreq $1; initialize_ftrace; . $1)
367  [ $? -ne 0 ] && kill -s $SIG_FAIL $SIG_PID
368}
369
370# Run one test case
371run_test() { # testfile
372  local testname=`basename $1`
373  testcase $1
374  if [ ! -z "$LOG_FILE" ] ; then
375    local testlog=`mktemp $LOG_DIR/${CASENO}-${testname}-log.XXXXXX`
376  else
377    local testlog=/proc/self/fd/1
378  fi
379  export TMPDIR=`mktemp -d /tmp/ftracetest-dir.XXXXXX`
380  export FTRACETEST_ROOT=$TOP_DIR
381  echo "execute$INSTANCE: "$1 > $testlog
382  SIG_RESULT=0
383  if [ $VERBOSE -eq -1 ]; then
384    __run_test $1
385  elif [ -z "$LOG_FILE" ]; then
386    __run_test $1 2>&1
387  elif [ $VERBOSE -ge 3 ]; then
388    __run_test $1 | tee -a $testlog 2>&1
389  elif [ $VERBOSE -eq 2 ]; then
390    __run_test $1 2>> $testlog | tee -a $testlog
391  else
392    __run_test $1 >> $testlog 2>&1
393  fi
394  eval_result $SIG_RESULT
395  if [ $? -eq 0 ]; then
396    # Remove test log if the test was done as it was expected.
397    [ $KEEP_LOG -eq 0 -a ! -z "$LOG_FILE" ] && rm $testlog
398  else
399    [ $VERBOSE -eq 1 -o $VERBOSE -eq 2 ] && catlog $testlog
400    TOTAL_RESULT=1
401  fi
402  rm -rf $TMPDIR
403}
404
405# load in the helper functions
406. $TEST_DIR/functions
407
408# Main loop
409for t in $TEST_CASES; do
410  run_test $t
411  if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then
412    echo "A failure detected. Stop test."
413    exit 1
414  fi
415done
416
417# Test on instance loop
418INSTANCE=" (instance) "
419for t in $TEST_CASES; do
420  test_on_instance $t || continue
421  SAVED_TRACING_DIR=$TRACING_DIR
422  export TRACING_DIR=`mktemp -d $TRACING_DIR/instances/ftracetest.XXXXXX`
423  run_test $t
424  rmdir $TRACING_DIR
425  TRACING_DIR=$SAVED_TRACING_DIR
426  if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then
427    echo "A failure detected. Stop test."
428    exit 1
429  fi
430done
431(cd $TRACING_DIR; finish_ftrace) # for cleanup
432
433prlog ""
434prlog "# of passed: " `echo $PASSED_CASES | wc -w`
435prlog "# of failed: " `echo $FAILED_CASES | wc -w`
436prlog "# of unresolved: " `echo $UNRESOLVED_CASES | wc -w`
437prlog "# of untested: " `echo $UNTESTED_CASES | wc -w`
438prlog "# of unsupported: " `echo $UNSUPPORTED_CASES | wc -w`
439prlog "# of xfailed: " `echo $XFAILED_CASES | wc -w`
440prlog "# of undefined(test bug): " `echo $UNDEFINED_CASES | wc -w`
441
442cleanup
443
444# if no error, return 0
445exit $TOTAL_RESULT
446