-e Exit on the first test failure
-H No headers - for running single test with other wrapper
-I <count> Iterate tests <count> times, exiting on failure (implies -e, -N)
+ -l <count> Use <count> daemons for local daemon integration tests
+ -L Print daemon logs on test failure (only some tests)
-N Don't print summary of tests results after running all tests
-q Quiet - don't show tests being run (still displays summary)
-S <lib> Use socket wrapper library <lib> for local integration tests
max_iterations=1
no_header=false
test_state_dir=""
-
-export TEST_VERBOSE=false
-export TEST_COMMAND_TRACE=false
-export TEST_CAT_RESULTS_OPTS=""
-export TEST_DIFF_RESULTS=false
-export TEST_LOCAL_DAEMONS
-[ -n "$TEST_LOCAL_DAEMONS" ] || TEST_LOCAL_DAEMONS=3
-export TEST_CLEANUP=false
-export TEST_TIMEOUT=3600
-export TEST_SOCKET_WRAPPER_SO_PATH=""
-
-while getopts "AcCDehHI:NqS:T:vV:xX?" opt ; do
+cleanup=false
+test_time_limit=3600
+
+export CTDB_TEST_VERBOSE=false
+export CTDB_TEST_COMMAND_TRACE=false
+export CTDB_TEST_CAT_RESULTS_OPTS=""
+export CTDB_TEST_DIFF_RESULTS=false
+export CTDB_TEST_PRINT_LOGS_ON_ERROR=false
+export CTDB_TEST_LOCAL_DAEMONS=3
+export CTDB_TEST_SWRAP_SO_PATH=""
+
+while getopts "AcCDehHI:l:LNqS:T:vV:xX?" opt ; do
case "$opt" in
- A) TEST_CAT_RESULTS_OPTS="-A" ;;
- c) TEST_LOCAL_DAEMONS="" ;;
- C) TEST_CLEANUP=true ;;
- D) TEST_DIFF_RESULTS=true ;;
+ A) CTDB_TEST_CAT_RESULTS_OPTS="-A" ;;
+ c) CTDB_TEST_LOCAL_DAEMONS="" ;;
+ C) cleanup=true ;;
+ D) CTDB_TEST_DIFF_RESULTS=true ;;
e) exit_on_fail=true ;;
H) no_header=true ;;
I) max_iterations="$OPTARG" ; exit_on_fail=true ; with_summary=false ;;
+ l) CTDB_TEST_LOCAL_DAEMONS="$OPTARG" ;;
+ L) CTDB_TEST_PRINT_LOGS_ON_ERROR=true ;;
N) with_summary=false ;;
q) quiet=true ;;
- S) TEST_SOCKET_WRAPPER_SO_PATH="$OPTARG" ;;
- T) TEST_TIMEOUT="$OPTARG" ;;
- v) TEST_VERBOSE=true ;;
+ S) CTDB_TEST_SWRAP_SO_PATH="$OPTARG" ;;
+ T) test_time_limit="$OPTARG" ;;
+ v) CTDB_TEST_VERBOSE=true ;;
V) test_state_dir="$OPTARG" ;;
x) set -x ;;
- X) TEST_COMMAND_TRACE=true ;;
+ X) CTDB_TEST_COMMAND_TRACE=true ;;
\?|h) usage ;;
esac
done
case $(basename "$0") in
*run_cluster_tests*)
# Running on a cluster... same as -c
- TEST_LOCAL_DAEMONS=""
+ CTDB_TEST_LOCAL_DAEMONS=""
;;
esac
######################################################################
-ctdb_test_begin ()
+test_header ()
{
- local name="$1"
-
- teststarttime=$(date '+%s')
- testduration=0
+ local name="$1"
- echo "--==--==--==--==--==--==--==--==--==--==--==--==--==--==--==--==--==--==--"
- echo "Running test $name ($(date '+%T'))"
- echo "--==--==--==--==--==--==--==--==--==--==--==--==--==--==--==--==--==--==--"
+ echo "--==--==--==--==--==--==--==--==--==--==--==--==--==--==--==--==--==--==--"
+ echo "Running test $name ($(date '+%T'))"
+ echo "--==--==--==--==--==--==--==--==--==--==--==--==--==--==--==--==--==--==--"
}
-ctdb_test_end ()
+test_footer ()
{
- local name="$1" ; shift
- local status="$1" ; shift
- # "$@" is command-line
+ local f="$1"
+ local status="$2"
+ local interp="$3"
+ local duration="$4"
- local interp="SKIPPED"
- local statstr=" (reason $*)"
- if [ -n "$status" ] ; then
+ local statstr=""
if [ "$status" -eq 0 ] ; then
- interp="PASSED"
- statstr=""
- echo "ALL OK: $*"
- elif [ "$status" -eq 124 ] ; then
- interp="TIMEOUT"
- statstr=" (status $status)"
+ statstr=""
else
- interp="FAILED"
- statstr=" (status $status)"
+ statstr=" (status $status)"
fi
- fi
-
- testduration=$(($(date +%s) - teststarttime))
-
- echo "=========================================================================="
- echo "TEST ${interp}: ${name}${statstr} (duration: ${testduration}s)"
- echo "=========================================================================="
+ echo "=========================================================================="
+ echo "TEST ${interp}: ${f}${statstr} (duration: ${duration}s)"
+ echo "=========================================================================="
}
ctdb_test_run ()
{
- local name="$1" ; shift
+ local f="$1"
- [ -n "$1" ] || set -- "$name"
+ $no_header || test_header "$f"
- $no_header || ctdb_test_begin "$name"
+ local status=0
+ local start_time
- local status=0
- if [ -x "$1" ] ; then
- timeout "$TEST_TIMEOUT" "$@" || status=$?
- else
- echo "TEST IS NOT EXECUTABLE"
- status=1
- fi
+ start_time=$(date '+%s')
- $no_header || ctdb_test_end "$name" "$status" "$*"
+ if [ -x "$f" ] ; then
+ timeout "$test_time_limit" "$f" </dev/null | show_progress
+ status=$?
+ else
+ echo "TEST IS NOT EXECUTABLE"
+ status=99
+ fi
+
+ local duration=$(($(date +%s) - start_time))
+
+ tests_total=$((tests_total + 1))
+
+ local interp
+ case "$status" in
+ 0)
+ interp="PASSED"
+ tests_passed=$((tests_passed + 1))
+ ;;
+ 77)
+ interp="SKIPPED"
+ tests_skipped=$((tests_skipped + 1))
+ ;;
+ 99)
+ interp="ERROR"
+ tests_failed=$((tests_failed + 1))
+ ;;
+ 124)
+ interp="TIMEDOUT"
+ tests_failed=$((tests_failed + 1))
+ ;;
+ *)
+ interp="FAILED"
+ tests_failed=$((tests_failed + 1))
+ ;;
+ esac
- return $status
+ $no_header || test_footer "$f" "$status" "$interp" "$duration"
+
+ if $with_summary ; then
+ local t
+ if [ $status -eq 0 ] ; then
+ t=" ${interp}"
+ else
+ t="*${interp}*"
+ fi
+ printf '%-10s %s\n' "$t" "$f" >>"$summary_file"
+ fi
+
+ # Skipped tests should not cause failure
+ case "$status" in
+ 77)
+ status=0
+ ;;
+ esac
+
+ return $status
}
######################################################################
tests_total=0
tests_passed=0
+tests_skipped=0
tests_failed=0
if ! type mktemp >/dev/null 2>&1 ; then
local test_dir test_suite_dir reldir
test_dir=$(cd "$CTDB_TEST_DIR" && pwd)
test_suite_dir=$(cd "$CTDB_TEST_SUITE_DIR" && pwd)
- reldir="${test_suite_dir#${test_dir}/}"
+ reldir="${test_suite_dir#"${test_dir}"/}"
export CTDB_TEST_TMP_DIR="${test_state_dir}/${reldir}"
rm -rf "$CTDB_TEST_TMP_DIR"
mkdir -p "$CTDB_TEST_TMP_DIR"
- tests_total=$((tests_total + 1))
-
- ctdb_test_run "$f" | show_progress
+ ctdb_test_run "$f"
status=$?
- if [ $status -eq 0 ] ; then
- tests_passed=$((tests_passed + 1))
- else
- tests_failed=$((tests_failed + 1))
- fi
- if $with_summary ; then
- local t
- if [ $status -eq 0 ] ; then
- t=" PASSED "
- else
- t="*FAILED*"
- fi
- echo "$t $f" >>"$summary_file"
- fi
}
-find_and_run_one_test ()
+run_tests ()
{
- local t="$1"
- local dir="$2"
+ local f
- local f="${dir}${dir:+/}${t}"
+ for f ; do
+ case "$f" in
+ */README|*/README.md)
+ continue
+ ;;
+ esac
- if [ -d "$f" ] ; then
- local i
- for i in "${f%/}/"*".sh" ; do
- # Only happens if test removed (unlikely) or empty directory
- if [ ! -f "$i" ] ; then
- break
- fi
- run_one_test "$i"
- if $exit_on_fail && [ $status -ne 0 ] ; then
- break
- fi
- done
- # No tests found? Not a tests directory! Not found...
- [ -n "$status" ] || status=127
- elif [ -f "$f" ] ; then
- run_one_test "$f"
- else
- status=127
- fi
-}
-
-run_tests ()
-{
- local tests=("$@")
-
- for f in "${tests[@]}" ; do
- find_and_run_one_test "$f"
-
- if [ $status -eq 127 ] ; then
- # Find the the top-level tests directory
- d=$(cd "$TEST_SCRIPTS_DIR" && echo "$PWD")
- if [ -z "$d" ] ; then
- local t="$TEST_SCRIPTS_DIR"
- die "Unable to find TEST_SCRIPTS_DIR=\"${t}\""
- fi
- tests_dir=$(dirname "$d")
+ if [ ! -e "$f" ] ; then
+ # Can't find it? Check relative to CTDB_TEST_DIR.
# Strip off current directory from beginning,
# if there, just to make paths more friendly.
- tests_dir="${tests_dir#${PWD}/}"
- find_and_run_one_test "$f" "$tests_dir"
+ f="${CTDB_TEST_DIR#"${PWD}"/}/${f}"
fi
- if [ $status -eq 127 ] ; then
+ if [ -d "$f" ] ; then
+ local test_dir dir reldir subtests
+
+ test_dir=$(cd "$CTDB_TEST_DIR" && pwd)
+ dir=$(cd "$f" && pwd)
+ reldir="${dir#"${test_dir}"/}"
+
+ case "$reldir" in
+ */*/*)
+ die "test \"$f\" is not recognised"
+ ;;
+ */*)
+ # This is a test suite
+ subtests=$(echo "${f%/}/"*".sh")
+ if [ "$subtests" = "${f%/}/*.sh" ] ; then
+ # Probably empty directory
+ die "test \"$f\" is not recognised"
+ fi
+ ;;
+ CLUSTER|INTEGRATION|UNIT)
+ # A collection of test suites
+ subtests=$(echo "${f%/}/"*)
+ ;;
+ *)
+ die "test \"$f\" is not recognised"
+ esac
+
+ # Recurse - word-splitting wanted
+ # shellcheck disable=SC2086
+ run_tests $subtests
+ elif [ -f "$f" ] ; then
+ run_one_test "$f"
+ else
+ # Time to give up
die "test \"$f\" is not recognised"
fi
- if $exit_on_fail && [ $status -ne 0 ] ; then
- return $status
+ if $exit_on_fail && [ "$status" -ne 0 ] ; then
+ return "$status"
fi
done
}
export TEST_SCRIPTS_DIR="${CTDB_TEST_DIR}/scripts"
-unit_tests="
- UNIT/cunit
- UNIT/eventd
- UNIT/eventscripts
- UNIT/onnode
- shellcheck
- takeover
- takeover_helper
- tool
-"
-
# If no tests specified then run some defaults
if [ -z "$1" ] ; then
- if [ -n "$TEST_LOCAL_DAEMONS" ] ; then
- set -- UNIT simple
+ if [ -n "$CTDB_TEST_LOCAL_DAEMONS" ] ; then
+ set -- UNIT INTEGRATION
else
- set -- simple complex
+ set -- INTEGRATION CLUSTER
fi
fi
do_cleanup ()
{
- if $TEST_CLEANUP ; then
+ if $cleanup ; then
echo "Removing test state directory: ${test_state_dir}"
rm -rf "$test_state_dir"
else
trap "do_cleanup ; exit 130" SIGINT
trap "do_cleanup ; exit 143" SIGTERM
-declare -a tests
-i=0
-for f ; do
- if [ "$f" = "UNIT" ] ; then
- for t in $unit_tests ; do
- tests[i++]="$t"
- done
- else
- tests[i++]="$f"
- fi
-done
-
iterations=0
# Special case: -I 0 means iterate forever (until failure)
while [ "$max_iterations" -eq 0 ] || [ $iterations -lt "$max_iterations" ] ; do
echo
fi
- run_tests "${tests[@]}"
+ run_tests "$@"
status=$?
if [ $status -ne 0 ] ; then
done
if $with_summary ; then
- if [ $status -eq 0 ] || ! $exit_on_fail ; then
+ if [ "$status" -eq 0 ] || ! $exit_on_fail ; then
echo
cat "$summary_file"
+
echo
- echo "${tests_passed}/${tests_total} tests passed"
+ tests_run=$((tests_total - tests_skipped))
+ printf '%d/%d tests passed' $tests_passed $tests_run
+ if [ $tests_skipped -gt 0 ] ; then
+ printf ' (%d skipped)' $tests_skipped
+ fi
+ printf '\n'
fi
fi
rm -f "$summary_file"
do_cleanup
if $no_header || $exit_on_fail ; then
- exit $status
+ exit "$status"
elif [ $tests_failed -gt 0 ] ; then
exit 1
else