Add quiet mode for test runner(s) (#2228)

In one of the previous PR we have encountered a problem,
where the Travis cuts off the test execution
as the generated log file is too big.

By adding a quiet mode for the test runners, we will
only report the failing tests.

JerryScript-DCO-1.0-Signed-off-by: Peter Gal pgal.u-szeged@partner.samsung.com
This commit is contained in:
Péter Gál
2018-03-02 02:18:17 +01:00
committed by yichoi
parent 575ec7e10a
commit 55058cf151
4 changed files with 34 additions and 11 deletions
+6 -6
View File
@@ -25,18 +25,18 @@ matrix:
- env: OPTS="--check-signed-off=travis --check-cppcheck --check-doxygen --check-vera --check-license --check-magic-strings --check-pylint"
install: pip install --user pylint==1.6.5
- env: OPTS="--jerry-debugger"
- env: OPTS="--jerry-tests --jerry-test-suite"
- env: OPTS="--jerry-tests --jerry-test-suite --toolchain=cmake/toolchain_linux_armv7l.cmake" TIMEOUT=300
- env: OPTS="--quiet --jerry-tests --jerry-test-suite"
- env: OPTS="--quiet --jerry-tests --jerry-test-suite --toolchain=cmake/toolchain_linux_armv7l.cmake" TIMEOUT=300
install: tools/apt-get-install-qemu-arm.sh
- env: OPTS="--buildoption-test"
- env: OPTS="--jerry-tests --jerry-test-suite --buildoptions=--jerry-libc=off,--compile-flag=-m32,--cpointer-32bit=on"
- env: OPTS="--quiet --jerry-tests --jerry-test-suite --buildoptions=--jerry-libc=off,--compile-flag=-m32,--cpointer-32bit=on"
- env: OPTS="--unittests"
- env: OPTS="--unittests --buildoptions=--cmake-param=-DFEATURE_INIT_FINI=ON"
- env: OPTS="--test262"
install: sudo timedatectl set-timezone America/Los_Angeles
- os: osx
before_install: tools/brew-install-deps.sh
env: OPTS="--jerry-tests --jerry-test-suite --unittests"
env: OPTS="--quiet --jerry-tests --jerry-test-suite --unittests"
- install: echo -n | openssl s_client -connect scan.coverity.com:443 | sed -ne '/-BEGIN CERTIFICATE-/,/-END CERTIFICATE-/p' | sudo tee -a /etc/ssl/certs/ca-
env:
# Declaration of the encrypted COVERITY_SCAN_TOKEN, created via the
@@ -59,7 +59,7 @@ matrix:
packages:
- gcc-5
- gcc-5-multilib
env: OPTS="--jerry-tests --jerry-test-suite --skip-list=parser-oom.js --buildoptions=--compile-flag=-fsanitize=address,--compile-flag=-m32,--compile-flag=-fno-omit-frame-pointer,--compile-flag=-fno-common,--compile-flag=-O2,--debug,--jerry-libc=off,--static-link=off,--system-allocator=on,--linker-flag=-fuse-ld=gold" ASAN_OPTIONS=detect_stack_use_after_return=1:check_initialization_order=true:strict_init_order=true TIMEOUT=600
env: OPTS="--quiet --jerry-tests --jerry-test-suite --skip-list=parser-oom.js --buildoptions=--compile-flag=-fsanitize=address,--compile-flag=-m32,--compile-flag=-fno-omit-frame-pointer,--compile-flag=-fno-common,--compile-flag=-O2,--debug,--jerry-libc=off,--static-link=off,--system-allocator=on,--linker-flag=-fuse-ld=gold" ASAN_OPTIONS=detect_stack_use_after_return=1:check_initialization_order=true:strict_init_order=true TIMEOUT=600
- compiler: gcc-5
addons:
apt:
@@ -68,7 +68,7 @@ matrix:
packages:
- gcc-5
- gcc-5-multilib
env: OPTS="--jerry-tests --jerry-test-suite --skip-list=parser-oom.js --buildoptions=--compile-flag=-fsanitize=undefined,--compile-flag=-m32,--compile-flag=-fno-omit-frame-pointer,--compile-flag=-fno-common,--debug,--jerry-libc=off,--static-link=off,--system-allocator=on,--linker-flag=-fuse-ld=gold" UBSAN_OPTIONS=print_stacktrace=1 TIMEOUT=600
env: OPTS="--quiet --jerry-tests --jerry-test-suite --skip-list=parser-oom.js --buildoptions=--compile-flag=-fsanitize=undefined,--compile-flag=-m32,--compile-flag=-fno-omit-frame-pointer,--compile-flag=-fno-common,--debug,--jerry-libc=off,--static-link=off,--system-allocator=on,--linker-flag=-fuse-ld=gold" UBSAN_OPTIONS=print_stacktrace=1 TIMEOUT=600
- env: JOBNAME="ESP8266 Build Test"
cache: ccache
+10 -1
View File
@@ -135,6 +135,8 @@ def get_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('--toolchain', metavar='FILE',
help='Add toolchain file')
parser.add_argument('-q', '--quiet', action='store_true',
help='Only print out failing tests')
parser.add_argument('--buildoptions', metavar='LIST',
help='Add a comma separated list of extra build options to each test')
parser.add_argument('--skip-list', metavar='LIST',
@@ -266,6 +268,9 @@ def run_jerry_tests(options):
if options.skip_list:
skip_list.append(options.skip_list)
if options.quiet:
test_cmd.append("-q")
if skip_list:
test_cmd.append("--skip-list=" + ",".join(skip_list))
@@ -292,6 +297,9 @@ def run_jerry_test_suite(options):
else:
test_cmd.append(settings.JERRY_TEST_SUITE_ES51_LIST)
if options.quiet:
test_cmd.append("-q")
if options.skip_list:
test_cmd.append("--skip-list=" + options.skip_list)
@@ -331,7 +339,8 @@ def run_unittests(options):
ret_test |= run_check([
settings.UNITTEST_RUNNER_SCRIPT,
bin_dir_path
bin_dir_path,
"-q" if options.quiet else "",
])
return ret_build | ret_test
+10 -3
View File
@@ -15,7 +15,7 @@
# limitations under the License.
# Usage:
# ./tools/runners/run-test-suite.sh ENGINE TESTS [--skip-list=item1,item2] [--snapshot] ENGINE_ARGS....
# ./tools/runners/run-test-suite.sh ENGINE TESTS [-q] [--skip-list=item1,item2] [--snapshot] ENGINE_ARGS....
TIMEOUT=${TIMEOUT:=5}
TIMEOUT_CMD=`which timeout`
@@ -37,6 +37,13 @@ TEST_FILES=$OUTPUT_DIR/$TESTS_BASENAME.files
TEST_FAILED=$OUTPUT_DIR/$TESTS_BASENAME.failed
TEST_PASSED=$OUTPUT_DIR/$TESTS_BASENAME.passed
VERBOSE=1
if [[ "$1" == "-q" ]]
then
unset VERBOSE
shift
fi
if [[ "$1" =~ ^--skip-list=.* ]]
then
SKIP_LIST=${1#--skip-list=}
@@ -147,7 +154,7 @@ do
if [ $status_code -eq 0 ]
then
echo "[$tested/$TOTAL] $cmd_line: PASS"
test $VERBOSE && echo "[$tested/$TOTAL] $cmd_line: PASS"
cmd_line="${ENGINE#$ROOT_DIR} $ENGINE_ARGS --exec-snapshot $SNAPSHOT_TEMP"
$TIMEOUT_CMD $TIMEOUT $ENGINE $ENGINE_ARGS --exec-snapshot $SNAPSHOT_TEMP &> $ENGINE_TEMP
@@ -175,7 +182,7 @@ do
failed=$((failed+1))
else
echo "[$tested/$TOTAL] $cmd_line: $PASS"
test $VERBOSE && echo "[$tested/$TOTAL] $cmd_line: $PASS"
echo "$test" >> $TEST_PASSED
+8 -1
View File
@@ -17,6 +17,13 @@
DIR="$1"
shift
VERBOSE=1
if [ "$1" == "-q" ]
then
unset VERBOSE
shift
fi
UNITTEST_ERROR=$DIR/unittests.failed
UNITTEST_OK=$DIR/unittests.passed
@@ -76,7 +83,7 @@ do
failed=$((failed+1))
else
echo "[$tested/$total] $cmd_line: PASS"
test $VERBOSE && echo "[$tested/$total] $cmd_line: PASS"
echo "$unit_test" >> $UNITTEST_OK