#
# use `run-test-local'
#
+# *** make sure you add multi-file tests to TEST_ORDERING if necessary
+
thisdir = tests
SUBDIRS =
include ../build/rules.make
-DISTFILES = README.tests $(wildcard *.cs)
+DISTFILES = README.tests harness.mk $(wildcard *.cs)
ifeq (default, $(PROFILE))
# force this, we don't case if CSC is broken. This also
USE_MCS_FLAGS :=
-#
# All new tests for 'mcs' go in here
-# These tests are not run by GMCS since the corresponding code is not yet in there.
+# These tests are not run by GMCS since the corresponding code is not yet in there.-
# Martin Baulig will manually move them into TEST_SOURCES_common after merging the code into GMCS.
# He may also move some to TEST_EXCLUDE_net_2_0 if some of the merges are inappropriate for GMCS.
#
-NEW_TEST_SOURCES_common = \
- test-297 test-287 test-294 test-299 test-300 \
- test-301 test-302 test-303 test-304 test-305 test-306 test-307 test-308
+NEW_TEST_SOURCES_common = test-294 test-304 test-305 test-306 test-307 test-318 mtest-5-dll mtest-5-exe \
+ test-319-dll test-319-exe test-320 test-40
#
# Please do _not_ add any tests here - all new tests should go into NEW_TEST_SOURCES_common
test-1 test-2 test-3 test-4 test-5 test-6 test-7 test-8 test-9 test-10 \
test-11 test-12 test-13 test-14 test-15 test-16 test-17 test-18 test-19 test-20 \
test-21 test-22 test-23 test-24 test-25 test-26 test-27 test-28 test-29 test-30 \
- test-31 test-32 test-33 test-34 test-35 test-36 test-37 test-38 test-39 test-40 \
+ test-31 test-32 test-33 test-34 test-35 test-36 test-37 test-38 test-39 \
test-41 test-42 test-43 test-44 test-45 test-46 test-47 test-48 test-49 \
test-51 test-52 test-53 test-54 test-55 test-56 test-57 test-58 test-59 test-60 \
test-61 test-62 test-63 test-64 test-65 test-66 test-68 test-69 test-70 \
test-251 test-252 test-253 test-254 test-255 test-256 test-257 test-258 test-259 test-260 \
test-261 test-262 test-263 test-264 test-265 test-266 test-267 test-268 test-269 test-270 \
test-271 test-272 test-273 test-274 test-275 test-276 test-277 test-278 test-279 test-280 \
- test-281 test-282 test-283 test-284 test-285 test-286 test-288 test-289 test-290 \
- test-291 test-292 test-293 test-295 test-296 test-298 \
+ test-281 test-282 test-283 test-284 test-285 test-286 test-287 test-288 test-289 test-290 \
+ test-291 test-292 test-293 test-295 test-296 test-297 test-298 test-299 test-300 \
+ test-301 test-302 test-303 test-309 test-310 \
+ test-311 test-312 test-313 test-314 test-315 test-316 test-317 \
cls-test-0 cls-test-1 cls-test-2 cls-test-3 cls-test-5 cls-test-6 cls-test-7 cls-test-10 \
cls-test-11 cls-test-14 cls-test-15 cls-test-16 \
2test-1 2test-2 2test-3 2test-4 2test-5 2test-6 2test-7 2test-8 2test-9 2test-10 \
2test-11 2test-12 2test-13 2test-14 \
unsafe-1 unsafe-2 unsafe-3 unsafe-5 unsafe-6 unsafe-7 unsafe-8 unsafe-9 unsafe-10 \
mtest-1-dll mtest-1-exe \
- dll-1 prog-1 \
- dll-2 prog-2 \
- conv-lib conv-main \
- vararg-lib vararg-exe \
+ mtest-2-dll mtest-2-exe \
+ mtest-3-dll mtest-3-exe \
+ mtest-4-dll mtest-4-exe \
+ conv-dll conv-exe \
+ vararg-dll vararg-exe \
module-1 module-2 module-3 \
- ns0 ns
+ a-call a-capture5 a-instance a-parameter2 \
+ a-capture10 a-capture6 a-nested-anon2 \
+ a-capture1 a-capture7 a-nested-anon3 \
+ a-capture2 a-capture8 a-nested-anon4 \
+ a-capture3 a-capture9 a-nested-anon a-simple2 \
+ a-capture4 a-event a-nested a-simple
TEST_EXCLUDES_common =
TEST_SOURCES_default = $(NEW_TEST_SOURCES_common)
TEST_SOURCES_net_2_0 = \
- gen-13-dll gen-13-exe gen-17-dll gen-17-exe gen-47-dll gen-47-exe \
- gen-1 gen-2 gen-3 gen-4 gen-5 gen-6 gen-7 gen-8 gen-9 gen-10 \
- gen-11 gen-12 gen-14 gen-15 gen-16 gen-18 gen-19 gen-20 \
- gen-21 gen-22 gen-23 gen-24 gen-25 gen-26 gen-27 gen-28 gen-29 gen-30 \
- gen-32 gen-33 gen-34 gen-35 gen-36 gen-37 gen-38 gen-39 gen-40 \
- gen-41 gen-42 gen-43 gen-44 gen-45 gen-46 gen-48 gen-49 gen-50 \
- gen-51 gen-52 gen-53 gen-54 gen-55 gen-56 gen-58 gen-59 gen-60 \
- gen-62 gen-63 gen-64 gen-65 gen-66 gen-67 gen-68 gen-69 gen-70 \
- gen-71 gen-72 gen-73 gen-74 gen-75 gen-76 gen-77 gen-78
-
-TEST_EXCLUDES_net_2_0 = test-55 test-289 test-259
+ gen-13-dll gen-13-exe gen-17-dll gen-17-exe gen-31-dll gen-31-exe \
+ gen-47-dll gen-47-exe gen-98-dll gen-98-exe \
+ gen-1 gen-2 gen-3 gen-4 gen-5 gen-6 gen-7 gen-8 gen-9 gen-10 \
+ gen-11 gen-12 gen-14 gen-15 gen-16 gen-18 gen-19 gen-20 \
+ gen-21 gen-22 gen-23 gen-24 gen-25 gen-26 gen-27 gen-28 gen-29 gen-30 \
+ gen-32 gen-33 gen-34 gen-35 gen-36 gen-37 gen-38 gen-39 gen-40 \
+ gen-41 gen-42 gen-43 gen-44 gen-45 gen-46 gen-48 gen-49 gen-50 \
+ gen-51 gen-52 gen-53 gen-54 gen-55 gen-56 gen-58 gen-59 gen-60 \
+ gen-61 gen-62 gen-63 gen-64 gen-66 gen-67 gen-68 gen-69 gen-70 \
+ gen-71 gen-72 gen-73 gen-74 gen-75 gen-76 gen-77 gen-78 gen-79 gen-80 \
+ gen-81 gen-82 gen-83 gen-84 gen-85 gen-86 gen-87 gen-88 gen-89 gen-90 \
+ gen-91 gen-92 gen-93 gen-94 gen-95 gen-96 gen-97 gen-100 \
+ gen-101 gen-102 gen-103 gen-104 gen-105 gen-106 gen-107 gen-108 gen-109 \
+ gen-111 gen-112
+
+TEST_EXCLUDES_net_2_0 =
TEST_SOURCES = $(filter-out $(TEST_EXCLUDES_common) $(TEST_EXCLUDES_$(PROFILE)) $(TEST_EXCLUDES_$(PLATFORM)), \
$(TEST_SOURCES_common) $(TEST_SOURCES_$(PROFILE)) $(TEST_SOURCES_$(PLATFORM)))
# These tests load User32.dll and/or Kernel32.dll
-TEST_SOURCES_win32 = test-50 test-67
+TEST_SOURCES_win32 = test-67
## FIXME: Need to audit. Maybe move to 'TEST_EXCLUDES_linux' and 'TEST_EXCLUDES_win32' as approprate
# A test is a 'no pass' if it fails on either windows or linux
# Test 120 does not pass because the MS.NET runtime is buggy.
-TEST_NOPASS = test-120 test-132 test-133
+
+TEST_NOPASS = test-50 test-120 test-132 test-133 a-parameter4.cs
# test-28 test-45 test-53 test-91 test-102 test-106 test-107 test-122 test-66 test-177
+# The test harness supports running the testcases in parallel. However, we still need to
+# provide test-ordering rules to support multi-file testcases. By default, any test named
+# 'foo-exe' requires that a test named 'foo-dll' be present, and they're run in the order: foo-dll, foo-exe
+# Additional test-orderings can be listed below. Note that x:y says that 'x' should be run _after_ y
+TEST_ORDERING = module-2:module-1 module-3:module-1 module-3:module-2
+
all-local install-local uninstall-local:
# casts
test-local:
-run-test-local: multi test-compiler-jit-real test-casts
+run-test-local: multi test-harness test-casts
test-everything:
$(MAKE) PROFILE=default run-test
$(MAKE) PROFILE=net_2_0 run-test
+test-generics:
+ $(MAKE) PROFILE=net_2_0 run-test
+
+test-generics-2:
+ $(MAKE) PROFILE=net_2_0 TEST_SOURCES="$(TEST_SOURCES_net_2_0)" test-harness
+
clean-local:
- rm -f *.exe *.netmodule *.out *.pdb casts.cs
+ -rm -fr dir-*
+ -rm -f *.exe *.netmodule *.out *.pdb casts.cs
dist-local: dist-default
rm -f $(distdir)/casts.cs
-TEST_TAG = mcs
-
ifeq (net_2_0, $(PROFILE))
TEST_TAG = gmcs
+else
+TEST_TAG = mcs
endif
-test-compiler-jit-real:
- @rm -f *.exe *.dll *.netmodule $(TEST_TAG).log $(TEST_TAG)-*.log ; \
- logfile="$(TEST_TAG).log" ; \
- echo 'Running $(TEST_TAG) tests with flags "$(TEST_RUNTIME)" ... ' ; \
- for i in $(TEST_SOURCES) ; do \
- options=`sed -n 's,^// Compiler options:,,p' $$i.cs`; \
- testlogfile="$(TEST_TAG)-$$i.log" ; \
- echo -n "$$i: "; \
- echo "*** $(CSCOMPILE) $$options $$i.cs" > $$testlogfile ; \
- if $(CSCOMPILE) $$options $$i.cs >> $$testlogfile 2>&1 ; then \
- if test -f $$i.exe; then \
- echo "*** $(TEST_RUNTIME) ./$$i.exe" >> $$testlogfile ; \
- if $(TEST_RUNTIME) ./$$i.exe >> $$testlogfile 2>&1 ; then \
- echo "PASS: $$i" >> $$logfile ; \
- echo OK ; rm -f $$testlogfile ; \
- else \
- echo "Exit code: $$?" >> $$testlogfile ; \
- failed="$$failed $$i" ; \
- echo "FAIL: $$i" >> $$logfile ; \
- echo FAILED ; cat $$testlogfile ; \
- fi ; \
- else \
- echo "PASS: $$i: compilation" >> $$logfile ; \
- echo OK ; rm -f $$testlogfile ; \
- fi ; \
- else \
- echo "Exit code: $$?" >> $$testlogfile ; \
- failed="$$failed $$i" ; \
- echo "FAIL: $$i: compilation" >> $$logfile ; \
- echo FAILED COMPILATION ; cat $$testlogfile ; \
- fi ; \
- done ; \
+.PHONY: test-harness test-harness-run
+test-harness:
+ @$(MAKE) -s test-harness-run
+
+exe_tests := $(filter %-exe, $(TEST_SOURCES))
+
+test-harness-run:
+ @-rm -f $(TEST_TAG).log
+ @-rm -fr dir-$(TEST_TAG)
+ @mkdir dir-$(TEST_TAG)
+ @sed 's,@thisdir@,$(thisdir)/dir-$(TEST_TAG),' harness.mk > dir-$(TEST_TAG)/Makefile
+ @test -z "$(exe_tests)" || for i in ''$(exe_tests); do echo $$i | sed 's,\(.*\)-exe$$,\1-exe.res: \1-dll.res,' >> dir-$(TEST_TAG)/Makefile; done
+ @test -z "$(TEST_ORDERING)" || for i in ''$(TEST_ORDERING); do echo $$i.res | sed 's,:,.res: ,' >> dir-$(TEST_TAG)/Makefile; done
+ @echo 'Running $(TEST_TAG) tests with flags "$(TEST_RUNTIME)" ... '
+ @cd dir-$(TEST_TAG) ; \
+ $(MAKE) -s $(TEST_SOURCES:=.res) || failed="make " ; \
+ cat $(TEST_SOURCES:=.res) > ../$(TEST_TAG).log ; \
+ failed=$$failed`sed -n 's,^FAIL: ,,p' ../$(TEST_TAG).log` ; \
if test -z "$$failed"; then :; else echo "Failing tests: $$failed"; exit 1; fi
#