--- /dev/null
+-nomethod System.Runtime.CompilerServices.Tests.RuntimeFeatureTests.DynamicCode_Jit
+-nomethod System.Net.Security.Tests.SslStreamStreamToStreamTest_*
+-nomethod XmlSerializerTests.Xml_Nullables
+-nomethod XmlSerializerTests.Xml_Soap_WithNullables
+-nomethod System.IO.Compression.DeflateStreamUnitTests.*
+-nomethod System.IO.Compression.GzipStreamUnitTests.*
+-nomethod System.Collections.Concurrent.Tests.ConcurrentDictionary_Generic_Tests_enum_enum.IDictionary_Generic_Add_DuplicateValue
+-nomethod System.ComponentModel.Tests.ToolboxItemFilterAttributeTests.Equals_Object_ReturnsExpected
+-nomethod System.Net.Http.Functional.Tests.MultipartContentTest.ReadAsStreamAsync_LargeContent_AllBytesRead
+-nomethod System.Net.Tests.ServicePointManagerTest.FindServicePoint_Collectible
+-nomethod System.Reflection.Metadata.Tests.ImmutableByteArrayInteropTest.DangerousCreateFromUnderlyingArray
+-nomethod DataContractJsonSerializerTests.DCJS_Nullables
+-nomethod DataContractSerializerTests.DCS_Nullables
+-nomethod System.Security.Cryptography.Hashing.Tests.HashAlgorithmTest.VerifyComputeHashAsync
+
+# crashes on Interpreter
+-nomethod ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.statements.checked005.checked005.Test.DynamicCSharpRunTest
+-nomethod System.Linq.Expressions.Tests.ConvertCheckedTests.ConvertCheckedNullableFloatToSByteTest
+-nomethod System.Dynamic.Tests.BinaryOperationTests.ModuloDouble
+-nomethod System.Net.Http.Functional.Tests.SocketsHttpHandler*
+-nomethod System.Net.Tests.HttpRequestStreamTests.Read_LargeLengthAsynchronous_Success
+-nomethod System.Net.Tests.HttpRequestStreamTests.Read_LargeLengthSynchronous_Success
+-nomethod System.Net.Tests.FileWebRequestTest.InvalidArguments_Throws
+-nomethod System.Net.Tests.AuthorizationTest.MutuallyAuthenticated_Values_ExpectEqualValues
+-nomethod System.Text.Json.Tests.JsonEncodedTextTests.ReplacementCharacterUTF8
+-noclass System.Net.Tests.HttpWebRequestTest
+-nonamespace System.Linq.Expressions.Tests
+-nonamespace System.Runtime.InteropServices.Tests
+-nonamespace System.Net.Tests
+-nonamespace System.Net.Http.Functional.Tests
+-nonamespace System.Text.Json
+
+# BrotliStream.BaseStream returned more bytes than requested in Read on Interpreter
+-nomethod System.IO.Compression.BrotliStreamUnitTests.Read
+-nomethod System.IO.Compression.BrotliStreamUnitTests.Read_SequentialReadsOnMemoryStream_Return_SameBytes
+-nomethod System.IO.Compression.BrotliStreamUnitTests.Parallel_CompressDecompressMultipleStreamsConcurrently
+-nomethod System.IO.Compression.BrotliStreamUnitTests.Read_BaseStreamSlowly
+-nomethod System.IO.Compression.BrotliStreamUnitTests.CompressDecompress_RoundTrip
+-nomethod System.IO.Compression.BrotliStreamUnitTests.Flush_RoundTrip
+-nomethod System.IO.Compression.BrotliStreamUnitTests.Flush_BeforeFirstWrites
+-nomethod System.IO.Compression.BrotliStreamUnitTests.Flush_Consecutive
+
+# extremely slow or hangs on Interpreter
+-nomethod System.IO.Compression.BrotliStreamUnitTests.WrapStreamReturningBadReadValues
+-nomethod System.Runtime.Serialization.Formatters.Tests.BinaryFormatterTests.*
+-nomethod System.Net.Sockets.Tests.SendReceiveSyncForceNonBlocking.TcpReceiveSendGetsCanceledByDispose
+
+# https://github.com/mono/mono/issues/18061 [interpreter] Conversions.ToX(NaN) are expected to throw OverflowException
+-nomethod Microsoft.VisualBasic.Tests.ConversionsTests.*
+-nomethod Microsoft.VisualBasic.CompilerServices.Tests.IntegerTypeTests.FromString_ThrowsOverflowException
+
+# https://github.com/mono/mono/issues/18063 [interpreter] EqualityComparer<TEnum?>.Default.Equals doesn't work correctly
+-nomethod System.Collections.Generic.Tests.EqualityComparerTests.NullableEquals*
# Requires precise GC (should be ignored in dotnet/corefx for mono)
-nomethod System.Collections.Concurrent.Tests.ConcurrentQueueTests.ReferenceTypes_NulledAfterDequeue
+-nomethod System.Threading.Tests.ThreadLocalTests.RunThreadLocalTest7_WeakReference
+-nomethod System.ComponentModel.EventBasedAsync.Tests.BackgroundWorkerTests.TestFinalization
# fails with some OpenSSL error
-nomethod System.Net.Security.Tests.ServerRequireEncryptionTest.ServerRequireEncryption_ClientNoEncryption_NoConnect
-nomethod System.Net.Security.Tests.ServerAsyncAuthenticateTest.ServerAsyncAuthenticate_MismatchProtocols_Fails
-nomethod System.Net.Security.Tests.ClientAsyncAuthenticateTest.ClientAsyncAuthenticate_MismatchProtocols_Fails
-# fails on LLVM
--nomethod System.Tests.StringTests.StartsWith
+# https://github.com/mono/mono/issues/18067 LLVM: try-catch doesn't catch an exception
-nomethod System.Reflection.Tests.BindingFlagsDoNotWrapTests.*
--nomethod System.Tests.TypeTests.FilterName_Invoke_DelegateFiltersExpectedMembers
-
-# fails on Interpreter
--nomethod System.Runtime.CompilerServices.Tests.RuntimeFeatureTests.DynamicCode_Jit
counter=$$((counter+1)); \
testname=$$(basename $$testdir); \
if [ -n "$$USE_TIMEOUT" ]; then timeoutcmd="../scripts/ci/run-step.sh --label=$$testname --timeout=10m --fatal"; fi; \
- $$timeoutcmd $(MAKE) run-tests-corefx-$$testname TEST_COUNTER="($$counter / $$tests_count) " || echo $$testname >> .failures; \
+ $$timeoutcmd $(MAKE) run-tests-corefx-$$testname XUNIT_MONO_ENV_OPTIONS="$(XUNIT_MONO_ENV_OPTIONS)" XUNIT_ARGS="$(XUNIT_ARGS)" TEST_COUNTER="($$counter / $$tests_count) " || echo $$testname >> .failures; \
done; \
$(MAKE) xunit-summary; \
if [ -e ".failures" ]; then \
echo "Actions:"
echo " --pack Package build outputs into NuGet packages"
echo " --test Run all unit tests in the solution (short: -t)"
+ echo " --interpreter Run tests with interpreter"
echo " --rebuild Run ../.autogen.sh"
echo " --llvm Enable LLVM support"
echo " --skipnative Do not build runtime"
pack=false
configuration='Debug'
+test_mono_flags=''
+test_xunit_flags=''
properties=''
force_rebuild=false
test=false
-test|-t)
test=true
;;
+ -interpreter)
+ test_mono_flags="$test_mono_flags --interpreter"
+ test_xunit_flags="$test_xunit_flags @../../../../CoreFX.issues_interpreter.rsp -parallel none"
+ ;;
-rebuild)
force_rebuild=true
;;
;;
-llvm)
llvm=true
+ test_mono_flags="$test_mono_flags --llvm"
;;
-ci)
ci=true
if [ "$test" = "true" ]; then
make update-tests-corefx || (Write-PipelineTelemetryError -c "tests-download" -e 1 "Error downloading tests" && exit 1)
if [ "$ci" = "true" ]; then
- make run-tests-corefx USE_TIMEOUT=1 || (Write-PipelineTelemetryError -c "tests" -e 1 "Error running tests" && exit 1)
+ make run-tests-corefx XUNIT_MONO_ENV_OPTIONS="$test_mono_flags" XUNIT_ARGS="$test_xunit_flags" USE_TIMEOUT=1 || (Write-PipelineTelemetryError -c "tests" -e 1 "Error running tests" && exit 1)
else
- make run-tests-corefx
+ make run-tests-corefx XUNIT_MONO_ENV_OPTIONS="$test_mono_flags" XUNIT_ARGS="$test_xunit_flags"
fi
fi
sys.exit(1)
test_dir = sys.argv [1]
-class TestResults():
+class AssemblyTestResults():
def __init__(self, name, total, passed, failed, skipped, errors, time):
self.name = name
self.total = total
self.skipped = skipped
self.time = time
-print("")
+class TestInfo():
+ def __init__(self, name, time):
+ self.name = name
+ self.time = time
+
+test_assemblies = []
+test_items = []
-tests = []
for testfile in glob.glob(test_dir + "/*-xunit.xml"):
assemblies = ET.parse(testfile).getroot()
for assembly in assemblies:
if test_name is None:
print("WARNING: %s has no tests!" % ntpath.basename(testfile))
continue
- tests.append(TestResults(test_name,
+ test_assemblies.append(AssemblyTestResults(test_name,
int(assembly.attrib["total"]),
int(assembly.attrib["passed"]),
int(assembly.attrib["failed"]),
int(assembly.attrib["skipped"]),
int(assembly.attrib["errors"]),
float(assembly.attrib["time"])))
+ for collection in assembly.iter("collection"):
+ for test in collection.iter("test"):
+ test_items.append(TestInfo(test.attrib["name"],
+ float(test.attrib["time"])))
-# sort by name
-tests.sort(key=lambda item: item.name)
+test_assemblies.sort(key=lambda item: (item.failed, item.name), reverse=True)
+test_items.sort(key=lambda item: (item.time), reverse=True)
print("")
+print("")
print("=" * 105)
-for t in tests:
+for t in test_assemblies:
#if t.failed > 0: # uncomment to list only test suits with failures
print("{0:<60} Total:{1:<6} Failed:{2:<6} Time:{3} sec".format(t.name, t.total, t.failed, round(t.time, 1)))
print("=" * 105)
-
print("")
-print("Total test suites: %d" % len(tests))
-print("Total tests run: %d" % sum(x.total for x in tests))
-print("Total tests passed: %d" % sum(x.passed for x in tests))
-print("Total tests failed: %d" % sum(x.failed for x in tests))
-print("Total tests skipped: %d" % sum(x.skipped for x in tests))
-print("Total duration: %d min" % (sum(x.time for x in tests) / 60))
+print("Total test suites: %d" % len(test_assemblies))
+print("Total tests run: %d" % sum(x.total for x in test_assemblies))
+print("Total tests passed: %d" % sum(x.passed for x in test_assemblies))
+print("Total tests failed: %d" % sum(x.failed for x in test_assemblies))
+print("Total tests skipped: %d" % sum(x.skipped for x in test_assemblies))
+print("Total duration: %d min" % (sum(x.time for x in test_assemblies) / 60))
print("")
-
+print("")
+print("Top 20 slowest tests:")
+print("=" * 105)
+for t in test_items[:20]:
+ print("{0:<89} Time:{1} sec".format(t.name[:88], round(t.time, 1)))
+print("")
\ No newline at end of file