de810499bde095070b9d1e3e49d4c938be213def
[platform/upstream/cmake.git] / Tests / RunCMake / ctest_test / RunCMakeTest.cmake
1 include(RunCTest)
2 set(RunCMake_TEST_TIMEOUT 60)
3
4 unset(ENV{CTEST_PARALLEL_LEVEL})
5 unset(ENV{CTEST_OUTPUT_ON_FAILURE})
6
7 set(CASE_CTEST_TEST_ARGS "")
8 set(CASE_CTEST_TEST_LOAD "")
9
10 function(run_ctest_test CASE_NAME)
11   set(CASE_CTEST_TEST_ARGS "${ARGN}")
12   run_ctest(${CASE_NAME})
13 endfunction()
14
15 run_ctest_test(TestQuiet QUIET)
16
17 # Tests for the 'Test Load' feature of ctest
18 #
19 # Spoof a load average value to make these tests more reliable.
20 set(ENV{__CTEST_FAKE_LOAD_AVERAGE_FOR_TESTING} 5)
21 set(RunCTest_VERBOSE_FLAG -VV)
22
23 # Verify that new tests are started when the load average falls below
24 # our threshold.
25 run_ctest_test(TestLoadPass TEST_LOAD 6)
26
27 # Verify that new tests are not started when the load average exceeds
28 # our threshold and that they then run once the load average drops.
29 run_ctest_test(TestLoadWait TEST_LOAD 2)
30
31 # Verify that when an invalid "TEST_LOAD" value is given, a warning
32 # message is displayed and the value is ignored.
33 run_ctest_test(TestLoadInvalid TEST_LOAD "ERR1")
34
35 # Verify that new tests are started when the load average falls below
36 # our threshold.
37 set(CASE_CTEST_TEST_LOAD 7)
38 run_ctest_test(CTestTestLoadPass)
39
40 # Verify that new tests are not started when the load average exceeds
41 # our threshold and that they then run once the load average drops.
42 set(CASE_CTEST_TEST_LOAD 4)
43 run_ctest_test(CTestTestLoadWait)
44
45 # Verify that when an invalid "CTEST_TEST_LOAD" value is given,
46 # a warning message is displayed and the value is ignored.
47 set(CASE_CTEST_TEST_LOAD "ERR2")
48 run_ctest_test(CTestTestLoadInvalid)
49
50 # Verify that the "TEST_LOAD" value has higher precedence than
51 # the "CTEST_TEST_LOAD" value
52 set(CASE_CTEST_TEST_LOAD "ERR3")
53 run_ctest_test(TestLoadOrder TEST_LOAD "ERR4")
54
55 unset(ENV{__CTEST_FAKE_LOAD_AVERAGE_FOR_TESTING})
56 unset(CASE_CTEST_TEST_LOAD)
57 unset(RunCTest_VERBOSE_FLAG)
58
59 function(run_TestChangeId)
60   set(CASE_TEST_PREFIX_CODE [[
61     set(CTEST_CHANGE_ID "<>1")
62   ]])
63
64   run_ctest(TestChangeId)
65 endfunction()
66 run_TestChangeId()
67
68 function(run_TestOutputSize)
69   set(CASE_CTEST_TEST_ARGS EXCLUDE RunCMakeVersion)
70   set(CASE_TEST_PREFIX_CODE [[
71 set(CTEST_CUSTOM_MAXIMUM_PASSED_TEST_OUTPUT_SIZE 10)
72 set(CTEST_CUSTOM_MAXIMUM_FAILED_TEST_OUTPUT_SIZE 12)
73   ]])
74   set(CASE_CMAKELISTS_SUFFIX_CODE [[
75 add_test(NAME PassingTest COMMAND ${CMAKE_COMMAND} -E echo PassingTestOutput)
76 add_test(NAME FailingTest COMMAND ${CMAKE_COMMAND} -E no_such_command)
77   ]])
78
79   run_ctest(TestOutputSize)
80 endfunction()
81 run_TestOutputSize()
82
83 run_ctest_test(TestRepeatBad1 REPEAT UNKNOWN:3)
84 run_ctest_test(TestRepeatBad2 REPEAT UNTIL_FAIL:-1)
85
86 function(run_TestRepeat case return_value )
87   set(CASE_CTEST_TEST_ARGS RETURN_VALUE result EXCLUDE RunCMakeVersion ${ARGN})
88   string(CONCAT suffix_code [[
89 add_test(NAME testRepeat
90   COMMAND ${CMAKE_COMMAND} -D COUNT_FILE=${CMAKE_CURRENT_BINARY_DIR}/count.cmake
91                            -P "]] "${RunCMake_SOURCE_DIR}/TestRepeat${case}" [[.cmake")
92 set_property(TEST testRepeat PROPERTY TIMEOUT 5)
93   ]])
94   string(APPEND CASE_CMAKELISTS_SUFFIX_CODE "${suffix_code}")
95
96   run_ctest(TestRepeat${case})
97
98   #write to end of the test file logic to Verify we get the expected
99   #return code
100   string(REPLACE "RETURN_VALUE:" "" return_value "${return_value}" )
101   file(APPEND "${RunCMake_BINARY_DIR}/TestRepeat${case}/test.cmake"
102 "
103
104   set(expected_result ${return_value})
105   message(STATUS \${result})
106   if(NOT result EQUAL expected_result)
107     message(FATAL_ERROR \"expected a return value of: \${expected_result},
108                          instead got: \${result}\")
109   endif()
110 "
111   )
112 endfunction()
113
114 run_TestRepeat(UntilFail RETURN_VALUE:1 REPEAT UNTIL_FAIL:3)
115 run_TestRepeat(UntilPass RETURN_VALUE:0 REPEAT UNTIL_PASS:3)
116 run_TestRepeat(AfterTimeout RETURN_VALUE:0 REPEAT AFTER_TIMEOUT:3)
117
118 # test repeat and not run tests interact correctly
119 set(CASE_CMAKELISTS_SUFFIX_CODE [[
120 add_test(NAME testNotRun
121   COMMAND ${CMAKE_COMMAND}/doesnt_exist)
122   set_property(TEST testNotRun PROPERTY TIMEOUT 5)
123   ]])
124 run_TestRepeat(NotRun RETURN_VALUE:1 REPEAT UNTIL_PASS:3)
125 unset(CASE_CMAKELISTS_SUFFIX_CODE)
126
127 # test --stop-on-failure
128 function(run_stop_on_failure)
129   set(CASE_CTEST_TEST_ARGS EXCLUDE RunCMakeVersion)
130   set(CASE_CMAKELISTS_SUFFIX_CODE [[
131 add_test(NAME StoppingTest COMMAND ${CMAKE_COMMAND} -E false)
132 add_test(NAME NotRunTest COMMAND ${CMAKE_COMMAND} -E true)
133   ]])
134
135   run_ctest_test(stop-on-failure STOP_ON_FAILURE)
136 endfunction()
137 run_stop_on_failure()
138
139 # Make sure environment gets logged
140 function(run_environment)
141   set(ENV{BAD_ENVIRONMENT_VARIABLE} "Bad environment variable")
142   set(CASE_CMAKELISTS_SUFFIX_CODE [[
143 set_property(TEST RunCMakeVersion PROPERTY ENVIRONMENT "ENV1=env1;ENV2=env2")
144   ]])
145
146   run_ctest(TestEnvironment)
147 endfunction()
148 run_environment()
149
150 # test for OUTPUT_JUNIT
151 run_ctest_test(OutputJUnit OUTPUT_JUNIT junit.xml REPEAT UNTIL_FAIL:2)
152
153 # Verify that extra measurements get reported.
154 function(run_measurements)
155   set(CASE_CMAKELISTS_SUFFIX_CODE [[
156 add_test(
157   NAME double_measurement
158   COMMAND ${CMAKE_COMMAND} -E
159   echo <DartMeasurement type="numeric/double" name="my_custom_value">1.4847</DartMeasurement>)
160 add_test(
161   NAME double_measurement2
162   COMMAND ${CMAKE_COMMAND} -E
163   echo <CTestMeasurement type="numeric/double" name="another_custom_value">1.8474</CTestMeasurement>)
164 add_test(
165   NAME img_measurement
166   COMMAND ${CMAKE_COMMAND} -E
167   echo <DartMeasurementFile name="TestImage" type="image/png">]] ${IMAGE_DIR}/cmake-logo-16.png [[</DartMeasurementFile>)
168 add_test(
169   NAME img_measurement2
170   COMMAND ${CMAKE_COMMAND} -E
171   echo <CTestMeasurementFile name="TestImage2" type="image/png">]] ${IMAGE_DIR}/cmake-logo-16.png [[</CTestMeasurementFile>)
172 add_test(
173   NAME file_measurement
174   COMMAND ${CMAKE_COMMAND} -E
175   echo <DartMeasurementFile name="my_test_input_data" type="file">]] ${IMAGE_DIR}/cmake-logo-16.png [[</DartMeasurementFile>)
176 add_test(
177   NAME file_measurement2
178   COMMAND ${CMAKE_COMMAND} -E
179   echo <CTestMeasurementFile name="another_test_input_data" type="file">]] ${IMAGE_DIR}/cmake-logo-16.png [[</CTestMeasurementFile>)
180   ]])
181   run_ctest(TestMeasurements)
182 endfunction()
183 run_measurements()
184
185 # Verify that test output can override the Completion Status.
186 function(run_completion_status)
187   set(CASE_CMAKELISTS_SUFFIX_CODE [[
188 add_test(
189   NAME custom_details
190   COMMAND ${CMAKE_COMMAND} -E
191   echo test output\n<CTestDetails>CustomDetails</CTestDetails>\nmore output)
192   ]])
193   run_ctest(TestCompletionStatus)
194 endfunction()
195 run_completion_status()
196
197 # Verify that running ctest_test() multiple times with different label arguments
198 # doesn't break.
199 function(run_changing_labels)
200   set(CASE_CMAKELISTS_SUFFIX_CODE [[
201 add_test(NAME a COMMAND ${CMAKE_COMMAND} -E true)
202 set_property(TEST a PROPERTY LABELS a)
203 add_test(NAME b COMMAND ${CMAKE_COMMAND} -E true)
204 set_property(TEST b PROPERTY LABELS b)
205   ]])
206   run_ctest(TestChangingLabels)
207 endfunction()
208 run_changing_labels()
209
210 # Verify that test output can add additional labels
211 function(run_extra_labels)
212   set(CASE_CMAKELISTS_SUFFIX_CODE [[
213 add_test(
214   NAME custom_labels
215   COMMAND ${CMAKE_COMMAND} -E
216   echo before\n<CTestLabel>label2</CTestLabel>\n<CTestLabel>label1</CTestLabel>\n<CTestLabel>label3</CTestLabel>\n<CTestLabel>label2</CTestLabel>\nafter)
217 set_tests_properties(custom_labels PROPERTIES LABELS "label1")
218   ]])
219   run_ctest(TestExtraLabels)
220 endfunction()
221 run_extra_labels()