1 // Copyright 2014 The Kyua Authors. 2 // All rights reserved. 3 // 4 // Redistribution and use in source and binary forms, with or without 5 // modification, are permitted provided that the following conditions are 6 // met: 7 // 8 // * Redistributions of source code must retain the above copyright 9 // notice, this list of conditions and the following disclaimer. 10 // * Redistributions in binary form must reproduce the above copyright 11 // notice, this list of conditions and the following disclaimer in the 12 // documentation and/or other materials provided with the distribution. 13 // * Neither the name of Google Inc. nor the names of its contributors 14 // may be used to endorse or promote products derived from this software 15 // without specific prior written permission. 16 // 17 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 18 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 19 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 20 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 21 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 22 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 23 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 24 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 25 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 27 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 29 #include "drivers/report_junit.hpp" 30 31 #include <sstream> 32 #include <vector> 33 34 #include <atf-c++.hpp> 35 36 #include "drivers/scan_results.hpp" 37 #include "engine/filters.hpp" 38 #include "model/context.hpp" 39 #include "model/metadata.hpp" 40 #include "model/test_case.hpp" 41 #include "model/test_program.hpp" 42 #include "model/test_result.hpp" 43 #include "store/write_backend.hpp" 44 #include "store/write_transaction.hpp" 45 #include "utils/datetime.hpp" 46 #include "utils/format/macros.hpp" 47 #include "utils/fs/path.hpp" 48 #include "utils/optional.ipp" 49 #include "utils/units.hpp" 50 51 namespace datetime = utils::datetime; 52 namespace fs = utils::fs; 53 namespace units = utils::units; 54 55 using utils::none; 56 57 58 namespace { 59 60 61 /// Formatted metadata for a test case with defaults. 62 static const char* const default_metadata = 63 "allowed_architectures is empty\n" 64 "allowed_platforms is empty\n" 65 "description is empty\n" 66 "has_cleanup = false\n" 67 "is_exclusive = false\n" 68 "required_configs is empty\n" 69 "required_disk_space = 0\n" 70 "required_files is empty\n" 71 "required_memory = 0\n" 72 "required_programs is empty\n" 73 "required_user is empty\n" 74 "timeout = 300\n"; 75 76 77 /// Formatted metadata for a test case constructed with the "with_metadata" flag 78 /// set to true in add_tests. 79 static const char* const overriden_metadata = 80 "allowed_architectures is empty\n" 81 "allowed_platforms is empty\n" 82 "description = Textual description\n" 83 "has_cleanup = false\n" 84 "is_exclusive = false\n" 85 "required_configs is empty\n" 86 "required_disk_space = 0\n" 87 "required_files is empty\n" 88 "required_memory = 0\n" 89 "required_programs is empty\n" 90 "required_user is empty\n" 91 "timeout = 5678\n"; 92 93 94 /// Populates the context of the given database. 95 /// 96 /// \param tx Transaction to use for the writes to the database. 97 /// \param env_vars Number of environment variables to add to the context. 98 static void 99 add_context(store::write_transaction& tx, const std::size_t env_vars) 100 { 101 std::map< std::string, std::string > env; 102 for (std::size_t i = 0; i < env_vars; i++) 103 env[F("VAR%s") % i] = F("Value %s") % i; 104 const model::context context(fs::path("/root"), env); 105 (void)tx.put_context(context); 106 } 107 108 109 /// Adds a new test program with various test cases to the given database. 110 /// 111 /// \param tx Transaction to use for the writes to the database. 112 /// \param prog Test program name. 113 /// \param results Collection of results for the added test cases. The size of 114 /// this vector indicates the number of tests in the test program. 115 /// \param with_metadata Whether to add metadata overrides to the test cases. 116 /// \param with_output Whether to add stdout/stderr messages to the test cases. 117 static void 118 add_tests(store::write_transaction& tx, 119 const char* prog, 120 const std::vector< model::test_result >& results, 121 const bool with_metadata, const bool with_output) 122 { 123 model::test_program_builder test_program_builder( 124 "plain", fs::path(prog), fs::path("/root"), "suite"); 125 126 for (std::size_t j = 0; j < results.size(); j++) { 127 model::metadata_builder builder; 128 if (with_metadata) { 129 builder.set_description("Textual description"); 130 builder.set_timeout(datetime::delta(5678, 0)); 131 } 132 test_program_builder.add_test_case(F("t%s") % j, builder.build()); 133 } 134 135 const model::test_program test_program = test_program_builder.build(); 136 const int64_t tp_id = tx.put_test_program(test_program); 137 138 for (std::size_t j = 0; j < results.size(); j++) { 139 const int64_t tc_id = tx.put_test_case(test_program, F("t%s") % j, 140 tp_id); 141 const datetime::timestamp start = 142 datetime::timestamp::from_microseconds(0); 143 const datetime::timestamp end = 144 datetime::timestamp::from_microseconds(j * 1000000 + 500000); 145 tx.put_result(results[j], tc_id, start, end); 146 147 if (with_output) { 148 atf::utils::create_file("fake-out", F("stdout file %s") % j); 149 tx.put_test_case_file("__STDOUT__", fs::path("fake-out"), tc_id); 150 atf::utils::create_file("fake-err", F("stderr file %s") % j); 151 tx.put_test_case_file("__STDERR__", fs::path("fake-err"), tc_id); 152 } 153 } 154 } 155 156 157 } // anonymous namespace 158 159 160 ATF_TEST_CASE_WITHOUT_HEAD(junit_classname); 161 ATF_TEST_CASE_BODY(junit_classname) 162 { 163 const model::test_program test_program = model::test_program_builder( 164 "plain", fs::path("dir1/dir2/program"), fs::path("/root"), "suite") 165 .build(); 166 167 ATF_REQUIRE_EQ("dir1.dir2.program", drivers::junit_classname(test_program)); 168 } 169 170 171 ATF_TEST_CASE_WITHOUT_HEAD(junit_duration); 172 ATF_TEST_CASE_BODY(junit_duration) 173 { 174 ATF_REQUIRE_EQ("0.457", 175 drivers::junit_duration(datetime::delta(0, 456700))); 176 ATF_REQUIRE_EQ("3.120", 177 drivers::junit_duration(datetime::delta(3, 120000))); 178 ATF_REQUIRE_EQ("5.000", drivers::junit_duration(datetime::delta(5, 0))); 179 } 180 181 182 ATF_TEST_CASE_WITHOUT_HEAD(junit_metadata__defaults); 183 ATF_TEST_CASE_BODY(junit_metadata__defaults) 184 { 185 const model::metadata metadata = model::metadata_builder().build(); 186 187 const std::string expected = std::string() 188 + drivers::junit_metadata_header 189 + default_metadata; 190 191 ATF_REQUIRE_EQ(expected, drivers::junit_metadata(metadata)); 192 } 193 194 195 ATF_TEST_CASE_WITHOUT_HEAD(junit_metadata__overrides); 196 ATF_TEST_CASE_BODY(junit_metadata__overrides) 197 { 198 const model::metadata metadata = model::metadata_builder() 199 .add_allowed_architecture("arch1") 200 .add_allowed_platform("platform1") 201 .set_description("This is a test") 202 .set_has_cleanup(true) 203 .set_is_exclusive(true) 204 .add_required_config("config1") 205 .set_required_disk_space(units::bytes(456)) 206 .add_required_file(fs::path("file1")) 207 .set_required_memory(units::bytes(123)) 208 .add_required_program(fs::path("prog1")) 209 .set_required_user("root") 210 .set_timeout(datetime::delta(10, 0)) 211 .build(); 212 213 const std::string expected = std::string() 214 + drivers::junit_metadata_header 215 + "allowed_architectures = arch1\n" 216 + "allowed_platforms = platform1\n" 217 + "description = This is a test\n" 218 + "has_cleanup = true\n" 219 + "is_exclusive = true\n" 220 + "required_configs = config1\n" 221 + "required_disk_space = 456\n" 222 + "required_files = file1\n" 223 + "required_memory = 123\n" 224 + "required_programs = prog1\n" 225 + "required_user = root\n" 226 + "timeout = 10\n"; 227 228 ATF_REQUIRE_EQ(expected, drivers::junit_metadata(metadata)); 229 } 230 231 232 ATF_TEST_CASE_WITHOUT_HEAD(junit_timing); 233 ATF_TEST_CASE_BODY(junit_timing) 234 { 235 const std::string expected = std::string() 236 + drivers::junit_timing_header + 237 "Start time: 2015-06-12T01:02:35.123456Z\n" 238 "End time: 2016-07-13T18:47:10.000001Z\n" 239 "Duration: 34364674.877s\n"; 240 241 const datetime::timestamp start_time = 242 datetime::timestamp::from_values(2015, 6, 12, 1, 2, 35, 123456); 243 const datetime::timestamp end_time = 244 datetime::timestamp::from_values(2016, 7, 13, 18, 47, 10, 1); 245 246 ATF_REQUIRE_EQ(expected, drivers::junit_timing(start_time, end_time)); 247 } 248 249 250 ATF_TEST_CASE_WITHOUT_HEAD(report_junit_hooks__minimal); 251 ATF_TEST_CASE_BODY(report_junit_hooks__minimal) 252 { 253 store::write_backend backend = store::write_backend::open_rw( 254 fs::path("test.db")); 255 store::write_transaction tx = backend.start_write(); 256 add_context(tx, 0); 257 tx.commit(); 258 backend.close(); 259 260 std::ostringstream output; 261 262 drivers::report_junit_hooks hooks(output); 263 drivers::scan_results::drive(fs::path("test.db"), 264 std::set< engine::test_filter >(), 265 hooks); 266 267 const char* expected = 268 "<?xml version=\"1.0\" encoding=\"iso-8859-1\"?>\n" 269 "<testsuite>\n" 270 "<properties>\n" 271 "<property name=\"cwd\" value=\"/root\"/>\n" 272 "</properties>\n" 273 "</testsuite>\n"; 274 ATF_REQUIRE_EQ(expected, output.str()); 275 } 276 277 278 ATF_TEST_CASE_WITHOUT_HEAD(report_junit_hooks__some_tests); 279 ATF_TEST_CASE_BODY(report_junit_hooks__some_tests) 280 { 281 std::vector< model::test_result > results1; 282 results1.push_back(model::test_result( 283 model::test_result_broken, "Broken")); 284 results1.push_back(model::test_result( 285 model::test_result_expected_failure, "XFail")); 286 results1.push_back(model::test_result( 287 model::test_result_failed, "Failed")); 288 std::vector< model::test_result > results2; 289 results2.push_back(model::test_result( 290 model::test_result_passed)); 291 results2.push_back(model::test_result( 292 model::test_result_skipped, "Skipped")); 293 294 store::write_backend backend = store::write_backend::open_rw( 295 fs::path("test.db")); 296 store::write_transaction tx = backend.start_write(); 297 add_context(tx, 2); 298 add_tests(tx, "dir/prog-1", results1, false, false); 299 add_tests(tx, "dir/sub/prog-2", results2, true, true); 300 tx.commit(); 301 backend.close(); 302 303 std::ostringstream output; 304 305 drivers::report_junit_hooks hooks(output); 306 drivers::scan_results::drive(fs::path("test.db"), 307 std::set< engine::test_filter >(), 308 hooks); 309 310 const std::string expected = std::string() + 311 "<?xml version=\"1.0\" encoding=\"iso-8859-1\"?>\n" 312 "<testsuite>\n" 313 "<properties>\n" 314 "<property name=\"cwd\" value=\"/root\"/>\n" 315 "<property name=\"env.VAR0\" value=\"Value 0\"/>\n" 316 "<property name=\"env.VAR1\" value=\"Value 1\"/>\n" 317 "</properties>\n" 318 319 "<testcase classname=\"dir.prog-1\" name=\"t0\" time=\"0.500\">\n" 320 "<error message=\"Broken\"/>\n" 321 "<system-err>" 322 + drivers::junit_metadata_header + 323 default_metadata 324 + drivers::junit_timing_header + 325 "Start time: 1970-01-01T00:00:00.000000Z\n" 326 "End time: 1970-01-01T00:00:00.500000Z\n" 327 "Duration: 0.500s\n" 328 + drivers::junit_stderr_header + 329 "<EMPTY>\n" 330 "</system-err>\n" 331 "</testcase>\n" 332 333 "<testcase classname=\"dir.prog-1\" name=\"t1\" time=\"1.500\">\n" 334 "<system-err>" 335 "Expected failure result details\n" 336 "-------------------------------\n" 337 "\n" 338 "XFail\n" 339 "\n" 340 + drivers::junit_metadata_header + 341 default_metadata 342 + drivers::junit_timing_header + 343 "Start time: 1970-01-01T00:00:00.000000Z\n" 344 "End time: 1970-01-01T00:00:01.500000Z\n" 345 "Duration: 1.500s\n" 346 + drivers::junit_stderr_header + 347 "<EMPTY>\n" 348 "</system-err>\n" 349 "</testcase>\n" 350 351 "<testcase classname=\"dir.prog-1\" name=\"t2\" time=\"2.500\">\n" 352 "<failure message=\"Failed\"/>\n" 353 "<system-err>" 354 + drivers::junit_metadata_header + 355 default_metadata 356 + drivers::junit_timing_header + 357 "Start time: 1970-01-01T00:00:00.000000Z\n" 358 "End time: 1970-01-01T00:00:02.500000Z\n" 359 "Duration: 2.500s\n" 360 + drivers::junit_stderr_header + 361 "<EMPTY>\n" 362 "</system-err>\n" 363 "</testcase>\n" 364 365 "<testcase classname=\"dir.sub.prog-2\" name=\"t0\" time=\"0.500\">\n" 366 "<system-out>stdout file 0</system-out>\n" 367 "<system-err>" 368 + drivers::junit_metadata_header + 369 overriden_metadata 370 + drivers::junit_timing_header + 371 "Start time: 1970-01-01T00:00:00.000000Z\n" 372 "End time: 1970-01-01T00:00:00.500000Z\n" 373 "Duration: 0.500s\n" 374 + drivers::junit_stderr_header + 375 "stderr file 0</system-err>\n" 376 "</testcase>\n" 377 378 "<testcase classname=\"dir.sub.prog-2\" name=\"t1\" time=\"1.500\">\n" 379 "<skipped/>\n" 380 "<system-out>stdout file 1</system-out>\n" 381 "<system-err>" 382 "Skipped result details\n" 383 "----------------------\n" 384 "\n" 385 "Skipped\n" 386 "\n" 387 + drivers::junit_metadata_header + 388 overriden_metadata 389 + drivers::junit_timing_header + 390 "Start time: 1970-01-01T00:00:00.000000Z\n" 391 "End time: 1970-01-01T00:00:01.500000Z\n" 392 "Duration: 1.500s\n" 393 + drivers::junit_stderr_header + 394 "stderr file 1</system-err>\n" 395 "</testcase>\n" 396 397 "</testsuite>\n"; 398 ATF_REQUIRE_EQ(expected, output.str()); 399 } 400 401 402 ATF_INIT_TEST_CASES(tcs) 403 { 404 ATF_ADD_TEST_CASE(tcs, junit_classname); 405 406 ATF_ADD_TEST_CASE(tcs, junit_duration); 407 408 ATF_ADD_TEST_CASE(tcs, junit_metadata__defaults); 409 ATF_ADD_TEST_CASE(tcs, junit_metadata__overrides); 410 411 ATF_ADD_TEST_CASE(tcs, junit_timing); 412 413 ATF_ADD_TEST_CASE(tcs, report_junit_hooks__minimal); 414 ATF_ADD_TEST_CASE(tcs, report_junit_hooks__some_tests); 415 } 416