xref: /freebsd/contrib/kyua/drivers/report_junit_test.cpp (revision 7fdf597e96a02165cfe22ff357b857d5fa15ed8a)
1 // Copyright 2014 The Kyua Authors.
2 // All rights reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are
6 // met:
7 //
8 // * Redistributions of source code must retain the above copyright
9 //   notice, this list of conditions and the following disclaimer.
10 // * Redistributions in binary form must reproduce the above copyright
11 //   notice, this list of conditions and the following disclaimer in the
12 //   documentation and/or other materials provided with the distribution.
13 // * Neither the name of Google Inc. nor the names of its contributors
14 //   may be used to endorse or promote products derived from this software
15 //   without specific prior written permission.
16 //
17 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 
29 #include "drivers/report_junit.hpp"
30 
31 #include <sstream>
32 #include <vector>
33 
34 #include <atf-c++.hpp>
35 
36 #include "drivers/scan_results.hpp"
37 #include "engine/filters.hpp"
38 #include "model/context.hpp"
39 #include "model/metadata.hpp"
40 #include "model/test_case.hpp"
41 #include "model/test_program.hpp"
42 #include "model/test_result.hpp"
43 #include "store/write_backend.hpp"
44 #include "store/write_transaction.hpp"
45 #include "utils/datetime.hpp"
46 #include "utils/format/macros.hpp"
47 #include "utils/fs/path.hpp"
48 #include "utils/optional.ipp"
49 #include "utils/units.hpp"
50 
51 namespace datetime = utils::datetime;
52 namespace fs = utils::fs;
53 namespace units = utils::units;
54 
55 using utils::none;
56 
57 
58 namespace {
59 
60 
61 /// Formatted metadata for a test case with defaults.
62 static const char* const default_metadata =
63     "allowed_architectures is empty\n"
64     "allowed_platforms is empty\n"
65     "description is empty\n"
66     "execenv is empty\n"
67     "execenv_jail_params is empty\n"
68     "has_cleanup = false\n"
69     "is_exclusive = false\n"
70     "required_configs is empty\n"
71     "required_disk_space = 0\n"
72     "required_files is empty\n"
73     "required_memory = 0\n"
74     "required_programs is empty\n"
75     "required_user is empty\n"
76     "timeout = 300\n";
77 
78 
79 /// Formatted metadata for a test case constructed with the "with_metadata" flag
80 /// set to true in add_tests.
81 static const char* const overriden_metadata =
82     "allowed_architectures is empty\n"
83     "allowed_platforms is empty\n"
84     "description = Textual description\n"
85     "execenv is empty\n"
86     "execenv_jail_params is empty\n"
87     "has_cleanup = false\n"
88     "is_exclusive = false\n"
89     "required_configs is empty\n"
90     "required_disk_space = 0\n"
91     "required_files is empty\n"
92     "required_memory = 0\n"
93     "required_programs is empty\n"
94     "required_user is empty\n"
95     "timeout = 5678\n";
96 
97 
98 /// Populates the context of the given database.
99 ///
100 /// \param tx Transaction to use for the writes to the database.
101 /// \param env_vars Number of environment variables to add to the context.
102 static void
103 add_context(store::write_transaction& tx, const std::size_t env_vars)
104 {
105     std::map< std::string, std::string > env;
106     for (std::size_t i = 0; i < env_vars; i++)
107         env[F("VAR%s") % i] = F("Value %s") % i;
108     const model::context context(fs::path("/root"), env);
109     (void)tx.put_context(context);
110 }
111 
112 
113 /// Adds a new test program with various test cases to the given database.
114 ///
115 /// \param tx Transaction to use for the writes to the database.
116 /// \param prog Test program name.
117 /// \param results Collection of results for the added test cases.  The size of
118 ///     this vector indicates the number of tests in the test program.
119 /// \param with_metadata Whether to add metadata overrides to the test cases.
120 /// \param with_output Whether to add stdout/stderr messages to the test cases.
121 static void
122 add_tests(store::write_transaction& tx,
123           const char* prog,
124           const std::vector< model::test_result >& results,
125           const bool with_metadata, const bool with_output)
126 {
127     model::test_program_builder test_program_builder(
128         "plain", fs::path(prog), fs::path("/root"), "suite");
129 
130     for (std::size_t j = 0; j < results.size(); j++) {
131         model::metadata_builder builder;
132         if (with_metadata) {
133             builder.set_description("Textual description");
134             builder.set_timeout(datetime::delta(5678, 0));
135         }
136         test_program_builder.add_test_case(F("t%s") % j, builder.build());
137     }
138 
139     const model::test_program test_program = test_program_builder.build();
140     const int64_t tp_id = tx.put_test_program(test_program);
141 
142     for (std::size_t j = 0; j < results.size(); j++) {
143         const int64_t tc_id = tx.put_test_case(test_program, F("t%s") % j,
144                                                tp_id);
145         const datetime::timestamp start =
146             datetime::timestamp::from_microseconds(0);
147         const datetime::timestamp end =
148             datetime::timestamp::from_microseconds(j * 1000000 + 500000);
149         tx.put_result(results[j], tc_id, start, end);
150 
151         if (with_output) {
152             atf::utils::create_file("fake-out", F("stdout file %s") % j);
153             tx.put_test_case_file("__STDOUT__", fs::path("fake-out"), tc_id);
154             atf::utils::create_file("fake-err", F("stderr file %s") % j);
155             tx.put_test_case_file("__STDERR__", fs::path("fake-err"), tc_id);
156         }
157     }
158 }
159 
160 
161 }  // anonymous namespace
162 
163 
164 ATF_TEST_CASE_WITHOUT_HEAD(junit_classname);
165 ATF_TEST_CASE_BODY(junit_classname)
166 {
167     const model::test_program test_program = model::test_program_builder(
168         "plain", fs::path("dir1/dir2/program"), fs::path("/root"), "suite")
169         .build();
170 
171     ATF_REQUIRE_EQ("dir1.dir2.program", drivers::junit_classname(test_program));
172 }
173 
174 
175 ATF_TEST_CASE_WITHOUT_HEAD(junit_duration);
176 ATF_TEST_CASE_BODY(junit_duration)
177 {
178     ATF_REQUIRE_EQ("0.457",
179                    drivers::junit_duration(datetime::delta(0, 456700)));
180     ATF_REQUIRE_EQ("3.120",
181                    drivers::junit_duration(datetime::delta(3, 120000)));
182     ATF_REQUIRE_EQ("5.000", drivers::junit_duration(datetime::delta(5, 0)));
183 }
184 
185 
186 ATF_TEST_CASE_WITHOUT_HEAD(junit_metadata__defaults);
187 ATF_TEST_CASE_BODY(junit_metadata__defaults)
188 {
189     const model::metadata metadata = model::metadata_builder().build();
190 
191     const std::string expected = std::string()
192         + drivers::junit_metadata_header
193         + default_metadata;
194 
195     ATF_REQUIRE_EQ(expected, drivers::junit_metadata(metadata));
196 }
197 
198 
199 ATF_TEST_CASE_WITHOUT_HEAD(junit_metadata__overrides);
200 ATF_TEST_CASE_BODY(junit_metadata__overrides)
201 {
202     const model::metadata metadata = model::metadata_builder()
203         .add_allowed_architecture("arch1")
204         .add_allowed_platform("platform1")
205         .set_description("This is a test")
206         .set_execenv("jail")
207         .set_execenv_jail_params("vnet")
208         .set_has_cleanup(true)
209         .set_is_exclusive(true)
210         .add_required_config("config1")
211         .set_required_disk_space(units::bytes(456))
212         .add_required_file(fs::path("file1"))
213         .set_required_memory(units::bytes(123))
214         .add_required_program(fs::path("prog1"))
215         .set_required_user("root")
216         .set_timeout(datetime::delta(10, 0))
217         .build();
218 
219     const std::string expected = std::string()
220         + drivers::junit_metadata_header
221         + "allowed_architectures = arch1\n"
222         + "allowed_platforms = platform1\n"
223         + "description = This is a test\n"
224         + "execenv = jail\n"
225         + "execenv_jail_params = vnet\n"
226         + "has_cleanup = true\n"
227         + "is_exclusive = true\n"
228         + "required_configs = config1\n"
229         + "required_disk_space = 456\n"
230         + "required_files = file1\n"
231         + "required_memory = 123\n"
232         + "required_programs = prog1\n"
233         + "required_user = root\n"
234         + "timeout = 10\n";
235 
236     ATF_REQUIRE_EQ(expected, drivers::junit_metadata(metadata));
237 }
238 
239 
240 ATF_TEST_CASE_WITHOUT_HEAD(junit_timing);
241 ATF_TEST_CASE_BODY(junit_timing)
242 {
243     const std::string expected = std::string()
244         + drivers::junit_timing_header +
245         "Start time: 2015-06-12T01:02:35.123456Z\n"
246         "End time:   2016-07-13T18:47:10.000001Z\n"
247         "Duration:   34364674.877s\n";
248 
249     const datetime::timestamp start_time =
250         datetime::timestamp::from_values(2015, 6, 12, 1, 2, 35, 123456);
251     const datetime::timestamp end_time =
252         datetime::timestamp::from_values(2016, 7, 13, 18, 47, 10, 1);
253 
254     ATF_REQUIRE_EQ(expected, drivers::junit_timing(start_time, end_time));
255 }
256 
257 
258 ATF_TEST_CASE_WITHOUT_HEAD(report_junit_hooks__minimal);
259 ATF_TEST_CASE_BODY(report_junit_hooks__minimal)
260 {
261     store::write_backend backend = store::write_backend::open_rw(
262         fs::path("test.db"));
263     store::write_transaction tx = backend.start_write();
264     add_context(tx, 0);
265     tx.commit();
266     backend.close();
267 
268     std::ostringstream output;
269 
270     drivers::report_junit_hooks hooks(output);
271     drivers::scan_results::drive(fs::path("test.db"),
272                                  std::set< engine::test_filter >(),
273                                  hooks);
274 
275     const char* expected =
276         "<?xml version=\"1.0\" encoding=\"iso-8859-1\"?>\n"
277         "<testsuite>\n"
278         "<properties>\n"
279         "<property name=\"cwd\" value=\"/root\"/>\n"
280         "</properties>\n"
281         "</testsuite>\n";
282     ATF_REQUIRE_EQ(expected, output.str());
283 }
284 
285 
286 ATF_TEST_CASE_WITHOUT_HEAD(report_junit_hooks__some_tests);
287 ATF_TEST_CASE_BODY(report_junit_hooks__some_tests)
288 {
289     std::vector< model::test_result > results1;
290     results1.push_back(model::test_result(
291         model::test_result_broken, "Broken"));
292     results1.push_back(model::test_result(
293         model::test_result_expected_failure, "XFail"));
294     results1.push_back(model::test_result(
295         model::test_result_failed, "Failed"));
296     std::vector< model::test_result > results2;
297     results2.push_back(model::test_result(
298         model::test_result_passed));
299     results2.push_back(model::test_result(
300         model::test_result_skipped, "Skipped"));
301 
302     store::write_backend backend = store::write_backend::open_rw(
303         fs::path("test.db"));
304     store::write_transaction tx = backend.start_write();
305     add_context(tx, 2);
306     add_tests(tx, "dir/prog-1", results1, false, false);
307     add_tests(tx, "dir/sub/prog-2", results2, true, true);
308     tx.commit();
309     backend.close();
310 
311     std::ostringstream output;
312 
313     drivers::report_junit_hooks hooks(output);
314     drivers::scan_results::drive(fs::path("test.db"),
315                                  std::set< engine::test_filter >(),
316                                  hooks);
317 
318     const std::string expected = std::string() +
319         "<?xml version=\"1.0\" encoding=\"iso-8859-1\"?>\n"
320         "<testsuite>\n"
321         "<properties>\n"
322         "<property name=\"cwd\" value=\"/root\"/>\n"
323         "<property name=\"env.VAR0\" value=\"Value 0\"/>\n"
324         "<property name=\"env.VAR1\" value=\"Value 1\"/>\n"
325         "</properties>\n"
326 
327         "<testcase classname=\"dir.prog-1\" name=\"t0\" time=\"0.500\">\n"
328         "<error message=\"Broken\"/>\n"
329         "<system-err>"
330         + drivers::junit_metadata_header +
331         default_metadata
332         + drivers::junit_timing_header +
333         "Start time: 1970-01-01T00:00:00.000000Z\n"
334         "End time:   1970-01-01T00:00:00.500000Z\n"
335         "Duration:   0.500s\n"
336         + drivers::junit_stderr_header +
337         "&lt;EMPTY&gt;\n"
338         "</system-err>\n"
339         "</testcase>\n"
340 
341         "<testcase classname=\"dir.prog-1\" name=\"t1\" time=\"1.500\">\n"
342         "<system-err>"
343         "Expected failure result details\n"
344         "-------------------------------\n"
345         "\n"
346         "XFail\n"
347         "\n"
348         + drivers::junit_metadata_header +
349         default_metadata
350         + drivers::junit_timing_header +
351         "Start time: 1970-01-01T00:00:00.000000Z\n"
352         "End time:   1970-01-01T00:00:01.500000Z\n"
353         "Duration:   1.500s\n"
354         + drivers::junit_stderr_header +
355         "&lt;EMPTY&gt;\n"
356         "</system-err>\n"
357         "</testcase>\n"
358 
359         "<testcase classname=\"dir.prog-1\" name=\"t2\" time=\"2.500\">\n"
360         "<failure message=\"Failed\"/>\n"
361         "<system-err>"
362         + drivers::junit_metadata_header +
363         default_metadata
364         + drivers::junit_timing_header +
365         "Start time: 1970-01-01T00:00:00.000000Z\n"
366         "End time:   1970-01-01T00:00:02.500000Z\n"
367         "Duration:   2.500s\n"
368         +  drivers::junit_stderr_header +
369         "&lt;EMPTY&gt;\n"
370         "</system-err>\n"
371         "</testcase>\n"
372 
373         "<testcase classname=\"dir.sub.prog-2\" name=\"t0\" time=\"0.500\">\n"
374         "<system-out>stdout file 0</system-out>\n"
375         "<system-err>"
376         + drivers::junit_metadata_header +
377         overriden_metadata
378         + drivers::junit_timing_header +
379         "Start time: 1970-01-01T00:00:00.000000Z\n"
380         "End time:   1970-01-01T00:00:00.500000Z\n"
381         "Duration:   0.500s\n"
382         + drivers::junit_stderr_header +
383         "stderr file 0</system-err>\n"
384         "</testcase>\n"
385 
386         "<testcase classname=\"dir.sub.prog-2\" name=\"t1\" time=\"1.500\">\n"
387         "<skipped/>\n"
388         "<system-out>stdout file 1</system-out>\n"
389         "<system-err>"
390         "Skipped result details\n"
391         "----------------------\n"
392         "\n"
393         "Skipped\n"
394         "\n"
395         + drivers::junit_metadata_header +
396         overriden_metadata
397         + drivers::junit_timing_header +
398         "Start time: 1970-01-01T00:00:00.000000Z\n"
399         "End time:   1970-01-01T00:00:01.500000Z\n"
400         "Duration:   1.500s\n"
401         + drivers::junit_stderr_header +
402         "stderr file 1</system-err>\n"
403         "</testcase>\n"
404 
405         "</testsuite>\n";
406     ATF_REQUIRE_EQ(expected, output.str());
407 }
408 
409 
410 ATF_INIT_TEST_CASES(tcs)
411 {
412     ATF_ADD_TEST_CASE(tcs, junit_classname);
413 
414     ATF_ADD_TEST_CASE(tcs, junit_duration);
415 
416     ATF_ADD_TEST_CASE(tcs, junit_metadata__defaults);
417     ATF_ADD_TEST_CASE(tcs, junit_metadata__overrides);
418 
419     ATF_ADD_TEST_CASE(tcs, junit_timing);
420 
421     ATF_ADD_TEST_CASE(tcs, report_junit_hooks__minimal);
422     ATF_ADD_TEST_CASE(tcs, report_junit_hooks__some_tests);
423 }
424