xref: /freebsd/contrib/kyua/engine/scheduler_test.cpp (revision 5ca8e32633c4ffbbcd6762e5888b6a4ba0708c6c)
1 // Copyright 2014 The Kyua Authors.
2 // All rights reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are
6 // met:
7 //
8 // * Redistributions of source code must retain the above copyright
9 //   notice, this list of conditions and the following disclaimer.
10 // * Redistributions in binary form must reproduce the above copyright
11 //   notice, this list of conditions and the following disclaimer in the
12 //   documentation and/or other materials provided with the distribution.
13 // * Neither the name of Google Inc. nor the names of its contributors
14 //   may be used to endorse or promote products derived from this software
15 //   without specific prior written permission.
16 //
17 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 
29 #include "engine/scheduler.hpp"
30 
31 extern "C" {
32 #include <sys/types.h>
33 
34 #include <signal.h>
35 #include <unistd.h>
36 }
37 
38 #include <cstdlib>
39 #include <fstream>
40 #include <iostream>
41 #include <string>
42 
43 #include <atf-c++.hpp>
44 
45 #include "engine/config.hpp"
46 #include "engine/exceptions.hpp"
47 #include "model/context.hpp"
48 #include "model/metadata.hpp"
49 #include "model/test_case.hpp"
50 #include "model/test_program.hpp"
51 #include "model/test_result.hpp"
52 #include "utils/config/tree.ipp"
53 #include "utils/datetime.hpp"
54 #include "utils/defs.hpp"
55 #include "utils/env.hpp"
56 #include "utils/format/containers.ipp"
57 #include "utils/format/macros.hpp"
58 #include "utils/fs/operations.hpp"
59 #include "utils/fs/path.hpp"
60 #include "utils/optional.ipp"
61 #include "utils/passwd.hpp"
62 #include "utils/process/status.hpp"
63 #include "utils/sanity.hpp"
64 #include "utils/stacktrace.hpp"
65 #include "utils/stream.hpp"
66 #include "utils/test_utils.ipp"
67 #include "utils/text/exceptions.hpp"
68 #include "utils/text/operations.ipp"
69 
70 namespace config = utils::config;
71 namespace datetime = utils::datetime;
72 namespace fs = utils::fs;
73 namespace passwd = utils::passwd;
74 namespace process = utils::process;
75 namespace scheduler = engine::scheduler;
76 namespace text = utils::text;
77 
78 using utils::none;
79 using utils::optional;
80 
81 
82 namespace {
83 
84 
85 /// Checks if a string starts with a prefix.
86 ///
87 /// \param str The string to be tested.
88 /// \param prefix The prefix to look for.
89 ///
90 /// \return True if the string is prefixed as specified.
91 static bool
92 starts_with(const std::string& str, const std::string& prefix)
93 {
94     return (str.length() >= prefix.length() &&
95             str.substr(0, prefix.length()) == prefix);
96 }
97 
98 
99 /// Strips a prefix from a string and converts the rest to an integer.
100 ///
101 /// \param str The string to be tested.
102 /// \param prefix The prefix to strip from the string.
103 ///
104 /// \return The part of the string after the prefix converted to an integer.
105 static int
106 suffix_to_int(const std::string& str, const std::string& prefix)
107 {
108     PRE(starts_with(str, prefix));
109     try {
110         return text::to_type< int >(str.substr(prefix.length()));
111     } catch (const text::value_error& error) {
112         std::cerr << F("Failed: %s\n") % error.what();
113         std::abort();
114     }
115 }
116 
117 
118 /// Mock interface definition for testing.
119 ///
120 /// This scheduler interface does not execute external binaries.  It is designed
121 /// to simulate the scheduler of various programs with different exit statuses.
122 class mock_interface : public scheduler::interface {
123     /// Executes the subprocess simulating an exec.
124     ///
125     /// This is just a simple wrapper over _exit(2) because we cannot use
126     /// std::exit on exit from this mock interface.  The reason is that we do
127     /// not want to invoke any destructors as otherwise we'd clear up the global
128     /// scheduler state by mistake.  This wouldn't be a major problem if it
129     /// wasn't because doing so deletes on-disk files and we want to leave them
130     /// in place so that the parent process can test for them!
131     ///
132     /// \param exit_code Exit code.
133     void
134     do_exit(const int exit_code) const UTILS_NORETURN
135     {
136         std::cout.flush();
137         std::cerr.flush();
138         ::_exit(exit_code);
139     }
140 
141     /// Executes a test case that creates various files and then fails.
142     void
143     exec_create_files_and_fail(void) const UTILS_NORETURN
144     {
145         std::cerr << "This should not be clobbered\n";
146         atf::utils::create_file("first file", "");
147         atf::utils::create_file("second-file", "");
148         fs::mkdir_p(fs::path("dir1/dir2"), 0755);
149         ::kill(::getpid(), SIGTERM);
150         std::abort();
151     }
152 
153     /// Executes a test case that deletes all files in the current directory.
154     ///
155     /// This is intended to validate that the test runs in an empty directory,
156     /// separate from any control files that the scheduler may have created.
157     void
158     exec_delete_all(void) const UTILS_NORETURN
159     {
160         const int exit_code = ::system("rm *") == -1
161             ? EXIT_FAILURE : EXIT_SUCCESS;
162 
163         // Recreate our own cookie.
164         atf::utils::create_file("exec_test_was_called", "");
165 
166         do_exit(exit_code);
167     }
168 
169     /// Executes a test case that returns a specific exit code.
170     ///
171     /// \param exit_code Exit status to terminate the program with.
172     void
173     exec_exit(const int exit_code) const UTILS_NORETURN
174     {
175         do_exit(exit_code);
176     }
177 
178     /// Executes a test case that just fails.
179     void
180     exec_fail(void) const UTILS_NORETURN
181     {
182         std::cerr << "This should not be clobbered\n";
183         ::kill(::getpid(), SIGTERM);
184         std::abort();
185     }
186 
187     /// Executes a test case that prints all input parameters to the functor.
188     ///
189     /// \param test_program The test program to execute.
190     /// \param test_case_name Name of the test case to invoke, which must be a
191     ///     number.
192     /// \param vars User-provided variables to pass to the test program.
193     void
194     exec_print_params(const model::test_program& test_program,
195                       const std::string& test_case_name,
196                       const config::properties_map& vars) const
197         UTILS_NORETURN
198     {
199         std::cout << F("Test program: %s\n") % test_program.relative_path();
200         std::cout << F("Test case: %s\n") % test_case_name;
201         for (config::properties_map::const_iterator iter = vars.begin();
202              iter != vars.end(); ++iter) {
203             std::cout << F("%s=%s\n") % (*iter).first % (*iter).second;
204         }
205 
206         std::cerr << F("stderr: %s\n") % test_case_name;
207 
208         do_exit(EXIT_SUCCESS);
209     }
210 
211 public:
212     /// Executes a test program's list operation.
213     ///
214     /// This method is intended to be called within a subprocess and is expected
215     /// to terminate execution either by exec(2)ing the test program or by
216     /// exiting with a failure.
217     ///
218     /// \param test_program The test program to execute.
219     /// \param vars User-provided variables to pass to the test program.
220     void
221     exec_list(const model::test_program& test_program,
222               const config::properties_map& vars)
223         const UTILS_NORETURN
224     {
225         const std::string name = test_program.absolute_path().leaf_name();
226 
227         std::cerr << name;
228         std::cerr.flush();
229         if (name == "check_i_exist") {
230             if (fs::exists(test_program.absolute_path())) {
231                 std::cout << "found\n";
232                 do_exit(EXIT_SUCCESS);
233             } else {
234                 std::cout << "not_found\n";
235                 do_exit(EXIT_FAILURE);
236             }
237         } else if (name == "empty") {
238             do_exit(EXIT_SUCCESS);
239         } else if (name == "misbehave") {
240             utils::abort_without_coredump();
241         } else if (name == "timeout") {
242             std::cout << "sleeping\n";
243             std::cout.flush();
244             ::sleep(100);
245             utils::abort_without_coredump();
246         } else if (name == "vars") {
247             for (config::properties_map::const_iterator iter = vars.begin();
248                  iter != vars.end(); ++iter) {
249                 std::cout << F("%s_%s\n") % (*iter).first % (*iter).second;
250             }
251             do_exit(15);
252         } else {
253             std::abort();
254         }
255     }
256 
257     /// Computes the test cases list of a test program.
258     ///
259     /// \param status The termination status of the subprocess used to execute
260     ///     the exec_test() method or none if the test timed out.
261     /// \param stdout_path Path to the file containing the stdout of the test.
262     /// \param stderr_path Path to the file containing the stderr of the test.
263     ///
264     /// \return A list of test cases.
265     model::test_cases_map
266     parse_list(const optional< process::status >& status,
267                const fs::path& stdout_path,
268                const fs::path& stderr_path) const
269     {
270         const std::string name = utils::read_file(stderr_path);
271         if (name == "check_i_exist") {
272             ATF_REQUIRE(status.get().exited());
273             ATF_REQUIRE_EQ(EXIT_SUCCESS, status.get().exitstatus());
274         } else if (name == "empty") {
275             ATF_REQUIRE(status.get().exited());
276             ATF_REQUIRE_EQ(EXIT_SUCCESS, status.get().exitstatus());
277         } else if (name == "misbehave") {
278             throw std::runtime_error("misbehaved in parse_list");
279         } else if (name == "timeout") {
280             ATF_REQUIRE(!status);
281         } else if (name == "vars") {
282             ATF_REQUIRE(status.get().exited());
283             ATF_REQUIRE_EQ(15, status.get().exitstatus());
284         } else {
285             ATF_FAIL("Invalid stderr contents; got " + name);
286         }
287 
288         model::test_cases_map_builder test_cases_builder;
289 
290         std::ifstream input(stdout_path.c_str());
291         ATF_REQUIRE(input);
292         std::string line;
293         while (std::getline(input, line).good()) {
294             test_cases_builder.add(line);
295         }
296 
297         return test_cases_builder.build();
298     }
299 
300     /// Executes a test case of the test program.
301     ///
302     /// This method is intended to be called within a subprocess and is expected
303     /// to terminate execution either by exec(2)ing the test program or by
304     /// exiting with a failure.
305     ///
306     /// \param test_program The test program to execute.
307     /// \param test_case_name Name of the test case to invoke.
308     /// \param vars User-provided variables to pass to the test program.
309     /// \param control_directory Directory where the interface may place control
310     ///     files.
311     void
312     exec_test(const model::test_program& test_program,
313               const std::string& test_case_name,
314               const config::properties_map& vars,
315               const fs::path& control_directory) const
316     {
317         const fs::path cookie = control_directory / "exec_test_was_called";
318         std::ofstream control_file(cookie.c_str());
319         if (!control_file) {
320             std::cerr << "Failed to create " << cookie << '\n';
321             std::abort();
322         }
323         control_file << test_case_name;
324         control_file.close();
325 
326         if (test_case_name == "check_i_exist") {
327             do_exit(fs::exists(test_program.absolute_path()) ? 0 : 1);
328         } else if (starts_with(test_case_name, "cleanup_timeout")) {
329             exec_exit(EXIT_SUCCESS);
330         } else if (starts_with(test_case_name, "create_files_and_fail")) {
331             exec_create_files_and_fail();
332         } else if (test_case_name == "delete_all") {
333             exec_delete_all();
334         } else if (starts_with(test_case_name, "exit ")) {
335             exec_exit(suffix_to_int(test_case_name, "exit "));
336         } else if (starts_with(test_case_name, "fail")) {
337             exec_fail();
338         } else if (starts_with(test_case_name, "fail_body_fail_cleanup")) {
339             exec_fail();
340         } else if (starts_with(test_case_name, "fail_body_pass_cleanup")) {
341             exec_fail();
342         } else if (starts_with(test_case_name, "pass_body_fail_cleanup")) {
343             exec_exit(EXIT_SUCCESS);
344         } else if (starts_with(test_case_name, "print_params")) {
345             exec_print_params(test_program, test_case_name, vars);
346         } else if (starts_with(test_case_name, "skip_body_pass_cleanup")) {
347             exec_exit(EXIT_SUCCESS);
348         } else {
349             std::cerr << "Unknown test case " << test_case_name << '\n';
350             std::abort();
351         }
352     }
353 
354     /// Executes a test cleanup routine of the test program.
355     ///
356     /// This method is intended to be called within a subprocess and is expected
357     /// to terminate execution either by exec(2)ing the test program or by
358     /// exiting with a failure.
359     ///
360     /// \param test_case_name Name of the test case to invoke.
361     void
362     exec_cleanup(const model::test_program& /* test_program */,
363                  const std::string& test_case_name,
364                  const config::properties_map& /* vars */,
365                  const fs::path& /* control_directory */) const
366     {
367         std::cout << "exec_cleanup was called\n";
368         std::cout.flush();
369 
370         if (starts_with(test_case_name, "cleanup_timeout")) {
371             ::sleep(100);
372             std::abort();
373         } else if (starts_with(test_case_name, "fail_body_fail_cleanup")) {
374             exec_fail();
375         } else if (starts_with(test_case_name, "fail_body_pass_cleanup")) {
376             exec_exit(EXIT_SUCCESS);
377         } else if (starts_with(test_case_name, "pass_body_fail_cleanup")) {
378             exec_fail();
379         } else if (starts_with(test_case_name, "skip_body_pass_cleanup")) {
380             exec_exit(EXIT_SUCCESS);
381         } else {
382             std::cerr << "Should not have been called for a test without "
383                 "a cleanup routine" << '\n';
384             std::abort();
385         }
386     }
387 
388     /// Computes the result of a test case based on its termination status.
389     ///
390     /// \param status The termination status of the subprocess used to execute
391     ///     the exec_test() method or none if the test timed out.
392     /// \param control_directory Path to the directory where the interface may
393     ///     have placed control files.
394     /// \param stdout_path Path to the file containing the stdout of the test.
395     /// \param stderr_path Path to the file containing the stderr of the test.
396     ///
397     /// \return A test result.
398     model::test_result
399     compute_result(const optional< process::status >& status,
400                    const fs::path& control_directory,
401                    const fs::path& stdout_path,
402                    const fs::path& stderr_path) const
403     {
404         // Do not use any ATF_* macros here.  Some of the tests below invoke
405         // this code in a subprocess, and terminating such subprocess due to a
406         // failed ATF_* macro yields mysterious failures that are incredibly
407         // hard to debug.  (Case in point: the signal_handling test is racy by
408         // nature, and the test run by exec_test() above may not have created
409         // the cookie we expect below.  We don't want to "silently" exit if the
410         // file is not there.)
411 
412         if (!status) {
413             return model::test_result(model::test_result_broken,
414                                       "Timed out");
415         }
416 
417         if (status.get().exited()) {
418             // Only sanity-check the work directory-related parameters in case
419             // of a clean exit.  In all other cases, there is no guarantee that
420             // these were ever created.
421             const fs::path cookie = control_directory / "exec_test_was_called";
422             if (!atf::utils::file_exists(cookie.str())) {
423                 return model::test_result(
424                     model::test_result_broken,
425                     "compute_result's control_directory does not seem to point "
426                     "to the right location");
427             }
428             const std::string test_case_name = utils::read_file(cookie);
429 
430             if (!atf::utils::file_exists(stdout_path.str())) {
431                 return model::test_result(
432                     model::test_result_broken,
433                     "compute_result's stdout_path does not exist");
434             }
435             if (!atf::utils::file_exists(stderr_path.str())) {
436                 return model::test_result(
437                     model::test_result_broken,
438                     "compute_result's stderr_path does not exist");
439             }
440 
441             if (test_case_name == "skip_body_pass_cleanup") {
442                 return model::test_result(
443                     model::test_result_skipped,
444                     F("Exit %s") % status.get().exitstatus());
445             } else {
446                 return model::test_result(
447                     model::test_result_passed,
448                     F("Exit %s") % status.get().exitstatus());
449             }
450         } else {
451             return model::test_result(
452                 model::test_result_failed,
453                 F("Signal %s") % status.get().termsig());
454         }
455     }
456 };
457 
458 
459 }  // anonymous namespace
460 
461 
462 /// Runs list_tests on the scheduler and returns the results.
463 ///
464 /// \param test_name The name of the test supported by our exec_list function.
465 /// \param user_config Optional user settings for the test.
466 ///
467 /// \return The loaded list of test cases.
468 static model::test_cases_map
469 check_integration_list(const char* test_name, const fs::path root,
470                        const config::tree& user_config = engine::empty_config())
471 {
472     const model::test_program program = model::test_program_builder(
473         "mock", fs::path(test_name), root, "the-suite")
474         .build();
475 
476     scheduler::scheduler_handle handle = scheduler::setup();
477     const model::test_cases_map test_cases = handle.list_tests(&program,
478                                                                user_config);
479     handle.cleanup();
480 
481     return test_cases;
482 }
483 
484 
485 ATF_TEST_CASE_WITHOUT_HEAD(integration__list_some);
486 ATF_TEST_CASE_BODY(integration__list_some)
487 {
488     config::tree user_config = engine::empty_config();
489     user_config.set_string("test_suites.the-suite.first", "test");
490     user_config.set_string("test_suites.the-suite.second", "TEST");
491     user_config.set_string("test_suites.abc.unused", "unused");
492 
493     const model::test_cases_map test_cases = check_integration_list(
494         "vars", fs::path("."), user_config);
495 
496     const model::test_cases_map exp_test_cases = model::test_cases_map_builder()
497         .add("first_test").add("second_TEST").build();
498     ATF_REQUIRE_EQ(exp_test_cases, test_cases);
499 }
500 
501 
502 ATF_TEST_CASE_WITHOUT_HEAD(integration__list_check_paths);
503 ATF_TEST_CASE_BODY(integration__list_check_paths)
504 {
505     fs::mkdir_p(fs::path("dir1/dir2/dir3"), 0755);
506     atf::utils::create_file("dir1/dir2/dir3/check_i_exist", "");
507 
508     const model::test_cases_map test_cases = check_integration_list(
509         "dir2/dir3/check_i_exist", fs::path("dir1"));
510 
511     const model::test_cases_map exp_test_cases = model::test_cases_map_builder()
512         .add("found").build();
513     ATF_REQUIRE_EQ(exp_test_cases, test_cases);
514 }
515 
516 
517 ATF_TEST_CASE_WITHOUT_HEAD(integration__list_timeout);
518 ATF_TEST_CASE_BODY(integration__list_timeout)
519 {
520     scheduler::list_timeout = datetime::delta(1, 0);
521     const model::test_cases_map test_cases = check_integration_list(
522         "timeout", fs::path("."));
523 
524     const model::test_cases_map exp_test_cases = model::test_cases_map_builder()
525         .add("sleeping").build();
526     ATF_REQUIRE_EQ(exp_test_cases, test_cases);
527 }
528 
529 
530 ATF_TEST_CASE_WITHOUT_HEAD(integration__list_fail);
531 ATF_TEST_CASE_BODY(integration__list_fail)
532 {
533     const model::test_cases_map test_cases = check_integration_list(
534         "misbehave", fs::path("."));
535 
536     ATF_REQUIRE_EQ(1, test_cases.size());
537     const model::test_case& test_case = test_cases.begin()->second;
538     ATF_REQUIRE_EQ("__test_cases_list__", test_case.name());
539     ATF_REQUIRE(test_case.fake_result());
540     ATF_REQUIRE_EQ(model::test_result(model::test_result_broken,
541                                       "misbehaved in parse_list"),
542                    test_case.fake_result().get());
543 }
544 
545 
546 ATF_TEST_CASE_WITHOUT_HEAD(integration__list_empty);
547 ATF_TEST_CASE_BODY(integration__list_empty)
548 {
549     const model::test_cases_map test_cases = check_integration_list(
550         "empty", fs::path("."));
551 
552     ATF_REQUIRE_EQ(1, test_cases.size());
553     const model::test_case& test_case = test_cases.begin()->second;
554     ATF_REQUIRE_EQ("__test_cases_list__", test_case.name());
555     ATF_REQUIRE(test_case.fake_result());
556     ATF_REQUIRE_EQ(model::test_result(model::test_result_broken,
557                                       "Empty test cases list"),
558                    test_case.fake_result().get());
559 }
560 
561 
562 ATF_TEST_CASE_WITHOUT_HEAD(integration__run_one);
563 ATF_TEST_CASE_BODY(integration__run_one)
564 {
565     const model::test_program_ptr program = model::test_program_builder(
566         "mock", fs::path("the-program"), fs::current_path(), "the-suite")
567         .add_test_case("exit 41").build_ptr();
568 
569     const config::tree user_config = engine::empty_config();
570 
571     scheduler::scheduler_handle handle = scheduler::setup();
572 
573     const scheduler::exec_handle exec_handle = handle.spawn_test(
574         program, "exit 41", user_config);
575 
576     scheduler::result_handle_ptr result_handle = handle.wait_any();
577     const scheduler::test_result_handle* test_result_handle =
578         dynamic_cast< const scheduler::test_result_handle* >(
579             result_handle.get());
580     ATF_REQUIRE_EQ(exec_handle, result_handle->original_pid());
581     ATF_REQUIRE_EQ(model::test_result(model::test_result_passed, "Exit 41"),
582                    test_result_handle->test_result());
583     result_handle->cleanup();
584     result_handle.reset();
585 
586     handle.cleanup();
587 }
588 
589 
590 ATF_TEST_CASE_WITHOUT_HEAD(integration__run_many);
591 ATF_TEST_CASE_BODY(integration__run_many)
592 {
593     static const std::size_t num_test_programs = 30;
594 
595     const config::tree user_config = engine::empty_config();
596 
597     scheduler::scheduler_handle handle = scheduler::setup();
598 
599     // We mess around with the "current time" below, so make sure the tests do
600     // not spuriously exceed their deadline by bumping it to a large number.
601     const model::metadata infinite_timeout = model::metadata_builder()
602         .set_timeout(datetime::delta(1000000L, 0)).build();
603 
604     std::size_t total_tests = 0;
605     std::map< scheduler::exec_handle, model::test_program_ptr >
606         exp_test_programs;
607     std::map< scheduler::exec_handle, std::string > exp_test_case_names;
608     std::map< scheduler::exec_handle, datetime::timestamp > exp_start_times;
609     std::map< scheduler::exec_handle, int > exp_exit_statuses;
610     for (std::size_t i = 0; i < num_test_programs; ++i) {
611         const std::string test_case_0 = F("exit %s") % (i * 3 + 0);
612         const std::string test_case_1 = F("exit %s") % (i * 3 + 1);
613         const std::string test_case_2 = F("exit %s") % (i * 3 + 2);
614 
615         const model::test_program_ptr program = model::test_program_builder(
616             "mock", fs::path(F("program-%s") % i),
617             fs::current_path(), "the-suite")
618             .set_metadata(infinite_timeout)
619             .add_test_case(test_case_0)
620             .add_test_case(test_case_1)
621             .add_test_case(test_case_2)
622             .build_ptr();
623 
624         const datetime::timestamp start_time = datetime::timestamp::from_values(
625             2014, 12, 8, 9, 40, 0, i);
626 
627         scheduler::exec_handle exec_handle;
628 
629         datetime::set_mock_now(start_time);
630         exec_handle = handle.spawn_test(program, test_case_0, user_config);
631         exp_test_programs.insert(std::make_pair(exec_handle, program));
632         exp_test_case_names.insert(std::make_pair(exec_handle, test_case_0));
633         exp_start_times.insert(std::make_pair(exec_handle, start_time));
634         exp_exit_statuses.insert(std::make_pair(exec_handle, i * 3));
635         ++total_tests;
636 
637         datetime::set_mock_now(start_time);
638         exec_handle = handle.spawn_test(program, test_case_1, user_config);
639         exp_test_programs.insert(std::make_pair(exec_handle, program));
640         exp_test_case_names.insert(std::make_pair(exec_handle, test_case_1));
641         exp_start_times.insert(std::make_pair(exec_handle, start_time));
642         exp_exit_statuses.insert(std::make_pair(exec_handle, i * 3 + 1));
643         ++total_tests;
644 
645         datetime::set_mock_now(start_time);
646         exec_handle = handle.spawn_test(program, test_case_2, user_config);
647         exp_test_programs.insert(std::make_pair(exec_handle, program));
648         exp_test_case_names.insert(std::make_pair(exec_handle, test_case_2));
649         exp_start_times.insert(std::make_pair(exec_handle, start_time));
650         exp_exit_statuses.insert(std::make_pair(exec_handle, i * 3 + 2));
651         ++total_tests;
652     }
653 
654     for (std::size_t i = 0; i < total_tests; ++i) {
655         const datetime::timestamp end_time = datetime::timestamp::from_values(
656             2014, 12, 8, 9, 50, 10, i);
657         datetime::set_mock_now(end_time);
658         scheduler::result_handle_ptr result_handle = handle.wait_any();
659         const scheduler::test_result_handle* test_result_handle =
660             dynamic_cast< const scheduler::test_result_handle* >(
661                 result_handle.get());
662 
663         const scheduler::exec_handle exec_handle =
664             result_handle->original_pid();
665 
666         const model::test_program_ptr test_program = exp_test_programs.find(
667             exec_handle)->second;
668         const std::string& test_case_name = exp_test_case_names.find(
669             exec_handle)->second;
670         const datetime::timestamp& start_time = exp_start_times.find(
671             exec_handle)->second;
672         const int exit_status = exp_exit_statuses.find(exec_handle)->second;
673 
674         ATF_REQUIRE_EQ(model::test_result(model::test_result_passed,
675                                           F("Exit %s") % exit_status),
676                        test_result_handle->test_result());
677 
678         ATF_REQUIRE_EQ(test_program, test_result_handle->test_program());
679         ATF_REQUIRE_EQ(test_case_name, test_result_handle->test_case_name());
680 
681         ATF_REQUIRE_EQ(start_time, result_handle->start_time());
682         ATF_REQUIRE_EQ(end_time, result_handle->end_time());
683 
684         result_handle->cleanup();
685 
686         ATF_REQUIRE(!atf::utils::file_exists(
687                         result_handle->stdout_file().str()));
688         ATF_REQUIRE(!atf::utils::file_exists(
689                         result_handle->stderr_file().str()));
690         ATF_REQUIRE(!atf::utils::file_exists(
691                         result_handle->work_directory().str()));
692 
693         result_handle.reset();
694     }
695 
696     handle.cleanup();
697 }
698 
699 
700 ATF_TEST_CASE_WITHOUT_HEAD(integration__run_check_paths);
701 ATF_TEST_CASE_BODY(integration__run_check_paths)
702 {
703     fs::mkdir_p(fs::path("dir1/dir2/dir3"), 0755);
704     atf::utils::create_file("dir1/dir2/dir3/program", "");
705 
706     const model::test_program_ptr program = model::test_program_builder(
707         "mock", fs::path("dir2/dir3/program"), fs::path("dir1"), "the-suite")
708         .add_test_case("check_i_exist").build_ptr();
709 
710     scheduler::scheduler_handle handle = scheduler::setup();
711 
712     (void)handle.spawn_test(program, "check_i_exist", engine::default_config());
713     scheduler::result_handle_ptr result_handle = handle.wait_any();
714     const scheduler::test_result_handle* test_result_handle =
715         dynamic_cast< const scheduler::test_result_handle* >(
716             result_handle.get());
717 
718     ATF_REQUIRE_EQ(model::test_result(model::test_result_passed, "Exit 0"),
719                    test_result_handle->test_result());
720 
721     result_handle->cleanup();
722     result_handle.reset();
723 
724     handle.cleanup();
725 }
726 
727 
728 ATF_TEST_CASE_WITHOUT_HEAD(integration__parameters_and_output);
729 ATF_TEST_CASE_BODY(integration__parameters_and_output)
730 {
731     const model::test_program_ptr program = model::test_program_builder(
732         "mock", fs::path("the-program"), fs::current_path(), "the-suite")
733         .add_test_case("print_params").build_ptr();
734 
735     config::tree user_config = engine::empty_config();
736     user_config.set_string("test_suites.the-suite.one", "first variable");
737     user_config.set_string("test_suites.the-suite.two", "second variable");
738 
739     scheduler::scheduler_handle handle = scheduler::setup();
740 
741     const scheduler::exec_handle exec_handle = handle.spawn_test(
742         program, "print_params", user_config);
743 
744     scheduler::result_handle_ptr result_handle = handle.wait_any();
745     const scheduler::test_result_handle* test_result_handle =
746         dynamic_cast< const scheduler::test_result_handle* >(
747             result_handle.get());
748 
749     ATF_REQUIRE_EQ(exec_handle, result_handle->original_pid());
750     ATF_REQUIRE_EQ(program, test_result_handle->test_program());
751     ATF_REQUIRE_EQ("print_params", test_result_handle->test_case_name());
752     ATF_REQUIRE_EQ(model::test_result(model::test_result_passed, "Exit 0"),
753                    test_result_handle->test_result());
754 
755     const fs::path stdout_file = result_handle->stdout_file();
756     ATF_REQUIRE(atf::utils::compare_file(
757         stdout_file.str(),
758         "Test program: the-program\n"
759         "Test case: print_params\n"
760         "one=first variable\n"
761         "two=second variable\n"));
762     const fs::path stderr_file = result_handle->stderr_file();
763     ATF_REQUIRE(atf::utils::compare_file(
764         stderr_file.str(), "stderr: print_params\n"));
765 
766     result_handle->cleanup();
767     ATF_REQUIRE(!fs::exists(stdout_file));
768     ATF_REQUIRE(!fs::exists(stderr_file));
769     result_handle.reset();
770 
771     handle.cleanup();
772 }
773 
774 
775 ATF_TEST_CASE_WITHOUT_HEAD(integration__fake_result);
776 ATF_TEST_CASE_BODY(integration__fake_result)
777 {
778     const model::test_result fake_result(model::test_result_skipped,
779                                          "Some fake details");
780 
781     model::test_cases_map test_cases;
782     test_cases.insert(model::test_cases_map::value_type(
783         "__fake__", model::test_case("__fake__", "ABC", fake_result)));
784 
785     const model::test_program_ptr program(new model::test_program(
786         "mock", fs::path("the-program"), fs::current_path(), "the-suite",
787         model::metadata_builder().build(), test_cases));
788 
789     const config::tree user_config = engine::empty_config();
790 
791     scheduler::scheduler_handle handle = scheduler::setup();
792 
793     (void)handle.spawn_test(program, "__fake__", user_config);
794 
795     scheduler::result_handle_ptr result_handle = handle.wait_any();
796     const scheduler::test_result_handle* test_result_handle =
797         dynamic_cast< const scheduler::test_result_handle* >(
798             result_handle.get());
799     ATF_REQUIRE_EQ(fake_result, test_result_handle->test_result());
800     result_handle->cleanup();
801     result_handle.reset();
802 
803     handle.cleanup();
804 }
805 
806 
807 ATF_TEST_CASE_WITHOUT_HEAD(integration__cleanup__head_skips);
808 ATF_TEST_CASE_BODY(integration__cleanup__head_skips)
809 {
810     const model::test_program_ptr program = model::test_program_builder(
811         "mock", fs::path("the-program"), fs::current_path(), "the-suite")
812         .add_test_case("skip_me",
813                        model::metadata_builder()
814                        .add_required_config("variable-that-does-not-exist")
815                        .set_has_cleanup(true)
816                        .build())
817         .build_ptr();
818 
819     const config::tree user_config = engine::empty_config();
820 
821     scheduler::scheduler_handle handle = scheduler::setup();
822 
823     (void)handle.spawn_test(program, "skip_me", user_config);
824 
825     scheduler::result_handle_ptr result_handle = handle.wait_any();
826     const scheduler::test_result_handle* test_result_handle =
827         dynamic_cast< const scheduler::test_result_handle* >(
828             result_handle.get());
829     ATF_REQUIRE_EQ(model::test_result(
830                        model::test_result_skipped,
831                        "Required configuration property "
832                        "'variable-that-does-not-exist' not defined"),
833                    test_result_handle->test_result());
834     ATF_REQUIRE(!atf::utils::grep_file("exec_cleanup was called",
835                                        result_handle->stdout_file().str()));
836     result_handle->cleanup();
837     result_handle.reset();
838 
839     handle.cleanup();
840 }
841 
842 
843 /// Runs a test to verify the behavior of cleanup routines.
844 ///
845 /// \param test_case The name of the test case to invoke.
846 /// \param exp_result The expected test result of the execution.
847 static void
848 do_cleanup_test(const char* test_case,
849                 const model::test_result& exp_result)
850 {
851     const model::test_program_ptr program = model::test_program_builder(
852         "mock", fs::path("the-program"), fs::current_path(), "the-suite")
853         .add_test_case(test_case)
854         .set_metadata(model::metadata_builder().set_has_cleanup(true).build())
855         .build_ptr();
856 
857     const config::tree user_config = engine::empty_config();
858 
859     scheduler::scheduler_handle handle = scheduler::setup();
860 
861     (void)handle.spawn_test(program, test_case, user_config);
862 
863     scheduler::result_handle_ptr result_handle = handle.wait_any();
864     const scheduler::test_result_handle* test_result_handle =
865         dynamic_cast< const scheduler::test_result_handle* >(
866             result_handle.get());
867     ATF_REQUIRE_EQ(exp_result, test_result_handle->test_result());
868     ATF_REQUIRE(atf::utils::compare_file(
869         result_handle->stdout_file().str(),
870         "exec_cleanup was called\n"));
871     result_handle->cleanup();
872     result_handle.reset();
873 
874     handle.cleanup();
875 }
876 
877 
878 ATF_TEST_CASE_WITHOUT_HEAD(integration__cleanup__body_skips);
879 ATF_TEST_CASE_BODY(integration__cleanup__body_skips)
880 {
881     do_cleanup_test(
882         "skip_body_pass_cleanup",
883         model::test_result(model::test_result_skipped, "Exit 0"));
884 }
885 
886 
887 ATF_TEST_CASE_WITHOUT_HEAD(integration__cleanup__body_bad__cleanup_ok);
888 ATF_TEST_CASE_BODY(integration__cleanup__body_bad__cleanup_ok)
889 {
890     do_cleanup_test(
891         "fail_body_pass_cleanup",
892         model::test_result(model::test_result_failed, "Signal 15"));
893 }
894 
895 
896 ATF_TEST_CASE_WITHOUT_HEAD(integration__cleanup__body_ok__cleanup_bad);
897 ATF_TEST_CASE_BODY(integration__cleanup__body_ok__cleanup_bad)
898 {
899     do_cleanup_test(
900         "pass_body_fail_cleanup",
901         model::test_result(model::test_result_broken, "Test case cleanup "
902                            "did not terminate successfully"));
903 }
904 
905 
906 ATF_TEST_CASE_WITHOUT_HEAD(integration__cleanup__body_bad__cleanup_bad);
907 ATF_TEST_CASE_BODY(integration__cleanup__body_bad__cleanup_bad)
908 {
909     do_cleanup_test(
910         "fail_body_fail_cleanup",
911         model::test_result(model::test_result_failed, "Signal 15"));
912 }
913 
914 
915 ATF_TEST_CASE_WITHOUT_HEAD(integration__cleanup__timeout);
916 ATF_TEST_CASE_BODY(integration__cleanup__timeout)
917 {
918     scheduler::cleanup_timeout = datetime::delta(1, 0);
919     do_cleanup_test(
920         "cleanup_timeout",
921         model::test_result(model::test_result_broken, "Test case cleanup "
922                            "timed out"));
923 }
924 
925 
926 ATF_TEST_CASE_WITHOUT_HEAD(integration__check_requirements);
927 ATF_TEST_CASE_BODY(integration__check_requirements)
928 {
929     const model::test_program_ptr program = model::test_program_builder(
930         "mock", fs::path("the-program"), fs::current_path(), "the-suite")
931         .add_test_case("exit 12")
932         .set_metadata(model::metadata_builder()
933                       .add_required_config("abcde").build())
934         .build_ptr();
935 
936     const config::tree user_config = engine::empty_config();
937 
938     scheduler::scheduler_handle handle = scheduler::setup();
939 
940     (void)handle.spawn_test(program, "exit 12", user_config);
941 
942     scheduler::result_handle_ptr result_handle = handle.wait_any();
943     const scheduler::test_result_handle* test_result_handle =
944         dynamic_cast< const scheduler::test_result_handle* >(
945             result_handle.get());
946     ATF_REQUIRE_EQ(model::test_result(
947                        model::test_result_skipped,
948                        "Required configuration property 'abcde' not defined"),
949                    test_result_handle->test_result());
950     result_handle->cleanup();
951     result_handle.reset();
952 
953     handle.cleanup();
954 }
955 
956 
957 ATF_TEST_CASE_WITHOUT_HEAD(integration__stacktrace);
958 ATF_TEST_CASE_BODY(integration__stacktrace)
959 {
960     utils::prepare_coredump_test(this);
961 
962     const model::test_program_ptr program = model::test_program_builder(
963         "mock", fs::path("the-program"), fs::current_path(), "the-suite")
964         .add_test_case("unknown-dumps-core").build_ptr();
965 
966     const config::tree user_config = engine::empty_config();
967 
968     scheduler::scheduler_handle handle = scheduler::setup();
969 
970     (void)handle.spawn_test(program, "unknown-dumps-core", user_config);
971 
972     scheduler::result_handle_ptr result_handle = handle.wait_any();
973     const scheduler::test_result_handle* test_result_handle =
974         dynamic_cast< const scheduler::test_result_handle* >(
975             result_handle.get());
976     ATF_REQUIRE_EQ(model::test_result(model::test_result_failed,
977                                       F("Signal %s") % SIGABRT),
978                    test_result_handle->test_result());
979     ATF_REQUIRE(!atf::utils::grep_file("attempting to gather stack trace",
980                                        result_handle->stdout_file().str()));
981     ATF_REQUIRE( atf::utils::grep_file("attempting to gather stack trace",
982                                        result_handle->stderr_file().str()));
983     result_handle->cleanup();
984     result_handle.reset();
985 
986     handle.cleanup();
987 }
988 
989 
990 /// Runs a test to verify the dumping of the list of existing files on failure.
991 ///
992 /// \param test_case The name of the test case to invoke.
993 /// \param exp_stderr Expected contents of stderr.
994 static void
995 do_check_list_files_on_failure(const char* test_case, const char* exp_stderr)
996 {
997     const model::test_program_ptr program = model::test_program_builder(
998         "mock", fs::path("the-program"), fs::current_path(), "the-suite")
999         .add_test_case(test_case).build_ptr();
1000 
1001     const config::tree user_config = engine::empty_config();
1002 
1003     scheduler::scheduler_handle handle = scheduler::setup();
1004 
1005     (void)handle.spawn_test(program, test_case, user_config);
1006 
1007     scheduler::result_handle_ptr result_handle = handle.wait_any();
1008     atf::utils::cat_file(result_handle->stdout_file().str(), "child stdout: ");
1009     ATF_REQUIRE(atf::utils::compare_file(result_handle->stdout_file().str(),
1010                                          ""));
1011     atf::utils::cat_file(result_handle->stderr_file().str(), "child stderr: ");
1012     ATF_REQUIRE(atf::utils::compare_file(result_handle->stderr_file().str(),
1013                                          exp_stderr));
1014     result_handle->cleanup();
1015     result_handle.reset();
1016 
1017     handle.cleanup();
1018 }
1019 
1020 
1021 ATF_TEST_CASE_WITHOUT_HEAD(integration__list_files_on_failure__none);
1022 ATF_TEST_CASE_BODY(integration__list_files_on_failure__none)
1023 {
1024     do_check_list_files_on_failure("fail", "This should not be clobbered\n");
1025 }
1026 
1027 
1028 ATF_TEST_CASE_WITHOUT_HEAD(integration__list_files_on_failure__some);
1029 ATF_TEST_CASE_BODY(integration__list_files_on_failure__some)
1030 {
1031     do_check_list_files_on_failure(
1032         "create_files_and_fail",
1033         "This should not be clobbered\n"
1034         "Files left in work directory after failure: "
1035         "dir1, first file, second-file\n");
1036 }
1037 
1038 
1039 ATF_TEST_CASE_WITHOUT_HEAD(integration__prevent_clobbering_control_files);
1040 ATF_TEST_CASE_BODY(integration__prevent_clobbering_control_files)
1041 {
1042     const model::test_program_ptr program = model::test_program_builder(
1043         "mock", fs::path("the-program"), fs::current_path(), "the-suite")
1044         .add_test_case("delete_all").build_ptr();
1045 
1046     const config::tree user_config = engine::empty_config();
1047 
1048     scheduler::scheduler_handle handle = scheduler::setup();
1049 
1050     (void)handle.spawn_test(program, "delete_all", user_config);
1051 
1052     scheduler::result_handle_ptr result_handle = handle.wait_any();
1053     const scheduler::test_result_handle* test_result_handle =
1054         dynamic_cast< const scheduler::test_result_handle* >(
1055             result_handle.get());
1056     ATF_REQUIRE_EQ(model::test_result(model::test_result_passed, "Exit 0"),
1057                    test_result_handle->test_result());
1058     result_handle->cleanup();
1059     result_handle.reset();
1060 
1061     handle.cleanup();
1062 }
1063 
1064 
1065 ATF_TEST_CASE_WITHOUT_HEAD(debug_test);
1066 ATF_TEST_CASE_BODY(debug_test)
1067 {
1068     const model::test_program_ptr program = model::test_program_builder(
1069         "mock", fs::path("the-program"), fs::current_path(), "the-suite")
1070         .add_test_case("print_params").build_ptr();
1071 
1072     config::tree user_config = engine::empty_config();
1073     user_config.set_string("test_suites.the-suite.one", "first variable");
1074     user_config.set_string("test_suites.the-suite.two", "second variable");
1075 
1076     scheduler::scheduler_handle handle = scheduler::setup();
1077 
1078     const fs::path stdout_file("custom-stdout.txt");
1079     const fs::path stderr_file("custom-stderr.txt");
1080 
1081     scheduler::result_handle_ptr result_handle = handle.debug_test(
1082         program, "print_params", user_config, stdout_file, stderr_file);
1083     const scheduler::test_result_handle* test_result_handle =
1084         dynamic_cast< const scheduler::test_result_handle* >(
1085             result_handle.get());
1086 
1087     ATF_REQUIRE_EQ(program, test_result_handle->test_program());
1088     ATF_REQUIRE_EQ("print_params", test_result_handle->test_case_name());
1089     ATF_REQUIRE_EQ(model::test_result(model::test_result_passed, "Exit 0"),
1090                    test_result_handle->test_result());
1091 
1092     // The original output went to a file.  It's only an artifact of
1093     // debug_test() that we later get a copy in our own files.
1094     ATF_REQUIRE(stdout_file != result_handle->stdout_file());
1095     ATF_REQUIRE(stderr_file != result_handle->stderr_file());
1096 
1097     result_handle->cleanup();
1098     result_handle.reset();
1099 
1100     handle.cleanup();
1101 
1102     ATF_REQUIRE(atf::utils::compare_file(
1103         stdout_file.str(),
1104         "Test program: the-program\n"
1105         "Test case: print_params\n"
1106         "one=first variable\n"
1107         "two=second variable\n"));
1108     ATF_REQUIRE(atf::utils::compare_file(
1109         stderr_file.str(), "stderr: print_params\n"));
1110 }
1111 
1112 
1113 ATF_TEST_CASE_WITHOUT_HEAD(ensure_valid_interface);
1114 ATF_TEST_CASE_BODY(ensure_valid_interface)
1115 {
1116     scheduler::ensure_valid_interface("mock");
1117 
1118     ATF_REQUIRE_THROW_RE(engine::error, "Unsupported test interface 'mock2'",
1119                          scheduler::ensure_valid_interface("mock2"));
1120     scheduler::register_interface(
1121         "mock2", std::shared_ptr< scheduler::interface >(new mock_interface()));
1122     scheduler::ensure_valid_interface("mock2");
1123 
1124     // Standard interfaces should not be present unless registered.
1125     ATF_REQUIRE_THROW_RE(engine::error, "Unsupported test interface 'plain'",
1126                          scheduler::ensure_valid_interface("plain"));
1127 }
1128 
1129 
1130 ATF_TEST_CASE_WITHOUT_HEAD(registered_interface_names);
1131 ATF_TEST_CASE_BODY(registered_interface_names)
1132 {
1133     std::set< std::string > exp_names;
1134 
1135     exp_names.insert("mock");
1136     ATF_REQUIRE_EQ(exp_names, scheduler::registered_interface_names());
1137 
1138     scheduler::register_interface(
1139         "mock2", std::shared_ptr< scheduler::interface >(new mock_interface()));
1140     exp_names.insert("mock2");
1141     ATF_REQUIRE_EQ(exp_names, scheduler::registered_interface_names());
1142 }
1143 
1144 
1145 ATF_TEST_CASE_WITHOUT_HEAD(current_context);
1146 ATF_TEST_CASE_BODY(current_context)
1147 {
1148     const model::context context = scheduler::current_context();
1149     ATF_REQUIRE_EQ(fs::current_path(), context.cwd());
1150     ATF_REQUIRE(utils::getallenv() == context.env());
1151 }
1152 
1153 
1154 ATF_TEST_CASE_WITHOUT_HEAD(generate_config__empty);
1155 ATF_TEST_CASE_BODY(generate_config__empty)
1156 {
1157     const config::tree user_config = engine::empty_config();
1158 
1159     const config::properties_map exp_props;
1160 
1161     ATF_REQUIRE_EQ(exp_props,
1162                    scheduler::generate_config(user_config, "missing"));
1163 }
1164 
1165 
1166 ATF_TEST_CASE_WITHOUT_HEAD(generate_config__no_matches);
1167 ATF_TEST_CASE_BODY(generate_config__no_matches)
1168 {
1169     config::tree user_config = engine::empty_config();
1170     user_config.set_string("architecture", "foo");
1171     user_config.set_string("test_suites.one.var1", "value 1");
1172 
1173     const config::properties_map exp_props;
1174 
1175     ATF_REQUIRE_EQ(exp_props,
1176                    scheduler::generate_config(user_config, "two"));
1177 }
1178 
1179 
1180 ATF_TEST_CASE_WITHOUT_HEAD(generate_config__some_matches);
1181 ATF_TEST_CASE_BODY(generate_config__some_matches)
1182 {
1183     std::vector< passwd::user > mock_users;
1184     mock_users.push_back(passwd::user("nobody", 1234, 5678));
1185     passwd::set_mock_users_for_testing(mock_users);
1186 
1187     config::tree user_config = engine::empty_config();
1188     user_config.set_string("architecture", "foo");
1189     user_config.set_string("unprivileged_user", "nobody");
1190     user_config.set_string("test_suites.one.var1", "value 1");
1191     user_config.set_string("test_suites.two.var2", "value 2");
1192 
1193     config::properties_map exp_props;
1194     exp_props["unprivileged-user"] = "nobody";
1195     exp_props["var1"] = "value 1";
1196 
1197     ATF_REQUIRE_EQ(exp_props,
1198                    scheduler::generate_config(user_config, "one"));
1199 }
1200 
1201 
1202 ATF_INIT_TEST_CASES(tcs)
1203 {
1204     scheduler::register_interface(
1205         "mock", std::shared_ptr< scheduler::interface >(new mock_interface()));
1206 
1207     ATF_ADD_TEST_CASE(tcs, integration__list_some);
1208     ATF_ADD_TEST_CASE(tcs, integration__list_check_paths);
1209     ATF_ADD_TEST_CASE(tcs, integration__list_timeout);
1210     ATF_ADD_TEST_CASE(tcs, integration__list_fail);
1211     ATF_ADD_TEST_CASE(tcs, integration__list_empty);
1212 
1213     ATF_ADD_TEST_CASE(tcs, integration__run_one);
1214     ATF_ADD_TEST_CASE(tcs, integration__run_many);
1215 
1216     ATF_ADD_TEST_CASE(tcs, integration__run_check_paths);
1217     ATF_ADD_TEST_CASE(tcs, integration__parameters_and_output);
1218 
1219     ATF_ADD_TEST_CASE(tcs, integration__fake_result);
1220     ATF_ADD_TEST_CASE(tcs, integration__cleanup__head_skips);
1221     ATF_ADD_TEST_CASE(tcs, integration__cleanup__body_skips);
1222     ATF_ADD_TEST_CASE(tcs, integration__cleanup__body_ok__cleanup_bad);
1223     ATF_ADD_TEST_CASE(tcs, integration__cleanup__body_bad__cleanup_ok);
1224     ATF_ADD_TEST_CASE(tcs, integration__cleanup__body_bad__cleanup_bad);
1225     ATF_ADD_TEST_CASE(tcs, integration__cleanup__timeout);
1226     ATF_ADD_TEST_CASE(tcs, integration__check_requirements);
1227     ATF_ADD_TEST_CASE(tcs, integration__stacktrace);
1228     ATF_ADD_TEST_CASE(tcs, integration__list_files_on_failure__none);
1229     ATF_ADD_TEST_CASE(tcs, integration__list_files_on_failure__some);
1230     ATF_ADD_TEST_CASE(tcs, integration__prevent_clobbering_control_files);
1231 
1232     ATF_ADD_TEST_CASE(tcs, debug_test);
1233 
1234     ATF_ADD_TEST_CASE(tcs, ensure_valid_interface);
1235     ATF_ADD_TEST_CASE(tcs, registered_interface_names);
1236 
1237     ATF_ADD_TEST_CASE(tcs, current_context);
1238 
1239     ATF_ADD_TEST_CASE(tcs, generate_config__empty);
1240     ATF_ADD_TEST_CASE(tcs, generate_config__no_matches);
1241     ATF_ADD_TEST_CASE(tcs, generate_config__some_matches);
1242 }
1243