check_check_master.c 49 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959
  1. /*
  2. * Check: a unit test framework for C
  3. * Copyright (C) 2001, 2002 Arien Malec
  4. *
  5. * This library is free software; you can redistribute it and/or
  6. * modify it under the terms of the GNU Lesser General Public
  7. * License as published by the Free Software Foundation; either
  8. * version 2.1 of the License, or (at your option) any later version.
  9. *
  10. * This library is distributed in the hope that it will be useful,
  11. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  13. * Lesser General Public License for more details.
  14. *
  15. * You should have received a copy of the GNU Lesser General Public
  16. * License along with this library; if not, write to the
  17. * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston,
  18. * MA 02110-1301, USA.
  19. */
  20. #include "../lib/libcompat.h"
  21. #include <stdlib.h>
  22. #include <stdio.h>
  23. #include <string.h>
  24. #if ENABLE_REGEX
  25. # include <regex.h>
  26. #endif
  27. #include <check.h>
  28. #include <assert.h>
  29. #include "check_check.h"
  30. int sub_nfailed;
  31. int sub_ntests;
  32. TestResult **tr_fail_array;
  33. TestResult **tr_all_array;
  34. FILE * test_names_file = NULL;
  35. char * test_names_file_name = NULL;
  36. FILE * line_num_failures = NULL;
  37. char * line_num_failures_file_name = NULL;
  38. enum ck_test_msg_type_t {
  39. #if ENABLE_REGEX
  40. /* For tests with different output on different platforms */
  41. CK_MSG_REGEXP,
  42. #endif
  43. /* Simple text */
  44. CK_MSG_TEXT
  45. };
  46. #define MAXSTR 300
  47. typedef struct {
  48. const char *tcname;
  49. const char *test_name;
  50. int failure_type;
  51. enum ck_test_msg_type_t msg_type;
  52. const char *msg;
  53. } master_test_t;
  54. #define SIG_STR_LEN 128
  55. static char signal_11_str[SIG_STR_LEN];
  56. static char signal_11_8_str[SIG_STR_LEN];
  57. static char signal_8_str[SIG_STR_LEN];
  58. static master_test_t master_tests[] = {
  59. { "Simple Tests", "test_lno", CK_FAILURE, CK_MSG_TEXT, "Failure expected" },
  60. #if defined(HAVE_FORK) && HAVE_FORK==1
  61. { "Simple Tests", "test_mark_lno", CK_ERROR, CK_MSG_TEXT, "Early exit with return value 1" },
  62. #endif
  63. { "Simple Tests", "test_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  64. { "Simple Tests", "test_fail_unless", CK_FAILURE, CK_MSG_TEXT, "This test should fail" },
  65. { "Simple Tests", "test_fail_if_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  66. { "Simple Tests", "test_fail_if_fail", CK_FAILURE, CK_MSG_TEXT, "This test should fail" },
  67. { "Simple Tests", "test_fail_null_msg", CK_FAILURE, CK_MSG_TEXT, "Assertion '2 == 3' failed" },
  68. #if defined(__GNUC__)
  69. { "Simple Tests", "test_fail_no_msg", CK_FAILURE, CK_MSG_TEXT, "Assertion '4 == 5' failed" },
  70. #endif /* __GNUC__ */
  71. { "Simple Tests", "test_fail_if_null_msg", CK_FAILURE, CK_MSG_TEXT, "Failure '2 != 3' occurred" },
  72. #if defined(__GNUC__)
  73. { "Simple Tests", "test_fail_if_no_msg", CK_FAILURE, CK_MSG_TEXT, "Failure '4 != 5' occurred" },
  74. #endif /* __GNUC__ */
  75. { "Simple Tests", "test_fail_vararg_msg_1", CK_FAILURE, CK_MSG_TEXT, "3 != 4" },
  76. { "Simple Tests", "test_fail_vararg_msg_2", CK_FAILURE, CK_MSG_TEXT, "5 != 6" },
  77. { "Simple Tests", "test_fail_vararg_msg_3", CK_FAILURE, CK_MSG_TEXT, "7 == 7" },
  78. #if defined(__GNUC__)
  79. { "Simple Tests", "test_fail_empty", CK_FAILURE, CK_MSG_TEXT, "Failed" },
  80. #endif /* __GNUC__ */
  81. { "Simple Tests", "test_ck_abort", CK_FAILURE, CK_MSG_TEXT, "Failed" },
  82. { "Simple Tests", "test_ck_abort_msg", CK_FAILURE, CK_MSG_TEXT, "Failure expected" },
  83. { "Simple Tests", "test_ck_abort_msg_null", CK_FAILURE, CK_MSG_TEXT, "Failed" },
  84. { "Simple Tests", "test_ck_assert", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x == y' failed" },
  85. { "Simple Tests", "test_ck_assert_null", CK_FAILURE, CK_MSG_TEXT, "Assertion '0' failed" },
  86. { "Simple Tests", "test_ck_assert_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '1%f == 1' failed" },
  87. { "Simple Tests", "test_ck_assert_int_eq", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x == y' failed: x == 3, y == 4" },
  88. { "Simple Tests", "test_ck_assert_int_eq_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '3%d == 2%f' failed: 3%d == 1, 2%f == 0" },
  89. { "Simple Tests", "test_ck_assert_int_ne", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x != y' failed: x == 3, y == 3" },
  90. { "Simple Tests", "test_ck_assert_int_ne_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '3%d != 3%f' failed: 3%d == 1, 3%f == 1" },
  91. { "Simple Tests", "test_ck_assert_int_lt", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x < x' failed: x == 2, x == 2" },
  92. { "Simple Tests", "test_ck_assert_int_lt_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '3%d < 3%f' failed: 3%d == 1, 3%f == 0" },
  93. { "Simple Tests", "test_ck_assert_int_le", CK_FAILURE, CK_MSG_TEXT, "Assertion 'y <= x' failed: y == 3, x == 2" },
  94. { "Simple Tests", "test_ck_assert_int_le_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '3%d <= 2%f' failed: 3%d == 1, 2%f == 0" },
  95. { "Simple Tests", "test_ck_assert_int_gt", CK_FAILURE, CK_MSG_TEXT, "Assertion 'y > y' failed: y == 3, y == 3" },
  96. { "Simple Tests", "test_ck_assert_int_gt_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '3%d > 3%f' failed: 3%d == 0, 3%f == 1" },
  97. { "Simple Tests", "test_ck_assert_int_ge", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x >= y' failed: x == 2, y == 3" },
  98. { "Simple Tests", "test_ck_assert_int_ge_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '3%d >= 4%f' failed: 3%d == 0, 4%f == 1" },
  99. { "Simple Tests", "test_ck_assert_int_expr", CK_PASS, CK_MSG_TEXT, "Passed" },
  100. { "Simple Tests", "test_ck_assert_uint_eq", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x == y' failed: x == 3, y == 4" },
  101. { "Simple Tests", "test_ck_assert_uint_eq_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '3%d == 1%f' failed: 3%d == 1, 1%f == 0" },
  102. { "Simple Tests", "test_ck_assert_uint_ne", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x != y' failed: x == 3, y == 3" },
  103. { "Simple Tests", "test_ck_assert_uint_ne_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '1%d != 1%f' failed: 1%d == 0, 1%f == 0" },
  104. { "Simple Tests", "test_ck_assert_uint_lt", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x < x' failed: x == 2, x == 2" },
  105. { "Simple Tests", "test_ck_assert_uint_lt_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '3%d < 1%f' failed: 3%d == 1, 1%f == 0" },
  106. { "Simple Tests", "test_ck_assert_uint_le", CK_FAILURE, CK_MSG_TEXT, "Assertion 'y <= x' failed: y == 3, x == 2" },
  107. { "Simple Tests", "test_ck_assert_uint_le_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '3%d <= 1%f' failed: 3%d == 1, 1%f == 0" },
  108. { "Simple Tests", "test_ck_assert_uint_gt", CK_FAILURE, CK_MSG_TEXT, "Assertion 'y > y' failed: y == 3, y == 3" },
  109. { "Simple Tests", "test_ck_assert_uint_gt_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '1%d > 3%f' failed: 1%d == 0, 3%f == 1" },
  110. { "Simple Tests", "test_ck_assert_uint_ge", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x >= y' failed: x == 2, y == 3" },
  111. { "Simple Tests", "test_ck_assert_uint_ge_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '1%d >= 3%f' failed: 1%d == 0, 3%f == 1" },
  112. { "Simple Tests", "test_ck_assert_uint_expr", CK_PASS, CK_MSG_TEXT, "Passed" },
  113. /* Tests on float macros */
  114. { "Simple Tests", "test_ck_assert_float_eq", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x == y' failed: x == 1.1, y == 1.2" },
  115. { "Simple Tests", "test_ck_assert_float_eq_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '3%d == 2%f' failed: 3%d == 1, 2%f == 0" },
  116. { "Simple Tests", "test_ck_assert_float_ne", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x != y' failed: x == 1.1, y == 1.1" },
  117. { "Simple Tests", "test_ck_assert_float_ne_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '1%d != 1%f' failed: 1%d == 1, 1%f == 1" },
  118. { "Simple Tests", "test_ck_assert_float_lt", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x < y' failed: x == 2, y == 1.5" },
  119. { "Simple Tests", "test_ck_assert_float_lt_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '3%d < 2%f' failed: 3%d == 1, 2%f == 0" },
  120. { "Simple Tests", "test_ck_assert_float_le", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x <= y' failed: x == 2, y == 1.5" },
  121. { "Simple Tests", "test_ck_assert_float_le_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '3%d <= 2%f' failed: 3%d == 1, 2%f == 0" },
  122. { "Simple Tests", "test_ck_assert_float_gt", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x > y' failed: x == 2.5, y == 3" },
  123. { "Simple Tests", "test_ck_assert_float_gt_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '2%d > 3%f' failed: 2%d == 0, 3%f == 1" },
  124. { "Simple Tests", "test_ck_assert_float_ge", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x >= y' failed: x == 2.5, y == 3" },
  125. { "Simple Tests", "test_ck_assert_float_ge_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '2%d >= 3%f' failed: 2%d == 0, 3%f == 1" },
  126. { "Simple Tests", "test_ck_assert_float_with_expr", CK_PASS, CK_MSG_TEXT, "Passed" },
  127. { "Simple Tests", "test_ck_assert_float_eq_tol", CK_FAILURE, CK_MSG_TEXT, "Assertion 'fabsl(y - x) < t' failed: x == 0.001, y == 0.003, t == 0.000990099" },
  128. { "Simple Tests", "test_ck_assert_float_eq_tol_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion 'fabsl(2%f - 3%d) < 2%p' failed: 3%d == 1, 2%f == 0, 2%p == 0" },
  129. { "Simple Tests", "test_ck_assert_float_ne_tol", CK_FAILURE, CK_MSG_TEXT, "Assertion 'fabsl(y - x) >= t' failed: x == 0.001, y == 0.002, t == 0.01" },
  130. { "Simple Tests", "test_ck_assert_float_ne_tol_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion 'fabsl(3%f - 3%d) >= 3%p' failed: 3%d == 1, 3%f == 1, 3%p == 1" },
  131. { "Simple Tests", "test_ck_assert_float_ge_tol", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x >= y, error < t' failed: x == 0.01, y == 0.03, t == 0.01" },
  132. { "Simple Tests", "test_ck_assert_float_ge_tol_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '2%d >= 3%f, error < 3%p' failed: 2%d == 0, 3%f == 1, 3%p == 1" },
  133. { "Simple Tests", "test_ck_assert_float_le_tol", CK_FAILURE, CK_MSG_TEXT, "Assertion 'y <= x, error < t' failed: y == 0.03, x == 0.01, t == 0.01" },
  134. { "Simple Tests", "test_ck_assert_float_le_tol_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '3%d <= 2%f, error < 3%p' failed: 3%d == 1, 2%f == 0, 3%p == 1" },
  135. { "Simple Tests", "test_ck_assert_float_tol_with_expr", CK_PASS, CK_MSG_TEXT, "Passed" },
  136. { "Simple Tests", "test_ck_assert_float_finite", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x is finite' failed: x == inf" },
  137. { "Simple Tests", "test_ck_assert_float_finite_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x*(1%d) is finite' failed: x*(1%d) == inf" },
  138. { "Simple Tests", "test_ck_assert_float_infinite", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x is infinite' failed: x == 0" },
  139. { "Simple Tests", "test_ck_assert_float_infinite_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '2%d is infinite' failed: 2%d == 0" },
  140. { "Simple Tests", "test_ck_assert_float_nan", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x is NaN' failed: x == inf" },
  141. { "Simple Tests", "test_ck_assert_float_nan_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '2%d is NaN' failed: 2%d == 0" },
  142. #if ENABLE_REGEX
  143. { "Simple Tests", "test_ck_assert_float_nonnan", CK_FAILURE, CK_MSG_REGEXP, "^Assertion 'x is not NaN' failed: x == -?nan$" },
  144. { "Simple Tests", "test_ck_assert_float_nonnan_with_mod", CK_FAILURE, CK_MSG_REGEXP, "^Assertion '\\(2%s\\)\\*x is not NaN' failed: \\(2%s\\)\\*x == -?nan$" },
  145. #else
  146. { "Simple Tests", "test_ck_assert_float_nonnan", CK_PASS, CK_MSG_TEXT, "Passed" },
  147. { "Simple Tests", "test_ck_assert_float_nonnan_with_mod", CK_PASS, CK_MSG_TEXT, "Passed" },
  148. #endif
  149. { "Simple Tests", "test_ck_assert_float_nan_and_inf_with_expr", CK_PASS, CK_MSG_TEXT, "Passed" },
  150. /* End of tests on float macros */
  151. /* Tests on double macros */
  152. { "Simple Tests", "test_ck_assert_double_eq", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x == y' failed: x == 1.1, y == 1.2" },
  153. { "Simple Tests", "test_ck_assert_double_eq_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '3%d == 2%f' failed: 3%d == 1, 2%f == 0" },
  154. { "Simple Tests", "test_ck_assert_double_eq_with_promotion", CK_PASS, CK_MSG_TEXT, "Passed" },
  155. { "Simple Tests", "test_ck_assert_double_eq_with_conv", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x == 0.1' failed: x == 0.1, 0.1 == 0.1" },
  156. { "Simple Tests", "test_ck_assert_double_ne", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x != y' failed: x == 1.1, y == 1.1" },
  157. { "Simple Tests", "test_ck_assert_double_ne_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '1%d != 1%f' failed: 1%d == 1, 1%f == 1" },
  158. { "Simple Tests", "test_ck_assert_double_lt", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x < y' failed: x == 2, y == 1.5" },
  159. { "Simple Tests", "test_ck_assert_double_lt_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '3%d < 2%f' failed: 3%d == 1, 2%f == 0" },
  160. { "Simple Tests", "test_ck_assert_double_le", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x <= y' failed: x == 2, y == 1.5" },
  161. { "Simple Tests", "test_ck_assert_double_le_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '3%d <= 2%f' failed: 3%d == 1, 2%f == 0" },
  162. { "Simple Tests", "test_ck_assert_double_gt", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x > y' failed: x == 2.5, y == 3" },
  163. { "Simple Tests", "test_ck_assert_double_gt_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '2%d > 3%f' failed: 2%d == 0, 3%f == 1" },
  164. { "Simple Tests", "test_ck_assert_double_ge", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x >= y' failed: x == 2.5, y == 3" },
  165. { "Simple Tests", "test_ck_assert_double_ge_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '2%d >= 3%f' failed: 2%d == 0, 3%f == 1" },
  166. { "Simple Tests", "test_ck_assert_double_with_expr", CK_PASS, CK_MSG_TEXT, "Passed" },
  167. { "Simple Tests", "test_ck_assert_double_eq_tol", CK_FAILURE, CK_MSG_TEXT, "Assertion 'fabsl(y - x) < t' failed: x == 0.001, y == 0.002, t == 0.000990099" },
  168. { "Simple Tests", "test_ck_assert_double_eq_tol_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion 'fabsl(2%f - 3%d) < 2%p' failed: 3%d == 1, 2%f == 0, 2%p == 0" },
  169. { "Simple Tests", "test_ck_assert_double_ne_tol", CK_FAILURE, CK_MSG_TEXT, "Assertion 'fabsl(y - x) >= t' failed: x == 0.001, y == 0.002, t == 0.01" },
  170. { "Simple Tests", "test_ck_assert_double_ne_tol_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion 'fabsl(3%f - 3%d) >= 3%p' failed: 3%d == 1, 3%f == 1, 3%p == 1" },
  171. { "Simple Tests", "test_ck_assert_double_ge_tol", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x >= y, error < t' failed: x == 0.01, y == 0.03, t == 0.01" },
  172. { "Simple Tests", "test_ck_assert_double_ge_tol_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '2%d >= 3%f, error < 3%p' failed: 2%d == 0, 3%f == 1, 3%p == 1" },
  173. { "Simple Tests", "test_ck_assert_double_le_tol", CK_FAILURE, CK_MSG_TEXT, "Assertion 'y <= x, error < t' failed: y == 0.03, x == 0.01, t == 0.01" },
  174. { "Simple Tests", "test_ck_assert_double_le_tol_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '3%d <= 2%f, error < 3%p' failed: 3%d == 1, 2%f == 0, 3%p == 1" },
  175. { "Simple Tests", "test_ck_assert_double_tol_with_expr", CK_PASS, CK_MSG_TEXT, "Passed" },
  176. { "Simple Tests", "test_ck_assert_double_finite", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x is finite' failed: x == inf" },
  177. { "Simple Tests", "test_ck_assert_double_finite_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x*(1%d) is finite' failed: x*(1%d) == inf" },
  178. { "Simple Tests", "test_ck_assert_double_infinite", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x is infinite' failed: x == 0" },
  179. { "Simple Tests", "test_ck_assert_double_infinite_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '2%d is infinite' failed: 2%d == 0" },
  180. { "Simple Tests", "test_ck_assert_double_nan", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x is NaN' failed: x == inf" },
  181. { "Simple Tests", "test_ck_assert_double_nan_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '2%d is NaN' failed: 2%d == 0" },
  182. #if ENABLE_REGEX
  183. { "Simple Tests", "test_ck_assert_double_nonnan", CK_FAILURE, CK_MSG_REGEXP, "^Assertion 'x is not NaN' failed: x == -?nan$" },
  184. { "Simple Tests", "test_ck_assert_double_nonnan_with_mod", CK_FAILURE, CK_MSG_REGEXP, "^Assertion '\\(2%s\\)\\*x is not NaN' failed: \\(2%s\\)\\*x == -?nan$" },
  185. #else
  186. { "Simple Tests", "test_ck_assert_double_nonnan", CK_PASS, CK_MSG_TEXT, "Passed" },
  187. { "Simple Tests", "test_ck_assert_double_nonnan_with_mod", CK_PASS, CK_MSG_TEXT, "Passed" },
  188. #endif
  189. { "Simple Tests", "test_ck_assert_double_nan_and_inf_with_expr", CK_PASS, CK_MSG_TEXT, "Passed" },
  190. /* End of tests on double macros */
  191. /* Tests on long double macros */
  192. { "Simple Tests", "test_ck_assert_ldouble_eq", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x == y' failed: x == 1.1, y == 1.2" },
  193. { "Simple Tests", "test_ck_assert_ldouble_eq_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '3%d == 2%f' failed: 3%d == 1, 2%f == 0" },
  194. { "Simple Tests", "test_ck_assert_ldouble_eq_with_promotion", CK_PASS, CK_MSG_TEXT, "Passed" },
  195. { "Simple Tests", "test_ck_assert_ldouble_eq_with_conv", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x == 1.1' failed: x == 1.1, 1.1 == 1.1" },
  196. { "Simple Tests", "test_ck_assert_ldouble_ne", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x != y' failed: x == 1.1, y == 1.1" },
  197. { "Simple Tests", "test_ck_assert_ldouble_ne_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '1%d != 1%f' failed: 1%d == 1, 1%f == 1" },
  198. { "Simple Tests", "test_ck_assert_ldouble_lt", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x < y' failed: x == 2, y == 1.5" },
  199. { "Simple Tests", "test_ck_assert_ldouble_lt_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '3%d < 2%f' failed: 3%d == 1, 2%f == 0" },
  200. { "Simple Tests", "test_ck_assert_ldouble_le", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x <= y' failed: x == 2, y == 1.5" },
  201. { "Simple Tests", "test_ck_assert_ldouble_le_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '3%d <= 2%f' failed: 3%d == 1, 2%f == 0" },
  202. { "Simple Tests", "test_ck_assert_ldouble_gt", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x > y' failed: x == 2.5, y == 3" },
  203. { "Simple Tests", "test_ck_assert_ldouble_gt_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '2%d > 3%f' failed: 2%d == 0, 3%f == 1" },
  204. { "Simple Tests", "test_ck_assert_ldouble_ge", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x >= y' failed: x == 2.5, y == 3" },
  205. { "Simple Tests", "test_ck_assert_ldouble_ge_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '2%d >= 3%f' failed: 2%d == 0, 3%f == 1" },
  206. { "Simple Tests", "test_ck_assert_ldouble_with_expr", CK_PASS, CK_MSG_TEXT, "Passed" },
  207. { "Simple Tests", "test_ck_assert_ldouble_eq_tol", CK_FAILURE, CK_MSG_TEXT, "Assertion 'fabsl(y - x) < t' failed: x == 0.001, y == 0.002, t == 0.000990099" },
  208. { "Simple Tests", "test_ck_assert_ldouble_eq_tol_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion 'fabsl(2%f - 3%d) < 2%p' failed: 3%d == 1, 2%f == 0, 2%p == 0" },
  209. { "Simple Tests", "test_ck_assert_ldouble_ne_tol", CK_FAILURE, CK_MSG_TEXT, "Assertion 'fabsl(y - x) >= t' failed: x == 0.001, y == 0.002, t == 0.01" },
  210. { "Simple Tests", "test_ck_assert_ldouble_ne_tol_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion 'fabsl(3%f - 3%d) >= 3%p' failed: 3%d == 1, 3%f == 1, 3%p == 1" },
  211. { "Simple Tests", "test_ck_assert_ldouble_ge_tol", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x >= y, error < t' failed: x == 0.01, y == 0.03, t == 0.01" },
  212. { "Simple Tests", "test_ck_assert_ldouble_ge_tol_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '2%d >= 3%f, error < 3%p' failed: 2%d == 0, 3%f == 1, 3%p == 1" },
  213. { "Simple Tests", "test_ck_assert_ldouble_le_tol", CK_FAILURE, CK_MSG_TEXT, "Assertion 'y <= x, error < t' failed: y == 0.03, x == 0.01, t == 0.01" },
  214. { "Simple Tests", "test_ck_assert_ldouble_le_tol_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '3%d <= 2%f, error < 3%p' failed: 3%d == 1, 2%f == 0, 3%p == 1" },
  215. { "Simple Tests", "test_ck_assert_ldouble_tol_with_expr", CK_PASS, CK_MSG_TEXT, "Passed" },
  216. { "Simple Tests", "test_ck_assert_ldouble_finite", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x is finite' failed: x == inf" },
  217. { "Simple Tests", "test_ck_assert_ldouble_finite_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x*(1%d) is finite' failed: x*(1%d) == inf" },
  218. { "Simple Tests", "test_ck_assert_ldouble_infinite", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x is infinite' failed: x == 0" },
  219. { "Simple Tests", "test_ck_assert_ldouble_infinite_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '2%d is infinite' failed: 2%d == 0" },
  220. { "Simple Tests", "test_ck_assert_ldouble_nan", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x is NaN' failed: x == inf" },
  221. { "Simple Tests", "test_ck_assert_ldouble_nan_with_mod", CK_FAILURE, CK_MSG_TEXT, "Assertion '2%d is NaN' failed: 2%d == 0" },
  222. #if ENABLE_REGEX
  223. { "Simple Tests", "test_ck_assert_ldouble_nonnan", CK_FAILURE, CK_MSG_REGEXP, "^Assertion 'x is not NaN' failed: x == -?nan$" },
  224. { "Simple Tests", "test_ck_assert_ldouble_nonnan_with_mod", CK_FAILURE, CK_MSG_REGEXP, "^Assertion '\\(2%s\\)\\*x is not NaN' failed: \\(2%s\\)\\*x == -?nan$" },
  225. #else
  226. { "Simple Tests", "test_ck_assert_ldouble_nonnan", CK_PASS, CK_MSG_TEXT, "Passed" },
  227. { "Simple Tests", "test_ck_assert_ldouble_nonnan_with_mod", CK_PASS, CK_MSG_TEXT, "Passed" },
  228. #endif
  229. { "Simple Tests", "test_ck_assert_ldouble_nan_and_inf_with_expr", CK_PASS, CK_MSG_TEXT, "Passed" },
  230. /* End of tests on long double macros */
  231. { "Simple Tests", "test_percent_n_escaped", CK_FAILURE, CK_MSG_TEXT, "Assertion 'returnsZero(\"%n\") == 1' failed: returnsZero(\"%n\") == 0, 1 == 1" },
  232. { "Simple Tests", "test_ck_assert_str_eq", CK_FAILURE, CK_MSG_TEXT, "Assertion '\"test1\" == s' failed: \"test1\" == \"test1\", s == \"test2\"" },
  233. { "Simple Tests", "test_ck_assert_str_eq_with_null", CK_FAILURE, CK_MSG_TEXT, "Assertion 't == s' failed: t == (null), s == (null)" },
  234. { "Simple Tests", "test_ck_assert_str_ne", CK_FAILURE, CK_MSG_TEXT, "Assertion 't != s' failed: t == \"test2\", s == \"test2\"" },
  235. { "Simple Tests", "test_ck_assert_str_ne_with_null", CK_FAILURE, CK_MSG_TEXT, "Assertion 't != s' failed: t == \"test\", s == (null)" },
  236. { "Simple Tests", "test_ck_assert_str_lt", CK_FAILURE, CK_MSG_TEXT, "Assertion 's < s' failed: s == \"test1\", s == \"test1\"" },
  237. { "Simple Tests", "test_ck_assert_str_lt_with_null", CK_FAILURE, CK_MSG_TEXT, "Assertion 's < t' failed: s == (null), t == \"test\"" },
  238. { "Simple Tests", "test_ck_assert_str_le", CK_FAILURE, CK_MSG_TEXT, "Assertion 't <= s' failed: t == \"test2\", s == \"test1\"" },
  239. { "Simple Tests", "test_ck_assert_str_le_with_null", CK_FAILURE, CK_MSG_TEXT, "Assertion 't <= s' failed: t == (null), s == (null)" },
  240. { "Simple Tests", "test_ck_assert_str_gt", CK_FAILURE, CK_MSG_TEXT, "Assertion 't > t' failed: t == \"test2\", t == \"test2\"" },
  241. { "Simple Tests", "test_ck_assert_str_gt_with_null", CK_FAILURE, CK_MSG_TEXT, "Assertion 't > s' failed: t == \"test\", s == (null)" },
  242. { "Simple Tests", "test_ck_assert_str_ge", CK_FAILURE, CK_MSG_TEXT, "Assertion 's >= t' failed: s == \"test1\", t == \"test2\"" },
  243. { "Simple Tests", "test_ck_assert_str_ge_with_null", CK_FAILURE, CK_MSG_TEXT, "Assertion 's >= t' failed: s == (null), t == (null)" },
  244. { "Simple Tests", "test_ck_assert_str_expr", CK_PASS, CK_MSG_TEXT, "Passed" },
  245. { "Simple Tests", "test_ck_assert_pstr_eq", CK_FAILURE, CK_MSG_TEXT, "Assertion '\"test1\" == s' failed: \"test1\" == \"test1\", s == \"test\"" },
  246. { "Simple Tests", "test_ck_assert_pstr_eq_with_null", CK_FAILURE, CK_MSG_TEXT, "Assertion 't == s' failed: t == \"test\", s == (null)" },
  247. { "Simple Tests", "test_ck_assert_pstr_ne", CK_FAILURE, CK_MSG_TEXT, "Assertion 't != s' failed: t == \"test2\", s == \"test2\"" },
  248. { "Simple Tests", "test_ck_assert_pstr_ne_with_null", CK_FAILURE, CK_MSG_TEXT, "Assertion 't != s' failed: t == (null), s == (null)" },
  249. { "Simple Tests", "test_ck_assert_ptr_eq", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x == y' failed: x == 0x1, y == 0x2" },
  250. { "Simple Tests", "test_ck_assert_ptr_ne", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x != z' failed: x == 0x1, z == 0x1" },
  251. { "Simple Tests", "test_ck_assert_ptr_null", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x == NULL' failed: x == 0x1" },
  252. { "Simple Tests", "test_ck_assert_ptr_nonnull", CK_FAILURE, CK_MSG_TEXT, "Assertion 'x != NULL' failed: x == 0" },
  253. { "Simple Tests", "test_ck_assert_mem_eq", CK_FAILURE, CK_MSG_TEXT, "Assertion '\"\\x00\\x00\\x00\\x00\\x01\" == s' failed: \"\\x00\\x00\\x00\\x00\\x01\" == \"0000000001\", s == \"0000000002\"" },
  254. { "Simple Tests", "test_ck_assert_mem_ne", CK_FAILURE, CK_MSG_TEXT, "Assertion 't != s' failed: t == \"0000000002\", s == \"0000000002\"" },
  255. { "Simple Tests", "test_ck_assert_mem_lt", CK_FAILURE, CK_MSG_TEXT, "Assertion 's < s' failed: s == \"0000000001\", s == \"0000000001\"" },
  256. { "Simple Tests", "test_ck_assert_mem_le", CK_FAILURE, CK_MSG_TEXT, "Assertion 't <= s' failed: t == \"0000000002\", s == \"0000000001\"" },
  257. { "Simple Tests", "test_ck_assert_mem_gt", CK_FAILURE, CK_MSG_TEXT, "Assertion 't > t' failed: t == \"0000000002\", t == \"0000000002\"" },
  258. { "Simple Tests", "test_ck_assert_mem_ge", CK_FAILURE, CK_MSG_TEXT, "Assertion 's >= t' failed: s == \"0000000001\", t == \"0000000002\"" },
  259. { "Simple Tests", "test_ck_assert_mem_zerolen", CK_PASS, CK_MSG_TEXT, "Passed" },
  260. { "Simple Tests", "test_ck_assert_mem_eq_exact", CK_FAILURE, CK_MSG_TEXT, "Assertion 't == s' failed: t == \"00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001\", s == \"00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002\"" },
  261. { "Simple Tests", "test_ck_assert_mem_eq_longer", CK_FAILURE, CK_MSG_TEXT, "Assertion 't == s' failed: t == \"000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000..\", s == \"000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000..\"" },
  262. #if defined(HAVE_FORK) && HAVE_FORK==1
  263. { "Signal Tests", "test_segv", CK_ERROR, CK_MSG_TEXT, signal_11_str },
  264. { "Signal Tests", "test_segv_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  265. { "Signal Tests", "test_segv", CK_ERROR, CK_MSG_TEXT, signal_11_8_str },
  266. { "Signal Tests", "test_non_signal_8", CK_FAILURE, CK_MSG_TEXT, "Early exit with return value 0" },
  267. { "Signal Tests", "test_fail_unless", CK_FAILURE, CK_MSG_TEXT, "Early exit with return value 1" },
  268. #if !defined(__CYGWIN__)
  269. { "Signal Tests", "test_fpe", CK_ERROR, CK_MSG_TEXT, signal_8_str },
  270. { "Signal Tests", "test_mark_point", CK_ERROR, CK_MSG_TEXT, signal_8_str },
  271. #endif /* !defined(__CYGWIN__) */
  272. #endif /* HAVE_FORK */
  273. #if TIMEOUT_TESTS_ENABLED && defined(HAVE_FORK) && HAVE_FORK==1
  274. #if HAVE_DECL_SETENV
  275. { "Environment Integer Timeout Tests", "test_eternal_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  276. { "Environment Integer Timeout Tests", "test_sleep2_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  277. { "Environment Integer Timeout Tests", "test_sleep5_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  278. { "Environment Integer Timeout Tests", "test_sleep9_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  279. { "Environment Double Timeout Tests", "test_eternal_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  280. #ifdef HAVE_LIBRT
  281. { "Environment Double Timeout Tests", "test_sleep0_025_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  282. { "Environment Double Timeout Tests", "test_sleep1_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  283. #endif /* HAVE_LIBRT */
  284. { "Environment Double Timeout Tests", "test_sleep2_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  285. { "Environment Double Timeout Tests", "test_sleep5_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  286. { "Environment Double Timeout Tests", "test_sleep9_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  287. #endif /* HAVE_DECL_SETENV */
  288. { "Default Timeout Tests", "test_eternal_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  289. #ifdef HAVE_LIBRT
  290. { "Default Timeout Tests", "test_sleep0_025_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  291. { "Default Timeout Tests", "test_sleep1_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  292. #endif /* HAVE_LIBRT */
  293. { "Default Timeout Tests", "test_sleep2_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  294. { "Default Timeout Tests", "test_sleep5_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  295. { "Default Timeout Tests", "test_sleep9_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  296. { "User Integer Timeout Tests", "test_eternal_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  297. { "User Integer Timeout Tests", "test_sleep2_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  298. { "User Integer Timeout Tests", "test_sleep5_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  299. { "User Integer Timeout Tests", "test_sleep9_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  300. { "User Double Timeout Tests", "test_eternal_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  301. #ifdef HAVE_LIBRT
  302. { "User Double Timeout Tests", "test_sleep0_025_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  303. { "User Double Timeout Tests", "test_sleep1_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  304. #endif /* HAVE_LIBRT */
  305. { "User Double Timeout Tests", "test_sleep2_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  306. { "User Double Timeout Tests", "test_sleep5_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  307. { "User Double Timeout Tests", "test_sleep9_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  308. #if HAVE_DECL_SETENV
  309. { "Environment Integer Timeout Scaling Tests", "test_eternal_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  310. #ifdef HAVE_LIBRT
  311. { "Environment Integer Timeout Scaling Tests", "test_sleep0_025_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  312. { "Environment Integer Timeout Scaling Tests", "test_sleep1_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  313. #endif /* HAVE_LIBRT */
  314. { "Environment Integer Timeout Scaling Tests", "test_sleep2_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  315. { "Environment Integer Timeout Scaling Tests", "test_sleep5_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  316. { "Environment Integer Timeout Scaling Tests", "test_sleep9_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  317. { "Environment Integer Timeout Scaling Tests", "test_sleep14_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  318. { "Environment Double Timeout Scaling Tests", "test_eternal_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  319. #ifdef HAVE_LIBRT
  320. { "Environment Double Timeout Scaling Tests", "test_sleep0_025_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  321. { "Environment Double Timeout Scaling Tests", "test_sleep1_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  322. #endif /* HAVE_LIBRT */
  323. { "Environment Double Timeout Scaling Tests", "test_sleep2_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  324. { "Environment Double Timeout Scaling Tests", "test_sleep5_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  325. { "Environment Double Timeout Scaling Tests", "test_sleep9_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  326. { "Environment Double Timeout Scaling Tests", "test_sleep14_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  327. { "Timeout Integer Scaling Tests", "test_eternal_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  328. #ifdef HAVE_LIBRT
  329. { "Timeout Integer Scaling Tests", "test_sleep0_025_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  330. { "Timeout Integer Scaling Tests", "test_sleep1_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  331. { "Timeout Integer Scaling Tests", "test_sleep2_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  332. #endif /* HAVE_LIBRT */
  333. { "Timeout Integer Scaling Tests", "test_sleep5_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  334. { "Timeout Integer Scaling Tests", "test_sleep9_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  335. { "Timeout Double Scaling Tests", "test_eternal_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  336. #ifdef HAVE_LIBRT
  337. { "Timeout Double Scaling Tests", "test_sleep0_025_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  338. { "Timeout Double Scaling Tests", "test_sleep1_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  339. #endif /* HAVE_LIBRT */
  340. { "Timeout Double Scaling Tests", "test_sleep2_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  341. { "Timeout Double Scaling Tests", "test_sleep5_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  342. { "Timeout Double Scaling Tests", "test_sleep9_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  343. { "User Integer Timeout Scaling Tests", "test_eternal_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  344. #ifdef HAVE_LIBRT
  345. { "User Integer Timeout Scaling Tests", "test_sleep0_025_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  346. { "User Integer Timeout Scaling Tests", "test_sleep1_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  347. #endif /* HAVE_LIBRT */
  348. { "User Integer Timeout Scaling Tests", "test_sleep2_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  349. { "User Integer Timeout Scaling Tests", "test_sleep5_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  350. { "User Integer Timeout Scaling Tests", "test_sleep9_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  351. { "User Integer Timeout Scaling Tests", "test_sleep14_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  352. { "User Double Timeout Scaling Tests", "test_eternal_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  353. #ifdef HAVE_LIBRT
  354. { "User Double Timeout Scaling Tests", "test_sleep0_025_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  355. { "User Double Timeout Scaling Tests", "test_sleep1_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  356. #endif /* HAVE_LIBRT */
  357. { "User Double Timeout Scaling Tests", "test_sleep2_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  358. { "User Double Timeout Scaling Tests", "test_sleep5_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  359. { "User Double Timeout Scaling Tests", "test_sleep9_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  360. { "User Double Timeout Scaling Tests", "test_sleep14_fail", CK_ERROR, CK_MSG_TEXT, "Test timeout expired" },
  361. #endif /* HAVE_DECL_SETENV */
  362. #endif /* TIMEOUT_TESTS_ENABLED && defined(HAVE_FORK) */
  363. #if defined(HAVE_FORK) && HAVE_FORK==1
  364. { "Limit Tests", "test_early_exit", CK_ERROR, CK_MSG_TEXT, "Early exit with return value 1" },
  365. #endif /* HAVE_FORK */
  366. #if MEMORY_LEAKING_TESTS_ENABLED
  367. { "Limit Tests", "test_null", CK_FAILURE, CK_MSG_TEXT, "Completed properly" },
  368. #endif /* MEMORY_LEAKING_TESTS_ENABLED */
  369. { "Limit Tests", "test_null_2", CK_FAILURE, CK_MSG_TEXT, "Completed properly" },
  370. #if defined(HAVE_FORK) && HAVE_FORK==1
  371. { "Msg and fork Tests", "test_fork1p_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  372. { "Msg and fork Tests", "test_fork1p_fail", CK_FAILURE, CK_MSG_TEXT, "Expected fail" },
  373. { "Msg and fork Tests", "test_fork1c_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  374. { "Msg and fork Tests", "test_fork1c_fail", CK_FAILURE, CK_MSG_TEXT, "Expected fail" },
  375. { "Msg and fork Tests", "test_fork2_pass", CK_PASS, CK_MSG_TEXT, "Passed" },
  376. { "Msg and fork Tests", "test_fork2_fail", CK_FAILURE, CK_MSG_TEXT, "Expected fail" },
  377. #endif /* HAVE_FORK */
  378. #if defined(HAVE_FORK) && HAVE_FORK==1
  379. #if MEMORY_LEAKING_TESTS_ENABLED
  380. { "Check Errors Tests", "test_invalid_set_fork_status", CK_FAILURE, CK_MSG_TEXT, "Early exit with return value 2" },
  381. #endif
  382. { "Check Ignore Exit Handlers", "test_ignore_exit_handlers", CK_FAILURE, CK_MSG_TEXT, "Failed" },
  383. #endif /* HAVE_FORK */
  384. { "Core", "test_srunner", CK_PASS, CK_MSG_TEXT, "Passed" },
  385. { "Core", "test_2nd_suite", CK_FAILURE, CK_MSG_TEXT, "We failed" }
  386. };
  387. static int nr_of_master_tests = sizeof master_tests /sizeof master_tests[0];
  388. START_TEST(test_check_nfailures)
  389. {
  390. int i;
  391. int number_failed = 0;
  392. for (i = 0; i < nr_of_master_tests; i++) {
  393. if (master_tests[i].failure_type != CK_PASS) {
  394. number_failed++;
  395. }
  396. }
  397. ck_assert_msg (sub_nfailed == number_failed,
  398. "Unexpected number of failures received, %d, expected %d.",
  399. sub_nfailed, number_failed);
  400. }
  401. END_TEST
  402. START_TEST(test_check_ntests_run)
  403. {
  404. ck_assert_msg (sub_ntests == nr_of_master_tests,
  405. "Unexpected number of tests run %d vs expected %d", sub_ntests, nr_of_master_tests);
  406. }
  407. END_TEST
  408. START_TEST(test_check_failure_msgs)
  409. {
  410. int i;
  411. int passed = 0;
  412. const char *got_msg;
  413. const char *expected_msg;
  414. unsigned char not_equal = 0;
  415. const char *msg_type_str;
  416. int reg_err;
  417. char err_text[256];
  418. TestResult *tr;
  419. #if ENABLE_REGEX
  420. regex_t re;
  421. #endif
  422. for (i = 0; i < sub_ntests; i++) {
  423. master_test_t *master_test = &master_tests[i];
  424. if (master_test->failure_type == CK_PASS) {
  425. passed++;
  426. continue;
  427. }
  428. ck_assert_msg(i - passed <= sub_nfailed, NULL);
  429. tr = tr_fail_array[i - passed];
  430. ck_assert_msg(tr != NULL, NULL);
  431. got_msg = tr_msg(tr);
  432. expected_msg = master_test->msg;
  433. switch (master_test->msg_type) {
  434. case CK_MSG_TEXT:
  435. if (strcmp(got_msg, expected_msg) != 0) {
  436. not_equal = 1;
  437. }
  438. break;
  439. #if ENABLE_REGEX
  440. case CK_MSG_REGEXP: {
  441. reg_err = regcomp(&re, expected_msg, REG_EXTENDED | REG_NOSUB);
  442. if (reg_err) {
  443. regerror(reg_err, &re, err_text, sizeof(err_text));
  444. ck_assert_msg(reg_err == 0,
  445. "For test %d:%s:%s Expected regexp '%s', but regcomp returned error '%s'",
  446. i, master_test->tcname, master_test->test_name, expected_msg,
  447. err_text);
  448. }
  449. reg_err = regexec(&re, got_msg, 0, NULL, 0);
  450. regfree(&re);
  451. if (reg_err) {
  452. not_equal = 1;
  453. }
  454. break;
  455. }
  456. #endif /* ENABLE_REGEX */
  457. default:
  458. /* Program should not reach here */
  459. break;
  460. }
  461. if (not_equal) {
  462. switch(master_test->msg_type) {
  463. #if ENABLE_REGEX
  464. case CK_MSG_REGEXP:
  465. msg_type_str = " regexp";
  466. break;
  467. #endif
  468. default:
  469. msg_type_str = "";
  470. }
  471. ck_abort_msg("For test %d:%s:%s Expected%s '%s', got '%s'",
  472. i, master_test->tcname, master_test->test_name, msg_type_str,
  473. expected_msg, got_msg);
  474. }
  475. }
  476. }
  477. END_TEST
  478. START_TEST(test_check_failure_lnos)
  479. {
  480. int i;
  481. long line_no;
  482. int passed = 0;
  483. int number_failed;
  484. TestResult *tr;
  485. /* Create list of line numbers where failures occurred */
  486. rewind(line_num_failures);
  487. for (i = 0; i < sub_ntests; i++) {
  488. if (master_tests[i].failure_type == CK_PASS) {
  489. passed++;
  490. continue;
  491. }
  492. number_failed = i - passed;
  493. ck_assert_msg(i - passed <= sub_nfailed, NULL);
  494. tr = tr_fail_array[number_failed];
  495. ck_assert_msg(tr != NULL, NULL);
  496. line_no = get_next_failure_line_num(line_num_failures);
  497. if(line_no == -1)
  498. {
  499. ck_abort_msg("Did not find the %dth failure line number for suite %s, msg %s",
  500. (number_failed+1), tr_tcname(tr), tr_msg(tr));
  501. }
  502. if (line_no > 0 && tr_lno(tr) != line_no) {
  503. ck_abort_msg("For test %d (failure %d): Expected lno %ld, got %d for suite %s, msg %s",
  504. i, number_failed, line_no, tr_lno(tr), tr_tcname(tr), tr_msg(tr));
  505. }
  506. }
  507. /* At this point, there should be no remaining failures */
  508. line_no = get_next_failure_line_num(line_num_failures);
  509. ck_assert_msg(line_no == -1,
  510. "No more failure line numbers expected, but found %ld", line_no);
  511. }
  512. END_TEST
  513. START_TEST(test_check_failure_ftypes)
  514. {
  515. int i;
  516. int passed = 0;
  517. TestResult *tr;
  518. for (i = 0; i < sub_ntests; i++) {
  519. if (master_tests[i].failure_type == CK_PASS) {
  520. passed++;
  521. continue;
  522. }
  523. ck_assert_msg(i - passed <= sub_nfailed, NULL);
  524. tr = tr_fail_array[i - passed];
  525. ck_assert_msg(tr != NULL, NULL);
  526. ck_assert_msg(master_tests[i].failure_type == tr_rtype(tr),
  527. "Failure type wrong for test %d:%s:%s",
  528. i, master_tests[i].tcname, master_tests[i].test_name);
  529. }
  530. }
  531. END_TEST
  532. START_TEST(test_check_failure_lfiles)
  533. {
  534. int i;
  535. for (i = 0; i < sub_nfailed; i++) {
  536. TestResult *tr = tr_fail_array[i];
  537. ck_assert_msg(tr != NULL, NULL);
  538. ck_assert_msg(tr_lfile(tr) != NULL, "Bad file name for test %d", i);
  539. ck_assert_msg(strstr(tr_lfile(tr), "check_check_sub.c") != 0,
  540. "Bad file name for test %d:%s:%s",
  541. i, master_tests[i].tcname, master_tests[i].test_name);
  542. }
  543. }
  544. END_TEST
  545. START_TEST(test_check_tcnames)
  546. {
  547. const char *tcname;
  548. tcname = tr_tcname(tr_all_array[_i]);
  549. if (strcmp(tcname, master_tests[_i].tcname) != 0) {
  550. ck_abort_msg("Expected '%s', got '%s' for test %d:%s",
  551. master_tests[_i].tcname, tcname,
  552. _i, master_tests[_i].test_name);
  553. }
  554. }
  555. END_TEST
  556. START_TEST(test_check_test_names)
  557. {
  558. int i;
  559. rewind(test_names_file);
  560. for (i = 0; i < sub_ntests; i++)
  561. {
  562. char* test_name = get_next_test_name(test_names_file);
  563. if(test_name == NULL || strcmp(master_tests[i].test_name, test_name) != 0)
  564. {
  565. ck_abort_msg("Expected test name '%s' but found '%s' for test %d:%s",
  566. master_tests[i].test_name,
  567. (test_name == NULL ? "(null)" : test_name),
  568. i, master_tests[i].tcname);
  569. }
  570. free(test_name);
  571. }
  572. }
  573. END_TEST
  574. START_TEST(test_check_all_msgs)
  575. {
  576. const char *got_msg = tr_msg(tr_all_array[_i]);
  577. master_test_t *master_test = &master_tests[_i];
  578. const char *expected_msg = master_test->msg;
  579. unsigned char not_equal = 0;
  580. #if ENABLE_REGEX
  581. regex_t re;
  582. #endif
  583. switch (master_test->msg_type) {
  584. case CK_MSG_TEXT:
  585. if (strcmp(got_msg, expected_msg) != 0) {
  586. not_equal = 1;
  587. }
  588. break;
  589. #if ENABLE_REGEX
  590. case CK_MSG_REGEXP: {
  591. int reg_err = regcomp(&re, expected_msg, REG_EXTENDED | REG_NOSUB);
  592. if (reg_err) {
  593. char err_text[256];
  594. regerror(reg_err, &re, err_text, sizeof(err_text));
  595. ck_assert_msg(reg_err == 0,
  596. "For test %d:%s:%s Expected regexp '%s', but regcomp returned error '%s'",
  597. _i, master_test->tcname, master_test->test_name, expected_msg,
  598. err_text);
  599. }
  600. reg_err = regexec(&re, got_msg, 0, NULL, 0);
  601. regfree(&re);
  602. if (reg_err) {
  603. not_equal = 1;
  604. }
  605. break;
  606. }
  607. #endif /* ENABLE_REGEX */
  608. default:
  609. /* Program should not reach here */
  610. break;
  611. }
  612. if (not_equal) {
  613. const char *msg_type_str;
  614. switch(master_test->msg_type) {
  615. #if ENABLE_REGEX
  616. case CK_MSG_REGEXP:
  617. msg_type_str = " regexp";
  618. break;
  619. #endif
  620. default:
  621. msg_type_str = "";
  622. }
  623. ck_abort_msg("For test %i:%s:%s expected%s '%s', got '%s'",
  624. _i, master_test->tcname, master_test->test_name, msg_type_str,
  625. expected_msg, got_msg);
  626. }
  627. }
  628. END_TEST
  629. START_TEST(test_check_all_ftypes)
  630. {
  631. ck_assert_msg(master_tests[_i].failure_type == tr_rtype(tr_all_array[_i]),
  632. "For test %d:%s:%s failure type wrong, expected %d but got %d",
  633. _i, master_tests[_i].tcname, master_tests[_i].test_name,
  634. master_tests[_i].failure_type, tr_rtype(tr_all_array[_i]));
  635. }
  636. END_TEST
  637. int test_fixture_val = 0;
  638. static void test_fixture_setup(void)
  639. {
  640. test_fixture_val = 1;
  641. }
  642. START_TEST(test_setup)
  643. {
  644. ck_assert_msg (test_fixture_val == 1,
  645. "Value not setup or changed across tests correctly");
  646. test_fixture_val = 2;
  647. }
  648. END_TEST
  649. static void test_fixture_teardown (void)
  650. {
  651. test_fixture_val = 3;
  652. }
  653. START_TEST(test_teardown)
  654. {
  655. ck_assert_msg (test_fixture_val == 3,
  656. "Value not changed correctly in teardown");
  657. }
  658. END_TEST
  659. Suite *make_master_suite (void)
  660. {
  661. Suite *s;
  662. TCase *tc_core;
  663. TCase *tc_fixture;
  664. TCase *tc_post_teardown;
  665. s = suite_create("Master");
  666. tc_core = tcase_create("Core Tests");
  667. tc_fixture = tcase_create("Fixture Setup Tests");
  668. suite_add_tcase (s, tc_core);
  669. tcase_add_test (tc_core, test_check_nfailures);
  670. tcase_add_test (tc_core, test_check_ntests_run);
  671. tcase_add_test (tc_core, test_check_failure_msgs);
  672. tcase_add_test (tc_core, test_check_failure_ftypes);
  673. tcase_add_test (tc_core, test_check_failure_lnos);
  674. tcase_add_test (tc_core, test_check_failure_lfiles);
  675. tcase_add_test (tc_core, test_check_test_names);
  676. tcase_add_loop_test (tc_core, test_check_tcnames, 0, sub_ntests);
  677. tcase_add_loop_test (tc_core, test_check_all_msgs, 0, sub_ntests);
  678. tcase_add_loop_test (tc_core, test_check_all_ftypes, 0, nr_of_master_tests);
  679. tcase_add_unchecked_fixture(tc_fixture, test_fixture_setup,
  680. test_fixture_teardown);
  681. /* add the test 3 times to make sure we adequately test
  682. preservation of fixture values across tests, regardless
  683. of the order in which tests are added to the test case */
  684. tcase_add_test (tc_fixture, test_setup);
  685. #if defined(HAVE_FORK) && HAVE_FORK==1
  686. /* The remaining test runs only work if fork() is available. */
  687. tcase_add_test (tc_fixture, test_setup);
  688. tcase_add_test (tc_fixture, test_setup);
  689. #endif /* HAVE_FORK */
  690. suite_add_tcase (s, tc_fixture);
  691. tc_post_teardown = tcase_create ("Fixture Teardown Tests");
  692. tcase_add_test (tc_post_teardown, test_teardown);
  693. suite_add_tcase (s, tc_post_teardown);
  694. return s;
  695. }
  696. static void init_signal_strings(void)
  697. {
  698. /* strsignal may overwrite the string returned by the previous call */
  699. char *s8 = strdup(strsignal(8));
  700. char *s11 = strdup(strsignal(11));
  701. int n;
  702. n = snprintf(signal_11_str, SIG_STR_LEN, "Received signal 11 (%s)", s11);
  703. assert(n < SIG_STR_LEN);
  704. n = snprintf(signal_11_8_str, SIG_STR_LEN, "Received signal 11 (%s), expected 8 (%s)", s11, s8);
  705. assert(n < SIG_STR_LEN);
  706. n = snprintf(signal_8_str, SIG_STR_LEN, "Received signal 8 (%s)", s8);
  707. assert(n < SIG_STR_LEN);
  708. free(s8);
  709. free(s11);
  710. }
  711. void setup (void)
  712. {
  713. Suite *s = make_sub_suite();
  714. SRunner *sr = srunner_create(s);
  715. init_signal_strings();
  716. /*
  717. * Create files that will contain the test names and line numbers of the failures
  718. * in check_check_sub.c, as they occur.
  719. */
  720. #if !HAVE_MKSTEMP
  721. test_names_file_name = tempnam(NULL, "check_test_names_");
  722. test_names_file = fopen(test_names_file_name, "w+b");
  723. line_num_failures_file_name = tempnam(NULL, "check_error_linenums_");
  724. line_num_failures = fopen(line_num_failures_file_name, "w+b");
  725. #else
  726. test_names_file_name = strdup("check_test_names__XXXXXX");
  727. assert(test_names_file_name != NULL && "strdup() failed");
  728. test_names_file = fdopen(mkstemp(test_names_file_name), "w+b");
  729. line_num_failures_file_name = strdup("check_error_linenums_XXXXXX");
  730. assert(line_num_failures_file_name != NULL && "strdup() failed");
  731. line_num_failures = fdopen(mkstemp(line_num_failures_file_name), "w+b");
  732. #endif
  733. srunner_add_suite(sr, make_sub2_suite());
  734. srunner_run_all(sr, CK_VERBOSE);
  735. tr_fail_array = srunner_failures(sr);
  736. tr_all_array = srunner_results(sr);
  737. sub_nfailed = srunner_ntests_failed(sr);
  738. sub_ntests = srunner_ntests_run(sr);
  739. }
  740. void cleanup (void)
  741. {
  742. fclose(line_num_failures);
  743. line_num_failures = NULL;
  744. unlink(line_num_failures_file_name);
  745. free(line_num_failures_file_name);
  746. line_num_failures_file_name = NULL;
  747. fclose(test_names_file);
  748. test_names_file = NULL;
  749. unlink(test_names_file_name);
  750. free(test_names_file_name);
  751. test_names_file_name = NULL;
  752. }
  753. void record_test_name(const char* test_name)
  754. {
  755. int result;
  756. if(test_names_file == NULL)
  757. {
  758. /*
  759. * The file may not be setup. This may be because some of the tests
  760. * are being reused outside of the master suite. This is OK.
  761. * If the master suite runs and does not find test names it will
  762. * fail as expected.
  763. */
  764. fprintf(stderr, "Test name file not setup, not reporting test failure");
  765. return;
  766. }
  767. fprintf(test_names_file, "%s\n", test_name);
  768. result = fflush(test_names_file);
  769. if(result != 0)
  770. {
  771. fprintf(stderr, "%s:%d: Error in call to fflush", __FILE__, __LINE__);
  772. exit(1);
  773. }
  774. }
  775. char* get_next_test_name(FILE * file)
  776. {
  777. char * line = NULL;
  778. size_t length;
  779. ssize_t written;
  780. written = getline(&line, &length, file);
  781. /**
  782. * getline() will leave a \n at the end of the line,
  783. * remove it if it is present.
  784. */
  785. if(written > 0 && line[written-1] == '\n')
  786. {
  787. line[written-1] = '\0';
  788. }
  789. return line;
  790. }
  791. void record_failure_line_num(int linenum)
  792. {
  793. size_t to_write;
  794. size_t written;
  795. int result, chars_printed;
  796. char string[16];
  797. /*
  798. * Because this call will occur right before a failure,
  799. * add +1 so the linenum will be that of the failure
  800. */
  801. linenum += 1;
  802. chars_printed = snprintf(string, sizeof(string), "%d\n", linenum);
  803. if(chars_printed <= 0 || (size_t) chars_printed >= sizeof(string))
  804. {
  805. fprintf(stderr, "%s:%d: Error in call to snprintf:", __FILE__, __LINE__);
  806. exit(1);
  807. }
  808. to_write = (size_t) chars_printed;
  809. if(line_num_failures == NULL)
  810. {
  811. /*
  812. * The file may not be setup. This may be because some of the tests
  813. * are being reused outside of the master suite. This is OK.
  814. * If the master suite runs and does not find line numbers it will
  815. * fail as expected.
  816. */
  817. fprintf(stderr, "Line number file not setup, not reporting test failure line: %s", string);
  818. return;
  819. }
  820. written = fwrite(string, 1, to_write, line_num_failures);
  821. if(written != to_write)
  822. {
  823. fprintf(stderr, "%s:%d: Error in call to fwrite, wrote " CK_FMT_ZD " instead of " CK_FMT_ZU ":", __FILE__, __LINE__, written, to_write);
  824. exit(1);
  825. }
  826. result = fflush(line_num_failures);
  827. if(result != 0)
  828. {
  829. fprintf(stderr, "%s:%d: Error in call to fflush", __FILE__, __LINE__);
  830. exit(1);
  831. }
  832. }
  833. long get_next_failure_line_num(FILE * file)
  834. {
  835. char * line = NULL;
  836. char * end = NULL;
  837. size_t length;
  838. ssize_t written;
  839. long value = -1;
  840. written = getline(&line, &length, file);
  841. if(written > 0)
  842. {
  843. /*
  844. * getline() will leave the \n at the end of the parsed line, if
  845. * it is found. Remove this before passing to strtol, so we
  846. * may detect invalid characters by checking for \0 instead
  847. */
  848. if(line[written-1] == '\n')
  849. {
  850. line[written-1] = '\0';
  851. }
  852. value = strtol(line, &end, 10);
  853. if(value <= 0 || *end != '\0')
  854. {
  855. fprintf(stderr, "%s:%d: Failed to convert next failure line number, found '%s'\n",
  856. __FILE__, __LINE__, line);
  857. exit(1);
  858. }
  859. }
  860. free(line);
  861. return value;
  862. }