summaryrefslogtreecommitdiffstats
path: root/test/testutil.c
diff options
context:
space:
mode:
authorEmilia Kasper <emilia@openssl.org>2016-03-17 15:14:30 +0100
committerEmilia Kasper <emilia@openssl.org>2016-04-05 13:44:46 +0200
commit453dfd8d5ee0893146e0fb61a5978ab59ba95c01 (patch)
tree6ada91599f4ebe125be3d34a69716b23a0688b05 /test/testutil.c
parent173f613b6a9029f34454b642ee4f3db6c6566fcb (diff)
New SSL test framework
Currently, SSL tests are configured via command-line switches to ssltest.c. This results in a lot of duplication between ssltest.c and apps, and a complex setup. ssltest.c is also simply old and needs maintenance. Instead, we already have a way to configure SSL servers and clients, so we leverage that. SSL tests can now be configured from a configuration file. Test servers and clients are configured using the standard ssl_conf module. Additional test settings are configured via a test configuration. Moreover, since the CONF language involves unnecessary boilerplate, the test conf itself is generated from a shorter Perl syntax. The generated testcase files are checked in to the repo to make it easier to verify that the intended test cases are in fact run; and to simplify debugging failures. To demonstrate the approach, min/max protocol tests are converted to the new format. This change also fixes MinProtocol and MaxProtocol handling. It was previously requested that an SSL_CTX have both the server and client flags set for these commands; this clearly can never work. Guide to this PR: - test/ssl_test.c - test framework - test/ssl_test_ctx.* - test configuration structure - test/handshake_helper.* - new SSL test handshaking code - test/ssl-tests/ - test configurations - test/generate_ssl_tests.pl - script for generating CONF-style test configurations from perl inputs Reviewed-by: Richard Levitte <levitte@openssl.org>
Diffstat (limited to 'test/testutil.c')
-rw-r--r--test/testutil.c49
1 files changed, 41 insertions, 8 deletions
diff --git a/test/testutil.c b/test/testutil.c
index de5598cb28..ccb6248234 100644
--- a/test/testutil.c
+++ b/test/testutil.c
@@ -68,37 +68,70 @@
typedef struct test_info {
const char *test_case_name;
int (*test_fn) ();
+ int (*param_test_fn)(int idx);
+ int num;
} TEST_INFO;
static TEST_INFO all_tests[1024];
static int num_tests = 0;
+/*
+ * A parameterised tests runs a loop of test cases.
+ * |num_test_cases| counts the total number of test cases
+ * across all tests.
+ */
+static int num_test_cases = 0;
void add_test(const char *test_case_name, int (*test_fn) ())
{
assert(num_tests != OSSL_NELEM(all_tests));
all_tests[num_tests].test_case_name = test_case_name;
all_tests[num_tests].test_fn = test_fn;
+ all_tests[num_tests].num = -1;
+ ++num_test_cases;
+ ++num_tests;
+}
+
+void add_all_tests(const char *test_case_name, int(*test_fn)(int idx),
+ int num)
+{
+ assert(num_tests != OSSL_NELEM(all_tests));
+ all_tests[num_tests].test_case_name = test_case_name;
+ all_tests[num_tests].param_test_fn = test_fn;
+ all_tests[num_tests].num = num;
++num_tests;
+ num_test_cases += num;
}
int run_tests(const char *test_prog_name)
{
int num_failed = 0;
- int i = 0;
- printf("%s: %d test case%s\n", test_prog_name, num_tests,
- num_tests == 1 ? "" : "s");
+ int i, j;
+
+ printf("%s: %d test case%s\n", test_prog_name, num_test_cases,
+ num_test_cases == 1 ? "" : "s");
+
for (i = 0; i != num_tests; ++i) {
- if (all_tests[i].test_fn()) {
- printf("** %s failed **\n--------\n",
- all_tests[i].test_case_name);
- ++num_failed;
+ if (all_tests[i].num == -1) {
+ if (all_tests[i].test_fn()) {
+ printf("** %s failed **\n--------\n",
+ all_tests[i].test_case_name);
+ ++num_failed;
+ }
+ } else {
+ for (j = 0; j < all_tests[i].num; j++) {
+ if (all_tests[i].param_test_fn(j)) {
+ printf("** %s failed test %d\n--------\n",
+ all_tests[i].test_case_name, j);
+ ++num_failed;
+ }
+ }
}
}
if (num_failed != 0) {
printf("%s: %d test%s failed (out of %d)\n", test_prog_name,
- num_failed, num_failed != 1 ? "s" : "", num_tests);
+ num_failed, num_failed != 1 ? "s" : "", num_test_cases);
return EXIT_FAILURE;
}
printf(" All tests passed.\n");