mirror of
https://github.com/MariaDB/server.git
synced 2025-07-29 05:21:33 +03:00
WL#3206 (Add unit tests):
An implementation of the TAP framework for writing unit tests.
This commit is contained in:
@ -29,7 +29,7 @@ SUBDIRS = . include @docs_dirs@ @zlib_dir@ @yassl_dir@ \
|
||||
@mysql_se_plugins@ \
|
||||
netware @libmysqld_dirs@ \
|
||||
@bench_dirs@ support-files @tools_dirs@ \
|
||||
plugin
|
||||
plugin mytap unittest
|
||||
|
||||
DIST_SUBDIRS = . include @docs_dirs@ zlib \
|
||||
@readline_topdir@ sql-common \
|
||||
|
@ -2599,6 +2599,8 @@ AC_SUBST(MAKE_BINARY_DISTRIBUTION_OPTIONS)
|
||||
|
||||
# Output results
|
||||
AC_CONFIG_FILES(Makefile extra/Makefile mysys/Makefile dnl
|
||||
mytap/Makefile mytap/t/Makefile unittest/Makefile dnl
|
||||
unittest/mysys/Makefile unittest/examples/Makefile dnl
|
||||
strings/Makefile regex/Makefile storage/Makefile storage/heap/Makefile dnl
|
||||
storage/myisam/Makefile storage/myisammrg/Makefile dnl
|
||||
os2/Makefile os2/include/Makefile os2/include/sys/Makefile dnl
|
||||
@ -2619,6 +2621,7 @@ AC_CONFIG_FILES(Makefile extra/Makefile mysys/Makefile dnl
|
||||
cmd-line-utils/readline/Makefile dnl
|
||||
plugin/Makefile dnl
|
||||
plugin/fulltext/Makefile)
|
||||
|
||||
AC_CONFIG_COMMANDS([default], , test -z "$CONFIG_HEADERS" || echo timestamp > stamp-h)
|
||||
AC_OUTPUT
|
||||
|
||||
|
1153
mytap/Doxyfile
Normal file
1153
mytap/Doxyfile
Normal file
File diff suppressed because it is too large
Load Diff
10
mytap/Makefile.am
Normal file
10
mytap/Makefile.am
Normal file
@ -0,0 +1,10 @@
|
||||
|
||||
AM_CPPFLAGS = -I$(top_builddir)/include -I$(top_srcdir)/include -I$(srcdir)
|
||||
AM_CPPFLAGS += -I$(top_builddir)/mytap
|
||||
|
||||
noinst_LIBRARIES = libmytap.a
|
||||
noinst_HEADERS = tap.h
|
||||
|
||||
libmytap_a_SOURCES = tap.c
|
||||
|
||||
SUBDIRS = . t
|
17
mytap/t/basic.t.c
Normal file
17
mytap/t/basic.t.c
Normal file
@ -0,0 +1,17 @@
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <tap.h>
|
||||
|
||||
int main() {
|
||||
plan(5);
|
||||
ok(1 == 1, "testing basic functions");
|
||||
ok(2 == 2, "");
|
||||
ok(3 == 3, NULL);
|
||||
if (1 == 1)
|
||||
skip(2, "Sensa fragoli");
|
||||
else {
|
||||
ok(1 == 2, "Should not be run at all");
|
||||
ok(1, "This one neither");
|
||||
}
|
||||
return exit_status();
|
||||
}
|
171
mytap/tap.c
Normal file
171
mytap/tap.c
Normal file
@ -0,0 +1,171 @@
|
||||
|
||||
#include "tap.h"
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <stdarg.h>
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
|
||||
/**
|
||||
Test data structure.
|
||||
|
||||
Data structure containing all information about the test suite.
|
||||
*/
|
||||
static TEST_DATA g_test = { 0 };
|
||||
|
||||
/**
|
||||
Output stream for test report message.
|
||||
|
||||
The macro is just a temporary solution.
|
||||
*/
|
||||
#define tapout stdout
|
||||
|
||||
/**
|
||||
Emit a TAP result and optionally a description.
|
||||
|
||||
@param pass 'true' if test passed, 'false' otherwise
|
||||
@param fmt Description of test in printf() format.
|
||||
@param ap Vararg list for the description string above.
|
||||
*/
|
||||
static int
|
||||
emit_tap(int pass, char const *fmt, va_list ap)
|
||||
{
|
||||
fprintf(tapout, "%sok %d%s",
|
||||
pass ? "" : "not ",
|
||||
++g_test.last,
|
||||
(fmt && *fmt) ? " - " : "");
|
||||
if (fmt && *fmt)
|
||||
vfprintf(tapout, fmt, ap);
|
||||
}
|
||||
|
||||
|
||||
static int
|
||||
emit_dir(const char *dir, const char *exp)
|
||||
{
|
||||
fprintf(tapout, " # %s %s", dir, exp);
|
||||
}
|
||||
|
||||
|
||||
static int
|
||||
emit_endl()
|
||||
{
|
||||
fprintf(tapout, "\n");
|
||||
}
|
||||
|
||||
void
|
||||
diag(char const *fmt, ...)
|
||||
{
|
||||
va_list ap;
|
||||
va_start(ap, fmt);
|
||||
fprintf(tapout, "# ");
|
||||
vfprintf(tapout, fmt, ap);
|
||||
fprintf(tapout, "\n");
|
||||
va_end(ap);
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
plan(int const count)
|
||||
{
|
||||
g_test.plan= count;
|
||||
switch (count)
|
||||
{
|
||||
case NO_PLAN:
|
||||
case SKIP_ALL:
|
||||
break;
|
||||
default:
|
||||
if (plan > 0)
|
||||
fprintf(tapout, "1..%d\n", count);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
skip_all(char const *reason, ...)
|
||||
{
|
||||
va_list ap;
|
||||
va_start(ap, reason);
|
||||
fprintf(tapout, "1..0 # skip ");
|
||||
vfprintf(tapout, reason, ap);
|
||||
va_end(ap);
|
||||
exit(0);
|
||||
}
|
||||
|
||||
void
|
||||
ok(int const pass, char const *fmt, ...)
|
||||
{
|
||||
if (!pass && *g_test.todo == '\0')
|
||||
++g_test.failed;
|
||||
|
||||
va_list ap;
|
||||
va_start(ap, fmt);
|
||||
emit_tap(pass, fmt, ap);
|
||||
va_end(ap);
|
||||
if (*g_test.todo != '\0')
|
||||
emit_dir("TODO", g_test.todo);
|
||||
emit_endl();
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
skip(int how_many, char const *fmt, ...)
|
||||
{
|
||||
char reason[80];
|
||||
if (fmt && *fmt)
|
||||
{
|
||||
va_list ap;
|
||||
va_start(ap, fmt);
|
||||
vsnprintf(reason, sizeof(reason), fmt, ap);
|
||||
va_end(ap);
|
||||
}
|
||||
else
|
||||
reason[0] = '\0';
|
||||
|
||||
while (how_many-- > 0)
|
||||
{
|
||||
va_list ap;
|
||||
emit_tap(1, NULL, ap);
|
||||
emit_dir("SKIP", reason);
|
||||
emit_endl();
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
todo_start(char const *message, ...)
|
||||
{
|
||||
va_list ap;
|
||||
va_start(ap, message);
|
||||
vsnprintf(g_test.todo, sizeof(g_test.todo), message, ap);
|
||||
va_end(ap);
|
||||
}
|
||||
|
||||
void
|
||||
todo_end()
|
||||
{
|
||||
*g_test.todo = '\0';
|
||||
}
|
||||
|
||||
int exit_status() {
|
||||
/*
|
||||
If there were no plan, we write one last instead.
|
||||
*/
|
||||
if (g_test.plan == NO_PLAN)
|
||||
plan(g_test.last);
|
||||
|
||||
if (g_test.plan != g_test.last)
|
||||
{
|
||||
diag("%d tests planned but only %d executed",
|
||||
g_test.plan, g_test.last);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
|
||||
if (g_test.failed > 0)
|
||||
{
|
||||
diag("Failed %d tests!", g_test.failed);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
|
||||
return EXIT_SUCCESS;
|
||||
}
|
||||
|
187
mytap/tap.h
Normal file
187
mytap/tap.h
Normal file
@ -0,0 +1,187 @@
|
||||
#ifndef TAP_H
|
||||
#define TAP_H
|
||||
|
||||
/*
|
||||
|
||||
*/
|
||||
|
||||
#define NO_PLAN (0)
|
||||
#define SKIP_ALL (-1)
|
||||
|
||||
/**
|
||||
Data about test plan.
|
||||
|
||||
@internal We are using the "typedef struct X { ... } X" idiom to
|
||||
create class/struct X both in C and C++.
|
||||
*/
|
||||
typedef struct TEST_DATA {
|
||||
/**
|
||||
Number of tests that is planned to execute.
|
||||
|
||||
Can be zero (<code>NO_PLAN</code>) meaning that the plan string
|
||||
will be printed at the end of test instead.
|
||||
*/
|
||||
int plan;
|
||||
|
||||
/** Number of last test that was done or skipped. */
|
||||
int last;
|
||||
|
||||
/** Number of tests that failed. */
|
||||
int failed;
|
||||
|
||||
/** Todo reason. */
|
||||
char todo[128];
|
||||
} TEST_DATA;
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
/**
|
||||
Set number of tests that is planned to execute.
|
||||
|
||||
The function also accepts the predefined constants SKIP_ALL and
|
||||
NO_PLAN.
|
||||
|
||||
@param count
|
||||
The planned number of tests to run. Alternatively, the SKIP_ALL
|
||||
and NO_PLAN can be supplied.
|
||||
*/
|
||||
void plan(int count);
|
||||
|
||||
|
||||
/**
|
||||
Report test result as a TAP line.
|
||||
|
||||
Function used to write status of an individual test. Call this
|
||||
function in the following manner:
|
||||
|
||||
@code
|
||||
ok(ducks == paddling,
|
||||
"%d ducks did not paddle", ducks - paddling);
|
||||
@endcode
|
||||
|
||||
@param pass Zero if the test failed, non-zero if it passed.
|
||||
@param fmt Format string in printf() format. NULL is allowed, in
|
||||
which case nothing is printed.
|
||||
*/
|
||||
void ok(int pass, char const *fmt, ...)
|
||||
__attribute__ ((format(printf,2,3)));
|
||||
|
||||
/**
|
||||
Skip a determined number of tests.
|
||||
|
||||
Function to print that <em>how_many</em> tests have been
|
||||
skipped. The reason is printed for each skipped test. Observe
|
||||
that this function does not do the actual skipping for you, it just
|
||||
prints information that tests have been skipped. It shall be used
|
||||
in the following manner:
|
||||
|
||||
@code
|
||||
if (ducks == 0) {
|
||||
skip(2, "No ducks in the pond");
|
||||
} else {
|
||||
int i;
|
||||
for (i = 0 ; i < 2 ; ++i)
|
||||
ok(duck[i] == paddling, "is duck %d paddling?", i);
|
||||
}
|
||||
@endcode
|
||||
|
||||
@see SKIP_BLOCK_IF
|
||||
|
||||
@param how_many Number of tests that are to be skipped.
|
||||
@param reason A reason for skipping the tests
|
||||
*/
|
||||
void skip(int how_many, char const *reason, ...)
|
||||
__attribute__ ((format(printf,2,3)));
|
||||
|
||||
|
||||
/**
|
||||
Helper macro to skip a block of code. The macro can be used to
|
||||
simplify conditionally skipping a block of code. It is used in the
|
||||
following manner:
|
||||
|
||||
@code
|
||||
SKIP_BLOCK_IF(ducks == 0, 2, "No ducks in the pond")
|
||||
{
|
||||
int i;
|
||||
for (i = 0 ; i < 2 ; ++i)
|
||||
ok(duck[i] == paddling, "is duck %d paddling?", i);
|
||||
}
|
||||
|
||||
@see skip
|
||||
|
||||
@endcode
|
||||
*/
|
||||
#define SKIP_BLOCK_IF(SKIP_IF_TRUE, COUNT, REASON) \
|
||||
if (SKIP_IF_TRUE) skip((COUNT),(REASON)); else
|
||||
|
||||
/**
|
||||
Print a diagnostics message.
|
||||
|
||||
@param fmt Diagnostics message in printf() format.
|
||||
*/
|
||||
void diag(char const *fmt, ...)
|
||||
__attribute__ ((format(printf,1,2)));
|
||||
|
||||
/**
|
||||
Print summary report and return exit status.
|
||||
|
||||
This function will print a summary report of how many tests passed,
|
||||
how many were skipped, and how many remains to do. The function
|
||||
should be called after all tests are executed in the following
|
||||
manner:
|
||||
|
||||
@code
|
||||
return exit_status();
|
||||
@endcode
|
||||
|
||||
@returns EXIT_SUCCESS if all tests passed, EXIT_FAILURE if one or
|
||||
more tests failed.
|
||||
*/
|
||||
int exit_status(void);
|
||||
|
||||
|
||||
/**
|
||||
Skip entire test suite.
|
||||
|
||||
To skip the entire test suite, use this function. It will
|
||||
automatically call exit(), so there is no need to have checks
|
||||
around it.
|
||||
*/
|
||||
void skip_all(char const *reason, ...)
|
||||
__attribute__ ((noreturn, format(printf, 1, 2)));
|
||||
|
||||
/**
|
||||
Start section of tests that are not yet ready.
|
||||
|
||||
To start a section of tests that are not ready and are expected to
|
||||
fail, use this function and todo_end() in the following manner:
|
||||
|
||||
@code
|
||||
todo_start("Not ready yet");
|
||||
ok(is_rocketeering(duck), "Rocket-propelled ducks");
|
||||
ok(is_kamikaze(duck), "Kamikaze ducks");
|
||||
todo_end();
|
||||
@endcode
|
||||
|
||||
@see todo_end
|
||||
|
||||
@note
|
||||
It is not possible to nest todo sections.
|
||||
|
||||
@param message Message that will be printed before the todo tests.
|
||||
*/
|
||||
void todo_start(char const *message, ...)
|
||||
__attribute__ ((format (printf, 1, 2)));
|
||||
|
||||
/**
|
||||
End a section of tests that are not yet ready.
|
||||
*/
|
||||
void todo_end();
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif /* TAP_H */
|
1
unittest/Makefile.am
Normal file
1
unittest/Makefile.am
Normal file
@ -0,0 +1 @@
|
||||
SUBDIRS = mysys examples
|
20
unittest/examples/Makefile.am
Normal file
20
unittest/examples/Makefile.am
Normal file
@ -0,0 +1,20 @@
|
||||
AM_CPPFLAGS = -I$(srcdir) -I$(top_builddir)/include
|
||||
AM_CPPFLAGS += -I$(top_builddir)/mytap
|
||||
|
||||
AM_LDFLAGS = -L$(top_builddir)/mytap
|
||||
|
||||
AM_CFLAGS = -Wall -ansi -pedantic
|
||||
|
||||
LDADD = -lmytap
|
||||
|
||||
noinst_PROGRAMS = simple.t skip.t todo.t skip_all.t no_plan.t
|
||||
|
||||
simple_t_SOURCES = simple.t.c
|
||||
|
||||
skip_t_SOURCES = skip.t.c
|
||||
|
||||
todo_t_SOURCES = todo.t.c
|
||||
|
||||
skip_all_t_SOURCES = skip_all.t.c
|
||||
|
||||
no_plan_t_SOURCES = no_plan.t.c
|
19
unittest/examples/no_plan.t.c
Normal file
19
unittest/examples/no_plan.t.c
Normal file
@ -0,0 +1,19 @@
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <tap.h>
|
||||
|
||||
/*
|
||||
Sometimes, the number of tests is not known beforehand. In those
|
||||
cases, the plan can be omitted and will instead be written at the
|
||||
end of the test (inside exit_status()).
|
||||
|
||||
Use this sparingly, it is a last resort: planning how many tests you
|
||||
are going to run will help you catch that offending case when some
|
||||
tests are skipped for an unknown reason.
|
||||
*/
|
||||
int main() {
|
||||
ok(1, NULL);
|
||||
ok(1, NULL);
|
||||
ok(1, NULL);
|
||||
return exit_status();
|
||||
}
|
38
unittest/examples/simple.t.c
Normal file
38
unittest/examples/simple.t.c
Normal file
@ -0,0 +1,38 @@
|
||||
|
||||
#include <tap.h>
|
||||
|
||||
unsigned int gcs(unsigned int a, unsigned int b)
|
||||
{
|
||||
if (b > a) {
|
||||
unsigned int t = a;
|
||||
a = b;
|
||||
b = t;
|
||||
}
|
||||
|
||||
while (b != 0) {
|
||||
unsigned int m = a % b;
|
||||
a = b;
|
||||
b = m;
|
||||
}
|
||||
return a;
|
||||
}
|
||||
|
||||
int main() {
|
||||
unsigned int a,b;
|
||||
unsigned int failed;
|
||||
plan(1);
|
||||
diag("Testing basic functions");
|
||||
failed = 0;
|
||||
for (a = 1 ; a < 2000 ; ++a)
|
||||
for (b = 1 ; b < 2000 ; ++b)
|
||||
{
|
||||
unsigned int d = gcs(a, b);
|
||||
if (a % d != 0 || b % d != 0) {
|
||||
++failed;
|
||||
diag("Failed for gcs(%4u,%4u)", a, b);
|
||||
}
|
||||
}
|
||||
ok(failed == 0, "Testing gcs()");
|
||||
return exit_status();
|
||||
}
|
||||
|
14
unittest/examples/skip.t.c
Normal file
14
unittest/examples/skip.t.c
Normal file
@ -0,0 +1,14 @@
|
||||
|
||||
#include <tap.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
int main() {
|
||||
plan(4);
|
||||
ok(1, NULL);
|
||||
ok(1, NULL);
|
||||
SKIP_BLOCK_IF(1, 2, "No point") {
|
||||
ok(1, NULL);
|
||||
ok(1, NULL);
|
||||
}
|
||||
return exit_status();
|
||||
}
|
23
unittest/examples/skip_all.t.c
Normal file
23
unittest/examples/skip_all.t.c
Normal file
@ -0,0 +1,23 @@
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <tap.h>
|
||||
|
||||
int has_feature() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
/*
|
||||
In some cases, an entire test file does not make sense because there
|
||||
some feature is missing. In that case, the entire test case can be
|
||||
skipped in the following manner.
|
||||
*/
|
||||
int main() {
|
||||
if (!has_feature())
|
||||
skip_all("Missing feature");
|
||||
plan(4);
|
||||
ok(1, NULL);
|
||||
ok(1, NULL);
|
||||
ok(1, NULL);
|
||||
ok(1, NULL);
|
||||
return exit_status();
|
||||
}
|
19
unittest/examples/todo.t.c
Normal file
19
unittest/examples/todo.t.c
Normal file
@ -0,0 +1,19 @@
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <tap.h>
|
||||
|
||||
int main()
|
||||
{
|
||||
plan(4);
|
||||
ok(1, NULL);
|
||||
ok(1, NULL);
|
||||
/*
|
||||
Tests in the todo region is expected to fail. If they don't,
|
||||
something is strange.
|
||||
*/
|
||||
todo_start("Need to fix these");
|
||||
ok(0, NULL);
|
||||
ok(0, NULL);
|
||||
todo_end();
|
||||
return exit_status();
|
||||
}
|
14
unittest/mysys/Makefile.am
Normal file
14
unittest/mysys/Makefile.am
Normal file
@ -0,0 +1,14 @@
|
||||
|
||||
AM_CPPFLAGS = -I$(srcdir) -I$(top_builddir)/include
|
||||
AM_CPPFLAGS += -I$(top_builddir)/mytap
|
||||
|
||||
AM_CFLAGS = -Wall -ansi -pedantic
|
||||
|
||||
AM_LDFLAGS = -L$(top_builddir)/mytap -L$(top_builddir)/mysys
|
||||
AM_LDFLAGS += -L$(top_builddir)/strings
|
||||
|
||||
LDADD = -lmytap -lmysys -lmystrings
|
||||
|
||||
noinst_PROGRAMS = bitmap.t
|
||||
|
||||
bitmap_t_SOURCES = bitmap.t.c
|
374
unittest/mysys/bitmap.t.c
Normal file
374
unittest/mysys/bitmap.t.c
Normal file
@ -0,0 +1,374 @@
|
||||
|
||||
#include <tap.h>
|
||||
|
||||
#include <my_global.h>
|
||||
#include "my_bitmap.h"
|
||||
|
||||
#include <string.h>
|
||||
|
||||
static void bitmap_print(MY_BITMAP *map)
|
||||
{
|
||||
uint32 *to= map->bitmap, *end= map->last_word_ptr;
|
||||
while (to <= end)
|
||||
{
|
||||
fprintf(stderr,"0x%x ", *to++);
|
||||
}
|
||||
fprintf(stderr,"\n");
|
||||
}
|
||||
|
||||
uint get_rand_bit(uint bitsize)
|
||||
{
|
||||
return (rand() % bitsize);
|
||||
}
|
||||
|
||||
bool test_set_get_clear_bit(MY_BITMAP *map, uint bitsize)
|
||||
{
|
||||
uint i, test_bit;
|
||||
uint no_loops= bitsize > 128 ? 128 : bitsize;
|
||||
for (i=0; i < no_loops; i++)
|
||||
{
|
||||
test_bit= get_rand_bit(bitsize);
|
||||
bitmap_set_bit(map, test_bit);
|
||||
if (!bitmap_is_set(map, test_bit))
|
||||
goto error1;
|
||||
bitmap_clear_bit(map, test_bit);
|
||||
if (bitmap_is_set(map, test_bit))
|
||||
goto error2;
|
||||
}
|
||||
return FALSE;
|
||||
error1:
|
||||
printf("Error in set bit, bit %u, bitsize = %u", test_bit, bitsize);
|
||||
return TRUE;
|
||||
error2:
|
||||
printf("Error in clear bit, bit %u, bitsize = %u", test_bit, bitsize);
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
bool test_flip_bit(MY_BITMAP *map, uint bitsize)
|
||||
{
|
||||
uint i, test_bit;
|
||||
uint no_loops= bitsize > 128 ? 128 : bitsize;
|
||||
for (i=0; i < no_loops; i++)
|
||||
{
|
||||
test_bit= get_rand_bit(bitsize);
|
||||
bitmap_flip_bit(map, test_bit);
|
||||
if (!bitmap_is_set(map, test_bit))
|
||||
goto error1;
|
||||
bitmap_flip_bit(map, test_bit);
|
||||
if (bitmap_is_set(map, test_bit))
|
||||
goto error2;
|
||||
}
|
||||
return FALSE;
|
||||
error1:
|
||||
printf("Error in flip bit 1, bit %u, bitsize = %u", test_bit, bitsize);
|
||||
return TRUE;
|
||||
error2:
|
||||
printf("Error in flip bit 2, bit %u, bitsize = %u", test_bit, bitsize);
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
bool test_operators(MY_BITMAP *map, uint bitsize)
|
||||
{
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
bool test_get_all_bits(MY_BITMAP *map, uint bitsize)
|
||||
{
|
||||
uint i;
|
||||
bitmap_set_all(map);
|
||||
if (!bitmap_is_set_all(map))
|
||||
goto error1;
|
||||
if (!bitmap_is_prefix(map, bitsize))
|
||||
goto error5;
|
||||
bitmap_clear_all(map);
|
||||
if (!bitmap_is_clear_all(map))
|
||||
goto error2;
|
||||
if (!bitmap_is_prefix(map, 0))
|
||||
goto error6;
|
||||
for (i=0; i<bitsize;i++)
|
||||
bitmap_set_bit(map, i);
|
||||
if (!bitmap_is_set_all(map))
|
||||
goto error3;
|
||||
for (i=0; i<bitsize;i++)
|
||||
bitmap_clear_bit(map, i);
|
||||
if (!bitmap_is_clear_all(map))
|
||||
goto error4;
|
||||
return FALSE;
|
||||
error1:
|
||||
diag("Error in set_all, bitsize = %u", bitsize);
|
||||
return TRUE;
|
||||
error2:
|
||||
diag("Error in clear_all, bitsize = %u", bitsize);
|
||||
return TRUE;
|
||||
error3:
|
||||
diag("Error in bitmap_is_set_all, bitsize = %u", bitsize);
|
||||
return TRUE;
|
||||
error4:
|
||||
diag("Error in bitmap_is_clear_all, bitsize = %u", bitsize);
|
||||
return TRUE;
|
||||
error5:
|
||||
diag("Error in set_all through set_prefix, bitsize = %u", bitsize);
|
||||
return TRUE;
|
||||
error6:
|
||||
diag("Error in clear_all through set_prefix, bitsize = %u", bitsize);
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
bool test_compare_operators(MY_BITMAP *map, uint bitsize)
|
||||
{
|
||||
uint i, j, test_bit1, test_bit2, test_bit3,test_bit4;
|
||||
uint no_loops= bitsize > 128 ? 128 : bitsize;
|
||||
MY_BITMAP map2_obj, map3_obj;
|
||||
MY_BITMAP *map2= &map2_obj, *map3= &map3_obj;
|
||||
uint32 map2buf[1024];
|
||||
uint32 map3buf[1024];
|
||||
bitmap_init(&map2_obj, map2buf, bitsize, FALSE);
|
||||
bitmap_init(&map3_obj, map3buf, bitsize, FALSE);
|
||||
bitmap_clear_all(map2);
|
||||
bitmap_clear_all(map3);
|
||||
for (i=0; i < no_loops; i++)
|
||||
{
|
||||
test_bit1=get_rand_bit(bitsize);
|
||||
bitmap_set_prefix(map, test_bit1);
|
||||
test_bit2=get_rand_bit(bitsize);
|
||||
bitmap_set_prefix(map2, test_bit2);
|
||||
bitmap_intersect(map, map2);
|
||||
test_bit3= test_bit2 < test_bit1 ? test_bit2 : test_bit1;
|
||||
bitmap_set_prefix(map3, test_bit3);
|
||||
if (!bitmap_cmp(map, map3))
|
||||
goto error1;
|
||||
bitmap_clear_all(map);
|
||||
bitmap_clear_all(map2);
|
||||
bitmap_clear_all(map3);
|
||||
test_bit1=get_rand_bit(bitsize);
|
||||
test_bit2=get_rand_bit(bitsize);
|
||||
test_bit3=get_rand_bit(bitsize);
|
||||
bitmap_set_prefix(map, test_bit1);
|
||||
bitmap_set_prefix(map2, test_bit2);
|
||||
test_bit3= test_bit2 > test_bit1 ? test_bit2 : test_bit1;
|
||||
bitmap_set_prefix(map3, test_bit3);
|
||||
bitmap_union(map, map2);
|
||||
if (!bitmap_cmp(map, map3))
|
||||
goto error2;
|
||||
bitmap_clear_all(map);
|
||||
bitmap_clear_all(map2);
|
||||
bitmap_clear_all(map3);
|
||||
test_bit1=get_rand_bit(bitsize);
|
||||
test_bit2=get_rand_bit(bitsize);
|
||||
test_bit3=get_rand_bit(bitsize);
|
||||
bitmap_set_prefix(map, test_bit1);
|
||||
bitmap_set_prefix(map2, test_bit2);
|
||||
bitmap_xor(map, map2);
|
||||
test_bit3= test_bit2 > test_bit1 ? test_bit2 : test_bit1;
|
||||
test_bit4= test_bit2 < test_bit1 ? test_bit2 : test_bit1;
|
||||
bitmap_set_prefix(map3, test_bit3);
|
||||
for (j=0; j < test_bit4; j++)
|
||||
bitmap_clear_bit(map3, j);
|
||||
if (!bitmap_cmp(map, map3))
|
||||
goto error3;
|
||||
bitmap_clear_all(map);
|
||||
bitmap_clear_all(map2);
|
||||
bitmap_clear_all(map3);
|
||||
test_bit1=get_rand_bit(bitsize);
|
||||
test_bit2=get_rand_bit(bitsize);
|
||||
test_bit3=get_rand_bit(bitsize);
|
||||
bitmap_set_prefix(map, test_bit1);
|
||||
bitmap_set_prefix(map2, test_bit2);
|
||||
bitmap_subtract(map, map2);
|
||||
if (test_bit2 < test_bit1)
|
||||
{
|
||||
bitmap_set_prefix(map3, test_bit1);
|
||||
for (j=0; j < test_bit2; j++)
|
||||
bitmap_clear_bit(map3, j);
|
||||
}
|
||||
if (!bitmap_cmp(map, map3))
|
||||
goto error4;
|
||||
bitmap_clear_all(map);
|
||||
bitmap_clear_all(map2);
|
||||
bitmap_clear_all(map3);
|
||||
test_bit1=get_rand_bit(bitsize);
|
||||
bitmap_set_prefix(map, test_bit1);
|
||||
bitmap_invert(map);
|
||||
bitmap_set_all(map3);
|
||||
for (j=0; j < test_bit1; j++)
|
||||
bitmap_clear_bit(map3, j);
|
||||
if (!bitmap_cmp(map, map3))
|
||||
goto error5;
|
||||
bitmap_clear_all(map);
|
||||
bitmap_clear_all(map3);
|
||||
}
|
||||
return FALSE;
|
||||
error1:
|
||||
diag("intersect error bitsize=%u,size1=%u,size2=%u", bitsize,
|
||||
test_bit1,test_bit2);
|
||||
return TRUE;
|
||||
error2:
|
||||
diag("union error bitsize=%u,size1=%u,size2=%u", bitsize,
|
||||
test_bit1,test_bit2);
|
||||
return TRUE;
|
||||
error3:
|
||||
diag("xor error bitsize=%u,size1=%u,size2=%u", bitsize,
|
||||
test_bit1,test_bit2);
|
||||
return TRUE;
|
||||
error4:
|
||||
diag("subtract error bitsize=%u,size1=%u,size2=%u", bitsize,
|
||||
test_bit1,test_bit2);
|
||||
return TRUE;
|
||||
error5:
|
||||
diag("invert error bitsize=%u,size=%u", bitsize,
|
||||
test_bit1);
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
bool test_count_bits_set(MY_BITMAP *map, uint bitsize)
|
||||
{
|
||||
uint i, bit_count=0, test_bit;
|
||||
uint no_loops= bitsize > 128 ? 128 : bitsize;
|
||||
for (i=0; i < no_loops; i++)
|
||||
{
|
||||
test_bit=get_rand_bit(bitsize);
|
||||
if (!bitmap_is_set(map, test_bit))
|
||||
{
|
||||
bitmap_set_bit(map, test_bit);
|
||||
bit_count++;
|
||||
}
|
||||
}
|
||||
if (bit_count==0 && bitsize > 0)
|
||||
goto error1;
|
||||
if (bitmap_bits_set(map) != bit_count)
|
||||
goto error2;
|
||||
return FALSE;
|
||||
error1:
|
||||
diag("No bits set bitsize = %u", bitsize);
|
||||
return TRUE;
|
||||
error2:
|
||||
diag("Wrong count of bits set, bitsize = %u", bitsize);
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
bool test_get_first_bit(MY_BITMAP *map, uint bitsize)
|
||||
{
|
||||
uint i, j, test_bit;
|
||||
uint no_loops= bitsize > 128 ? 128 : bitsize;
|
||||
for (i=0; i < no_loops; i++)
|
||||
{
|
||||
test_bit=get_rand_bit(bitsize);
|
||||
bitmap_set_bit(map, test_bit);
|
||||
if (bitmap_get_first_set(map) != test_bit)
|
||||
goto error1;
|
||||
bitmap_set_all(map);
|
||||
bitmap_clear_bit(map, test_bit);
|
||||
if (bitmap_get_first(map) != test_bit)
|
||||
goto error2;
|
||||
bitmap_clear_all(map);
|
||||
}
|
||||
return FALSE;
|
||||
error1:
|
||||
diag("get_first_set error bitsize=%u,prefix_size=%u",bitsize,test_bit);
|
||||
return TRUE;
|
||||
error2:
|
||||
diag("get_first error bitsize= %u, prefix_size= %u",bitsize,test_bit);
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
bool test_get_next_bit(MY_BITMAP *map, uint bitsize)
|
||||
{
|
||||
uint i, j, test_bit;
|
||||
uint no_loops= bitsize > 128 ? 128 : bitsize;
|
||||
for (i=0; i < no_loops; i++)
|
||||
{
|
||||
test_bit=get_rand_bit(bitsize);
|
||||
for (j=0; j < test_bit; j++)
|
||||
bitmap_set_next(map);
|
||||
if (!bitmap_is_prefix(map, test_bit))
|
||||
goto error1;
|
||||
bitmap_clear_all(map);
|
||||
}
|
||||
return FALSE;
|
||||
error1:
|
||||
diag("get_next error bitsize= %u, prefix_size= %u", bitsize,test_bit);
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
bool test_prefix(MY_BITMAP *map, uint bitsize)
|
||||
{
|
||||
uint i, j, test_bit;
|
||||
uint no_loops= bitsize > 128 ? 128 : bitsize;
|
||||
for (i=0; i < no_loops; i++)
|
||||
{
|
||||
test_bit=get_rand_bit(bitsize);
|
||||
bitmap_set_prefix(map, test_bit);
|
||||
if (!bitmap_is_prefix(map, test_bit))
|
||||
goto error1;
|
||||
bitmap_clear_all(map);
|
||||
for (j=0; j < test_bit; j++)
|
||||
bitmap_set_bit(map, j);
|
||||
if (!bitmap_is_prefix(map, test_bit))
|
||||
goto error2;
|
||||
bitmap_set_all(map);
|
||||
for (j=bitsize - 1; ~(j-test_bit); j--)
|
||||
bitmap_clear_bit(map, j);
|
||||
if (!bitmap_is_prefix(map, test_bit))
|
||||
goto error3;
|
||||
bitmap_clear_all(map);
|
||||
}
|
||||
return FALSE;
|
||||
error1:
|
||||
diag("prefix1 error bitsize = %u, prefix_size = %u", bitsize,test_bit);
|
||||
return TRUE;
|
||||
error2:
|
||||
diag("prefix2 error bitsize = %u, prefix_size = %u", bitsize,test_bit);
|
||||
return TRUE;
|
||||
error3:
|
||||
diag("prefix3 error bitsize = %u, prefix_size = %u", bitsize,test_bit);
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
|
||||
bool do_test(uint bitsize)
|
||||
{
|
||||
MY_BITMAP map;
|
||||
uint32 buf[1024];
|
||||
if (bitmap_init(&map, buf, bitsize, FALSE))
|
||||
{
|
||||
diag("init error for bitsize %d", bitsize);
|
||||
goto error;
|
||||
}
|
||||
if (test_set_get_clear_bit(&map,bitsize))
|
||||
goto error;
|
||||
bitmap_clear_all(&map);
|
||||
if (test_flip_bit(&map,bitsize))
|
||||
goto error;
|
||||
bitmap_clear_all(&map);
|
||||
if (test_operators(&map,bitsize))
|
||||
goto error;
|
||||
bitmap_clear_all(&map);
|
||||
if (test_get_all_bits(&map, bitsize))
|
||||
goto error;
|
||||
bitmap_clear_all(&map);
|
||||
if (test_compare_operators(&map,bitsize))
|
||||
goto error;
|
||||
bitmap_clear_all(&map);
|
||||
if (test_count_bits_set(&map,bitsize))
|
||||
goto error;
|
||||
bitmap_clear_all(&map);
|
||||
if (test_get_first_bit(&map,bitsize))
|
||||
goto error;
|
||||
bitmap_clear_all(&map);
|
||||
if (test_get_next_bit(&map,bitsize))
|
||||
goto error;
|
||||
if (test_prefix(&map,bitsize))
|
||||
goto error;
|
||||
return FALSE;
|
||||
error:
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
int main()
|
||||
{
|
||||
int i;
|
||||
plan(4095);
|
||||
for (i= 1; i < 4096; i++)
|
||||
ok(do_test(i) == 0, "bitmap size %d", i);
|
||||
return exit_status();
|
||||
}
|
77
unittest/unit.pl
Normal file
77
unittest/unit.pl
Normal file
@ -0,0 +1,77 @@
|
||||
#!/usr/bin/perl
|
||||
|
||||
# Override _command_line in the standard Perl test harness to prevent
|
||||
# it from using "perl" to run the test scripts.
|
||||
package MySQL::Straps;
|
||||
use base qw(Test::Harness::Straps);
|
||||
sub _command_line { return $_[1] }
|
||||
|
||||
package main;
|
||||
|
||||
use strict;
|
||||
use Test::Harness;
|
||||
use File::Find;
|
||||
|
||||
sub run_cmd (@);
|
||||
|
||||
my %dispatch = (
|
||||
"run" => \&run_cmd,
|
||||
);
|
||||
|
||||
=head1 NAME
|
||||
|
||||
unit - Run unit tests in directory
|
||||
|
||||
=head1 SYNOPSIS
|
||||
|
||||
unit run
|
||||
|
||||
=cut
|
||||
|
||||
my $cmd = shift;
|
||||
|
||||
if (defined $cmd && exists $dispatch{$cmd}) {
|
||||
$dispatch{$cmd}->(@ARGV);
|
||||
} else {
|
||||
print "Unknown command", (defined $cmd ? " $cmd" : ""), ".\n";
|
||||
print "Available commands are: ", join(", ", keys %dispatch), "\n";
|
||||
}
|
||||
|
||||
=head2 run
|
||||
|
||||
Run all unit tests in the current directory and all subdirectories.
|
||||
|
||||
=cut
|
||||
|
||||
|
||||
sub _find_test_files (@) {
|
||||
my @dirs = @_;
|
||||
my @files;
|
||||
find sub {
|
||||
$File::Find::prune = 1 if /^SCCS$/;
|
||||
push(@files, $File::Find::name) if -x _ && /\.t\z/;
|
||||
}, @dirs;
|
||||
return @files;
|
||||
}
|
||||
|
||||
sub run_cmd (@) {
|
||||
my @files;
|
||||
|
||||
push(@_, '.') if @_ == 0;
|
||||
|
||||
foreach my $name (@_) {
|
||||
push(@files, _find_test_files $name) if -d $name;
|
||||
push(@files, $name) if -f $name;
|
||||
}
|
||||
|
||||
if (@files > 0) {
|
||||
# Removing the first './' from the file names
|
||||
foreach (@files) { s!^\./!! }
|
||||
|
||||
# Install the strap above instead of the default strap
|
||||
$Test::Harness::Strap = MySQL::Straps->new;
|
||||
|
||||
runtests @files;
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user