static test_return_t test_success_equals_one_test(void *)
{
- test_skip(HAVE_LIBMEMCACHED, true);
+ test_skip(HAVE_LIBMEMCACHED, 1);
#if defined(HAVE_LIBMEMCACHED) && HAVE_LIBMEMCACHED
test_zero(MEMCACHED_SUCCESS);
#endif
}
// unsetenv() will cause issues with valgrind
- _compare(__FILE__, __LINE__, __func__, 0, unsetenv("LIBTEST_LOCAL"));
+ _compare(__FILE__, __LINE__, __func__, 0, unsetenv("LIBTEST_LOCAL"), true);
test_compare(0, unsetenv("LIBTEST_LOCAL"));
test_false(test_is_local());
static test_return_t gearmand_cycle_test(void *object)
{
server_startup_st *servers= (server_startup_st*)object;
- test_true(servers);
+ test_true(servers and servers->validate());
#if defined(HAVE_GEARMAND_BINARY) && HAVE_GEARMAND_BINARY
test_true(has_gearmand_binary());
-#else
- test_skip(true, has_gearmand_binary());
#endif
+ test_skip(true, has_gearmand_binary());
+
test_true(server_startup(*servers, "gearmand", get_free_port(), 0, NULL));
return TEST_SUCCESS;
return TEST_SUCCESS;
}
+static test_return_t skip_shim(bool a, bool b)
+{
+ test_skip(a, b);
+ return TEST_SUCCESS;
+}
+
+static test_return_t test_skip_true_TEST(void *object)
+{
+ test_compare(true, true);
+ test_compare(false, false);
+ test_compare(TEST_SUCCESS, skip_shim(true, true));
+ test_compare(TEST_SUCCESS, skip_shim(false, false));
+
+ return TEST_SUCCESS;
+}
+
+static test_return_t test_skip_false_TEST(void *object)
+{
+ test_compare(TEST_SKIPPED, skip_shim(true, false));
+ test_compare(TEST_SKIPPED, skip_shim(false, true));
+ return TEST_SUCCESS;
+}
+
static test_return_t memcached_cycle_test(void *object)
{
server_startup_st *servers= (server_startup_st*)object;
server_startup_st *servers= (server_startup_st*)object;
test_true(servers);
- if (getenv("TESTS_ENVIRONMENT"))
- {
- return TEST_SKIPPED;
- }
+ test_skip(false, bool(getenv("TESTS_ENVIRONMENT")));
if (MEMCACHED_SASL_BINARY)
{
static test_return_t application_gdb_true_BINARY2(void *)
{
+ test_skip(0, access("/usr/bin/gdb", X_OK ));
Application true_app("true");
true_app.use_gdb();
static test_return_t application_gdb_true_BINARY(void *)
{
+ test_skip(0, access("/usr/bin/gdb", X_OK ));
Application true_app("true");
true_app.use_gdb();
static test_return_t application_doesnotexist_BINARY(void *)
{
+ test_skip_valgrind();
+
Application true_app("doesnotexist");
const char *args[]= { "--fubar", 0 };
test_compare(Application::INVALID, true_app.run(args));
#else
test_compare(Application::SUCCESS, true_app.run(args));
+ test_compare(Application::INVALID, true_app.wait());
#endif
- // Behavior is different if we are running under valgrind
- if (getenv("TESTS_ENVIRONMENT") and strstr(getenv("TESTS_ENVIRONMENT"), "valgrind"))
- {
- test_compare(Application::FAILURE, true_app.wait());
- }
- else
- {
-#if defined(TARGET_OS_OSX) && TARGET_OS_OSX
- test_compare(Application::FAILURE, true_app.wait());
-#else
- test_compare(Application::INVALID, true_app.wait());
-#endif
- }
+
test_compare(0, true_app.stdout_result().size());
return TEST_SUCCESS;
static test_return_t wait_services_appliction_TEST(void *)
{
+ test_skip(0, access("/usr/bin/gdb", X_OK ));
test_skip(0, access("/etc/services", R_OK ));
libtest::Application wait_app("libtest/wait", true);
test_skip(0, TARGET_OS_OSX);
#endif
+ test_skip(0, access("/usr/bin/gdb", X_OK ));
test_skip(0, access("/etc/services", R_OK ));
libtest::Application wait_app("libtest/wait", true);
static test_return_t gdb_abort_services_appliction_TEST(void *)
{
+ test_skip(0, access("/usr/bin/gdb", X_OK ));
+
#if defined(TARGET_OS_OSX) && TARGET_OS_OSX
test_skip(0, TARGET_OS_OSX);
#endif
{0, 0, 0}
};
+test_st test_skip_TESTS[] ={
+ {"true, true", 0, test_skip_true_TEST },
+ {"true, false", 0, test_skip_false_TEST },
+ {0, 0, 0}
+};
+
test_st environment_tests[] ={
{"LIBTOOL_COMMAND", 0, LIBTOOL_COMMAND_test },
{"VALGRIND_COMMAND", 0, VALGRIND_COMMAND_test },
collection_st collection[] ={
{"environment", 0, 0, environment_tests},
{"return values", 0, 0, tests_log},
+ {"test_skip()", 0, 0, test_skip_TESTS },
{"local", 0, 0, local_log},
{"directories", 0, 0, directories_tests},
{"comparison", 0, 0, comparison_tests},