-/* uTest
- * Copyright (C) 2011 Data Differential, http://datadifferential.com/
- * Copyright (C) 2006-2009 Brian Aker
- * All rights reserved.
+/* vim:expandtab:shiftwidth=2:tabstop=2:smarttab:
+ *
+ * libtest
*
- * Use and distribution licensed under the BSD license. See
- * the COPYING file in the parent directory for full text.
+ * Copyright (C) 2011 Data Differential, http://datadifferential.com/
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
-
#include <libtest/common.h>
#include <cassert>
#include <fnmatch.h>
#include <iostream>
-#include <libtest/stats.h>
+#include <signal.h>
#ifndef __INTEL_COMPILER
#pragma GCC diagnostic ignored "-Wold-style-cast"
#endif
-static in_port_t global_port= 0;
-
-in_port_t default_port()
-{
- assert(global_port);
- return global_port;
-}
-
-void set_default_port(in_port_t port)
-{
- global_port= port;
-}
+using namespace libtest;
static void stats_print(Stats *stats)
{
- std::cout << "\tTotal Collections\t\t\t\t" << stats->collection_total << std::endl;
- std::cout << "\tFailed Collections\t\t\t\t" << stats->collection_failed << std::endl;
- std::cout << "\tSkipped Collections\t\t\t\t" << stats->collection_skipped << std::endl;
- std::cout << "\tSucceeded Collections\t\t\t\t" << stats->collection_success << std::endl;
- std::cout << std::endl;
- std::cout << "Total\t\t\t\t" << stats->total << std::endl;
- std::cout << "\tFailed\t\t\t" << stats->failed << std::endl;
- std::cout << "\tSkipped\t\t\t" << stats->skipped << std::endl;
- std::cout << "\tSucceeded\t\t" << stats->success << std::endl;
+ if (stats->collection_failed == 0 and stats->collection_success == 0)
+ {
+ return;
+ }
+
+ Out << "\tTotal Collections\t\t\t\t" << stats->collection_total;
+ Out << "\tFailed Collections\t\t\t\t" << stats->collection_failed;
+ Out << "\tSkipped Collections\t\t\t\t" << stats->collection_skipped;
+ Out << "\tSucceeded Collections\t\t\t\t" << stats->collection_success;
+ Outn();
+ Out << "Total\t\t\t\t" << stats->total;
+ Out << "\tFailed\t\t\t" << stats->failed;
+ Out << "\tSkipped\t\t\t" << stats->skipped;
+ Out << "\tSucceeded\t\t" << stats->success;
}
static long int timedif(struct timeval a, struct timeval b)
return s + us;
}
-const char *test_strerror(test_return_t code)
+static Framework *world= NULL;
+int main(int argc, char *argv[])
{
- switch (code) {
- case TEST_SUCCESS:
- return "ok";
+ srandom((unsigned int)time(NULL));
- case TEST_FAILURE:
- return "failed";
-
- case TEST_MEMORY_ALLOCATION_FAILURE:
- return "memory allocation";
-
- case TEST_SKIPPED:
- return "skipped";
-
- case TEST_FATAL:
- break;
+ if (getenv("LIBTEST_QUIET"))
+ {
+ close(STDOUT_FILENO);
}
-
- return "failed";
-}
-
-void create_core(void)
-{
- if (getenv("LIBMEMCACHED_NO_COREDUMP") == NULL)
+ else if (getenv("JENKINS_URL"))
{
- pid_t pid= fork();
-
- if (pid == 0)
- {
- abort();
- }
- else
- {
- while (waitpid(pid, NULL, 0) != pid) {};
- }
+ close(STDOUT_FILENO);
}
-}
-
-static test_return_t _runner_default(test_callback_fn func, void *p)
-{
- if (func)
+ char buffer[1024];
+ if (getenv("LIBTEST_TMP"))
{
- return func(p);
+ snprintf(buffer, sizeof(buffer), "%s", getenv("LIBTEST_TMP"));
+ }
+ else
+ {
+ snprintf(buffer, sizeof(buffer), "%s", LIBTEST_TEMP);
}
- return TEST_SUCCESS;
-}
-
-static Runner defualt_runners= {
- _runner_default,
- _runner_default,
- _runner_default
-};
+ if (chdir(buffer) == -1)
+ {
+ char getcwd_buffer[1024];
+ char *dir= getcwd(getcwd_buffer, sizeof(getcwd_buffer));
-static test_return_t _default_callback(void *p)
-{
- (void)p;
+ Error << "Unable to chdir() from " << dir << " to " << buffer << " errno:" << strerror(errno);
+ return EXIT_FAILURE;
+ }
- return TEST_SUCCESS;
-}
+ if (libtest::libtool() == NULL)
+ {
+ Error << "Failed to locate libtool";
+ return EXIT_FAILURE;
+ }
-Framework::Framework() :
- collections(NULL),
- _create(NULL),
- _destroy(NULL),
- collection_startup(_default_callback),
- collection_shutdown(_default_callback),
- _on_error(NULL),
- runner(&defualt_runners)
-{
-}
+ world= new Framework();
+ if (world == NULL)
+ {
+ Error << "Failed to create Framework()";
+ return EXIT_FAILURE;
+ }
-int main(int argc, char *argv[])
-{
- Framework world;
+ libtest::SignalThread signal;
+ if (not signal.setup())
+ {
+ return EXIT_FAILURE;
+ }
Stats stats;
- get_world(&world);
-
- if (not world.runner)
- {
- world.runner= &defualt_runners;
- }
+ get_world(world);
test_return_t error;
- void *world_ptr= world.create(&error);
- if (test_failed(error))
+ void *creators_ptr= world->create(error);
+
+ switch (error)
{
+ case TEST_SUCCESS:
+ break;
+
+ case TEST_SKIPPED:
+ Out << "SKIP " << argv[0];
+ delete world;
+ return EXIT_SUCCESS;
+
+ case TEST_FATAL:
+ case TEST_FAILURE:
+ case TEST_MEMORY_ALLOCATION_FAILURE:
+ delete world;
return EXIT_FAILURE;
}
}
else if (getenv("TEST_COLLECTION"))
{
- collection_to_run= getenv("TEST_COLLECTION");
+ if (strlen(getenv("TEST_COLLECTION")))
+ {
+ collection_to_run= getenv("TEST_COLLECTION");
+ }
}
if (collection_to_run)
{
- std::cout << "Only testing " << collection_to_run << std::endl;
+ Out << "Only testing " << collection_to_run;
}
char *wildcard= NULL;
wildcard= argv[2];
}
- for (collection_st *next= world.collections; next->name; next++)
+ for (collection_st *next= world->collections; next->name and (not signal.is_shutdown()); next++)
{
test_return_t collection_rc= TEST_SUCCESS;
bool failed= false;
stats.collection_total++;
- collection_rc= world.startup(world_ptr);
+ collection_rc= world->startup(creators_ptr);
if (collection_rc == TEST_SUCCESS and next->pre)
{
- collection_rc= world.runner->pre(next->pre, world_ptr);
+ collection_rc= world->runner()->pre(next->pre, creators_ptr);
}
switch (collection_rc)
{
case TEST_SUCCESS:
- std::cerr << std::endl << next->name << std::endl << std::endl;
break;
case TEST_FATAL:
case TEST_FAILURE:
- std::cerr << std::endl << next->name << " [ failed ]" << std::endl << std::endl;
- stats.collection_failed++;
+ Out << next->name << " [ failed ]";
+ failed= true;
+ signal.set_shutdown(SHUTDOWN_GRACEFUL);
goto cleanup;
case TEST_SKIPPED:
- std::cerr << std::endl << next->name << " [ skipping ]" << std::endl << std::endl;
- stats.collection_skipped++;
+ Out << next->name << " [ skipping ]";
+ skipped= true;
goto cleanup;
case TEST_MEMORY_ALLOCATION_FAILURE:
test_assert(0, "Allocation failure, or unknown return");
}
+ Out << "Collection: " << next->name;
+
for (test_st *run= next->tests; run->name; run++)
{
struct timeval start_time, end_time;
if (wildcard && fnmatch(wildcard, run->name, 0))
{
- continue;
+ continue;
}
- std::cerr << "\tTesting " << run->name;
-
- world.item.startup(world_ptr);
-
- world.item.flush(world_ptr, run);
-
- world.item.pre(world_ptr);
-
test_return_t return_code;
- { // Runner Code
- gettimeofday(&start_time, NULL);
- return_code= world.runner->run(run->test_fn, world_ptr);
- gettimeofday(&end_time, NULL);
- load_time= timedif(end_time, start_time);
+ if (test_success(return_code= world->item.startup(creators_ptr)))
+ {
+ if (test_success(return_code= world->item.flush(creators_ptr, run)))
+ {
+ // @note pre will fail is SKIPPED is returned
+ if (test_success(return_code= world->item.pre(creators_ptr)))
+ {
+ { // Runner Code
+ gettimeofday(&start_time, NULL);
+ assert(world->runner());
+ assert(run->test_fn);
+ return_code= world->runner()->run(run->test_fn, creators_ptr);
+ gettimeofday(&end_time, NULL);
+ load_time= timedif(end_time, start_time);
+ }
+ }
+
+ // @todo do something if post fails
+ (void)world->item.post(creators_ptr);
+ }
+ else if (return_code == TEST_SKIPPED)
+ { }
+ else if (return_code == TEST_FAILURE)
+ {
+ Error << " item.flush(failure)";
+ signal.set_shutdown(SHUTDOWN_GRACEFUL);
+ }
+ }
+ else if (return_code == TEST_SKIPPED)
+ { }
+ else if (return_code == TEST_FAILURE)
+ {
+ Error << " item.startup(failure)";
+ signal.set_shutdown(SHUTDOWN_GRACEFUL);
}
-
- world.item.post(world_ptr);
stats.total++;
- std::cerr << "\t\t\t\t\t";
-
switch (return_code)
{
case TEST_SUCCESS:
- std::cerr << load_time / 1000 << "." << load_time % 1000;
- stats.success++;
- break;
+ Out << "\tTesting " << run->name << "\t\t\t\t\t" << load_time / 1000 << "." << load_time % 1000 << "[ " << test_strerror(return_code) << " ]";
+ stats.success++;
+ break;
case TEST_FATAL:
case TEST_FAILURE:
- stats.failed++;
- failed= true;
- break;
+ stats.failed++;
+ failed= true;
+ Out << "\tTesting " << run->name << "\t\t\t\t\t" << "[ " << test_strerror(return_code) << " ]";
+ break;
case TEST_SKIPPED:
- stats.skipped++;
- skipped= true;
- break;
+ stats.skipped++;
+ skipped= true;
+ Out << "\tTesting " << run->name << "\t\t\t\t\t" << "[ " << test_strerror(return_code) << " ]";
+ break;
case TEST_MEMORY_ALLOCATION_FAILURE:
- test_assert(0, "Memory Allocation Error");
+ test_assert(0, "Memory Allocation Error");
}
- std::cerr << "[ " << test_strerror(return_code) << " ]" << std::endl;
-
- if (test_failed(world.on_error(return_code, world_ptr)))
+ if (test_failed(world->on_error(return_code, creators_ptr)))
{
+ Error << "Failed while running on_error()";
+ signal.set_shutdown(SHUTDOWN_GRACEFUL);
break;
}
}
- if (next->post && world.runner->post)
+ (void) world->runner()->post(next->post, creators_ptr);
+
+cleanup:
+ if (failed == false and skipped == false)
{
- (void) world.runner->post(next->post, world_ptr);
+ stats.collection_success++;
}
- if (failed == 0 and skipped == 0)
+ if (failed)
{
- stats.collection_success++;
+ stats.collection_failed++;
}
-cleanup:
- world.shutdown(world_ptr);
+ if (skipped)
+ {
+ stats.collection_skipped++;
+ }
+
+ world->shutdown(creators_ptr);
+ Outn();
}
- if (stats.collection_failed || stats.collection_skipped)
+ if (not signal.is_shutdown())
{
- std::cerr << std::endl << std::endl << "Some test failures and/or skipped test occurred." << std::endl << std::endl;
-#if 0
- print_failed_test();
-#endif
+ signal.set_shutdown(SHUTDOWN_GRACEFUL);
}
- else
+
+ int exit_code= EXIT_SUCCESS;
+ shutdown_t status= signal.get_shutdown();
+ if (status == SHUTDOWN_FORCED)
{
- std::cout << std::endl << std::endl << "All tests completed successfully." << std::endl << std::endl;
+ Out << "Tests were aborted.";
+ exit_code= EXIT_FAILURE;
}
-
- if (test_failed(world.destroy(world_ptr)))
+ else if (stats.collection_failed)
{
- stats.failed++; // We do this to make our exit code return EXIT_FAILURE
+ Out << "Some test failed.";
+ exit_code= EXIT_FAILURE;
+ }
+ else if (stats.collection_skipped and stats.collection_failed and stats.collection_success)
+ {
+ Out << "Some tests were skipped.";
+ }
+ else if (stats.collection_success and stats.collection_failed == 0)
+ {
+ Out << "All tests completed successfully.";
}
stats_print(&stats);
- return stats.failed == 0 ? 0 : 1;
+ delete world;
+
+ Outn(); // Generate a blank to break up the messages if make check/test has been run
+
+ return exit_code;
}