From 7e19aeada5ae070cc6f64b16114556da48883a23 Mon Sep 17 00:00:00 2001 From: Alexandre Quercia Date: Wed, 27 Mar 2024 00:19:37 +0100 Subject: [PATCH] fix(test): exit code of lime test --- .php-cs-fixer.dist.php | 1 + .../sfDoctrinePlugin/test/bin/coverage.php | 5 +- lib/vendor/lime/lime.php | 892 +----------------- lib/vendor/lime/src/lime_harness.php | 341 +++++++ lib/vendor/lime/src/lime_test.php | 637 +++++++++++++ test/bin/coverage.php | 5 +- test/unit/vendor/lime/fixtures/failed.php | 7 + .../failed_with_plan_less_than_total.php | 8 + .../failed_with_plan_more_than_total.php | 7 + test/unit/vendor/lime/fixtures/pass.php | 7 + .../lime/fixtures/pass_with_one_error.php | 13 + .../fixtures/pass_with_one_parse_error.php | 7 + .../pass_with_one_throw_exception.php | 9 + .../pass_with_plan_less_than_total.php | 8 + .../pass_with_plan_more_than_total.php | 7 + test/unit/vendor/lime/lime_harnessTest.php | 187 ++++ test/unit/vendor/lime/lime_testTest.php | 160 ++++ 17 files changed, 1409 insertions(+), 892 deletions(-) create mode 100644 lib/vendor/lime/src/lime_harness.php create mode 100644 lib/vendor/lime/src/lime_test.php create mode 100644 test/unit/vendor/lime/fixtures/failed.php create mode 100644 test/unit/vendor/lime/fixtures/failed_with_plan_less_than_total.php create mode 100644 test/unit/vendor/lime/fixtures/failed_with_plan_more_than_total.php create mode 100644 test/unit/vendor/lime/fixtures/pass.php create mode 100644 test/unit/vendor/lime/fixtures/pass_with_one_error.php create mode 100644 test/unit/vendor/lime/fixtures/pass_with_one_parse_error.php create mode 100644 test/unit/vendor/lime/fixtures/pass_with_one_throw_exception.php create mode 100644 test/unit/vendor/lime/fixtures/pass_with_plan_less_than_total.php create mode 100644 test/unit/vendor/lime/fixtures/pass_with_plan_more_than_total.php create mode 100644 test/unit/vendor/lime/lime_harnessTest.php create mode 100644 test/unit/vendor/lime/lime_testTest.php diff --git a/.php-cs-fixer.dist.php b/.php-cs-fixer.dist.php index fccc2b3f9..07ded65b3 100644 --- a/.php-cs-fixer.dist.php +++ b/.php-cs-fixer.dist.php @@ -5,6 +5,7 @@ ->in(__DIR__.'/lib') ->in(__DIR__.'/data/bin') ->in(__DIR__.'/test') + ->in(__DIR__.'/lib/vendor/lime/src') ->append([__FILE__]) // Exclude PHP classes templates/generators, which are not valid PHP files ->exclude('task/generator/skeleton/') diff --git a/lib/plugins/sfDoctrinePlugin/test/bin/coverage.php b/lib/plugins/sfDoctrinePlugin/test/bin/coverage.php index f3f3b3321..616359482 100644 --- a/lib/plugins/sfDoctrinePlugin/test/bin/coverage.php +++ b/lib/plugins/sfDoctrinePlugin/test/bin/coverage.php @@ -32,4 +32,7 @@ $finder = sfFinder::type('file')->name('*.php')->prune('vendor')->prune('test')->prune('data'); $c->register($finder->in($c->base_dir)); -$c->run(); + +$allTestsSucceed = $c->run(); + +exit($allTestsSucceed ? 0 : 1); diff --git a/lib/vendor/lime/lime.php b/lib/vendor/lime/lime.php index c09016fb2..69efbe093 100644 --- a/lib/vendor/lime/lime.php +++ b/lib/vendor/lime/lime.php @@ -8,597 +8,8 @@ * file that was distributed with this source code. */ -/** - * Unit test library. - * - * @package lime - * @author Fabien Potencier - */ -class lime_test -{ - const EPSILON = 0.0000000001; - - protected $test_nb = 0; - protected $output = null; - protected $results = array(); - protected $options = array(); - - static protected $all_results = array(); - - public function __construct($plan = null, $options = array()) - { - // for BC - if (!is_array($options)) - { - $options = array('output' => $options); - } - - $this->options = array_merge(array( - 'force_colors' => false, - 'output' => null, - 'verbose' => false, - 'error_reporting' => false, - ), $options); - - $this->output = $this->options['output'] ? $this->options['output'] : new lime_output($this->options['force_colors']); - - $caller = $this->find_caller(debug_backtrace()); - self::$all_results[] = array( - 'file' => $caller[0], - 'tests' => array(), - 'stats' => array('plan' => $plan, 'total' => 0, 'failed' => array(), 'passed' => array(), 'skipped' => array(), 'errors' => array()), - ); - - $this->results = &self::$all_results[count(self::$all_results) - 1]; - - null !== $plan and $this->output->echoln(sprintf("1..%d", $plan)); - - set_error_handler(array($this, 'handle_error')); - set_exception_handler(array($this, 'handle_exception')); - } - - static public function reset() - { - self::$all_results = array(); - } - - static public function to_array() - { - return self::$all_results; - } - - static public function to_xml($results = null) - { - if (is_null($results)) - { - $results = self::$all_results; - } - - $dom = new DOMDocument('1.0', 'UTF-8'); - $dom->formatOutput = true; - $dom->appendChild($testsuites = $dom->createElement('testsuites')); - - $errors = 0; - $failures = 0; - $errors = 0; - $skipped = 0; - $assertions = 0; - - foreach ($results as $result) - { - $testsuites->appendChild($testsuite = $dom->createElement('testsuite')); - $testsuite->setAttribute('name', basename($result['file'], '.php')); - $testsuite->setAttribute('file', $result['file']); - $testsuite->setAttribute('failures', count($result['stats']['failed'])); - $testsuite->setAttribute('errors', count($result['stats']['errors'])); - $testsuite->setAttribute('skipped', count($result['stats']['skipped'])); - $testsuite->setAttribute('tests', $result['stats']['plan']); - $testsuite->setAttribute('assertions', $result['stats']['plan']); - - $failures += count($result['stats']['failed']); - $errors += count($result['stats']['errors']); - $skipped += count($result['stats']['skipped']); - $assertions += $result['stats']['plan']; - - foreach ($result['tests'] as $test) - { - $testsuite->appendChild($testcase = $dom->createElement('testcase')); - $testcase->setAttribute('name', utf8_encode($test['message'])); - $testcase->setAttribute('file', $test['file']); - $testcase->setAttribute('line', $test['line']); - $testcase->setAttribute('assertions', 1); - if (!$test['status']) - { - $testcase->appendChild($failure = $dom->createElement('failure')); - $failure->setAttribute('type', 'lime'); - if (isset($test['error'])) - { - $failure->appendChild($dom->createTextNode($test['error'])); - } - } - } - } - - $testsuites->setAttribute('failures', $failures); - $testsuites->setAttribute('errors', $errors); - $testsuites->setAttribute('tests', $assertions); - $testsuites->setAttribute('assertions', $assertions); - $testsuites->setAttribute('skipped', $skipped); - - return $dom->saveXml(); - } - - public function __destruct() - { - $plan = $this->results['stats']['plan']; - $passed = count($this->results['stats']['passed']); - $failed = count($this->results['stats']['failed']); - $total = $this->results['stats']['total']; - is_null($plan) and $plan = $total and $this->output->echoln(sprintf("1..%d", $plan)); - - if ($total > $plan) - { - $this->output->red_bar(sprintf("# Looks like you planned %d tests but ran %d extra.", $plan, $total - $plan)); - } - elseif ($total < $plan) - { - $this->output->red_bar(sprintf("# Looks like you planned %d tests but only ran %d.", $plan, $total)); - } - - if ($failed) - { - $this->output->red_bar(sprintf("# Looks like you failed %d tests of %d.", $failed, $passed + $failed)); - } - else if ($total == $plan) - { - $this->output->green_bar("# Looks like everything went fine."); - } - - flush(); - } - - /** - * Tests a condition and passes if it is true - * - * @param mixed $exp condition to test - * @param string $message display output message when the test passes - * - * @return boolean - */ - public function ok($exp, $message = '') - { - $this->update_stats(); - - if ($result = (boolean) $exp) - { - $this->results['stats']['passed'][] = $this->test_nb; - } - else - { - $this->results['stats']['failed'][] = $this->test_nb; - } - $this->results['tests'][$this->test_nb]['message'] = $message; - $this->results['tests'][$this->test_nb]['status'] = $result; - $this->output->echoln(sprintf("%s %d%s", $result ? 'ok' : 'not ok', $this->test_nb, $message = $message ? sprintf('%s %s', 0 === strpos($message, '#') ? '' : ' -', $message) : '')); - - if (!$result) - { - $this->output->diag(sprintf(' Failed test (%s at line %d)', str_replace(getcwd(), '.', $this->results['tests'][$this->test_nb]['file']), $this->results['tests'][$this->test_nb]['line'])); - } - - return $result; - } - - /** - * Compares two values and returns true if they are equal - * - * @param mixed $exp1 left value - * @param mixed $exp2 right value - * @return bool - */ - private function equals($exp1, $exp2) - { - if (is_object($exp1) || is_object($exp2)) { - return $exp1 === $exp2; - } else if (is_float($exp1) && is_float($exp2)) { - return abs($exp1 - $exp2) < self::EPSILON; - } else if (is_string($exp1) && is_numeric($exp1) || is_string($exp2) && is_numeric($exp2)) { - return $exp1 == $exp2; - } else if (is_string($exp1) || is_string($exp2)) { - return (string) $exp1 === (string) $exp2; - } - return $exp1 == $exp2; - } - - /** - * Compares two values and passes if they are equal (==) - * - * @param mixed $exp1 left value - * @param mixed $exp2 right value - * @param string $message display output message when the test passes - * - * @return boolean - */ - public function is($exp1, $exp2, $message = '') - { - $value = $this->equals($exp1, $exp2); - - if (!$result = $this->ok($value, $message)) - { - $this->set_last_test_errors(array(sprintf(" got: %s", var_export($exp1, true)), sprintf(" expected: %s", var_export($exp2, true)))); - } - - return $result; - } - - /** - * Compares two values and passes if they are not equal - * - * @param mixed $exp1 left value - * @param mixed $exp2 right value - * @param string $message display output message when the test passes - * - * @return boolean - */ - public function isnt($exp1, $exp2, $message = '') - { - $value = $this->equals($exp1, $exp2); - - if (!$result = $this->ok(!$value, $message)) - { - $this->set_last_test_errors(array(sprintf(" %s", var_export($exp1, true)), ' ne', sprintf(" %s", var_export($exp2, true)))); - } - - return $result; - } - - /** - * Tests a string against a regular expression - * - * @param string $exp value to test - * @param string $regex the pattern to search for, as a string - * @param string $message display output message when the test passes - * - * @return boolean - */ - public function like($exp, $regex, $message = '') - { - if (!$result = $this->ok(preg_match($regex, $exp), $message)) - { - $this->set_last_test_errors(array(sprintf(" '%s'", $exp), sprintf(" doesn't match '%s'", $regex))); - } - - return $result; - } - - /** - * Checks that a string doesn't match a regular expression - * - * @param string $exp value to test - * @param string $regex the pattern to search for, as a string - * @param string $message display output message when the test passes - * - * @return boolean - */ - public function unlike($exp, $regex, $message = '') - { - if (!$result = $this->ok(!preg_match($regex, $exp), $message)) - { - $this->set_last_test_errors(array(sprintf(" '%s'", $exp), sprintf(" matches '%s'", $regex))); - } - - return $result; - } - - /** - * Compares two arguments with an operator - * - * @param mixed $exp1 left value - * @param string $op operator - * @param mixed $exp2 right value - * @param string $message display output message when the test passes - * - * @return boolean - */ - public function cmp_ok($exp1, $op, $exp2, $message = '') - { - $php = sprintf("\$result = \$exp1 $op \$exp2;"); - // under some unknown conditions the sprintf() call causes a segmentation fault - // when placed directly in the eval() call - eval($php); - - if (!$this->ok($result, $message)) - { - $this->set_last_test_errors(array(sprintf(" %s", str_replace("\n", '', var_export($exp1, true))), sprintf(" %s", $op), sprintf(" %s", str_replace("\n", '', var_export($exp2, true))))); - } - - return $result; - } - - /** - * Checks the availability of a method for an object or a class - * - * @param mixed $object an object instance or a class name - * @param string|array $methods one or more method names - * @param string $message display output message when the test passes - * - * @return boolean - */ - public function can_ok($object, $methods, $message = '') - { - $result = true; - $failed_messages = array(); - foreach ((array) $methods as $method) - { - if (!method_exists($object, $method)) - { - $failed_messages[] = sprintf(" method '%s' does not exist", $method); - $result = false; - } - } - - !$this->ok($result, $message); - - !$result and $this->set_last_test_errors($failed_messages); - - return $result; - } - - /** - * Checks the type of an argument - * - * @param mixed $var variable instance - * @param string $class class or type name - * @param string $message display output message when the test passes - * - * @return boolean - */ - public function isa_ok($var, $class, $message = '') - { - $type = is_object($var) ? get_class($var) : gettype($var); - if (!$result = $this->ok($type == $class, $message)) - { - $this->set_last_test_errors(array(sprintf(" variable isn't a '%s' it's a '%s'", $class, $type))); - } - - return $result; - } - - /** - * Checks that two arrays have the same values - * - * @param mixed $exp1 first variable - * @param mixed $exp2 second variable - * @param string $message display output message when the test passes - * - * @return boolean - */ - public function is_deeply($exp1, $exp2, $message = '') - { - if (!$result = $this->ok($this->test_is_deeply($exp1, $exp2), $message)) - { - $this->set_last_test_errors(array(sprintf(" got: %s", str_replace("\n", '', var_export($exp1, true))), sprintf(" expected: %s", str_replace("\n", '', var_export($exp2, true))))); - } - - return $result; - } - - /** - * Always passes--useful for testing exceptions - * - * @param string $message display output message - * - * @return true - */ - public function pass($message = '') - { - return $this->ok(true, $message); - } - - /** - * Always fails--useful for testing exceptions - * - * @param string $message display output message - * - * @return false - */ - public function fail($message = '') - { - return $this->ok(false, $message); - } - - /** - * Outputs a diag message but runs no test - * - * @param string $message display output message - * - * @return void - */ - public function diag($message) - { - $this->output->diag($message); - } - - /** - * Counts as $nb_tests tests--useful for conditional tests - * - * @param string $message display output message - * @param integer $nb_tests number of tests to skip - * - * @return void - */ - public function skip($message = '', $nb_tests = 1) - { - for ($i = 0; $i < $nb_tests; $i++) - { - $this->pass(sprintf("# SKIP%s", $message ? ' '.$message : '')); - $this->results['stats']['skipped'][] = $this->test_nb; - array_pop($this->results['stats']['passed']); - } - } - - /** - * Counts as a test--useful for tests yet to be written - * - * @param string $message display output message - * - * @return void - */ - public function todo($message = '') - { - $this->pass(sprintf("# TODO%s", $message ? ' '.$message : '')); - $this->results['stats']['skipped'][] = $this->test_nb; - array_pop($this->results['stats']['passed']); - } - - /** - * Validates that a file exists and that it is properly included - * - * @param string $file file path - * @param string $message display output message when the test passes - * - * @return boolean - */ - public function include_ok($file, $message = '') - { - if (!$result = $this->ok((@include($file)) == 1, $message)) - { - $this->set_last_test_errors(array(sprintf(" Tried to include '%s'", $file))); - } - - return $result; - } - - private function test_is_deeply($var1, $var2) - { - if (gettype($var1) != gettype($var2)) - { - return false; - } - - if (is_array($var1)) - { - ksort($var1); - ksort($var2); - - $keys1 = array_keys($var1); - $keys2 = array_keys($var2); - if (array_diff($keys1, $keys2) || array_diff($keys2, $keys1)) - { - return false; - } - $is_equal = true; - foreach ($var1 as $key => $value) - { - $is_equal = $this->test_is_deeply($var1[$key], $var2[$key]); - if ($is_equal === false) - { - break; - } - } - - return $is_equal; - } - else - { - return $var1 === $var2; - } - } - - public function comment($message) - { - $this->output->comment($message); - } - - public function info($message) - { - $this->output->info($message); - } - - public function error($message, $file = null, $line = null, array $traces = array()) - { - $this->output->error($message, $file, $line, $traces); - - $this->results['stats']['errors'][] = array( - 'message' => $message, - 'file' => $file, - 'line' => $line, - ); - } - - protected function update_stats() - { - ++$this->test_nb; - ++$this->results['stats']['total']; - - list($this->results['tests'][$this->test_nb]['file'], $this->results['tests'][$this->test_nb]['line']) = $this->find_caller(debug_backtrace()); - } - - protected function set_last_test_errors(array $errors) - { - $this->output->diag($errors); - - $this->results['tests'][$this->test_nb]['error'] = implode("\n", $errors); - } - - private function is_test_object($object) - { - return $object instanceof lime_test || $object instanceof sfTestFunctionalBase || $object instanceof sfTester; - } - - protected function find_caller($traces) - { - // find the first call to a method of an object that is an instance of lime_test - $t = array_reverse($traces); - foreach ($t as $trace) - { - // In internal calls, like error_handle, 'file' will be missing - if (isset($trace['object']) && $this->is_test_object($trace['object']) && isset($trace['file'])) - { - return array($trace['file'], $trace['line']); - } - } - - // return the first call - $last = count($traces) - 1; - return array($traces[$last]['file'], $traces[$last]['line']); - } - - public function handle_error($code, $message, $file, $line, $context = null) - { - if (!$this->options['error_reporting'] || ($code & error_reporting()) == 0) - { - return false; - } - - switch ($code) - { - case E_WARNING: - $type = 'Warning'; - break; - default: - $type = 'Notice'; - break; - } - - $trace = debug_backtrace(); - array_shift($trace); // remove the handle_error() call from the trace - - $this->error($type.': '.$message, $file, $line, $trace); - } - - /** - * @param Throwable|Exception $exception - * @return bool - */ - public function handle_exception($exception) - { - $this->error(get_class($exception).': '.$exception->getMessage(), $exception->getFile(), $exception->getLine(), $exception->getTrace()); - - // exception was handled - return true; - } -} +require_once __DIR__.'/src/lime_test.php'; +require_once __DIR__.'/src/lime_harness.php'; class lime_output { @@ -826,305 +237,6 @@ public function colorize($text = '', $parameters = array()) lime_colorizer::style('RED_BAR', array('fg' => 'white', 'bg' => 'red', 'bold' => true)); lime_colorizer::style('INFO_BAR', array('fg' => 'cyan', 'bold' => true)); -class lime_harness extends lime_registration -{ - public $options = array(); - public $php_cli = null; - public $stats = array(); - public $output = null; - public $full_output = false; - - public function __construct($options = array()) - { - // for BC - if (!is_array($options)) - { - $options = array('output' => $options); - } - - $this->options = array_merge(array( - 'php_cli' => null, - 'force_colors' => false, - 'output' => null, - 'verbose' => false, - 'test_path' => sys_get_temp_dir(), - ), $options); - - $this->php_cli = $this->find_php_cli($this->options['php_cli']); - $this->output = $this->options['output'] ? $this->options['output'] : new lime_output($this->options['force_colors']); - } - - protected function find_php_cli($php_cli = null) - { - if (is_null($php_cli)) - { - if (getenv('PHP_PATH')) - { - $php_cli = getenv('PHP_PATH'); - - if (!is_executable($php_cli)) - { - throw new Exception('The defined PHP_PATH environment variable is not a valid PHP executable.'); - } - } - else - { - $php_cli = PHP_BINDIR.DIRECTORY_SEPARATOR.'php'; - } - } - - if (is_executable($php_cli)) - { - return $php_cli; - } - - $path = getenv('PATH') ? getenv('PATH') : getenv('Path'); - $exe_suffixes = DIRECTORY_SEPARATOR == '\\' ? (getenv('PATHEXT') ? explode(PATH_SEPARATOR, getenv('PATHEXT')) : array('.exe', '.bat', '.cmd', '.com')) : array(''); - foreach (array('php5', 'php') as $php_cli) - { - foreach ($exe_suffixes as $suffix) - { - foreach (explode(PATH_SEPARATOR, $path) as $dir) - { - $file = $dir.DIRECTORY_SEPARATOR.$php_cli.$suffix; - if (is_executable($file)) - { - return $file; - } - } - } - } - - throw new Exception("Unable to find PHP executable."); - } - - public function to_array() - { - $results = array(); - foreach ($this->stats['files'] as $file => $stat) - { - $results = array_merge($results, $stat['output']); - } - - return $results; - } - - public function to_xml() - { - return lime_test::to_xml($this->to_array()); - } - - public function run() - { - if (!count($this->files)) - { - throw new Exception('You must register some test files before running them!'); - } - - // sort the files to be able to predict the order - sort($this->files); - - $this->stats = array( - 'files' => array(), - 'failed_files' => array(), - 'failed_tests' => 0, - 'total' => 0, - ); - - foreach ($this->files as $file) - { - $this->stats['files'][$file] = array(); - $stats = &$this->stats['files'][$file]; - - $relative_file = $this->get_relative_file($file); - - $test_file = tempnam($this->options['test_path'], 'lime_test').'.php'; - $result_file = tempnam($this->options['test_path'], 'lime_result'); - file_put_contents($test_file, <<&1', escapeshellarg($this->php_cli), escapeshellarg($test_file)), $return); - ob_end_clean(); - unlink($test_file); - - $output = file_get_contents($result_file); - $stats['output'] = $output ? unserialize($output) : ''; - if (!$stats['output']) - { - $stats['output'] = array(array('file' => $file, 'tests' => array(), 'stats' => array('plan' => 1, 'total' => 1, 'failed' => array(0), 'passed' => array(), 'skipped' => array(), 'errors' => array()))); - } - unlink($result_file); - - $file_stats = &$stats['output'][0]['stats']; - - $delta = 0; - if ($return > 0) - { - $stats['status'] = $file_stats['errors'] ? 'errors' : 'dubious'; - $stats['status_code'] = $return; - } - else - { - $this->stats['total'] += $file_stats['total']; - - if (!$file_stats['plan']) - { - $file_stats['plan'] = $file_stats['total']; - } - - $delta = $file_stats['plan'] - $file_stats['total']; - if (0 != $delta) - { - $stats['status'] = $file_stats['errors'] ? 'errors' : 'dubious'; - $stats['status_code'] = 255; - } - else - { - $stats['status'] = $file_stats['failed'] ? 'not ok' : ($file_stats['errors'] ? 'errors' : 'ok'); - $stats['status_code'] = 0; - } - } - - if (true === $this->full_output) - { - $this->output->echoln(sprintf('%s%s%s', $relative_file, '.....', $stats['status'])); - } - else - { - $this->output->echoln(sprintf('%s%s%s', substr($relative_file, -min(67, strlen($relative_file))), str_repeat('.', 70 - min(67, strlen($relative_file))), $stats['status'])); - } - - if ('dubious' == $stats['status']) - { - $this->output->echoln(sprintf(' Test returned status %s', $stats['status_code'])); - } - - if ('ok' != $stats['status']) - { - $this->stats['failed_files'][] = $file; - } - - if ($delta > 0) - { - $this->output->echoln(sprintf(' Looks like you planned %d tests but only ran %d.', $file_stats['plan'], $file_stats['total'])); - - $this->stats['failed_tests'] += $delta; - $this->stats['total'] += $delta; - } - else if ($delta < 0) - { - $this->output->echoln(sprintf(' Looks like you planned %s test but ran %s extra.', $file_stats['plan'], $file_stats['total'] - $file_stats['plan'])); - } - - if (false !== $file_stats && $file_stats['failed']) - { - $this->stats['failed_tests'] += count($file_stats['failed']); - - $this->output->echoln(sprintf(" Failed tests: %s", implode(', ', $file_stats['failed']))); - } - - if (false !== $file_stats && $file_stats['errors']) - { - $this->output->echoln(' Errors:'); - - $error_count = count($file_stats['errors']); - for ($i = 0; $i < 3 && $i < $error_count; ++$i) - { - $this->output->echoln(' - ' . $file_stats['errors'][$i]['message'], null, false); - } - if ($error_count > 3) - { - $this->output->echoln(sprintf(' ... and %s more', $error_count-3)); - } - } - } - - if (count($this->stats['failed_files'])) - { - $format = "%-30s %4s %5s %5s %5s %s"; - $this->output->echoln(sprintf($format, 'Failed Test', 'Stat', 'Total', 'Fail', 'Errors', 'List of Failed')); - $this->output->echoln("--------------------------------------------------------------------------"); - foreach ($this->stats['files'] as $file => $stat) - { - if (!in_array($file, $this->stats['failed_files'])) - { - continue; - } - $relative_file = $this->get_relative_file($file); - - if (isset($stat['output'][0])) - { - $this->output->echoln(sprintf($format, substr($relative_file, -min(30, strlen($relative_file))), $stat['status_code'], count($stat['output'][0]['stats']['failed']) + count($stat['output'][0]['stats']['passed']), count($stat['output'][0]['stats']['failed']), count($stat['output'][0]['stats']['errors']), implode(' ', $stat['output'][0]['stats']['failed']))); - } - else - { - $this->output->echoln(sprintf($format, substr($relative_file, -min(30, strlen($relative_file))), $stat['status_code'], '', '', '')); - } - } - - $this->output->red_bar(sprintf('Failed %d/%d test scripts, %.2f%% okay. %d/%d subtests failed, %.2f%% okay.', - $nb_failed_files = count($this->stats['failed_files']), - $nb_files = count($this->files), - ($nb_files - $nb_failed_files) * 100 / $nb_files, - $nb_failed_tests = $this->stats['failed_tests'], - $nb_tests = $this->stats['total'], - $nb_tests > 0 ? ($nb_tests - $nb_failed_tests) * 100 / $nb_tests : 0 - )); - - if ($this->options['verbose']) - { - foreach ($this->to_array() as $testsuite) - { - $first = true; - foreach ($testsuite['stats']['failed'] as $testcase) - { - if (!isset($testsuite['tests'][$testcase]['file'])) - { - continue; - } - - if ($first) - { - $this->output->echoln(''); - $this->output->error($this->get_relative_file($testsuite['file']).$this->extension); - $first = false; - } - - $this->output->comment(sprintf(' at %s line %s', $this->get_relative_file($testsuite['tests'][$testcase]['file']).$this->extension, $testsuite['tests'][$testcase]['line'])); - $this->output->info(' '.$testsuite['tests'][$testcase]['message']); - if (isset($testsuite['tests'][$testcase]['error'])) - { - $this->output->echoln($testsuite['tests'][$testcase]['error'], null, false); - } - } - } - } - } - else - { - $this->output->green_bar(' All tests successful.'); - $this->output->green_bar(sprintf(' Files=%d, Tests=%d', count($this->files), $this->stats['total'])); - } - - return $this->stats['failed_files'] ? false : true; - } - - public function get_failed_files() - { - return isset($this->stats['failed_files']) ? $this->stats['failed_files'] : array(); - } -} - class lime_coverage extends lime_registration { public $files = array(); diff --git a/lib/vendor/lime/src/lime_harness.php b/lib/vendor/lime/src/lime_harness.php new file mode 100644 index 000000000..781239eee --- /dev/null +++ b/lib/vendor/lime/src/lime_harness.php @@ -0,0 +1,341 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +/** + * Unit test library. + * + * @author Fabien Potencier + */ +class lime_harness extends lime_registration +{ + public $options = []; + public $php_cli; + public $stats = []; + public $output; + public $full_output = false; + + public function __construct($options = []) + { + // for BC + if (!is_array($options)) { + $options = ['output' => $options]; + } + + $this->options = array_merge([ + 'php_cli' => null, + 'force_colors' => false, + 'output' => null, + 'verbose' => false, + 'test_path' => sys_get_temp_dir(), + ], $options); + + $this->php_cli = $this->find_php_cli($this->options['php_cli']); + $this->output = $this->options['output'] ? $this->options['output'] : new lime_output($this->options['force_colors']); + } + + protected function find_php_cli($php_cli = null) + { + if (is_null($php_cli)) { + if (getenv('PHP_PATH')) { + $php_cli = getenv('PHP_PATH'); + + if (!is_executable($php_cli)) { + throw new Exception('The defined PHP_PATH environment variable is not a valid PHP executable.'); + } + } else { + $php_cli = PHP_BINDIR.DIRECTORY_SEPARATOR.'php'; + } + } + + if (is_executable($php_cli)) { + return $php_cli; + } + + $path = getenv('PATH') ? getenv('PATH') : getenv('Path'); + $exe_suffixes = DIRECTORY_SEPARATOR == '\\' ? (getenv('PATHEXT') ? explode(PATH_SEPARATOR, getenv('PATHEXT')) : ['.exe', '.bat', '.cmd', '.com']) : ['']; + foreach (['php5', 'php'] as $php_cli) { + foreach ($exe_suffixes as $suffix) { + foreach (explode(PATH_SEPARATOR, $path) as $dir) { + $file = $dir.DIRECTORY_SEPARATOR.$php_cli.$suffix; + if (is_executable($file)) { + return $file; + } + } + } + } + + throw new Exception('Unable to find PHP executable.'); + } + + public function to_array() + { + $results = []; + foreach ($this->stats['files'] as $stat) { + $results = array_merge($results, $stat['output']); + } + + return $results; + } + + public function to_xml() + { + return lime_test::to_xml($this->to_array()); + } + + public function run() + { + if (!count($this->files)) { + throw new Exception('You must register some test files before running them!'); + } + + // sort the files to be able to predict the order + sort($this->files); + + $this->stats = [ + 'files' => [], + 'failed_files' => [], + 'failed_tests' => 0, + 'total' => 0, + ]; + + foreach ($this->files as $file) { + $this->stats['files'][$file] = []; + $stats = &$this->stats['files'][$file]; + + $test_file = tempnam($this->options['test_path'], 'lime_test').'.php'; + $result_file = tempnam($this->options['test_path'], 'lime_result'); + file_put_contents($test_file, <<executePhpFile($test_file); + ob_end_clean(); + unlink($test_file); + + $stats['status_code'] = $return; + $output = file_get_contents($result_file); + $stats['output'] = $output ? unserialize($output) : ''; + if (!$stats['output']) { + $stats['output'] = $this->makeOutputForMissingTestReport($file); + } + unlink($result_file); + + $file_stats = &$stats['output'][0]['stats']; + + $delta = $this->computePlanDeltaFromFileStats($file_stats); + + $stats['status'] = $this->computeStatusWithCodeAndFileStats( + $stats['status_code'], + $file_stats + ); + + if ('ok' !== $stats['status']) { + $this->stats['failed_files'][] = $file; + } + + $this->stats['total'] += $file_stats['total']; + + if ($delta > 0) { + $this->stats['failed_tests'] += $delta; + $this->stats['total'] += $delta; + } + + if ($file_stats['failed']) { + $this->stats['failed_tests'] += count($file_stats['failed']); + } + + $this->writeFileSummary($file, $stats['status']); + + $this->writeFileDetails($stats, $file_stats, $delta); + } + + if (count($this->stats['failed_files'])) { + $format = '%-30s %4s %5s %5s %5s %s'; + $this->output->echoln(sprintf($format, 'Failed Test', 'Stat', 'Total', 'Fail', 'Errors', 'List of Failed')); + $this->output->echoln('--------------------------------------------------------------------------'); + foreach ($this->stats['files'] as $file => $stat) { + if (!in_array($file, $this->stats['failed_files'])) { + continue; + } + $relative_file = $this->get_relative_file($file); + + if (isset($stat['output'][0])) { + $this->output->echoln(sprintf($format, + substr($relative_file, -min(30, strlen($relative_file))), + $stat['status_code'], + count($stat['output'][0]['stats']['failed']) + + count($stat['output'][0]['stats']['passed']), + count($stat['output'][0]['stats']['failed']), + count($stat['output'][0]['stats']['errors']), + implode(' ', $stat['output'][0]['stats']['failed']) + )); + } else { + $this->output->echoln(sprintf($format, substr($relative_file, -min(30, strlen($relative_file))), $stat['status_code'], '', '', '')); + } + } + + $this->output->red_bar(sprintf('Failed %d/%d test scripts, %.2f%% okay. %d/%d subtests failed, %.2f%% okay.', + $nb_failed_files = count($this->stats['failed_files']), + $nb_files = count($this->files), + ($nb_files - $nb_failed_files) * 100 / $nb_files, + $nb_failed_tests = $this->stats['failed_tests'], + $nb_tests = $this->stats['total'], + $nb_tests > 0 ? ($nb_tests - $nb_failed_tests) * 100 / $nb_tests : 0 + )); + + if ($this->options['verbose']) { + foreach ($this->to_array() as $testsuite) { + $first = true; + foreach ($testsuite['stats']['failed'] as $testcase) { + if (!isset($testsuite['tests'][$testcase]['file'])) { + continue; + } + + if ($first) { + $this->output->echoln(''); + $this->output->error($this->get_relative_file($testsuite['file']).$this->extension); + $first = false; + } + + $this->output->comment(sprintf(' at %s line %s', $this->get_relative_file($testsuite['tests'][$testcase]['file']).$this->extension, $testsuite['tests'][$testcase]['line'])); + $this->output->info(' '.$testsuite['tests'][$testcase]['message']); + if (isset($testsuite['tests'][$testcase]['error'])) { + $this->output->echoln($testsuite['tests'][$testcase]['error'], null, false); + } + } + } + } + } else { + $this->output->green_bar(' All tests successful.'); + $this->output->green_bar(sprintf(' Files=%d, Tests=%d', count($this->files), $this->stats['total'])); + } + + return $this->stats['failed_files'] ? false : true; + } + + private function makeOutputForMissingTestReport(string $file): array + { + return [ + [ + 'file' => $file, + 'tests' => [], + 'stats' => [ + 'plan' => null, + 'total' => 0, + 'failed' => [], + 'passed' => [], + 'skipped' => [], + 'errors' => [ + [ + 'message' => 'Missing test report. It is probably due to a Parse error.', + ], + ], + ], + ], + ]; + } + + private function computePlanDeltaFromFileStats(array $fileStats): int + { + if ($fileStats['plan']) { + return $fileStats['plan'] - $fileStats['total']; + } + + return 0; + } + + private function computeStatusWithCodeAndFileStats(int $statusCode, array $fileStats): string + { + if (0 === $statusCode) { + return 'ok'; + } + + if ($fileStats['failed']) { + return 'not ok'; + } + + if ($fileStats['errors']) { + return 'errors'; + } + + return 'dubious'; + } + + private function writeFileSummary(string $file, string $status): void + { + $relativeFile = $this->get_relative_file($file); + + if (true === $this->full_output) { + $this->output->echoln(sprintf('%s%s%s', $relativeFile, '.....', $status)); + } else { + $this->output->echoln(sprintf('%s%s%s', + substr($relativeFile, -min(67, strlen($relativeFile))), + str_repeat('.', 70 - min(67, strlen($relativeFile))), + $status + )); + } + } + + private function writeFileDetails(array $stats, array $fileStats, int $delta): void + { + if ('dubious' === $stats['status']) { + $this->output->echoln(sprintf(' Test returned status %s', $stats['status_code'])); + } + + if ($delta > 0) { + $this->output->echoln(sprintf(' Looks like you planned %d tests but only ran %d.', $fileStats['plan'], $fileStats['total'])); + } elseif ($delta < 0) { + $this->output->echoln(sprintf(' Looks like you planned %s test but ran %s extra.', $fileStats['plan'], $fileStats['total'] - $fileStats['plan'])); + } + + if (false !== $fileStats && $fileStats['failed']) { + $this->output->echoln(sprintf(' Failed tests: %s', implode(', ', $fileStats['failed']))); + } + + if (false !== $fileStats && $fileStats['errors']) { + $this->output->echoln(' Errors:'); + + $error_count = count($fileStats['errors']); + for ($i = 0; $i < 3 && $i < $error_count; ++$i) { + $this->output->echoln(' - '.$fileStats['errors'][$i]['message'], null, false); + } + if ($error_count > 3) { + $this->output->echoln(sprintf(' ... and %s more', $error_count - 3)); + } + } + } + + public function get_failed_files() + { + return isset($this->stats['failed_files']) ? $this->stats['failed_files'] : []; + } + + /** + * The command fails if the path to php interpreter contains spaces. + * The only workaround is adding a "nop" command call before the quoted command. + * The weird "cd &". + * + * see http://trac.symfony-project.org/ticket/5437 + */ + public function executePhpFile(string $phpFile): int + { + passthru(sprintf('cd & %s %s 2>&1', escapeshellarg($this->php_cli), escapeshellarg($phpFile)), $return); + + return $return; + } +} diff --git a/lib/vendor/lime/src/lime_test.php b/lib/vendor/lime/src/lime_test.php new file mode 100644 index 000000000..18e35a96f --- /dev/null +++ b/lib/vendor/lime/src/lime_test.php @@ -0,0 +1,637 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +/** + * Unit test library. + * + * @author Fabien Potencier + */ +class lime_test +{ + public const EPSILON = 0.0000000001; + + protected $test_nb = 0; + protected $output; + protected $results = []; + protected $options = []; + + protected static $all_results = []; + + private const STATE_PASS = 0; + private const STATE_FAIL = 1; + private const STATE_PLAN_NOT_FOLLOW = 2; + + private static $instanceCount = 0; + private static $finalState = self::STATE_PASS; + + public function __construct($plan = null, $options = []) + { + ++self::$instanceCount; + + // for BC + if (!is_array($options)) { + $options = ['output' => $options]; + } + + $this->options = array_merge([ + 'force_colors' => false, + 'output' => null, + 'verbose' => false, + 'error_reporting' => false, + ], $options); + + $this->output = $this->options['output'] ? $this->options['output'] : new lime_output($this->options['force_colors']); + + $caller = $this->find_caller(debug_backtrace()); + self::$all_results[] = [ + 'file' => $caller[0], + 'tests' => [], + 'stats' => ['plan' => $plan, 'total' => 0, 'failed' => [], 'passed' => [], 'skipped' => [], 'errors' => []], + ]; + + $this->results = &self::$all_results[count(self::$all_results) - 1]; + + null !== $plan and $this->output->echoln(sprintf('1..%d', $plan)); + + set_error_handler([$this, 'handle_error']); + set_exception_handler([$this, 'handle_exception']); + } + + public static function reset() + { + self::$all_results = []; + } + + public static function to_array() + { + return self::$all_results; + } + + public static function to_xml($results = null) + { + if (is_null($results)) { + $results = self::$all_results; + } + + $dom = new DOMDocument('1.0', 'UTF-8'); + $dom->formatOutput = true; + $dom->appendChild($testsuites = $dom->createElement('testsuites')); + + $errors = 0; + $failures = 0; + $errors = 0; + $skipped = 0; + $assertions = 0; + + foreach ($results as $result) { + $testsuites->appendChild($testsuite = $dom->createElement('testsuite')); + $testsuite->setAttribute('name', basename($result['file'], '.php')); + $testsuite->setAttribute('file', $result['file']); + $testsuite->setAttribute('failures', count($result['stats']['failed'])); + $testsuite->setAttribute('errors', count($result['stats']['errors'])); + $testsuite->setAttribute('skipped', count($result['stats']['skipped'])); + $testsuite->setAttribute('tests', $result['stats']['plan']); + $testsuite->setAttribute('assertions', $result['stats']['plan']); + + $failures += count($result['stats']['failed']); + $errors += count($result['stats']['errors']); + $skipped += count($result['stats']['skipped']); + $assertions += $result['stats']['plan']; + + foreach ($result['tests'] as $test) { + $testsuite->appendChild($testcase = $dom->createElement('testcase')); + $testcase->setAttribute('name', utf8_encode($test['message'])); + $testcase->setAttribute('file', $test['file']); + $testcase->setAttribute('line', $test['line']); + $testcase->setAttribute('assertions', 1); + if (!$test['status']) { + $testcase->appendChild($failure = $dom->createElement('failure')); + $failure->setAttribute('type', 'lime'); + if (isset($test['error'])) { + $failure->appendChild($dom->createTextNode($test['error'])); + } + } + } + } + + $testsuites->setAttribute('failures', $failures); + $testsuites->setAttribute('errors', $errors); + $testsuites->setAttribute('tests', $assertions); + $testsuites->setAttribute('assertions', $assertions); + $testsuites->setAttribute('skipped', $skipped); + + return $dom->saveXml(); + } + + public function __destruct() + { + $testSuiteState = $this->determineAndPrintStateOfTestSuite(); + + flush(); + + $this->keepTheWorstState($testSuiteState); + + $this->finalizeLastInstanceDestructorWithProcessExit(); + } + + private function determineAndPrintStateOfTestSuite(): int + { + $planState = $this->determineAndPrintStateOfPlan(); + $failed = count($this->results['stats']['failed']); + + if ($failed) { + $passed = count($this->results['stats']['passed']); + + $this->output->red_bar(sprintf('# Looks like you failed %d tests of %d.', $failed, $passed + $failed)); + + return self::STATE_FAIL; + } + + if ($this->results['stats']['errors']) { + return self::STATE_FAIL; + } + + if (self::STATE_PASS === $planState) { + $this->output->green_bar('# Looks like everything went fine.'); + } + + return $planState; + } + + private function determineAndPrintStateOfPlan(): int + { + $plan = $this->results['stats']['plan']; + $total = $this->results['stats']['total']; + + if (null === $plan) { + $plan = $total; + + $this->output->echoln(sprintf('1..%d', $plan)); + } + + if ($total > $plan) { + $this->output->red_bar(sprintf('# Looks like you planned %d tests but ran %d extra.', $plan, $total - $plan)); + } elseif ($total < $plan) { + $this->output->red_bar(sprintf('# Looks like you planned %d tests but only ran %d.', $plan, $total)); + } + + return $total === $plan ? self::STATE_PASS : self::STATE_PLAN_NOT_FOLLOW; + } + + private function keepTheWorstState(int $state): void + { + if ($this->stateIsTheWorst($state)) { + self::$finalState = $state; + } + } + + private function stateIsTheWorst(int $state): bool + { + return self::$finalState < $state; + } + + private function finalizeLastInstanceDestructorWithProcessExit(): void + { + --self::$instanceCount; + + if (0 === self::$instanceCount) { + exit($this->determineExitCodeFromState(self::$finalState)); + } + } + + private function determineExitCodeFromState(int $state): int + { + switch ($state) { + case self::STATE_PASS: + return 0; + case self::STATE_PLAN_NOT_FOLLOW: + return 255; + default: + return 1; + } + } + + /** + * Tests a condition and passes if it is true. + * + * @param mixed $exp condition to test + * @param string $message display output message when the test passes + * + * @return bool + */ + public function ok($exp, $message = '') + { + $this->update_stats(); + + if ($result = (bool) $exp) { + $this->results['stats']['passed'][] = $this->test_nb; + } else { + $this->results['stats']['failed'][] = $this->test_nb; + } + $this->results['tests'][$this->test_nb]['message'] = $message; + $this->results['tests'][$this->test_nb]['status'] = $result; + $this->output->echoln(sprintf('%s %d%s', $result ? 'ok' : 'not ok', $this->test_nb, $message = $message ? sprintf('%s %s', 0 === strpos($message, '#') ? '' : ' -', $message) : '')); + + if (!$result) { + $this->output->diag(sprintf(' Failed test (%s at line %d)', str_replace(getcwd(), '.', $this->results['tests'][$this->test_nb]['file']), $this->results['tests'][$this->test_nb]['line'])); + } + + return $result; + } + + /** + * Compares two values and returns true if they are equal. + * + * @param mixed $exp1 left value + * @param mixed $exp2 right value + * + * @return bool + */ + private function equals($exp1, $exp2) + { + if (is_object($exp1) || is_object($exp2)) { + return $exp1 === $exp2; + } + if (is_float($exp1) && is_float($exp2)) { + return abs($exp1 - $exp2) < self::EPSILON; + } + if (is_string($exp1) && is_numeric($exp1) || is_string($exp2) && is_numeric($exp2)) { + return $exp1 == $exp2; + } + if (is_string($exp1) || is_string($exp2)) { + return (string) $exp1 === (string) $exp2; + } + + return $exp1 == $exp2; + } + + /** + * Compares two values and passes if they are equal (==). + * + * @param mixed $exp1 left value + * @param mixed $exp2 right value + * @param string $message display output message when the test passes + * + * @return bool + */ + public function is($exp1, $exp2, $message = '') + { + $value = $this->equals($exp1, $exp2); + + if (!$result = $this->ok($value, $message)) { + $this->set_last_test_errors([sprintf(' got: %s', var_export($exp1, true)), sprintf(' expected: %s', var_export($exp2, true))]); + } + + return $result; + } + + /** + * Compares two values and passes if they are not equal. + * + * @param mixed $exp1 left value + * @param mixed $exp2 right value + * @param string $message display output message when the test passes + * + * @return bool + */ + public function isnt($exp1, $exp2, $message = '') + { + $value = $this->equals($exp1, $exp2); + + if (!$result = $this->ok(!$value, $message)) { + $this->set_last_test_errors([sprintf(' %s', var_export($exp1, true)), ' ne', sprintf(' %s', var_export($exp2, true))]); + } + + return $result; + } + + /** + * Tests a string against a regular expression. + * + * @param string $exp value to test + * @param string $regex the pattern to search for, as a string + * @param string $message display output message when the test passes + * + * @return bool + */ + public function like($exp, $regex, $message = '') + { + if (!$result = $this->ok(preg_match($regex, $exp), $message)) { + $this->set_last_test_errors([sprintf(" '%s'", $exp), sprintf(" doesn't match '%s'", $regex)]); + } + + return $result; + } + + /** + * Checks that a string doesn't match a regular expression. + * + * @param string $exp value to test + * @param string $regex the pattern to search for, as a string + * @param string $message display output message when the test passes + * + * @return bool + */ + public function unlike($exp, $regex, $message = '') + { + if (!$result = $this->ok(!preg_match($regex, $exp), $message)) { + $this->set_last_test_errors([sprintf(" '%s'", $exp), sprintf(" matches '%s'", $regex)]); + } + + return $result; + } + + /** + * Compares two arguments with an operator. + * + * @param mixed $exp1 left value + * @param string $op operator + * @param mixed $exp2 right value + * @param string $message display output message when the test passes + * + * @return bool + */ + public function cmp_ok($exp1, $op, $exp2, $message = '') + { + $php = sprintf("\$result = \$exp1 {$op} \$exp2;"); + // under some unknown conditions the sprintf() call causes a segmentation fault + // when placed directly in the eval() call + eval($php); + + if (!$this->ok($result, $message)) { + $this->set_last_test_errors([sprintf(' %s', str_replace("\n", '', var_export($exp1, true))), sprintf(' %s', $op), sprintf(' %s', str_replace("\n", '', var_export($exp2, true)))]); + } + + return $result; + } + + /** + * Checks the availability of a method for an object or a class. + * + * @param mixed $object an object instance or a class name + * @param string|array $methods one or more method names + * @param string $message display output message when the test passes + * + * @return bool + */ + public function can_ok($object, $methods, $message = '') + { + $result = true; + $failed_messages = []; + foreach ((array) $methods as $method) { + if (!method_exists($object, $method)) { + $failed_messages[] = sprintf(" method '%s' does not exist", $method); + $result = false; + } + } + + !$this->ok($result, $message); + + !$result and $this->set_last_test_errors($failed_messages); + + return $result; + } + + /** + * Checks the type of an argument. + * + * @param mixed $var variable instance + * @param string $class class or type name + * @param string $message display output message when the test passes + * + * @return bool + */ + public function isa_ok($var, $class, $message = '') + { + $type = is_object($var) ? get_class($var) : gettype($var); + if (!$result = $this->ok($type == $class, $message)) { + $this->set_last_test_errors([sprintf(" variable isn't a '%s' it's a '%s'", $class, $type)]); + } + + return $result; + } + + /** + * Checks that two arrays have the same values. + * + * @param mixed $exp1 first variable + * @param mixed $exp2 second variable + * @param string $message display output message when the test passes + * + * @return bool + */ + public function is_deeply($exp1, $exp2, $message = '') + { + if (!$result = $this->ok($this->test_is_deeply($exp1, $exp2), $message)) { + $this->set_last_test_errors([sprintf(' got: %s', str_replace("\n", '', var_export($exp1, true))), sprintf(' expected: %s', str_replace("\n", '', var_export($exp2, true)))]); + } + + return $result; + } + + /** + * Always passes--useful for testing exceptions. + * + * @param string $message display output message + * + * @return true + */ + public function pass($message = '') + { + return $this->ok(true, $message); + } + + /** + * Always fails--useful for testing exceptions. + * + * @param string $message display output message + * + * @return false + */ + public function fail($message = '') + { + return $this->ok(false, $message); + } + + /** + * Outputs a diag message but runs no test. + * + * @param string $message display output message + */ + public function diag($message) + { + $this->output->diag($message); + } + + /** + * Counts as $nb_tests tests--useful for conditional tests. + * + * @param string $message display output message + * @param int $nb_tests number of tests to skip + */ + public function skip($message = '', $nb_tests = 1) + { + for ($i = 0; $i < $nb_tests; ++$i) { + $this->pass(sprintf('# SKIP%s', $message ? ' '.$message : '')); + $this->results['stats']['skipped'][] = $this->test_nb; + array_pop($this->results['stats']['passed']); + } + } + + /** + * Counts as a test--useful for tests yet to be written. + * + * @param string $message display output message + */ + public function todo($message = '') + { + $this->pass(sprintf('# TODO%s', $message ? ' '.$message : '')); + $this->results['stats']['skipped'][] = $this->test_nb; + array_pop($this->results['stats']['passed']); + } + + /** + * Validates that a file exists and that it is properly included. + * + * @param string $file file path + * @param string $message display output message when the test passes + * + * @return bool + */ + public function include_ok($file, $message = '') + { + if (!$result = $this->ok((@include ($file)) == 1, $message)) { + $this->set_last_test_errors([sprintf(" Tried to include '%s'", $file)]); + } + + return $result; + } + + private function test_is_deeply($var1, $var2) + { + if (gettype($var1) != gettype($var2)) { + return false; + } + + if (is_array($var1)) { + ksort($var1); + ksort($var2); + + $keys1 = array_keys($var1); + $keys2 = array_keys($var2); + if (array_diff($keys1, $keys2) || array_diff($keys2, $keys1)) { + return false; + } + $is_equal = true; + foreach ($var1 as $key => $value) { + $is_equal = $this->test_is_deeply($var1[$key], $var2[$key]); + if (false === $is_equal) { + break; + } + } + + return $is_equal; + } + + return $var1 === $var2; + } + + public function comment($message) + { + $this->output->comment($message); + } + + public function info($message) + { + $this->output->info($message); + } + + public function error($message, $file = null, $line = null, array $traces = []) + { + $this->output->error($message, $file, $line, $traces); + + $this->results['stats']['errors'][] = [ + 'message' => $message, + 'file' => $file, + 'line' => $line, + ]; + } + + protected function update_stats() + { + ++$this->test_nb; + ++$this->results['stats']['total']; + + list($this->results['tests'][$this->test_nb]['file'], $this->results['tests'][$this->test_nb]['line']) = $this->find_caller(debug_backtrace()); + } + + protected function set_last_test_errors(array $errors) + { + $this->output->diag($errors); + + $this->results['tests'][$this->test_nb]['error'] = implode("\n", $errors); + } + + private function is_test_object($object) + { + return $object instanceof lime_test || $object instanceof sfTestFunctionalBase || $object instanceof sfTester; + } + + protected function find_caller($traces) + { + // find the first call to a method of an object that is an instance of lime_test + $t = array_reverse($traces); + foreach ($t as $trace) { + // In internal calls, like error_handle, 'file' will be missing + if (isset($trace['object']) && $this->is_test_object($trace['object']) && isset($trace['file'])) { + return [$trace['file'], $trace['line']]; + } + } + + // return the first call + $last = count($traces) - 1; + + return [$traces[$last]['file'], $traces[$last]['line']]; + } + + public function handle_error($code, $message, $file, $line, $context = null) + { + if (!$this->options['error_reporting'] || ($code & error_reporting()) == 0) { + return false; + } + + switch ($code) { + case E_WARNING: + $type = 'Warning'; + break; + default: + $type = 'Notice'; + break; + } + + $trace = debug_backtrace(); + array_shift($trace); // remove the handle_error() call from the trace + + $this->error($type.': '.$message, $file, $line, $trace); + } + + /** + * @param Throwable|Exception $exception + * + * @return bool + */ + public function handle_exception($exception) + { + $this->error(get_class($exception).': '.$exception->getMessage(), $exception->getFile(), $exception->getLine(), $exception->getTrace()); + + // exception was handled + return true; + } +} diff --git a/test/bin/coverage.php b/test/bin/coverage.php index 409634821..c7bc90e3f 100644 --- a/test/bin/coverage.php +++ b/test/bin/coverage.php @@ -42,4 +42,7 @@ $finder = sfFinder::type('file')->name($name.'.class.php')->prune('vendor')->prune('test')->prune('data'); $c->register($finder->in($c->base_dir)); -$c->run(); + +$allTestsSucceed = $c->run(); + +exit($allTestsSucceed ? 0 : 1); diff --git a/test/unit/vendor/lime/fixtures/failed.php b/test/unit/vendor/lime/fixtures/failed.php new file mode 100644 index 000000000..bd0186881 --- /dev/null +++ b/test/unit/vendor/lime/fixtures/failed.php @@ -0,0 +1,7 @@ +is(false, true); diff --git a/test/unit/vendor/lime/fixtures/failed_with_plan_less_than_total.php b/test/unit/vendor/lime/fixtures/failed_with_plan_less_than_total.php new file mode 100644 index 000000000..47e0c69f8 --- /dev/null +++ b/test/unit/vendor/lime/fixtures/failed_with_plan_less_than_total.php @@ -0,0 +1,8 @@ +is(false, true); +$test->is(true, true); diff --git a/test/unit/vendor/lime/fixtures/failed_with_plan_more_than_total.php b/test/unit/vendor/lime/fixtures/failed_with_plan_more_than_total.php new file mode 100644 index 000000000..7bf7da135 --- /dev/null +++ b/test/unit/vendor/lime/fixtures/failed_with_plan_more_than_total.php @@ -0,0 +1,7 @@ +is(false, true); diff --git a/test/unit/vendor/lime/fixtures/pass.php b/test/unit/vendor/lime/fixtures/pass.php new file mode 100644 index 000000000..93afc9a6e --- /dev/null +++ b/test/unit/vendor/lime/fixtures/pass.php @@ -0,0 +1,7 @@ +is(true, true); diff --git a/test/unit/vendor/lime/fixtures/pass_with_one_error.php b/test/unit/vendor/lime/fixtures/pass_with_one_error.php new file mode 100644 index 000000000..4491a9a80 --- /dev/null +++ b/test/unit/vendor/lime/fixtures/pass_with_one_error.php @@ -0,0 +1,13 @@ + true, +]); + +trigger_error('some user error message', E_USER_ERROR); + +$test->is(true, true); diff --git a/test/unit/vendor/lime/fixtures/pass_with_one_parse_error.php b/test/unit/vendor/lime/fixtures/pass_with_one_parse_error.php new file mode 100644 index 000000000..39535fef4 --- /dev/null +++ b/test/unit/vendor/lime/fixtures/pass_with_one_parse_error.php @@ -0,0 +1,7 @@ +is(true, true); diff --git a/test/unit/vendor/lime/fixtures/pass_with_plan_less_than_total.php b/test/unit/vendor/lime/fixtures/pass_with_plan_less_than_total.php new file mode 100644 index 000000000..13d7600b8 --- /dev/null +++ b/test/unit/vendor/lime/fixtures/pass_with_plan_less_than_total.php @@ -0,0 +1,8 @@ +is(true, true); +$test->is(true, true); diff --git a/test/unit/vendor/lime/fixtures/pass_with_plan_more_than_total.php b/test/unit/vendor/lime/fixtures/pass_with_plan_more_than_total.php new file mode 100644 index 000000000..6c0d577cc --- /dev/null +++ b/test/unit/vendor/lime/fixtures/pass_with_plan_more_than_total.php @@ -0,0 +1,7 @@ +is(true, true); diff --git a/test/unit/vendor/lime/lime_harnessTest.php b/test/unit/vendor/lime/lime_harnessTest.php new file mode 100644 index 000000000..a72213ca3 --- /dev/null +++ b/test/unit/vendor/lime/lime_harnessTest.php @@ -0,0 +1,187 @@ +test = new lime_test(); + } + + private function whenExecuteHarnessWithFilesWillHaveResultAndOutput($message, $files, $expectedOverallSucceed, $expectedOutput) + { + $this->test->info($message); + + $harness = $this->makeHarnessWithFiles($files); + + ob_start(); + $allTestsSucceed = $harness->run(); + $output = ob_get_clean(); + + $this->test->is($expectedOverallSucceed, $allTestsSucceed, 'overall test '.($expectedOverallSucceed ? 'succeed' : 'failed')); + + $this->test->is($this->removeTrailingSpaces($output), $expectedOutput, 'test harness result output'); + } + + private function makeHarnessWithFiles($files): lime_harness + { + $harness = new lime_harness(); + $harness->output->colorizer = new lime_no_colorizer(); + + $harness->register($files); + + return $harness; + } + + private function removeTrailingSpaces(string $output): string + { + return preg_replace("/ *\n/", "\n", $output); + } + + public function run(): void + { + foreach ($this->provideTestCases() as $parameters) { + $this->whenExecuteHarnessWithFilesWillHaveResultAndOutput(...$parameters); + } + } + + private function provideTestCases() + { + yield [ + /* name */ 'with all tests passes without error and exception will succeed the overall test suite', + /* files */ [ + __DIR__.'/fixtures/pass.php', + ], + /* expectedOverallSucceed */ true, + /* expectedOutput */ <<<'EOF' +test/unit/vendor/lime/fixtures/pass..................................ok + All tests successful. + Files=1, Tests=1 + +EOF + ]; + + yield [ + /* name */ 'with at least one test file that not follow the plan will fail the overall test suite', + /* files */ [ + __DIR__.'/fixtures/pass_with_plan_less_than_total.php', + ], + /* expectedOverallSucceed */ false, + /* expectedOutput */ <<<'EOF' +test/unit/vendor/lime/fixtures/pass_with_plan_less_than_total........dubious + Test returned status 255 + Looks like you planned 1 test but ran 1 extra. +Failed Test Stat Total Fail Errors List of Failed +-------------------------------------------------------------------------- +pass_with_plan_less_than_total 255 2 0 0 +Failed 1/1 test scripts, 0.00% okay. 0/2 subtests failed, 100.00% okay. + +EOF + ]; + + yield [ + /* name */ 'with at least one error will fail the overall test suite', + /* files */ [ + __DIR__.'/fixtures/pass_with_one_error.php', + ], + /* expectedOverallSucceed */ false, + /* expectedOutput */ <<<'EOF' +test/unit/vendor/lime/fixtures/pass_with_one_error...................errors + Errors: + - Notice: some user error message +Failed Test Stat Total Fail Errors List of Failed +-------------------------------------------------------------------------- +e/fixtures/pass_with_one_error 1 1 0 1 +Failed 1/1 test scripts, 0.00% okay. 0/1 subtests failed, 100.00% okay. + +EOF + ]; + + yield [ + /* name */ 'with at least one thrown Exception will fail the overall test suite', + /* files */ [ + __DIR__.'/fixtures/pass_with_one_throw_exception.php', + ], + /* expectedOverallSucceed */ false, + /* expectedOutput */ <<<'EOF' +test/unit/vendor/lime/fixtures/pass_with_one_throw_exception.........errors + Errors: + - LogicException: some exception message +Failed Test Stat Total Fail Errors List of Failed +-------------------------------------------------------------------------- +/pass_with_one_throw_exception 1 0 0 1 +Failed 1/1 test scripts, 0.00% okay. 0/0 subtests failed, 0.00% okay. + +EOF + ]; + + yield [ + /* name */ 'with parse error will fail the overall test suite', + /* files */ [ + __DIR__.'/fixtures/pass_with_one_parse_error.php', + ], + /* expectedOverallSucceed */ false, + /* expectedOutput */ <<<'EOF' +test/unit/vendor/lime/fixtures/pass_with_one_parse_error.............errors + Errors: + - Missing test report. It is probably due to a Parse error. +Failed Test Stat Total Fail Errors List of Failed +-------------------------------------------------------------------------- +ures/pass_with_one_parse_error 255 0 0 1 +Failed 1/1 test scripts, 0.00% okay. 0/0 subtests failed, 0.00% okay. + +EOF + ]; + + yield [ + /* name */ 'with at least one failed test file will fail the overall test suite', + /* files */ [ + __DIR__.'/fixtures/failed.php', + __DIR__.'/fixtures/failed_with_plan_less_than_total.php', + __DIR__.'/fixtures/failed_with_plan_more_than_total.php', + __DIR__.'/fixtures/pass.php', + __DIR__.'/fixtures/pass_with_plan_less_than_total.php', + __DIR__.'/fixtures/pass_with_plan_more_than_total.php', + ], + /* expectedOverallSucceed */ false, + /* expectedOutput */ <<<'EOF' +test/unit/vendor/lime/fixtures/failed................................not ok + Failed tests: 1 +test/unit/vendor/lime/fixtures/failed_with_plan_less_than_total......not ok + Looks like you planned 1 test but ran 1 extra. + Failed tests: 1 +test/unit/vendor/lime/fixtures/failed_with_plan_more_than_total......not ok + Looks like you planned 2 tests but only ran 1. + Failed tests: 1 +test/unit/vendor/lime/fixtures/pass..................................ok +test/unit/vendor/lime/fixtures/pass_with_plan_less_than_total........dubious + Test returned status 255 + Looks like you planned 1 test but ran 1 extra. +test/unit/vendor/lime/fixtures/pass_with_plan_more_than_total........dubious + Test returned status 255 + Looks like you planned 2 tests but only ran 1. +Failed Test Stat Total Fail Errors List of Failed +-------------------------------------------------------------------------- +it/vendor/lime/fixtures/failed 1 1 1 0 1 +iled_with_plan_less_than_total 1 2 1 0 1 +iled_with_plan_more_than_total 1 1 1 0 1 +pass_with_plan_less_than_total 255 2 0 0 +pass_with_plan_more_than_total 255 1 0 0 +Failed 5/6 test scripts, 16.67% okay. 5/10 subtests failed, 50.00% okay. + +EOF + ]; + } +} + +(new lime_harnessTest())->run(); diff --git a/test/unit/vendor/lime/lime_testTest.php b/test/unit/vendor/lime/lime_testTest.php new file mode 100644 index 000000000..a43dd159c --- /dev/null +++ b/test/unit/vendor/lime/lime_testTest.php @@ -0,0 +1,160 @@ +test = new lime_test(); + } + + private function removeTrailingSpaces(string $output): string + { + return preg_replace("/ *\n/", "\n", $output); + } + + private function whenExecutePhpFileWillHaveStatusCodeAndOutput($name, $expectedStatusCode, $expectedOutput) + { + $this->test->info($name); + + ob_start(); + $exitCode = (new lime_harness())->executePhpFile(__DIR__.'/fixtures/'.$name.'.php'); + $output = ob_get_clean(); + + $this->test->is($exitCode, $expectedStatusCode, 'with test '.$name.' will exit with status code '.$expectedStatusCode); + + $this->test->is($this->removeTrailingSpaces($output), $expectedOutput, 'test '.$name.' output'); + } + + public function run() + { + foreach ($this->provideTestCases() as $parameters) { + $this->whenExecutePhpFileWillHaveStatusCodeAndOutput(...$parameters); + } + } + + private function provideTestCases() + { + yield [ + /* name */ 'pass', + /* expectedStatusCode*/ 0, + /* expectedOutput */ <<<'EOF' +ok 1 +1..1 +# Looks like everything went fine. + +EOF + ]; + + yield [ + /* name */ 'failed', + /* expectedStatusCode*/ 1, + /* expectedOutput */ <<<'EOF' +not ok 1 +# Failed test (./test/unit/vendor/lime/fixtures/failed.php at line 7) +# got: false +# expected: true +1..1 +# Looks like you failed 1 tests of 1. + +EOF + ]; + + yield [ + /* name */ 'failed_with_plan_less_than_total', + /* expectedStatusCode*/ 1, + /* expectedOutput */ <<<'EOF' +1..1 +not ok 1 +# Failed test (./test/unit/vendor/lime/fixtures/failed_with_plan_less_than_total.php at line 7) +# got: false +# expected: true +ok 2 +# Looks like you planned 1 tests but ran 1 extra. +# Looks like you failed 1 tests of 2. + +EOF + ]; + + yield [ + /* name */ 'failed_with_plan_more_than_total', + /* expectedStatusCode*/ 1, + /* expectedOutput */ <<<'EOF' +1..2 +not ok 1 +# Failed test (./test/unit/vendor/lime/fixtures/failed_with_plan_more_than_total.php at line 7) +# got: false +# expected: true +# Looks like you planned 2 tests but only ran 1. +# Looks like you failed 1 tests of 1. + +EOF + ]; + + yield [ + /* name */ 'pass_with_plan_less_than_total', + /* expectedStatusCode*/ 255, + /* expectedOutput */ <<<'EOF' +1..1 +ok 1 +ok 2 +# Looks like you planned 1 tests but ran 1 extra. + +EOF + ]; + + yield [ + /* name */ 'pass_with_plan_more_than_total', + /* expectedStatusCode*/ 255, + /* expectedOutput */ <<<'EOF' +1..2 +ok 1 +# Looks like you planned 2 tests but only ran 1. + +EOF + ]; + + yield [ + /* name */ 'pass_with_one_error', + /* expectedStatusCode*/ 1, + /* expectedOutput */ <<<'EOF' + + + Notice: some user error message + (in test/unit/vendor/lime/fixtures/pass_with_one_error.php on line + 11) + + +Exception trace: + at test/unit/vendor/lime/fixtures/pass_with_one_error.php:11 + trigger_error() at test/unit/vendor/lime/fixtures/pass_with_one_error.php:11 + +ok 1 +1..1 + +EOF + ]; + + yield [ + /* name */ 'pass_with_one_throw_exception', + /* expectedStatusCode*/ 1, + /* expectedOutput */ <<<'EOF' + + + LogicException: some exception message + (in + test/unit/vendor/lime/fixtures/pass_with_one_throw_exception.php + on line 7) + + +1..0 + +EOF + ]; + } +} + +(new lime_testTest())->run();