mirror of
https://github.com/google/googletest.git
synced 2025-03-10 17:29:16 +00:00
Adds run_tests.py for running the tests (by Vlad Losev).
This commit is contained in:
parent
fd36c200f4
commit
819501581c
433
run_tests.py
Executable file
433
run_tests.py
Executable file
@ -0,0 +1,433 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# Copyright 2008, Google Inc. All rights reserved.
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are
|
||||||
|
# met:
|
||||||
|
#
|
||||||
|
# * Redistributions of source code must retain the above copyright
|
||||||
|
# notice, this list of conditions and the following disclaimer.
|
||||||
|
# * Redistributions in binary form must reproduce the above
|
||||||
|
# copyright notice, this list of conditions and the following disclaimer
|
||||||
|
# in the documentation and/or other materials provided with the
|
||||||
|
# distribution.
|
||||||
|
# * Neither the name of Google Inc. nor the names of its
|
||||||
|
# contributors may be used to endorse or promote products derived from
|
||||||
|
# this software without specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
"""Runs specified tests for Google Test.
|
||||||
|
|
||||||
|
SYNOPSIS
|
||||||
|
run_tests.py [OPTION]... [BUILD_DIR]... [TEST]...
|
||||||
|
|
||||||
|
DESCRIPTION
|
||||||
|
Runs the specified tests (either binary or Python), and prints a
|
||||||
|
summary of the results. BUILD_DIRS will be used to search for the
|
||||||
|
binaries. If no TESTs are specified, all binary tests found in
|
||||||
|
BUILD_DIRs and all Python tests found in the directory test/ (in the
|
||||||
|
gtest root) are run.
|
||||||
|
|
||||||
|
TEST is a name of either a binary or a Python test. A binary test is
|
||||||
|
an executable file named *_test or *_unittest (with the .exe
|
||||||
|
extension on Windows) A Python test is a script named *_test.py or
|
||||||
|
*_unittest.py.
|
||||||
|
|
||||||
|
OPTIONS
|
||||||
|
-c CONFIGURATIONS
|
||||||
|
Specify build directories via build configurations.
|
||||||
|
CONFIGURATIONS is either a comma-separated list of build
|
||||||
|
configurations or 'all'. Each configuration is equivalent to
|
||||||
|
adding 'scons/build/<configuration>/scons' to BUILD_DIRs.
|
||||||
|
Specifying -c=all is equivalent to providing all directories
|
||||||
|
listed in KNOWN BUILD DIRECTORIES section below.
|
||||||
|
|
||||||
|
-a
|
||||||
|
Equivalent to -c=all
|
||||||
|
|
||||||
|
-b
|
||||||
|
Equivalent to -c=all with the exception that the script will not
|
||||||
|
fail if some of the KNOWN BUILD DIRECTORIES do not exists; the
|
||||||
|
script will simply not run the tests there. 'b' stands for
|
||||||
|
'built directories'.
|
||||||
|
|
||||||
|
RETURN VALUE
|
||||||
|
Returns 0 if all tests are successful; otherwise returns 1.
|
||||||
|
|
||||||
|
EXAMPLES
|
||||||
|
run_tests.py
|
||||||
|
Runs all tests for the default build configuration.
|
||||||
|
|
||||||
|
run_tests.py -a
|
||||||
|
Runs all tests with binaries in KNOWN BUILD DIRECTORIES.
|
||||||
|
|
||||||
|
run_tests.py -b
|
||||||
|
Runs all tests in KNOWN BUILD DIRECTORIES that have been
|
||||||
|
built.
|
||||||
|
|
||||||
|
run_tests.py foo/
|
||||||
|
Runs all tests in the foo/ directory and all Python tests in
|
||||||
|
the directory test. The Python tests are instructed to look
|
||||||
|
for binaries in foo/.
|
||||||
|
|
||||||
|
run_tests.py bar_test.exe test/baz_test.exe foo/ bar/
|
||||||
|
Runs foo/bar_test.exe, bar/bar_test.exe, foo/baz_test.exe, and
|
||||||
|
bar/baz_test.exe.
|
||||||
|
|
||||||
|
run_tests.py foo bar test/foo_test.py
|
||||||
|
Runs test/foo_test.py twice instructing it to look for its
|
||||||
|
test binaries in the directories foo and bar,
|
||||||
|
correspondingly.
|
||||||
|
|
||||||
|
KNOWN BUILD DIRECTORIES
|
||||||
|
run_tests.py knows about directories where the SCons build script
|
||||||
|
deposits its products. These are the directories where run_tests.py
|
||||||
|
will be looking for its binaries. Currently, gtest's SConstruct file
|
||||||
|
defines them as follows (the default build directory is the first one
|
||||||
|
listed in each group):
|
||||||
|
On Windows:
|
||||||
|
<gtest root>/scons/build/win-dbg/scons/
|
||||||
|
<gtest root>/scons/build/win-opt/scons/
|
||||||
|
<gtest root>/scons/build/win-dbg8/scons/
|
||||||
|
<gtest root>/scons/build/win-opt8/scons/
|
||||||
|
On Mac:
|
||||||
|
<gtest root>/scons/build/mac-dbg/scons/
|
||||||
|
<gtest root>/scons/build/mac-opt/scons/
|
||||||
|
On other platforms:
|
||||||
|
<gtest root>/scons/build/dbg/scons/
|
||||||
|
<gtest root>/scons/build/opt/scons/
|
||||||
|
|
||||||
|
AUTHOR
|
||||||
|
Written by Zhanyong Wan (wan@google.com)
|
||||||
|
and Vlad Losev(vladl@google.com).
|
||||||
|
|
||||||
|
REQUIREMENTS
|
||||||
|
This script requires Python 2.3 or higher.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import optparse
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sets
|
||||||
|
import sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
# subrocess module is a preferable way to invoke subprocesses but it may
|
||||||
|
# not be available on MacOS X 10.4.
|
||||||
|
import subprocess
|
||||||
|
except ImportError:
|
||||||
|
subprocess = None
|
||||||
|
|
||||||
|
IS_WINDOWS = os.name == 'nt'
|
||||||
|
IS_MAC = os.name == 'posix' and os.uname()[0] == 'Darwin'
|
||||||
|
|
||||||
|
# Definition of CONFIGS must match that of the build directory names in the
|
||||||
|
# SConstruct script. The first list item is the default build configuration.
|
||||||
|
if IS_WINDOWS:
|
||||||
|
CONFIGS = ('win-dbg', 'win-dbg8', 'win-opt', 'win-opt8')
|
||||||
|
elif IS_MAC:
|
||||||
|
CONFIGS = ('mac-dbg', 'mac-opt')
|
||||||
|
else:
|
||||||
|
CONFIGS = ('dbg', 'opt')
|
||||||
|
|
||||||
|
if IS_WINDOWS:
|
||||||
|
PYTHON_TEST_REGEX = re.compile(r'_(unit)?test\.py$', re.IGNORECASE)
|
||||||
|
BINARY_TEST_REGEX = re.compile(r'_(unit)?test(\.exe)?$', re.IGNORECASE)
|
||||||
|
else:
|
||||||
|
PYTHON_TEST_REGEX = re.compile(r'_(unit)?test\.py$')
|
||||||
|
BINARY_TEST_REGEX = re.compile(r'_(unit)?test$')
|
||||||
|
|
||||||
|
GTEST_BUILD_DIR = 'GTEST_BUILD_DIR'
|
||||||
|
|
||||||
|
def ScriptDir():
|
||||||
|
"""Returns the directory containing this script file."""
|
||||||
|
|
||||||
|
my_path = sys.argv[0]
|
||||||
|
my_dir = os.path.dirname(my_path)
|
||||||
|
if not my_dir or __name__ != '__main__':
|
||||||
|
my_dir = '.'
|
||||||
|
return my_dir
|
||||||
|
|
||||||
|
|
||||||
|
MY_DIR = ScriptDir()
|
||||||
|
|
||||||
|
|
||||||
|
def GetBuildDirForConfig(config):
|
||||||
|
"""Returns the build directory for a given configuration."""
|
||||||
|
|
||||||
|
return 'scons/build/%s/scons' % config
|
||||||
|
|
||||||
|
|
||||||
|
class TestRunner(object):
|
||||||
|
"""Provides facilities for running Python and binary tests for Google Test."""
|
||||||
|
|
||||||
|
def __init__(self, injected_os=os, injected_subprocess=subprocess):
|
||||||
|
self.os = injected_os
|
||||||
|
self.subprocess = injected_subprocess
|
||||||
|
|
||||||
|
def Run(self, args):
|
||||||
|
"""Runs the executable with given args (args[0] is the executable name).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
args: Command line arguments for the process.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Process's exit code if it exits normally, or -signal if the process is
|
||||||
|
killed by a signal.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.subprocess:
|
||||||
|
return self.subprocess.Popen(args).wait()
|
||||||
|
else:
|
||||||
|
return self.os.spawn(self.os.P_WAIT, args[0], args)
|
||||||
|
|
||||||
|
def RunBinaryTest(self, test):
|
||||||
|
"""Runs the binary test script given its path relative to the gtest root.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
test: Path to the test binary relative to the location of this script.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Process's exit code if it exits normally, or -signal if the process is
|
||||||
|
killed by a signal.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return self.Run([self.os.path.abspath(self.os.path.join(MY_DIR, test))])
|
||||||
|
|
||||||
|
def RunPythonTest(self, test, build_dir):
|
||||||
|
"""Runs the Python test script with the specified build directory.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
test: Name of the test's Python script.
|
||||||
|
build_dir: Path to the directory where the test binary is to be found.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Process's exit code if it exits normally, or -signal if the process is
|
||||||
|
killed by a signal.
|
||||||
|
"""
|
||||||
|
|
||||||
|
old_build_dir = self.os.environ.get(GTEST_BUILD_DIR)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.os.environ[GTEST_BUILD_DIR] = build_dir
|
||||||
|
|
||||||
|
# If this script is run on a Windows machine that has no association
|
||||||
|
# between the .py extension and a python interpreter, simply passing
|
||||||
|
# the script name into subprocess.Popen/os.spawn will not work.
|
||||||
|
script = self.os.path.join(MY_DIR, test)
|
||||||
|
print 'Running %s . . .' % (script,)
|
||||||
|
return self.Run([sys.executable, script])
|
||||||
|
|
||||||
|
finally:
|
||||||
|
if old_build_dir is None:
|
||||||
|
del self.os.environ[GTEST_BUILD_DIR]
|
||||||
|
else:
|
||||||
|
self.os.environ[GTEST_BUILD_DIR] = old_build_dir
|
||||||
|
|
||||||
|
def FindFilesByRegex(self, directory, regex):
|
||||||
|
"""Returns files in a directory whose names match a regular expression.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
directory: Path to the directory to search for files.
|
||||||
|
regex: Regular expression to filter file names.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The list of the paths to the files in the directory.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return [self.os.path.join(directory, file_name)
|
||||||
|
for file_name in self.os.listdir(directory)
|
||||||
|
if re.search(regex, file_name)]
|
||||||
|
|
||||||
|
# TODO(vladl@google.com): Implement parsing of scons/SConscript to run all
|
||||||
|
# tests defined there when no tests are specified.
|
||||||
|
# TODO(vladl@google.com): Update the docstring after the code is changed to
|
||||||
|
# try to test all builds defined in scons/SConscript.
|
||||||
|
def GetTestsToRun(self,
|
||||||
|
args,
|
||||||
|
named_configurations,
|
||||||
|
built_configurations,
|
||||||
|
available_configurations=CONFIGS):
|
||||||
|
"""Determines what tests should be run.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
args: The list of non-option arguments from the command line.
|
||||||
|
named_configurations: The list of configurations specified via -c or -a.
|
||||||
|
built_configurations: True if -b has been specified.
|
||||||
|
available_configurations: a list of configurations available on the
|
||||||
|
current platform, injectable for testing.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A tuple with 2 elements: the list of Python tests to run and the list of
|
||||||
|
binary tests to run.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if named_configurations == 'all':
|
||||||
|
named_configurations = ','.join(available_configurations)
|
||||||
|
|
||||||
|
# A final list of build directories which will be searched for the test
|
||||||
|
# binaries. First, add directories specified directly on the command
|
||||||
|
# line.
|
||||||
|
build_dirs = [arg for arg in args if self.os.path.isdir(arg)]
|
||||||
|
|
||||||
|
# Adds build directories specified via their build configurations using
|
||||||
|
# the -c or -a options.
|
||||||
|
if named_configurations:
|
||||||
|
build_dirs += [GetBuildDirForConfig(config)
|
||||||
|
for config in named_configurations.split(',')]
|
||||||
|
|
||||||
|
# Adds KNOWN BUILD DIRECTORIES if -b is specified.
|
||||||
|
if built_configurations:
|
||||||
|
build_dirs += [GetBuildDirForConfig(config)
|
||||||
|
for config in available_configurations
|
||||||
|
if self.os.path.isdir(GetBuildDirForConfig(config))]
|
||||||
|
|
||||||
|
# If no directories were specified either via -a, -b, -c, or directly, use
|
||||||
|
# the default configuration.
|
||||||
|
elif not build_dirs:
|
||||||
|
build_dirs = [GetBuildDirForConfig(config)
|
||||||
|
for config in available_configurations[0:1]]
|
||||||
|
|
||||||
|
# Makes sure there are no duplications.
|
||||||
|
build_dirs = sets.Set(build_dirs)
|
||||||
|
|
||||||
|
errors_found = False
|
||||||
|
listed_python_tests = [] # All Python tests listed on the command line.
|
||||||
|
listed_binary_tests = [] # All binary tests listed on the command line.
|
||||||
|
|
||||||
|
# Sifts through non-directory arguments fishing for any Python or binary
|
||||||
|
# tests and detecting errors.
|
||||||
|
for argument in sets.Set(args) - build_dirs:
|
||||||
|
if re.search(PYTHON_TEST_REGEX, argument):
|
||||||
|
python_path = self.os.path.join('test', self.os.path.basename(argument))
|
||||||
|
if self.os.path.isfile(self.os.path.join(MY_DIR, python_path)):
|
||||||
|
listed_python_tests.append(python_path)
|
||||||
|
else:
|
||||||
|
sys.stderr.write('Unable to find Python test %s' % argument)
|
||||||
|
errors_found = True
|
||||||
|
elif re.search(BINARY_TEST_REGEX, argument):
|
||||||
|
# This script also accepts binary test names prefixed with test/ for
|
||||||
|
# the convenience of typing them (can use path completions in the
|
||||||
|
# shell). Strips test/ prefix from the binary test names.
|
||||||
|
listed_binary_tests.append(self.os.path.basename(argument))
|
||||||
|
else:
|
||||||
|
sys.stderr.write('%s is neither test nor build directory' % argument)
|
||||||
|
errors_found = True
|
||||||
|
|
||||||
|
if errors_found:
|
||||||
|
return None
|
||||||
|
|
||||||
|
user_has_listed_tests = listed_python_tests or listed_binary_tests
|
||||||
|
|
||||||
|
if user_has_listed_tests:
|
||||||
|
selected_python_tests = listed_python_tests
|
||||||
|
else:
|
||||||
|
selected_python_tests = self.FindFilesByRegex('test', PYTHON_TEST_REGEX)
|
||||||
|
|
||||||
|
# TODO(vladl@google.com): skip unbuilt Python tests when -b is specified.
|
||||||
|
python_test_pairs = []
|
||||||
|
for directory in build_dirs:
|
||||||
|
for test in selected_python_tests:
|
||||||
|
python_test_pairs.append((directory, test))
|
||||||
|
|
||||||
|
binary_test_pairs = []
|
||||||
|
for directory in build_dirs:
|
||||||
|
if user_has_listed_tests:
|
||||||
|
binary_test_pairs.extend(
|
||||||
|
[(directory, self.os.path.join(directory, test))
|
||||||
|
for test in listed_binary_tests])
|
||||||
|
else:
|
||||||
|
tests = self.FindFilesByRegex(directory, BINARY_TEST_REGEX)
|
||||||
|
binary_test_pairs.extend([(directory, test) for test in tests])
|
||||||
|
|
||||||
|
return (python_test_pairs, binary_test_pairs)
|
||||||
|
|
||||||
|
def RunTests(self, python_tests, binary_tests):
|
||||||
|
"""Runs Python and binary tests represented as pairs (work_dir, binary).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
python_tests: List of Python tests to run in the form of tuples
|
||||||
|
(build directory, Python test script).
|
||||||
|
binary_tests: List of binary tests to run in the form of tuples
|
||||||
|
(build directory, binary file).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The exit code the program should pass into sys.exit().
|
||||||
|
"""
|
||||||
|
|
||||||
|
if python_tests or binary_tests:
|
||||||
|
results = []
|
||||||
|
for directory, test in python_tests:
|
||||||
|
results.append((directory,
|
||||||
|
test,
|
||||||
|
self.RunPythonTest(test, directory) == 0))
|
||||||
|
for directory, test in binary_tests:
|
||||||
|
results.append((directory,
|
||||||
|
self.os.path.basename(test),
|
||||||
|
self.RunBinaryTest(test) == 0))
|
||||||
|
|
||||||
|
failed = [(directory, test)
|
||||||
|
for (directory, test, success) in results
|
||||||
|
if not success]
|
||||||
|
print
|
||||||
|
print '%d tests run.' % len(results)
|
||||||
|
if failed:
|
||||||
|
print 'The following %d tests failed:' % len(failed)
|
||||||
|
for (directory, test) in failed:
|
||||||
|
print '%s in %s' % (test, directory)
|
||||||
|
return 1
|
||||||
|
else:
|
||||||
|
print 'All tests passed!'
|
||||||
|
else: # No tests defined
|
||||||
|
print 'Nothing to test - no tests specified!'
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def _Main():
|
||||||
|
"""Runs all tests for Google Test."""
|
||||||
|
|
||||||
|
parser = optparse.OptionParser()
|
||||||
|
parser.add_option('-c',
|
||||||
|
action='store',
|
||||||
|
dest='configurations',
|
||||||
|
default=None,
|
||||||
|
help='Test in the specified build directories')
|
||||||
|
parser.add_option('-a',
|
||||||
|
action='store_const',
|
||||||
|
dest='configurations',
|
||||||
|
default=None,
|
||||||
|
const='all',
|
||||||
|
help='Test in all default build directories')
|
||||||
|
parser.add_option('-b',
|
||||||
|
action='store_const',
|
||||||
|
dest='built_configurations',
|
||||||
|
default=False,
|
||||||
|
const=True,
|
||||||
|
help=('Test in all default build directories, do not fail'
|
||||||
|
'if some of them do not exist'))
|
||||||
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
|
test_runner = TestRunner()
|
||||||
|
tests = test_runner.GetTestsToRun(args,
|
||||||
|
options.configurations,
|
||||||
|
options.built_configurations)
|
||||||
|
if not tests:
|
||||||
|
sys.exit(1) # Incorrect parameters given, abort execution.
|
||||||
|
|
||||||
|
sys.exit(test_runner.RunTests(tests[0], tests[1]))
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
_Main()
|
527
test/run_tests_test.py
Executable file
527
test/run_tests_test.py
Executable file
@ -0,0 +1,527 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# Copyright 2009 Google Inc. All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are
|
||||||
|
# met:
|
||||||
|
#
|
||||||
|
# * Redistributions of source code must retain the above copyright
|
||||||
|
# notice, this list of conditions and the following disclaimer.
|
||||||
|
# * Redistributions in binary form must reproduce the above
|
||||||
|
# copyright notice, this list of conditions and the following disclaimer
|
||||||
|
# in the documentation and/or other materials provided with the
|
||||||
|
# distribution.
|
||||||
|
# * Neither the name of Google Inc. nor the names of its
|
||||||
|
# contributors may be used to endorse or promote products derived from
|
||||||
|
# this software without specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
"""Tests for run_tests.py test runner script."""
|
||||||
|
|
||||||
|
__author__ = 'vladl@google.com (Vlad Losev)'
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), os.pardir))
|
||||||
|
import run_tests
|
||||||
|
|
||||||
|
|
||||||
|
class FakePath(object):
|
||||||
|
"""A fake os.path module for testing."""
|
||||||
|
|
||||||
|
def __init__(self, current_dir=os.getcwd(), known_paths=None):
|
||||||
|
self.current_dir = current_dir
|
||||||
|
self.tree = {}
|
||||||
|
self.path_separator = os.sep
|
||||||
|
|
||||||
|
if known_paths:
|
||||||
|
self._AddPaths(known_paths)
|
||||||
|
|
||||||
|
def _AddPath(self, path):
|
||||||
|
ends_with_slash = path.endswith('/')
|
||||||
|
path = self.abspath(path)
|
||||||
|
if ends_with_slash:
|
||||||
|
path += self.path_separator
|
||||||
|
name_list = path.split(self.path_separator)
|
||||||
|
tree = self.tree
|
||||||
|
for name in name_list[:-1]:
|
||||||
|
if not name:
|
||||||
|
continue
|
||||||
|
if name in tree:
|
||||||
|
tree = tree[name]
|
||||||
|
else:
|
||||||
|
tree[name] = {}
|
||||||
|
tree = tree[name]
|
||||||
|
|
||||||
|
name = name_list[-1]
|
||||||
|
if name:
|
||||||
|
if name in tree:
|
||||||
|
assert tree[name] == 1
|
||||||
|
else:
|
||||||
|
tree[name] = 1
|
||||||
|
|
||||||
|
def _AddPaths(self, paths):
|
||||||
|
for path in paths:
|
||||||
|
self._AddPath(path)
|
||||||
|
|
||||||
|
def PathElement(self, path):
|
||||||
|
"""Returns an internal representation of directory tree entry for path."""
|
||||||
|
tree = self.tree
|
||||||
|
name_list = self.abspath(path).split(self.path_separator)
|
||||||
|
for name in name_list:
|
||||||
|
if not name:
|
||||||
|
continue
|
||||||
|
tree = tree.get(name, None)
|
||||||
|
if tree is None:
|
||||||
|
break
|
||||||
|
|
||||||
|
return tree
|
||||||
|
|
||||||
|
def abspath(self, path):
|
||||||
|
return os.path.normpath(os.path.join(self.current_dir, path))
|
||||||
|
|
||||||
|
def isfile(self, path):
|
||||||
|
return self.PathElement(self.abspath(path)) == 1
|
||||||
|
|
||||||
|
def isdir(self, path):
|
||||||
|
return type(self.PathElement(self.abspath(path))) == type(dict())
|
||||||
|
|
||||||
|
def basename(self, path):
|
||||||
|
return os.path.basename(path)
|
||||||
|
|
||||||
|
def dirname(self, path):
|
||||||
|
return os.path.dirname(path)
|
||||||
|
|
||||||
|
def join(self, *kargs):
|
||||||
|
return os.path.join(*kargs)
|
||||||
|
|
||||||
|
|
||||||
|
class FakeOs(object):
|
||||||
|
"""A fake os module for testing."""
|
||||||
|
P_WAIT = os.P_WAIT
|
||||||
|
|
||||||
|
def __init__(self, fake_path_module):
|
||||||
|
self.path = fake_path_module
|
||||||
|
|
||||||
|
# Some methods/attributes are delegated to the real os module.
|
||||||
|
self.environ = os.environ
|
||||||
|
|
||||||
|
def listdir(self, path):
|
||||||
|
assert self.path.isdir(path)
|
||||||
|
return self.path.PathElement(path).iterkeys()
|
||||||
|
|
||||||
|
def spawn(self, wait, executable, *kargs):
|
||||||
|
assert wait == FakeOs.P_WAIT
|
||||||
|
return self.spawn_impl(executable, kargs)
|
||||||
|
|
||||||
|
|
||||||
|
class GetTestsToRunTest(unittest.TestCase):
|
||||||
|
"""Exercises TestRunner.GetTestsToRun."""
|
||||||
|
|
||||||
|
def AssertResultsEqual(self, results, expected):
|
||||||
|
"""Asserts results returned by GetTestsToRun equal to expected results."""
|
||||||
|
|
||||||
|
def NormalizeResultPaths(paths):
|
||||||
|
"""Normalizes values returned by GetTestsToRun for comparison."""
|
||||||
|
|
||||||
|
def NormalizeResultPair(pair):
|
||||||
|
return (os.path.normpath(pair[0]), os.path.normpath(pair[1]))
|
||||||
|
|
||||||
|
return (sorted(map(NormalizeResultPair, paths[0])),
|
||||||
|
sorted(map(NormalizeResultPair, paths[1])))
|
||||||
|
|
||||||
|
self.assertEqual(NormalizeResultPaths(results),
|
||||||
|
NormalizeResultPaths(expected),
|
||||||
|
'Incorrect set of tests %s returned vs %s expected' %
|
||||||
|
(results, expected))
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.fake_os = FakeOs(FakePath(
|
||||||
|
current_dir=os.path.abspath(os.path.dirname(run_tests.__file__)),
|
||||||
|
known_paths=['scons/build/dbg/scons/gtest_unittest',
|
||||||
|
'scons/build/opt/scons/gtest_unittest',
|
||||||
|
'test/gtest_color_test.py']))
|
||||||
|
self.fake_configurations = ['dbg', 'opt']
|
||||||
|
self.test_runner = run_tests.TestRunner(injected_os=self.fake_os,
|
||||||
|
injected_subprocess=None)
|
||||||
|
|
||||||
|
def testBinaryTestsOnly(self):
|
||||||
|
"""Exercises GetTestsToRun with parameters designating binary tests only."""
|
||||||
|
|
||||||
|
# A default build.
|
||||||
|
self.AssertResultsEqual(
|
||||||
|
self.test_runner.GetTestsToRun(
|
||||||
|
['gtest_unittest'],
|
||||||
|
'',
|
||||||
|
False,
|
||||||
|
available_configurations=self.fake_configurations),
|
||||||
|
([],
|
||||||
|
[('scons/build/dbg/scons', 'scons/build/dbg/scons/gtest_unittest')]))
|
||||||
|
|
||||||
|
# An explicitly specified directory.
|
||||||
|
self.AssertResultsEqual(
|
||||||
|
self.test_runner.GetTestsToRun(
|
||||||
|
['scons/build/dbg/scons', 'gtest_unittest'],
|
||||||
|
'',
|
||||||
|
False,
|
||||||
|
available_configurations=self.fake_configurations),
|
||||||
|
([],
|
||||||
|
[('scons/build/dbg/scons', 'scons/build/dbg/scons/gtest_unittest')]))
|
||||||
|
|
||||||
|
# A particular configuration.
|
||||||
|
self.AssertResultsEqual(
|
||||||
|
self.test_runner.GetTestsToRun(
|
||||||
|
['gtest_unittest'],
|
||||||
|
'other',
|
||||||
|
False,
|
||||||
|
available_configurations=self.fake_configurations),
|
||||||
|
([],
|
||||||
|
[('scons/build/other/scons',
|
||||||
|
'scons/build/other/scons/gtest_unittest')]))
|
||||||
|
|
||||||
|
# All available configurations
|
||||||
|
self.AssertResultsEqual(
|
||||||
|
self.test_runner.GetTestsToRun(
|
||||||
|
['gtest_unittest'],
|
||||||
|
'all',
|
||||||
|
False,
|
||||||
|
available_configurations=self.fake_configurations),
|
||||||
|
([],
|
||||||
|
[('scons/build/dbg/scons', 'scons/build/dbg/scons/gtest_unittest'),
|
||||||
|
('scons/build/opt/scons', 'scons/build/opt/scons/gtest_unittest')]))
|
||||||
|
|
||||||
|
# All built configurations (unbuilt don't cause failure).
|
||||||
|
self.AssertResultsEqual(
|
||||||
|
self.test_runner.GetTestsToRun(
|
||||||
|
['gtest_unittest'],
|
||||||
|
'',
|
||||||
|
True,
|
||||||
|
available_configurations=self.fake_configurations + ['unbuilt']),
|
||||||
|
([],
|
||||||
|
[('scons/build/dbg/scons', 'scons/build/dbg/scons/gtest_unittest'),
|
||||||
|
('scons/build/opt/scons', 'scons/build/opt/scons/gtest_unittest')]))
|
||||||
|
|
||||||
|
# A combination of an explicit directory and a configuration.
|
||||||
|
self.AssertResultsEqual(
|
||||||
|
self.test_runner.GetTestsToRun(
|
||||||
|
['scons/build/dbg/scons', 'gtest_unittest'],
|
||||||
|
'opt',
|
||||||
|
False,
|
||||||
|
available_configurations=self.fake_configurations),
|
||||||
|
([],
|
||||||
|
[('scons/build/dbg/scons', 'scons/build/dbg/scons/gtest_unittest'),
|
||||||
|
('scons/build/opt/scons', 'scons/build/opt/scons/gtest_unittest')]))
|
||||||
|
|
||||||
|
# Same test specified in an explicit directory and via a configuration.
|
||||||
|
self.AssertResultsEqual(
|
||||||
|
self.test_runner.GetTestsToRun(
|
||||||
|
['scons/build/dbg/scons', 'gtest_unittest'],
|
||||||
|
'dbg',
|
||||||
|
False,
|
||||||
|
available_configurations=self.fake_configurations),
|
||||||
|
([],
|
||||||
|
[('scons/build/dbg/scons', 'scons/build/dbg/scons/gtest_unittest')]))
|
||||||
|
|
||||||
|
# All built configurations + explicit directory + explicit configuration.
|
||||||
|
self.AssertResultsEqual(
|
||||||
|
self.test_runner.GetTestsToRun(
|
||||||
|
['scons/build/dbg/scons', 'gtest_unittest'],
|
||||||
|
'opt',
|
||||||
|
True,
|
||||||
|
available_configurations=self.fake_configurations),
|
||||||
|
([],
|
||||||
|
[('scons/build/dbg/scons', 'scons/build/dbg/scons/gtest_unittest'),
|
||||||
|
('scons/build/opt/scons', 'scons/build/opt/scons/gtest_unittest')]))
|
||||||
|
|
||||||
|
def testPythonTestsOnly(self):
|
||||||
|
"""Exercises GetTestsToRun with parameters designating Python tests only."""
|
||||||
|
|
||||||
|
# A default build.
|
||||||
|
self.AssertResultsEqual(
|
||||||
|
self.test_runner.GetTestsToRun(
|
||||||
|
['gtest_color_test.py'],
|
||||||
|
'',
|
||||||
|
False,
|
||||||
|
available_configurations=self.fake_configurations),
|
||||||
|
([('scons/build/dbg/scons', 'test/gtest_color_test.py')],
|
||||||
|
[]))
|
||||||
|
|
||||||
|
# An explicitly specified directory.
|
||||||
|
self.AssertResultsEqual(
|
||||||
|
self.test_runner.GetTestsToRun(
|
||||||
|
['scons/build/dbg/scons', 'test/gtest_color_test.py'],
|
||||||
|
'',
|
||||||
|
False,
|
||||||
|
available_configurations=self.fake_configurations),
|
||||||
|
([('scons/build/dbg/scons', 'test/gtest_color_test.py')],
|
||||||
|
[]))
|
||||||
|
|
||||||
|
# A particular configuration.
|
||||||
|
self.AssertResultsEqual(
|
||||||
|
self.test_runner.GetTestsToRun(
|
||||||
|
['gtest_color_test.py'],
|
||||||
|
'other',
|
||||||
|
False,
|
||||||
|
available_configurations=self.fake_configurations),
|
||||||
|
([('scons/build/other/scons', 'test/gtest_color_test.py')],
|
||||||
|
[]))
|
||||||
|
|
||||||
|
# All available configurations
|
||||||
|
self.AssertResultsEqual(
|
||||||
|
self.test_runner.GetTestsToRun(
|
||||||
|
['test/gtest_color_test.py'],
|
||||||
|
'all',
|
||||||
|
False,
|
||||||
|
available_configurations=self.fake_configurations),
|
||||||
|
([('scons/build/dbg/scons', 'test/gtest_color_test.py'),
|
||||||
|
('scons/build/opt/scons', 'test/gtest_color_test.py')],
|
||||||
|
[]))
|
||||||
|
|
||||||
|
# All built configurations (unbuilt don't cause failure).
|
||||||
|
self.AssertResultsEqual(
|
||||||
|
self.test_runner.GetTestsToRun(
|
||||||
|
['gtest_color_test.py'],
|
||||||
|
'',
|
||||||
|
True,
|
||||||
|
available_configurations=self.fake_configurations + ['unbuilt']),
|
||||||
|
([('scons/build/dbg/scons', 'test/gtest_color_test.py'),
|
||||||
|
('scons/build/opt/scons', 'test/gtest_color_test.py')],
|
||||||
|
[]))
|
||||||
|
|
||||||
|
# A combination of an explicit directory and a configuration.
|
||||||
|
self.AssertResultsEqual(
|
||||||
|
self.test_runner.GetTestsToRun(
|
||||||
|
['scons/build/dbg/scons', 'gtest_color_test.py'],
|
||||||
|
'opt',
|
||||||
|
False,
|
||||||
|
available_configurations=self.fake_configurations),
|
||||||
|
([('scons/build/dbg/scons', 'test/gtest_color_test.py'),
|
||||||
|
('scons/build/opt/scons', 'test/gtest_color_test.py')],
|
||||||
|
[]))
|
||||||
|
|
||||||
|
# Same test specified in an explicit directory and via a configuration.
|
||||||
|
self.AssertResultsEqual(
|
||||||
|
self.test_runner.GetTestsToRun(
|
||||||
|
['scons/build/dbg/scons', 'gtest_color_test.py'],
|
||||||
|
'dbg',
|
||||||
|
False,
|
||||||
|
available_configurations=self.fake_configurations),
|
||||||
|
([('scons/build/dbg/scons', 'test/gtest_color_test.py')],
|
||||||
|
[]))
|
||||||
|
|
||||||
|
# All built configurations + explicit directory + explicit configuration.
|
||||||
|
self.AssertResultsEqual(
|
||||||
|
self.test_runner.GetTestsToRun(
|
||||||
|
['scons/build/dbg/scons', 'gtest_color_test.py'],
|
||||||
|
'opt',
|
||||||
|
True,
|
||||||
|
available_configurations=self.fake_configurations),
|
||||||
|
([('scons/build/dbg/scons', 'test/gtest_color_test.py'),
|
||||||
|
('scons/build/opt/scons', 'test/gtest_color_test.py')],
|
||||||
|
[]))
|
||||||
|
|
||||||
|
def testCombinationOfBinaryAndPythonTests(self):
|
||||||
|
"""Exercises GetTestsToRun with mixed binary/Python tests."""
|
||||||
|
|
||||||
|
# Use only default configuration for this test.
|
||||||
|
|
||||||
|
# Neither binary nor Python tests are specified so find all.
|
||||||
|
self.AssertResultsEqual(
|
||||||
|
self.test_runner.GetTestsToRun(
|
||||||
|
[],
|
||||||
|
'',
|
||||||
|
False,
|
||||||
|
available_configurations=self.fake_configurations),
|
||||||
|
([('scons/build/dbg/scons', 'test/gtest_color_test.py')],
|
||||||
|
[('scons/build/dbg/scons', 'scons/build/dbg/scons/gtest_unittest')]))
|
||||||
|
|
||||||
|
# Specifying both binary and Python tests.
|
||||||
|
self.AssertResultsEqual(
|
||||||
|
self.test_runner.GetTestsToRun(
|
||||||
|
['gtest_unittest', 'gtest_color_test.py'],
|
||||||
|
'',
|
||||||
|
False,
|
||||||
|
available_configurations=self.fake_configurations),
|
||||||
|
([('scons/build/dbg/scons', 'test/gtest_color_test.py')],
|
||||||
|
[('scons/build/dbg/scons', 'scons/build/dbg/scons/gtest_unittest')]))
|
||||||
|
|
||||||
|
# Specifying binary tests suppresses Python tests.
|
||||||
|
self.AssertResultsEqual(
|
||||||
|
self.test_runner.GetTestsToRun(
|
||||||
|
['gtest_unittest'],
|
||||||
|
'',
|
||||||
|
False,
|
||||||
|
available_configurations=self.fake_configurations),
|
||||||
|
([],
|
||||||
|
[('scons/build/dbg/scons', 'scons/build/dbg/scons/gtest_unittest')]))
|
||||||
|
|
||||||
|
# Specifying Python tests suppresses binary tests.
|
||||||
|
self.AssertResultsEqual(
|
||||||
|
self.test_runner.GetTestsToRun(
|
||||||
|
['gtest_color_test.py'],
|
||||||
|
'',
|
||||||
|
False,
|
||||||
|
available_configurations=self.fake_configurations),
|
||||||
|
([('scons/build/dbg/scons', 'test/gtest_color_test.py')],
|
||||||
|
[]))
|
||||||
|
|
||||||
|
def testIgnoresNonTestFiles(self):
|
||||||
|
"""Verifies that GetTestsToRun ignores non-test files in the filesystem."""
|
||||||
|
|
||||||
|
self.fake_os = FakeOs(FakePath(
|
||||||
|
current_dir=os.path.abspath(os.path.dirname(run_tests.__file__)),
|
||||||
|
known_paths=['scons/build/dbg/scons/gtest_nontest',
|
||||||
|
'scons/build/opt/scons/gtest_nontest.exe',
|
||||||
|
'test/']))
|
||||||
|
self.test_runner = run_tests.TestRunner(injected_os=self.fake_os,
|
||||||
|
injected_subprocess=None)
|
||||||
|
self.AssertResultsEqual(
|
||||||
|
self.test_runner.GetTestsToRun(
|
||||||
|
[],
|
||||||
|
'',
|
||||||
|
True,
|
||||||
|
available_configurations=self.fake_configurations),
|
||||||
|
([], []))
|
||||||
|
|
||||||
|
def testNonTestBinary(self):
|
||||||
|
"""Exercises GetTestsToRun with a non-test parameter."""
|
||||||
|
|
||||||
|
self.assert_(
|
||||||
|
not self.test_runner.GetTestsToRun(
|
||||||
|
['gtest_unittest_not_really'],
|
||||||
|
'',
|
||||||
|
False,
|
||||||
|
available_configurations=self.fake_configurations))
|
||||||
|
|
||||||
|
def testNonExistingPythonTest(self):
|
||||||
|
"""Exercises GetTestsToRun with a non-existent Python test parameter."""
|
||||||
|
|
||||||
|
self.assert_(
|
||||||
|
not self.test_runner.GetTestsToRun(
|
||||||
|
['nonexistent_test.py'],
|
||||||
|
'',
|
||||||
|
False,
|
||||||
|
available_configurations=self.fake_configurations))
|
||||||
|
|
||||||
|
|
||||||
|
class RunTestsTest(unittest.TestCase):
|
||||||
|
"""Exercises TestRunner.RunTests."""
|
||||||
|
|
||||||
|
def SpawnSuccess(self, unused_executable, unused_argv):
|
||||||
|
"""Fakes test success by returning 0 as an exit code."""
|
||||||
|
|
||||||
|
self.num_spawn_calls += 1
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def SpawnFailure(self, unused_executable, unused_argv):
|
||||||
|
"""Fakes test success by returning 1 as an exit code."""
|
||||||
|
|
||||||
|
self.num_spawn_calls += 1
|
||||||
|
return 1
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.fake_os = FakeOs(FakePath(
|
||||||
|
current_dir=os.path.abspath(os.path.dirname(run_tests.__file__)),
|
||||||
|
known_paths=['scons/build/dbg/scons/gtest_unittest',
|
||||||
|
'scons/build/opt/scons/gtest_unittest',
|
||||||
|
'test/gtest_color_test.py']))
|
||||||
|
self.fake_configurations = ['dbg', 'opt']
|
||||||
|
self.test_runner = run_tests.TestRunner(injected_os=self.fake_os,
|
||||||
|
injected_subprocess=None)
|
||||||
|
self.num_spawn_calls = 0 # A number of calls to spawn.
|
||||||
|
|
||||||
|
def testRunPythonTestSuccess(self):
|
||||||
|
"""Exercises RunTests to handle a Python test success."""
|
||||||
|
|
||||||
|
self.fake_os.spawn_impl = self.SpawnSuccess
|
||||||
|
self.assertEqual(
|
||||||
|
self.test_runner.RunTests(
|
||||||
|
[('scons/build/dbg/scons', 'test/gtest_color_test.py')],
|
||||||
|
[]),
|
||||||
|
0)
|
||||||
|
self.assertEqual(self.num_spawn_calls, 1)
|
||||||
|
|
||||||
|
def testRunBinaryTestSuccess(self):
|
||||||
|
"""Exercises RunTests to handle a binary test success."""
|
||||||
|
|
||||||
|
self.fake_os.spawn_impl = self.SpawnSuccess
|
||||||
|
self.assertEqual(
|
||||||
|
self.test_runner.RunTests(
|
||||||
|
[],
|
||||||
|
[('scons/build/dbg/scons',
|
||||||
|
'scons/build/dbg/scons/gtest_unittest')]),
|
||||||
|
0)
|
||||||
|
self.assertEqual(self.num_spawn_calls, 1)
|
||||||
|
|
||||||
|
def testRunPythonTestFauilure(self):
|
||||||
|
"""Exercises RunTests to handle a Python test failure."""
|
||||||
|
|
||||||
|
self.fake_os.spawn_impl = self.SpawnFailure
|
||||||
|
self.assertEqual(
|
||||||
|
self.test_runner.RunTests(
|
||||||
|
[('scons/build/dbg/scons', 'test/gtest_color_test.py')],
|
||||||
|
[]),
|
||||||
|
1)
|
||||||
|
self.assertEqual(self.num_spawn_calls, 1)
|
||||||
|
|
||||||
|
def testRunBinaryTestFailure(self):
|
||||||
|
"""Exercises RunTests to handle a binary test failure."""
|
||||||
|
|
||||||
|
self.fake_os.spawn_impl = self.SpawnFailure
|
||||||
|
self.assertEqual(
|
||||||
|
self.test_runner.RunTests(
|
||||||
|
[],
|
||||||
|
[('scons/build/dbg/scons',
|
||||||
|
'scons/build/dbg/scons/gtest_unittest')]),
|
||||||
|
1)
|
||||||
|
self.assertEqual(self.num_spawn_calls, 1)
|
||||||
|
|
||||||
|
def testCombinedTestSuccess(self):
|
||||||
|
"""Exercises RunTests to handle a success of both Python and binary test."""
|
||||||
|
|
||||||
|
self.fake_os.spawn_impl = self.SpawnSuccess
|
||||||
|
self.assertEqual(
|
||||||
|
self.test_runner.RunTests(
|
||||||
|
[('scons/build/dbg/scons', 'scons/build/dbg/scons/gtest_unittest')],
|
||||||
|
[('scons/build/dbg/scons',
|
||||||
|
'scons/build/dbg/scons/gtest_unittest')]),
|
||||||
|
0)
|
||||||
|
self.assertEqual(self.num_spawn_calls, 2)
|
||||||
|
|
||||||
|
def testCombinedTestSuccessAndFailure(self):
|
||||||
|
"""Exercises RunTests to handle a success of both Python and binary test."""
|
||||||
|
|
||||||
|
def SpawnImpl(executable, argv):
|
||||||
|
self.num_spawn_calls += 1
|
||||||
|
# Simulates failure of a Python test and success of a binary test.
|
||||||
|
if '.py' in executable or '.py' in argv[0]:
|
||||||
|
return 1
|
||||||
|
else:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
self.fake_os.spawn_impl = SpawnImpl
|
||||||
|
self.assertEqual(
|
||||||
|
self.test_runner.RunTests(
|
||||||
|
[('scons/build/dbg/scons', 'scons/build/dbg/scons/gtest_unittest')],
|
||||||
|
[('scons/build/dbg/scons',
|
||||||
|
'scons/build/dbg/scons/gtest_unittest')]),
|
||||||
|
0)
|
||||||
|
self.assertEqual(self.num_spawn_calls, 2)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
Loading…
x
Reference in New Issue
Block a user