mirror of
https://github.com/open-source-parsers/jsoncpp.git
synced 2024-12-26 02:00:50 +08:00
Switched CRLF to LF in repo, and added svn:eol-style native. I might have missed a few files though. Just committing what I have so far.
This commit is contained in:
parent
139da63aef
commit
dc0f736f59
294
amalgamate.py
294
amalgamate.py
@ -1,147 +1,147 @@
|
||||
"""Amalgate json-cpp library sources into a single source and header file.
|
||||
|
||||
Requires Python 2.6
|
||||
|
||||
Example of invocation (must be invoked from json-cpp top directory):
|
||||
python amalgate.py
|
||||
"""
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
class AmalgamationFile:
|
||||
def __init__( self, top_dir ):
|
||||
self.top_dir = top_dir
|
||||
self.blocks = []
|
||||
|
||||
def add_text( self, text ):
|
||||
if not text.endswith( '\n' ):
|
||||
text += '\n'
|
||||
self.blocks.append( text )
|
||||
|
||||
def add_file( self, relative_input_path, wrap_in_comment=False ):
|
||||
def add_marker( prefix ):
|
||||
self.add_text( '' )
|
||||
self.add_text( '// ' + '/'*70 )
|
||||
self.add_text( '// %s of content of file: %s' % (prefix, relative_input_path.replace('\\','/')) )
|
||||
self.add_text( '// ' + '/'*70 )
|
||||
self.add_text( '' )
|
||||
add_marker( 'Beginning' )
|
||||
f = open( os.path.join( self.top_dir, relative_input_path ), 'rt' )
|
||||
content = f.read()
|
||||
if wrap_in_comment:
|
||||
content = '/*\n' + content + '\n*/'
|
||||
self.add_text( content )
|
||||
f.close()
|
||||
add_marker( 'End' )
|
||||
self.add_text( '\n\n\n\n' )
|
||||
|
||||
def get_value( self ):
|
||||
return ''.join( self.blocks ).replace('\r\n','\n')
|
||||
|
||||
def write_to( self, output_path ):
|
||||
output_dir = os.path.dirname( output_path )
|
||||
if output_dir and not os.path.isdir( output_dir ):
|
||||
os.makedirs( output_dir )
|
||||
f = open( output_path, 'wb' )
|
||||
f.write( self.get_value() )
|
||||
f.close()
|
||||
|
||||
def amalgamate_source( source_top_dir=None,
|
||||
target_source_path=None,
|
||||
header_include_path=None ):
|
||||
"""Produces amalgated source.
|
||||
Parameters:
|
||||
source_top_dir: top-directory
|
||||
target_source_path: output .cpp path
|
||||
header_include_path: generated header path relative to target_source_path.
|
||||
"""
|
||||
print 'Amalgating header...'
|
||||
header = AmalgamationFile( source_top_dir )
|
||||
header.add_text( '/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).' )
|
||||
header.add_text( '/// It is intented to be used with #include <%s>' % header_include_path )
|
||||
header.add_file( 'LICENSE', wrap_in_comment=True )
|
||||
header.add_text( '#ifndef JSON_AMALGATED_H_INCLUDED' )
|
||||
header.add_text( '# define JSON_AMALGATED_H_INCLUDED' )
|
||||
header.add_text( '/// If defined, indicates that the source file is amalgated' )
|
||||
header.add_text( '/// to prevent private header inclusion.' )
|
||||
header.add_text( '#define JSON_IS_AMALGATED' )
|
||||
header.add_file( 'include/json/config.h' )
|
||||
header.add_file( 'include/json/forwards.h' )
|
||||
header.add_file( 'include/json/features.h' )
|
||||
header.add_file( 'include/json/value.h' )
|
||||
header.add_file( 'include/json/reader.h' )
|
||||
header.add_file( 'include/json/writer.h' )
|
||||
header.add_text( '#endif //ifndef JSON_AMALGATED_H_INCLUDED' )
|
||||
|
||||
target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path )
|
||||
print 'Writing amalgated header to %r' % target_header_path
|
||||
header.write_to( target_header_path )
|
||||
|
||||
base, ext = os.path.splitext( header_include_path )
|
||||
forward_header_include_path = base + '-forwards' + ext
|
||||
print 'Amalgating forward header...'
|
||||
header = AmalgamationFile( source_top_dir )
|
||||
header.add_text( '/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).' )
|
||||
header.add_text( '/// It is intented to be used with #include <%s>' % forward_header_include_path )
|
||||
header.add_text( '/// This header provides forward declaration for all JsonCpp types.' )
|
||||
header.add_file( 'LICENSE', wrap_in_comment=True )
|
||||
header.add_text( '#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' )
|
||||
header.add_text( '# define JSON_FORWARD_AMALGATED_H_INCLUDED' )
|
||||
header.add_text( '/// If defined, indicates that the source file is amalgated' )
|
||||
header.add_text( '/// to prevent private header inclusion.' )
|
||||
header.add_text( '#define JSON_IS_AMALGATED' )
|
||||
header.add_file( 'include/json/config.h' )
|
||||
header.add_file( 'include/json/forwards.h' )
|
||||
header.add_text( '#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' )
|
||||
|
||||
target_forward_header_path = os.path.join( os.path.dirname(target_source_path),
|
||||
forward_header_include_path )
|
||||
print 'Writing amalgated forward header to %r' % target_forward_header_path
|
||||
header.write_to( target_forward_header_path )
|
||||
|
||||
print 'Amalgating source...'
|
||||
source = AmalgamationFile( source_top_dir )
|
||||
source.add_text( '/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).' )
|
||||
source.add_text( '/// It is intented to be used with #include <%s>' % header_include_path )
|
||||
source.add_file( 'LICENSE', wrap_in_comment=True )
|
||||
source.add_text( '' )
|
||||
source.add_text( '#include <%s>' % header_include_path )
|
||||
source.add_text( '' )
|
||||
source.add_file( 'src/lib_json\json_tool.h' )
|
||||
source.add_file( 'src/lib_json\json_reader.cpp' )
|
||||
source.add_file( 'src/lib_json\json_batchallocator.h' )
|
||||
source.add_file( 'src/lib_json\json_valueiterator.inl' )
|
||||
source.add_file( 'src/lib_json\json_value.cpp' )
|
||||
source.add_file( 'src/lib_json\json_writer.cpp' )
|
||||
|
||||
print 'Writing amalgated source to %r' % target_source_path
|
||||
source.write_to( target_source_path )
|
||||
|
||||
def main():
|
||||
usage = """%prog [options]
|
||||
Generate a single amalgated source and header file from the sources.
|
||||
"""
|
||||
from optparse import OptionParser
|
||||
parser = OptionParser(usage=usage)
|
||||
parser.allow_interspersed_args = False
|
||||
parser.add_option('-s', '--source', dest="target_source_path", action='store', default='dist/jsoncpp.cpp',
|
||||
help="""Output .cpp source path. [Default: %default]""")
|
||||
parser.add_option('-i', '--include', dest="header_include_path", action='store', default='json/json.h',
|
||||
help="""Header include path. Used to include the header from the amalgated source file. [Default: %default]""")
|
||||
parser.add_option('-t', '--top-dir', dest="top_dir", action='store', default=os.getcwd(),
|
||||
help="""Source top-directory. [Default: %default]""")
|
||||
parser.enable_interspersed_args()
|
||||
options, args = parser.parse_args()
|
||||
|
||||
msg = amalgamate_source( source_top_dir=options.top_dir,
|
||||
target_source_path=options.target_source_path,
|
||||
header_include_path=options.header_include_path )
|
||||
if msg:
|
||||
sys.stderr.write( msg + '\n' )
|
||||
sys.exit( 1 )
|
||||
else:
|
||||
print 'Source succesfully amalagated'
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
"""Amalgate json-cpp library sources into a single source and header file.
|
||||
|
||||
Requires Python 2.6
|
||||
|
||||
Example of invocation (must be invoked from json-cpp top directory):
|
||||
python amalgate.py
|
||||
"""
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
class AmalgamationFile:
|
||||
def __init__( self, top_dir ):
|
||||
self.top_dir = top_dir
|
||||
self.blocks = []
|
||||
|
||||
def add_text( self, text ):
|
||||
if not text.endswith( '\n' ):
|
||||
text += '\n'
|
||||
self.blocks.append( text )
|
||||
|
||||
def add_file( self, relative_input_path, wrap_in_comment=False ):
|
||||
def add_marker( prefix ):
|
||||
self.add_text( '' )
|
||||
self.add_text( '// ' + '/'*70 )
|
||||
self.add_text( '// %s of content of file: %s' % (prefix, relative_input_path.replace('\\','/')) )
|
||||
self.add_text( '// ' + '/'*70 )
|
||||
self.add_text( '' )
|
||||
add_marker( 'Beginning' )
|
||||
f = open( os.path.join( self.top_dir, relative_input_path ), 'rt' )
|
||||
content = f.read()
|
||||
if wrap_in_comment:
|
||||
content = '/*\n' + content + '\n*/'
|
||||
self.add_text( content )
|
||||
f.close()
|
||||
add_marker( 'End' )
|
||||
self.add_text( '\n\n\n\n' )
|
||||
|
||||
def get_value( self ):
|
||||
return ''.join( self.blocks ).replace('\r\n','\n')
|
||||
|
||||
def write_to( self, output_path ):
|
||||
output_dir = os.path.dirname( output_path )
|
||||
if output_dir and not os.path.isdir( output_dir ):
|
||||
os.makedirs( output_dir )
|
||||
f = open( output_path, 'wb' )
|
||||
f.write( self.get_value() )
|
||||
f.close()
|
||||
|
||||
def amalgamate_source( source_top_dir=None,
|
||||
target_source_path=None,
|
||||
header_include_path=None ):
|
||||
"""Produces amalgated source.
|
||||
Parameters:
|
||||
source_top_dir: top-directory
|
||||
target_source_path: output .cpp path
|
||||
header_include_path: generated header path relative to target_source_path.
|
||||
"""
|
||||
print 'Amalgating header...'
|
||||
header = AmalgamationFile( source_top_dir )
|
||||
header.add_text( '/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).' )
|
||||
header.add_text( '/// It is intented to be used with #include <%s>' % header_include_path )
|
||||
header.add_file( 'LICENSE', wrap_in_comment=True )
|
||||
header.add_text( '#ifndef JSON_AMALGATED_H_INCLUDED' )
|
||||
header.add_text( '# define JSON_AMALGATED_H_INCLUDED' )
|
||||
header.add_text( '/// If defined, indicates that the source file is amalgated' )
|
||||
header.add_text( '/// to prevent private header inclusion.' )
|
||||
header.add_text( '#define JSON_IS_AMALGATED' )
|
||||
header.add_file( 'include/json/config.h' )
|
||||
header.add_file( 'include/json/forwards.h' )
|
||||
header.add_file( 'include/json/features.h' )
|
||||
header.add_file( 'include/json/value.h' )
|
||||
header.add_file( 'include/json/reader.h' )
|
||||
header.add_file( 'include/json/writer.h' )
|
||||
header.add_text( '#endif //ifndef JSON_AMALGATED_H_INCLUDED' )
|
||||
|
||||
target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path )
|
||||
print 'Writing amalgated header to %r' % target_header_path
|
||||
header.write_to( target_header_path )
|
||||
|
||||
base, ext = os.path.splitext( header_include_path )
|
||||
forward_header_include_path = base + '-forwards' + ext
|
||||
print 'Amalgating forward header...'
|
||||
header = AmalgamationFile( source_top_dir )
|
||||
header.add_text( '/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).' )
|
||||
header.add_text( '/// It is intented to be used with #include <%s>' % forward_header_include_path )
|
||||
header.add_text( '/// This header provides forward declaration for all JsonCpp types.' )
|
||||
header.add_file( 'LICENSE', wrap_in_comment=True )
|
||||
header.add_text( '#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' )
|
||||
header.add_text( '# define JSON_FORWARD_AMALGATED_H_INCLUDED' )
|
||||
header.add_text( '/// If defined, indicates that the source file is amalgated' )
|
||||
header.add_text( '/// to prevent private header inclusion.' )
|
||||
header.add_text( '#define JSON_IS_AMALGATED' )
|
||||
header.add_file( 'include/json/config.h' )
|
||||
header.add_file( 'include/json/forwards.h' )
|
||||
header.add_text( '#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' )
|
||||
|
||||
target_forward_header_path = os.path.join( os.path.dirname(target_source_path),
|
||||
forward_header_include_path )
|
||||
print 'Writing amalgated forward header to %r' % target_forward_header_path
|
||||
header.write_to( target_forward_header_path )
|
||||
|
||||
print 'Amalgating source...'
|
||||
source = AmalgamationFile( source_top_dir )
|
||||
source.add_text( '/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).' )
|
||||
source.add_text( '/// It is intented to be used with #include <%s>' % header_include_path )
|
||||
source.add_file( 'LICENSE', wrap_in_comment=True )
|
||||
source.add_text( '' )
|
||||
source.add_text( '#include <%s>' % header_include_path )
|
||||
source.add_text( '' )
|
||||
source.add_file( 'src/lib_json\json_tool.h' )
|
||||
source.add_file( 'src/lib_json\json_reader.cpp' )
|
||||
source.add_file( 'src/lib_json\json_batchallocator.h' )
|
||||
source.add_file( 'src/lib_json\json_valueiterator.inl' )
|
||||
source.add_file( 'src/lib_json\json_value.cpp' )
|
||||
source.add_file( 'src/lib_json\json_writer.cpp' )
|
||||
|
||||
print 'Writing amalgated source to %r' % target_source_path
|
||||
source.write_to( target_source_path )
|
||||
|
||||
def main():
|
||||
usage = """%prog [options]
|
||||
Generate a single amalgated source and header file from the sources.
|
||||
"""
|
||||
from optparse import OptionParser
|
||||
parser = OptionParser(usage=usage)
|
||||
parser.allow_interspersed_args = False
|
||||
parser.add_option('-s', '--source', dest="target_source_path", action='store', default='dist/jsoncpp.cpp',
|
||||
help="""Output .cpp source path. [Default: %default]""")
|
||||
parser.add_option('-i', '--include', dest="header_include_path", action='store', default='json/json.h',
|
||||
help="""Header include path. Used to include the header from the amalgated source file. [Default: %default]""")
|
||||
parser.add_option('-t', '--top-dir', dest="top_dir", action='store', default=os.getcwd(),
|
||||
help="""Source top-directory. [Default: %default]""")
|
||||
parser.enable_interspersed_args()
|
||||
options, args = parser.parse_args()
|
||||
|
||||
msg = amalgamate_source( source_top_dir=options.top_dir,
|
||||
target_source_path=options.target_source_path,
|
||||
header_include_path=options.header_include_path )
|
||||
if msg:
|
||||
sys.stderr.write( msg + '\n' )
|
||||
sys.exit( 1 )
|
||||
else:
|
||||
print 'Source succesfully amalagated'
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
@ -55,20 +55,20 @@ ALL = DIR | FILE | LINKS
|
||||
|
||||
_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' )
|
||||
|
||||
def ant_pattern_to_re( ant_pattern ):
|
||||
"""Generates a regular expression from the ant pattern.
|
||||
Matching convention:
|
||||
**/a: match 'a', 'dir/a', 'dir1/dir2/a'
|
||||
a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b'
|
||||
*.py: match 'script.py' but not 'a/script.py'
|
||||
def ant_pattern_to_re( ant_pattern ):
|
||||
"""Generates a regular expression from the ant pattern.
|
||||
Matching convention:
|
||||
**/a: match 'a', 'dir/a', 'dir1/dir2/a'
|
||||
a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b'
|
||||
*.py: match 'script.py' but not 'a/script.py'
|
||||
"""
|
||||
rex = ['^']
|
||||
next_pos = 0
|
||||
sep_rex = r'(?:/|%s)' % re.escape( os.path.sep )
|
||||
## print 'Converting', ant_pattern
|
||||
for match in _ANT_RE.finditer( ant_pattern ):
|
||||
## print 'Matched', match.group()
|
||||
## print match.start(0), next_pos
|
||||
sep_rex = r'(?:/|%s)' % re.escape( os.path.sep )
|
||||
## print 'Converting', ant_pattern
|
||||
for match in _ANT_RE.finditer( ant_pattern ):
|
||||
## print 'Matched', match.group()
|
||||
## print match.start(0), next_pos
|
||||
if match.start(0) != next_pos:
|
||||
raise ValueError( "Invalid ant pattern" )
|
||||
if match.group(1): # /**/
|
||||
@ -83,14 +83,14 @@ def ant_pattern_to_re( ant_pattern ):
|
||||
rex.append( sep_rex )
|
||||
else: # somepath
|
||||
rex.append( re.escape(match.group(6)) )
|
||||
next_pos = match.end()
|
||||
next_pos = match.end()
|
||||
rex.append('$')
|
||||
return re.compile( ''.join( rex ) )
|
||||
|
||||
def _as_list( l ):
|
||||
if isinstance(l, basestring):
|
||||
return l.split()
|
||||
return l
|
||||
|
||||
def _as_list( l ):
|
||||
if isinstance(l, basestring):
|
||||
return l.split()
|
||||
return l
|
||||
|
||||
def glob(dir_path,
|
||||
includes = '**/*',
|
||||
@ -99,8 +99,8 @@ def glob(dir_path,
|
||||
prune_dirs = prune_dirs,
|
||||
max_depth = 25):
|
||||
include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)]
|
||||
exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)]
|
||||
prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)]
|
||||
exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)]
|
||||
prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)]
|
||||
dir_path = dir_path.replace('/',os.path.sep)
|
||||
entry_type_filter = entry_type
|
||||
|
||||
@ -117,37 +117,37 @@ def glob(dir_path,
|
||||
return True
|
||||
return False
|
||||
|
||||
def glob_impl( root_dir_path ):
|
||||
child_dirs = [root_dir_path]
|
||||
while child_dirs:
|
||||
def glob_impl( root_dir_path ):
|
||||
child_dirs = [root_dir_path]
|
||||
while child_dirs:
|
||||
dir_path = child_dirs.pop()
|
||||
for entry in listdir( dir_path ):
|
||||
full_path = os.path.join( dir_path, entry )
|
||||
## print 'Testing:', full_path,
|
||||
is_dir = os.path.isdir( full_path )
|
||||
if is_dir and not is_pruned_dir( entry ): # explore child directory ?
|
||||
## print '===> marked for recursion',
|
||||
child_dirs.append( full_path )
|
||||
included = apply_filter( full_path, include_filter )
|
||||
rejected = apply_filter( full_path, exclude_filter )
|
||||
if not included or rejected: # do not include entry ?
|
||||
## print '=> not included or rejected'
|
||||
continue
|
||||
link = os.path.islink( full_path )
|
||||
is_file = os.path.isfile( full_path )
|
||||
if not is_file and not is_dir:
|
||||
## print '=> unknown entry type'
|
||||
continue
|
||||
if link:
|
||||
entry_type = is_file and FILE_LINK or DIR_LINK
|
||||
else:
|
||||
entry_type = is_file and FILE or DIR
|
||||
## print '=> type: %d' % entry_type,
|
||||
if (entry_type & entry_type_filter) != 0:
|
||||
## print ' => KEEP'
|
||||
yield os.path.join( dir_path, entry )
|
||||
## else:
|
||||
## print ' => TYPE REJECTED'
|
||||
for entry in listdir( dir_path ):
|
||||
full_path = os.path.join( dir_path, entry )
|
||||
## print 'Testing:', full_path,
|
||||
is_dir = os.path.isdir( full_path )
|
||||
if is_dir and not is_pruned_dir( entry ): # explore child directory ?
|
||||
## print '===> marked for recursion',
|
||||
child_dirs.append( full_path )
|
||||
included = apply_filter( full_path, include_filter )
|
||||
rejected = apply_filter( full_path, exclude_filter )
|
||||
if not included or rejected: # do not include entry ?
|
||||
## print '=> not included or rejected'
|
||||
continue
|
||||
link = os.path.islink( full_path )
|
||||
is_file = os.path.isfile( full_path )
|
||||
if not is_file and not is_dir:
|
||||
## print '=> unknown entry type'
|
||||
continue
|
||||
if link:
|
||||
entry_type = is_file and FILE_LINK or DIR_LINK
|
||||
else:
|
||||
entry_type = is_file and FILE or DIR
|
||||
## print '=> type: %d' % entry_type,
|
||||
if (entry_type & entry_type_filter) != 0:
|
||||
## print ' => KEEP'
|
||||
yield os.path.join( dir_path, entry )
|
||||
## else:
|
||||
## print ' => TYPE REJECTED'
|
||||
return list( glob_impl( dir_path ) )
|
||||
|
||||
|
||||
@ -155,47 +155,47 @@ if __name__ == "__main__":
|
||||
import unittest
|
||||
|
||||
class AntPatternToRETest(unittest.TestCase):
|
||||
## def test_conversion( self ):
|
||||
## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern )
|
||||
|
||||
def test_matching( self ):
|
||||
test_cases = [ ( 'path',
|
||||
['path'],
|
||||
['somepath', 'pathsuffix', '/path', '/path'] ),
|
||||
( '*.py',
|
||||
['source.py', 'source.ext.py', '.py'],
|
||||
['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ),
|
||||
( '**/path',
|
||||
['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'],
|
||||
['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ),
|
||||
( 'path/**',
|
||||
['path/a', 'path/path/a', 'path//'],
|
||||
['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ),
|
||||
( '/**/path',
|
||||
['/path', '/a/path', '/a/b/path/path', '/path/path'],
|
||||
['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ),
|
||||
( 'a/b',
|
||||
['a/b'],
|
||||
['somea/b', 'a/bsuffix', 'a/b/c'] ),
|
||||
( '**/*.py',
|
||||
['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'],
|
||||
['script.pyc', 'script.pyo', 'a.py/b'] ),
|
||||
( 'src/**/*.py',
|
||||
['src/a.py', 'src/dir/a.py'],
|
||||
['a/src/a.py', '/src/a.py'] ),
|
||||
]
|
||||
for ant_pattern, accepted_matches, rejected_matches in list(test_cases):
|
||||
def local_path( paths ):
|
||||
return [ p.replace('/',os.path.sep) for p in paths ]
|
||||
test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) )
|
||||
for ant_pattern, accepted_matches, rejected_matches in test_cases:
|
||||
rex = ant_pattern_to_re( ant_pattern )
|
||||
print 'ant_pattern:', ant_pattern, ' => ', rex.pattern
|
||||
for accepted_match in accepted_matches:
|
||||
print 'Accepted?:', accepted_match
|
||||
self.assert_( rex.match( accepted_match ) is not None )
|
||||
for rejected_match in rejected_matches:
|
||||
print 'Rejected?:', rejected_match
|
||||
self.assert_( rex.match( rejected_match ) is None )
|
||||
## def test_conversion( self ):
|
||||
## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern )
|
||||
|
||||
def test_matching( self ):
|
||||
test_cases = [ ( 'path',
|
||||
['path'],
|
||||
['somepath', 'pathsuffix', '/path', '/path'] ),
|
||||
( '*.py',
|
||||
['source.py', 'source.ext.py', '.py'],
|
||||
['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ),
|
||||
( '**/path',
|
||||
['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'],
|
||||
['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ),
|
||||
( 'path/**',
|
||||
['path/a', 'path/path/a', 'path//'],
|
||||
['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ),
|
||||
( '/**/path',
|
||||
['/path', '/a/path', '/a/b/path/path', '/path/path'],
|
||||
['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ),
|
||||
( 'a/b',
|
||||
['a/b'],
|
||||
['somea/b', 'a/bsuffix', 'a/b/c'] ),
|
||||
( '**/*.py',
|
||||
['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'],
|
||||
['script.pyc', 'script.pyo', 'a.py/b'] ),
|
||||
( 'src/**/*.py',
|
||||
['src/a.py', 'src/dir/a.py'],
|
||||
['a/src/a.py', '/src/a.py'] ),
|
||||
]
|
||||
for ant_pattern, accepted_matches, rejected_matches in list(test_cases):
|
||||
def local_path( paths ):
|
||||
return [ p.replace('/',os.path.sep) for p in paths ]
|
||||
test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) )
|
||||
for ant_pattern, accepted_matches, rejected_matches in test_cases:
|
||||
rex = ant_pattern_to_re( ant_pattern )
|
||||
print 'ant_pattern:', ant_pattern, ' => ', rex.pattern
|
||||
for accepted_match in accepted_matches:
|
||||
print 'Accepted?:', accepted_match
|
||||
self.assert_( rex.match( accepted_match ) is not None )
|
||||
for rejected_match in rejected_matches:
|
||||
print 'Rejected?:', rejected_match
|
||||
self.assert_( rex.match( rejected_match ) is None )
|
||||
|
||||
unittest.main()
|
||||
|
@ -1,63 +1,63 @@
|
||||
import os.path
|
||||
|
||||
def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
|
||||
"""Makes sure that all sources have the specified eol sequence (default: unix)."""
|
||||
if not os.path.isfile( path ):
|
||||
raise ValueError( 'Path "%s" is not a file' % path )
|
||||
try:
|
||||
f = open(path, 'rb')
|
||||
except IOError, msg:
|
||||
print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg))
|
||||
return False
|
||||
try:
|
||||
raw_lines = f.readlines()
|
||||
finally:
|
||||
f.close()
|
||||
fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]
|
||||
if raw_lines != fixed_lines:
|
||||
print '%s =>' % path,
|
||||
if not is_dry_run:
|
||||
f = open(path, "wb")
|
||||
try:
|
||||
f.writelines(fixed_lines)
|
||||
finally:
|
||||
f.close()
|
||||
if verbose:
|
||||
print is_dry_run and ' NEED FIX' or ' FIXED'
|
||||
return True
|
||||
##
|
||||
##
|
||||
##
|
||||
##def _do_fix( is_dry_run = True ):
|
||||
## from waftools import antglob
|
||||
## python_sources = antglob.glob( '.',
|
||||
## includes = '**/*.py **/wscript **/wscript_build',
|
||||
## excludes = antglob.default_excludes + './waf.py',
|
||||
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
|
||||
## for path in python_sources:
|
||||
## _fix_python_source( path, is_dry_run )
|
||||
##
|
||||
## cpp_sources = antglob.glob( '.',
|
||||
## includes = '**/*.cpp **/*.h **/*.inl',
|
||||
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
|
||||
## for path in cpp_sources:
|
||||
## _fix_source_eol( path, is_dry_run )
|
||||
##
|
||||
##
|
||||
##def dry_fix(context):
|
||||
## _do_fix( is_dry_run = True )
|
||||
##
|
||||
##def fix(context):
|
||||
## _do_fix( is_dry_run = False )
|
||||
##
|
||||
##def shutdown():
|
||||
## pass
|
||||
##
|
||||
##def check(context):
|
||||
## # Unit tests are run when "check" target is used
|
||||
## ut = UnitTest.unit_test()
|
||||
## ut.change_to_testfile_dir = True
|
||||
## ut.want_to_see_test_output = True
|
||||
## ut.want_to_see_test_error = True
|
||||
## ut.run()
|
||||
## ut.print_results()
|
||||
import os.path
|
||||
|
||||
def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
|
||||
"""Makes sure that all sources have the specified eol sequence (default: unix)."""
|
||||
if not os.path.isfile( path ):
|
||||
raise ValueError( 'Path "%s" is not a file' % path )
|
||||
try:
|
||||
f = open(path, 'rb')
|
||||
except IOError, msg:
|
||||
print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg))
|
||||
return False
|
||||
try:
|
||||
raw_lines = f.readlines()
|
||||
finally:
|
||||
f.close()
|
||||
fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]
|
||||
if raw_lines != fixed_lines:
|
||||
print '%s =>' % path,
|
||||
if not is_dry_run:
|
||||
f = open(path, "wb")
|
||||
try:
|
||||
f.writelines(fixed_lines)
|
||||
finally:
|
||||
f.close()
|
||||
if verbose:
|
||||
print is_dry_run and ' NEED FIX' or ' FIXED'
|
||||
return True
|
||||
##
|
||||
##
|
||||
##
|
||||
##def _do_fix( is_dry_run = True ):
|
||||
## from waftools import antglob
|
||||
## python_sources = antglob.glob( '.',
|
||||
## includes = '**/*.py **/wscript **/wscript_build',
|
||||
## excludes = antglob.default_excludes + './waf.py',
|
||||
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
|
||||
## for path in python_sources:
|
||||
## _fix_python_source( path, is_dry_run )
|
||||
##
|
||||
## cpp_sources = antglob.glob( '.',
|
||||
## includes = '**/*.cpp **/*.h **/*.inl',
|
||||
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
|
||||
## for path in cpp_sources:
|
||||
## _fix_source_eol( path, is_dry_run )
|
||||
##
|
||||
##
|
||||
##def dry_fix(context):
|
||||
## _do_fix( is_dry_run = True )
|
||||
##
|
||||
##def fix(context):
|
||||
## _do_fix( is_dry_run = False )
|
||||
##
|
||||
##def shutdown():
|
||||
## pass
|
||||
##
|
||||
##def check(context):
|
||||
## # Unit tests are run when "check" target is used
|
||||
## ut = UnitTest.unit_test()
|
||||
## ut.change_to_testfile_dir = True
|
||||
## ut.want_to_see_test_output = True
|
||||
## ut.want_to_see_test_error = True
|
||||
## ut.run()
|
||||
## ut.print_results()
|
||||
|
@ -1,93 +1,93 @@
|
||||
"""Updates the license text in source file.
|
||||
"""
|
||||
|
||||
# An existing license is found if the file starts with the string below,
|
||||
# and ends with the first blank line.
|
||||
LICENSE_BEGIN = "// Copyright "
|
||||
|
||||
BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur
|
||||
// Distributed under MIT license, or public domain if desired and
|
||||
// recognized in your jurisdiction.
|
||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
""".replace('\r\n','\n')
|
||||
|
||||
def update_license( path, dry_run, show_diff ):
|
||||
"""Update the license statement in the specified file.
|
||||
Parameters:
|
||||
path: path of the C++ source file to update.
|
||||
dry_run: if True, just print the path of the file that would be updated,
|
||||
but don't change it.
|
||||
show_diff: if True, print the path of the file that would be modified,
|
||||
as well as the change made to the file.
|
||||
"""
|
||||
with open( path, 'rt' ) as fin:
|
||||
original_text = fin.read().replace('\r\n','\n')
|
||||
newline = fin.newlines and fin.newlines[0] or '\n'
|
||||
if not original_text.startswith( LICENSE_BEGIN ):
|
||||
# No existing license found => prepend it
|
||||
new_text = BRIEF_LICENSE + original_text
|
||||
else:
|
||||
license_end_index = original_text.index( '\n\n' ) # search first blank line
|
||||
new_text = BRIEF_LICENSE + original_text[license_end_index+2:]
|
||||
if original_text != new_text:
|
||||
if not dry_run:
|
||||
with open( path, 'wb' ) as fout:
|
||||
fout.write( new_text.replace('\n', newline ) )
|
||||
print 'Updated', path
|
||||
if show_diff:
|
||||
import difflib
|
||||
print '\n'.join( difflib.unified_diff( original_text.split('\n'),
|
||||
new_text.split('\n') ) )
|
||||
return True
|
||||
return False
|
||||
|
||||
def update_license_in_source_directories( source_dirs, dry_run, show_diff ):
|
||||
"""Updates license text in C++ source files found in directory source_dirs.
|
||||
Parameters:
|
||||
source_dirs: list of directory to scan for C++ sources. Directories are
|
||||
scanned recursively.
|
||||
dry_run: if True, just print the path of the file that would be updated,
|
||||
but don't change it.
|
||||
show_diff: if True, print the path of the file that would be modified,
|
||||
as well as the change made to the file.
|
||||
"""
|
||||
from devtools import antglob
|
||||
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
|
||||
for source_dir in source_dirs:
|
||||
cpp_sources = antglob.glob( source_dir,
|
||||
includes = '''**/*.h **/*.cpp **/*.inl''',
|
||||
prune_dirs = prune_dirs )
|
||||
for source in cpp_sources:
|
||||
update_license( source, dry_run, show_diff )
|
||||
|
||||
def main():
|
||||
usage = """%prog DIR [DIR2...]
|
||||
Updates license text in sources of the project in source files found
|
||||
in the directory specified on the command-line.
|
||||
|
||||
Example of call:
|
||||
python devtools\licenseupdater.py include src -n --diff
|
||||
=> Show change that would be made to the sources.
|
||||
|
||||
python devtools\licenseupdater.py include src
|
||||
=> Update license statement on all sources in directories include/ and src/.
|
||||
"""
|
||||
from optparse import OptionParser
|
||||
parser = OptionParser(usage=usage)
|
||||
parser.allow_interspersed_args = False
|
||||
parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False,
|
||||
help="""Only show what files are updated, do not update the files""")
|
||||
parser.add_option('--diff', dest="show_diff", action='store_true', default=False,
|
||||
help="""On update, show change made to the file.""")
|
||||
parser.enable_interspersed_args()
|
||||
options, args = parser.parse_args()
|
||||
update_license_in_source_directories( args, options.dry_run, options.show_diff )
|
||||
print 'Done'
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
import os.path
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
main()
|
||||
|
||||
"""Updates the license text in source file.
|
||||
"""
|
||||
|
||||
# An existing license is found if the file starts with the string below,
|
||||
# and ends with the first blank line.
|
||||
LICENSE_BEGIN = "// Copyright "
|
||||
|
||||
BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur
|
||||
// Distributed under MIT license, or public domain if desired and
|
||||
// recognized in your jurisdiction.
|
||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
""".replace('\r\n','\n')
|
||||
|
||||
def update_license( path, dry_run, show_diff ):
|
||||
"""Update the license statement in the specified file.
|
||||
Parameters:
|
||||
path: path of the C++ source file to update.
|
||||
dry_run: if True, just print the path of the file that would be updated,
|
||||
but don't change it.
|
||||
show_diff: if True, print the path of the file that would be modified,
|
||||
as well as the change made to the file.
|
||||
"""
|
||||
with open( path, 'rt' ) as fin:
|
||||
original_text = fin.read().replace('\r\n','\n')
|
||||
newline = fin.newlines and fin.newlines[0] or '\n'
|
||||
if not original_text.startswith( LICENSE_BEGIN ):
|
||||
# No existing license found => prepend it
|
||||
new_text = BRIEF_LICENSE + original_text
|
||||
else:
|
||||
license_end_index = original_text.index( '\n\n' ) # search first blank line
|
||||
new_text = BRIEF_LICENSE + original_text[license_end_index+2:]
|
||||
if original_text != new_text:
|
||||
if not dry_run:
|
||||
with open( path, 'wb' ) as fout:
|
||||
fout.write( new_text.replace('\n', newline ) )
|
||||
print 'Updated', path
|
||||
if show_diff:
|
||||
import difflib
|
||||
print '\n'.join( difflib.unified_diff( original_text.split('\n'),
|
||||
new_text.split('\n') ) )
|
||||
return True
|
||||
return False
|
||||
|
||||
def update_license_in_source_directories( source_dirs, dry_run, show_diff ):
|
||||
"""Updates license text in C++ source files found in directory source_dirs.
|
||||
Parameters:
|
||||
source_dirs: list of directory to scan for C++ sources. Directories are
|
||||
scanned recursively.
|
||||
dry_run: if True, just print the path of the file that would be updated,
|
||||
but don't change it.
|
||||
show_diff: if True, print the path of the file that would be modified,
|
||||
as well as the change made to the file.
|
||||
"""
|
||||
from devtools import antglob
|
||||
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
|
||||
for source_dir in source_dirs:
|
||||
cpp_sources = antglob.glob( source_dir,
|
||||
includes = '''**/*.h **/*.cpp **/*.inl''',
|
||||
prune_dirs = prune_dirs )
|
||||
for source in cpp_sources:
|
||||
update_license( source, dry_run, show_diff )
|
||||
|
||||
def main():
|
||||
usage = """%prog DIR [DIR2...]
|
||||
Updates license text in sources of the project in source files found
|
||||
in the directory specified on the command-line.
|
||||
|
||||
Example of call:
|
||||
python devtools\licenseupdater.py include src -n --diff
|
||||
=> Show change that would be made to the sources.
|
||||
|
||||
python devtools\licenseupdater.py include src
|
||||
=> Update license statement on all sources in directories include/ and src/.
|
||||
"""
|
||||
from optparse import OptionParser
|
||||
parser = OptionParser(usage=usage)
|
||||
parser.allow_interspersed_args = False
|
||||
parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False,
|
||||
help="""Only show what files are updated, do not update the files""")
|
||||
parser.add_option('--diff', dest="show_diff", action='store_true', default=False,
|
||||
help="""On update, show change made to the file.""")
|
||||
parser.enable_interspersed_args()
|
||||
options, args = parser.parse_args()
|
||||
update_license_in_source_directories( args, options.dry_run, options.show_diff )
|
||||
print 'Done'
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
import os.path
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
main()
|
||||
|
||||
|
@ -1,53 +1,53 @@
|
||||
import os.path
|
||||
import gzip
|
||||
import tarfile
|
||||
|
||||
TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
|
||||
|
||||
def make_tarball(tarball_path, sources, base_dir, prefix_dir=''):
|
||||
"""Parameters:
|
||||
tarball_path: output path of the .tar.gz file
|
||||
sources: list of sources to include in the tarball, relative to the current directory
|
||||
base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped
|
||||
from path in the tarball.
|
||||
prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to ''
|
||||
to make them child of root.
|
||||
"""
|
||||
base_dir = os.path.normpath( os.path.abspath( base_dir ) )
|
||||
def archive_name( path ):
|
||||
"""Makes path relative to base_dir."""
|
||||
path = os.path.normpath( os.path.abspath( path ) )
|
||||
common_path = os.path.commonprefix( (base_dir, path) )
|
||||
archive_name = path[len(common_path):]
|
||||
if os.path.isabs( archive_name ):
|
||||
archive_name = archive_name[1:]
|
||||
return os.path.join( prefix_dir, archive_name )
|
||||
def visit(tar, dirname, names):
|
||||
for name in names:
|
||||
path = os.path.join(dirname, name)
|
||||
if os.path.isfile(path):
|
||||
path_in_tar = archive_name(path)
|
||||
tar.add(path, path_in_tar )
|
||||
compression = TARGZ_DEFAULT_COMPRESSION_LEVEL
|
||||
tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression )
|
||||
try:
|
||||
for source in sources:
|
||||
source_path = source
|
||||
if os.path.isdir( source ):
|
||||
os.path.walk(source_path, visit, tar)
|
||||
else:
|
||||
path_in_tar = archive_name(source_path)
|
||||
tar.add(source_path, path_in_tar ) # filename, arcname
|
||||
finally:
|
||||
tar.close()
|
||||
|
||||
def decompress( tarball_path, base_dir ):
|
||||
"""Decompress the gzipped tarball into directory base_dir.
|
||||
"""
|
||||
# !!! This class method is not documented in the online doc
|
||||
# nor is bz2open!
|
||||
tar = tarfile.TarFile.gzopen(tarball_path, mode='r')
|
||||
try:
|
||||
tar.extractall( base_dir )
|
||||
finally:
|
||||
tar.close()
|
||||
import os.path
|
||||
import gzip
|
||||
import tarfile
|
||||
|
||||
TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
|
||||
|
||||
def make_tarball(tarball_path, sources, base_dir, prefix_dir=''):
|
||||
"""Parameters:
|
||||
tarball_path: output path of the .tar.gz file
|
||||
sources: list of sources to include in the tarball, relative to the current directory
|
||||
base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped
|
||||
from path in the tarball.
|
||||
prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to ''
|
||||
to make them child of root.
|
||||
"""
|
||||
base_dir = os.path.normpath( os.path.abspath( base_dir ) )
|
||||
def archive_name( path ):
|
||||
"""Makes path relative to base_dir."""
|
||||
path = os.path.normpath( os.path.abspath( path ) )
|
||||
common_path = os.path.commonprefix( (base_dir, path) )
|
||||
archive_name = path[len(common_path):]
|
||||
if os.path.isabs( archive_name ):
|
||||
archive_name = archive_name[1:]
|
||||
return os.path.join( prefix_dir, archive_name )
|
||||
def visit(tar, dirname, names):
|
||||
for name in names:
|
||||
path = os.path.join(dirname, name)
|
||||
if os.path.isfile(path):
|
||||
path_in_tar = archive_name(path)
|
||||
tar.add(path, path_in_tar )
|
||||
compression = TARGZ_DEFAULT_COMPRESSION_LEVEL
|
||||
tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression )
|
||||
try:
|
||||
for source in sources:
|
||||
source_path = source
|
||||
if os.path.isdir( source ):
|
||||
os.path.walk(source_path, visit, tar)
|
||||
else:
|
||||
path_in_tar = archive_name(source_path)
|
||||
tar.add(source_path, path_in_tar ) # filename, arcname
|
||||
finally:
|
||||
tar.close()
|
||||
|
||||
def decompress( tarball_path, base_dir ):
|
||||
"""Decompress the gzipped tarball into directory base_dir.
|
||||
"""
|
||||
# !!! This class method is not documented in the online doc
|
||||
# nor is bz2open!
|
||||
tar = tarfile.TarFile.gzopen(tarball_path, mode='r')
|
||||
try:
|
||||
tar.extractall( base_dir )
|
||||
finally:
|
||||
tar.close()
|
||||
|
@ -1,53 +1,53 @@
|
||||
import fnmatch
|
||||
import os
|
||||
|
||||
def generate( env ):
|
||||
def Glob( env, includes = None, excludes = None, dir = '.' ):
|
||||
"""Adds Glob( includes = Split( '*' ), excludes = None, dir = '.')
|
||||
helper function to environment.
|
||||
|
||||
Glob both the file-system files.
|
||||
|
||||
includes: list of file name pattern included in the return list when matched.
|
||||
excludes: list of file name pattern exluced from the return list.
|
||||
|
||||
Example:
|
||||
sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" )
|
||||
"""
|
||||
def filterFilename(path):
|
||||
abs_path = os.path.join( dir, path )
|
||||
if not os.path.isfile(abs_path):
|
||||
return 0
|
||||
fn = os.path.basename(path)
|
||||
match = 0
|
||||
for include in includes:
|
||||
if fnmatch.fnmatchcase( fn, include ):
|
||||
match = 1
|
||||
break
|
||||
if match == 1 and not excludes is None:
|
||||
for exclude in excludes:
|
||||
if fnmatch.fnmatchcase( fn, exclude ):
|
||||
match = 0
|
||||
break
|
||||
return match
|
||||
if includes is None:
|
||||
includes = ('*',)
|
||||
elif type(includes) in ( type(''), type(u'') ):
|
||||
includes = (includes,)
|
||||
if type(excludes) in ( type(''), type(u'') ):
|
||||
excludes = (excludes,)
|
||||
dir = env.Dir(dir).abspath
|
||||
paths = os.listdir( dir )
|
||||
def makeAbsFileNode( path ):
|
||||
return env.File( os.path.join( dir, path ) )
|
||||
nodes = filter( filterFilename, paths )
|
||||
return map( makeAbsFileNode, nodes )
|
||||
|
||||
from SCons.Script import Environment
|
||||
import fnmatch
|
||||
import os
|
||||
|
||||
def generate( env ):
|
||||
def Glob( env, includes = None, excludes = None, dir = '.' ):
|
||||
"""Adds Glob( includes = Split( '*' ), excludes = None, dir = '.')
|
||||
helper function to environment.
|
||||
|
||||
Glob both the file-system files.
|
||||
|
||||
includes: list of file name pattern included in the return list when matched.
|
||||
excludes: list of file name pattern exluced from the return list.
|
||||
|
||||
Example:
|
||||
sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" )
|
||||
"""
|
||||
def filterFilename(path):
|
||||
abs_path = os.path.join( dir, path )
|
||||
if not os.path.isfile(abs_path):
|
||||
return 0
|
||||
fn = os.path.basename(path)
|
||||
match = 0
|
||||
for include in includes:
|
||||
if fnmatch.fnmatchcase( fn, include ):
|
||||
match = 1
|
||||
break
|
||||
if match == 1 and not excludes is None:
|
||||
for exclude in excludes:
|
||||
if fnmatch.fnmatchcase( fn, exclude ):
|
||||
match = 0
|
||||
break
|
||||
return match
|
||||
if includes is None:
|
||||
includes = ('*',)
|
||||
elif type(includes) in ( type(''), type(u'') ):
|
||||
includes = (includes,)
|
||||
if type(excludes) in ( type(''), type(u'') ):
|
||||
excludes = (excludes,)
|
||||
dir = env.Dir(dir).abspath
|
||||
paths = os.listdir( dir )
|
||||
def makeAbsFileNode( path ):
|
||||
return env.File( os.path.join( dir, path ) )
|
||||
nodes = filter( filterFilename, paths )
|
||||
return map( makeAbsFileNode, nodes )
|
||||
|
||||
from SCons.Script import Environment
|
||||
Environment.Glob = Glob
|
||||
|
||||
def exists(env):
|
||||
"""
|
||||
Tool always exists.
|
||||
"""
|
||||
return True
|
||||
|
||||
def exists(env):
|
||||
"""
|
||||
Tool always exists.
|
||||
"""
|
||||
return True
|
||||
|
@ -1,3 +1,3 @@
|
||||
Test suite from http://json.org/JSON_checker/.
|
||||
|
||||
If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files.
|
||||
Test suite from http://json.org/JSON_checker/.
|
||||
|
||||
If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files.
|
||||
|
@ -1,73 +1,73 @@
|
||||
import sys
|
||||
import os
|
||||
import os.path
|
||||
import subprocess
|
||||
from glob import glob
|
||||
import optparse
|
||||
|
||||
VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes'
|
||||
|
||||
class TestProxy(object):
|
||||
def __init__( self, test_exe_path, use_valgrind=False ):
|
||||
self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) )
|
||||
self.use_valgrind = use_valgrind
|
||||
|
||||
def run( self, options ):
|
||||
if self.use_valgrind:
|
||||
cmd = VALGRIND_CMD.split()
|
||||
else:
|
||||
cmd = []
|
||||
cmd.extend( [self.test_exe_path, '--test-auto'] + options )
|
||||
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
|
||||
stdout = process.communicate()[0]
|
||||
if process.returncode:
|
||||
return False, stdout
|
||||
return True, stdout
|
||||
|
||||
def runAllTests( exe_path, use_valgrind=False ):
|
||||
test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind )
|
||||
status, test_names = test_proxy.run( ['--list-tests'] )
|
||||
if not status:
|
||||
print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names
|
||||
return 1
|
||||
test_names = [name.strip() for name in test_names.strip().split('\n')]
|
||||
failures = []
|
||||
for name in test_names:
|
||||
print 'TESTING %s:' % name,
|
||||
succeed, result = test_proxy.run( ['--test', name] )
|
||||
if succeed:
|
||||
print 'OK'
|
||||
else:
|
||||
failures.append( (name, result) )
|
||||
print 'FAILED'
|
||||
failed_count = len(failures)
|
||||
pass_count = len(test_names) - failed_count
|
||||
if failed_count:
|
||||
print
|
||||
for name, result in failures:
|
||||
print result
|
||||
print '%d/%d tests passed (%d failure(s))' % (
|
||||
pass_count, len(test_names), failed_count)
|
||||
return 1
|
||||
else:
|
||||
print 'All %d tests passed' % len(test_names)
|
||||
return 0
|
||||
|
||||
def main():
|
||||
from optparse import OptionParser
|
||||
parser = OptionParser( usage="%prog [options] <path to test_lib_json.exe>" )
|
||||
parser.add_option("--valgrind",
|
||||
action="store_true", dest="valgrind", default=False,
|
||||
help="run all the tests using valgrind to detect memory leaks")
|
||||
parser.enable_interspersed_args()
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if len(args) != 1:
|
||||
parser.error( 'Must provides at least path to test_lib_json executable.' )
|
||||
sys.exit( 1 )
|
||||
|
||||
exit_code = runAllTests( args[0], use_valgrind=options.valgrind )
|
||||
sys.exit( exit_code )
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
import sys
|
||||
import os
|
||||
import os.path
|
||||
import subprocess
|
||||
from glob import glob
|
||||
import optparse
|
||||
|
||||
VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes'
|
||||
|
||||
class TestProxy(object):
|
||||
def __init__( self, test_exe_path, use_valgrind=False ):
|
||||
self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) )
|
||||
self.use_valgrind = use_valgrind
|
||||
|
||||
def run( self, options ):
|
||||
if self.use_valgrind:
|
||||
cmd = VALGRIND_CMD.split()
|
||||
else:
|
||||
cmd = []
|
||||
cmd.extend( [self.test_exe_path, '--test-auto'] + options )
|
||||
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
|
||||
stdout = process.communicate()[0]
|
||||
if process.returncode:
|
||||
return False, stdout
|
||||
return True, stdout
|
||||
|
||||
def runAllTests( exe_path, use_valgrind=False ):
|
||||
test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind )
|
||||
status, test_names = test_proxy.run( ['--list-tests'] )
|
||||
if not status:
|
||||
print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names
|
||||
return 1
|
||||
test_names = [name.strip() for name in test_names.strip().split('\n')]
|
||||
failures = []
|
||||
for name in test_names:
|
||||
print 'TESTING %s:' % name,
|
||||
succeed, result = test_proxy.run( ['--test', name] )
|
||||
if succeed:
|
||||
print 'OK'
|
||||
else:
|
||||
failures.append( (name, result) )
|
||||
print 'FAILED'
|
||||
failed_count = len(failures)
|
||||
pass_count = len(test_names) - failed_count
|
||||
if failed_count:
|
||||
print
|
||||
for name, result in failures:
|
||||
print result
|
||||
print '%d/%d tests passed (%d failure(s))' % (
|
||||
pass_count, len(test_names), failed_count)
|
||||
return 1
|
||||
else:
|
||||
print 'All %d tests passed' % len(test_names)
|
||||
return 0
|
||||
|
||||
def main():
|
||||
from optparse import OptionParser
|
||||
parser = OptionParser( usage="%prog [options] <path to test_lib_json.exe>" )
|
||||
parser.add_option("--valgrind",
|
||||
action="store_true", dest="valgrind", default=False,
|
||||
help="run all the tests using valgrind to detect memory leaks")
|
||||
parser.enable_interspersed_args()
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if len(args) != 1:
|
||||
parser.error( 'Must provides at least path to test_lib_json executable.' )
|
||||
sys.exit( 1 )
|
||||
|
||||
exit_code = runAllTests( args[0], use_valgrind=options.valgrind )
|
||||
sys.exit( exit_code )
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
Loading…
x
Reference in New Issue
Block a user