2016-03-20 21:50:25 +08:00
|
|
|
#!/usr/bin/env python
|
|
|
|
#coding=utf-8
|
|
|
|
|
|
|
|
import os, re, argparse, sys
|
2016-03-21 13:27:44 +08:00
|
|
|
import posixpath
|
|
|
|
|
2016-03-21 15:01:01 +08:00
|
|
|
class Path:
|
|
|
|
@staticmethod
|
|
|
|
def _forward_slash(p):
|
|
|
|
return p.replace(os.path.sep, '/')
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def join(p, *paths):
|
|
|
|
return Path._forward_slash(posixpath.join(p, *paths))
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def abspath(p):
|
|
|
|
return Path._forward_slash(posixpath.abspath(p))
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def normpath(p):
|
|
|
|
return Path._forward_slash(posixpath.normpath(p))
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def relpath(p, s):
|
|
|
|
return Path._forward_slash(posixpath.relpath(p, s))
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def exists(p):
|
|
|
|
return os.path.exists(p)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def basename(p):
|
|
|
|
return posixpath.basename(p)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def extname(p):
|
|
|
|
return posixpath.splitext(p)[1]
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def dirname(p):
|
|
|
|
return posixpath.dirname(p)
|
2016-03-20 21:50:25 +08:00
|
|
|
|
2016-03-21 11:01:23 +08:00
|
|
|
class LintContext:
|
2016-03-20 21:50:25 +08:00
|
|
|
def __init__(self, root, fix):
|
2016-03-21 11:01:23 +08:00
|
|
|
self.exclude = [
|
2016-03-24 21:28:25 +08:00
|
|
|
# exclude some platform specific files.
|
2020-11-01 20:18:50 +08:00
|
|
|
'extensions/spine/Json.c',
|
|
|
|
'extensions/spine/PathConstraint.h',
|
|
|
|
'extensions/spine/SkeletonJson.c',
|
|
|
|
'extensions/spine/SkeletonBinary.c',
|
|
|
|
'extensions/spine/kvec.h'
|
2016-03-21 11:01:23 +08:00
|
|
|
]
|
|
|
|
self.source_exts = ['.h','.hpp','.inl','.c','.cpp', '.m', '.mm']
|
|
|
|
self.header_exts = ['.h','.hpp','.inl']
|
2016-03-20 21:50:25 +08:00
|
|
|
self.root = root
|
|
|
|
self.fix = fix
|
|
|
|
self.errors = 0
|
|
|
|
self.error_files = 0
|
|
|
|
self._scan_source(root)
|
|
|
|
self._scan_unique_headers(self.headers)
|
|
|
|
|
|
|
|
def _scan_source(self, top):
|
2016-03-21 13:53:15 +08:00
|
|
|
# find all sources and headers relative to self.root
|
|
|
|
self.sources = []
|
|
|
|
self.headers = []
|
2016-03-20 21:50:25 +08:00
|
|
|
for root, dirnames, filenames in os.walk(top):
|
2016-03-21 13:53:15 +08:00
|
|
|
for f in filenames:
|
2016-03-21 15:01:01 +08:00
|
|
|
p = Path.relpath(Path.join(root, f), top)
|
2016-03-21 13:53:15 +08:00
|
|
|
if self._source_to_lint(p):
|
|
|
|
self.sources.append(p)
|
|
|
|
if self._is_header(p):
|
|
|
|
self.headers.append(p)
|
2016-03-20 21:50:25 +08:00
|
|
|
|
2016-03-21 13:53:15 +08:00
|
|
|
def _source_to_lint(self, p):
|
|
|
|
if p in self.exclude:
|
2016-03-21 11:01:23 +08:00
|
|
|
return False
|
2016-03-21 15:01:01 +08:00
|
|
|
ext = Path.extname(p)
|
2016-03-21 11:01:23 +08:00
|
|
|
return ext in self.source_exts
|
|
|
|
|
|
|
|
def _is_header(self, name):
|
2016-03-21 15:01:01 +08:00
|
|
|
return Path.extname(name) in self.header_exts
|
2016-03-21 11:01:23 +08:00
|
|
|
|
2016-03-20 21:50:25 +08:00
|
|
|
# find headers have unique base filenames
|
|
|
|
# this is used to get included headers in other search paths
|
|
|
|
def _scan_unique_headers(self, headers):
|
|
|
|
known = {}
|
|
|
|
for f in headers:
|
2016-03-21 15:01:01 +08:00
|
|
|
name = Path.basename(f)
|
2016-03-20 21:50:25 +08:00
|
|
|
if known.has_key(name):
|
|
|
|
known[name].append(f)
|
|
|
|
else:
|
|
|
|
known[name] = [f]
|
|
|
|
uniq = {}
|
|
|
|
for k,v in known.iteritems():
|
|
|
|
if len(v) == 1:
|
|
|
|
uniq[k] = v[0]
|
|
|
|
self.uniq = uniq
|
|
|
|
|
2016-03-21 13:27:44 +08:00
|
|
|
def in_search_path(self, filename):
|
2016-03-21 15:01:01 +08:00
|
|
|
return Path.exists(Path.join(self.root, filename))
|
2016-03-20 21:50:25 +08:00
|
|
|
|
|
|
|
def find_uniq(self, basename):
|
|
|
|
return self.uniq[basename] if self.uniq.has_key(basename) else None
|
|
|
|
|
|
|
|
def get_include_path(self, original, directory):
|
|
|
|
# 1. try search in uniq cocos header names
|
2016-03-21 15:01:01 +08:00
|
|
|
p = self.find_uniq(Path.basename(original))
|
2016-03-21 13:27:44 +08:00
|
|
|
if not p:
|
2016-03-20 21:50:25 +08:00
|
|
|
# 2. try search in current header directory
|
2016-03-21 15:01:01 +08:00
|
|
|
p = Path.normpath(Path.join(directory, original))
|
2016-03-21 13:27:44 +08:00
|
|
|
if not self.in_search_path(p):
|
2016-03-20 21:50:25 +08:00
|
|
|
return None
|
2016-03-21 13:27:44 +08:00
|
|
|
return p
|
2016-03-20 21:50:25 +08:00
|
|
|
|
|
|
|
def fix(match, cwd, ctx, fixed):
|
2016-03-21 20:12:15 +08:00
|
|
|
h = match.group(2)
|
2016-03-20 21:50:25 +08:00
|
|
|
# return original if already in search path (cocos directory)
|
|
|
|
if ctx.in_search_path(h):
|
|
|
|
return match.group(0)
|
|
|
|
|
2016-03-21 15:01:01 +08:00
|
|
|
p = ctx.get_include_path(h, cwd)
|
|
|
|
if not p:
|
2016-03-20 21:50:25 +08:00
|
|
|
return match.group(0)
|
|
|
|
|
|
|
|
ctx.errors += 1
|
2016-03-21 20:12:15 +08:00
|
|
|
fix = '#%s "%s"' % (match.group(1), p)
|
|
|
|
fixed[match.group(0)] = fix
|
|
|
|
return fix
|
2016-03-20 21:50:25 +08:00
|
|
|
|
|
|
|
def lint_one(header, ctx):
|
2016-03-21 15:01:01 +08:00
|
|
|
cwd = Path.dirname(header)
|
2016-03-20 21:50:25 +08:00
|
|
|
if not cwd:
|
|
|
|
return
|
|
|
|
|
2016-03-21 15:01:01 +08:00
|
|
|
filename = Path.join(ctx.root, header)
|
2016-03-20 21:50:25 +08:00
|
|
|
content = open(filename, 'r').read()
|
|
|
|
fixed = {}
|
|
|
|
# check all #include "header.*"
|
2016-03-21 20:12:15 +08:00
|
|
|
linted = re.sub('#\s*(include|import)\s*"(.*)"', lambda m: fix(m, cwd, ctx, fixed), content)
|
2016-03-20 21:50:25 +08:00
|
|
|
if content != linted:
|
|
|
|
ctx.error_files += 1
|
|
|
|
if ctx.fix:
|
|
|
|
with open (filename, 'w') as f: f.write(linted)
|
|
|
|
print('%s: %d error(s) fixed' % (header, len(fixed)))
|
|
|
|
else:
|
|
|
|
print('%s:' % (header))
|
|
|
|
for k, v in fixed.iteritems():
|
2016-03-21 20:12:15 +08:00
|
|
|
print('\t%s should be %s' % (k, v))
|
2016-03-20 21:50:25 +08:00
|
|
|
|
|
|
|
|
|
|
|
def lint(ctx):
|
|
|
|
print('Checking headers in: %s' % ctx.root)
|
|
|
|
for f in ctx.sources:
|
|
|
|
lint_one(f, ctx)
|
|
|
|
|
|
|
|
print('Total: %d errors in %d files' % (ctx.errors, ctx.error_files))
|
|
|
|
if ctx.errors > 0:
|
|
|
|
if ctx.fix:
|
|
|
|
print('All fixed')
|
|
|
|
else:
|
|
|
|
print('Rerun this script with -f to fixes these errors')
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
2016-03-21 15:01:01 +08:00
|
|
|
default_root = Path.abspath(Path.join(Path.dirname(__file__), '..', '..'))
|
2022-08-08 18:02:17 +08:00
|
|
|
parser = argparse.ArgumentParser(description='The axys headers lint script.')
|
2016-03-20 21:50:25 +08:00
|
|
|
parser.add_argument('-f','--fix', action='store_true', help='fixe the headers while linting')
|
2022-08-08 18:02:17 +08:00
|
|
|
parser.add_argument('root', nargs='?', default= default_root, help='path to axys source root directory')
|
2016-03-20 21:50:25 +08:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
2022-08-08 18:02:17 +08:00
|
|
|
lint(LintContext(Path.join(args.root, 'axys'), args.fix))
|
2016-03-20 21:50:25 +08:00
|
|
|
|
|
|
|
main()
|