Merge pull request #2854 from theotherjimmy/find-duplicates

[Tools] Find and report duplicates
pull/2243/merge
Sam Grove 2016-09-30 18:30:00 -05:00 committed by GitHub
commit 872363d809
5 changed files with 92 additions and 1 deletions

View File

@ -452,6 +452,8 @@ def build_project(src_paths, build_path, target, toolchain_name,
# Link Program
res, _ = toolchain.link_program(resources, build_path, name)
resources.detect_duplicates(toolchain)
if report != None:
end = time()
cur_result["elapsed_time"] = end - start

View File

View File

@ -0,0 +1,26 @@
from os import walk
from os.path import join, abspath, dirname, basename, splitext
import sys
ROOT = abspath(join(dirname(__file__), "..", ".."))
sys.path.insert(0, ROOT)
from tools.toolchains.gcc import GCC_ARM
from tools.targets import TARGET_MAP
from argparse import ArgumentParser
if __name__ == "__main__":
parser = ArgumentParser("Find duplicate file names within a directory structure")
parser.add_argument("dirs", help="Directories to search for duplicate file names"
, nargs="*")
parser.add_argument("--silent", help="Supress printing of filenames, just return number of duplicates", action="store_true")
args = parser.parse_args()
toolchain = GCC_ARM(TARGET_MAP["K64F"])
resources = sum([toolchain.scan_resources(d) for d in args.dirs], None)
scanned_files = {}
exit(resources.detect_duplicates(toolchain))

View File

@ -3,6 +3,7 @@ import sys
import os
from string import printable
from copy import deepcopy
from mock import MagicMock
from hypothesis import given
from hypothesis.strategies import text, lists, fixed_dictionaries
@ -10,7 +11,8 @@ ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..",
".."))
sys.path.insert(0, ROOT)
from tools.toolchains import TOOLCHAIN_CLASSES, LEGACY_TOOLCHAIN_NAMES
from tools.toolchains import TOOLCHAIN_CLASSES, LEGACY_TOOLCHAIN_NAMES,\
Resources
from tools.targets import TARGET_MAP
def test_instantiation():
@ -96,3 +98,27 @@ def test_toolchain_profile_asm(profile, source_file):
"Toolchain %s did not propigate arg %s" % (toolchain.name,
parameter)
for name, Class in TOOLCHAIN_CLASSES.items():
CLS = Class(TARGET_MAP["K64F"])
assert name == CLS.name or name == LEGACY_TOOLCHAIN_NAMES[CLS.name]
@given(lists(text(alphabet=ALPHABET, min_size=1), min_size=1))
def test_detect_duplicates(filenames):
c_sources = [os.path.join(name, "dupe.c") for name in filenames]
s_sources = [os.path.join(name, "dupe.s") for name in filenames]
cpp_sources = [os.path.join(name, "dupe.cpp") for name in filenames]
with MagicMock() as notify:
toolchain = TOOLCHAIN_CLASSES["ARM"](TARGET_MAP["K64F"], notify=notify)
res = Resources()
res.c_sources = c_sources
res.s_sources = s_sources
res.cpp_sources = cpp_sources
assert res.detect_duplicates(toolchain) == 1,\
"Not Enough duplicates found"
_, (notification, _), _ = notify.mock_calls[1]
assert "dupe.o" in notification["message"]
assert "dupe.s" in notification["message"]
assert "dupe.c" in notification["message"]
assert "dupe.cpp" in notification["message"]

View File

@ -120,6 +120,43 @@ class Resources:
return self
def _collect_duplicates(self, dupe_dict, dupe_headers):
for filename in self.s_sources + self.c_sources + self.cpp_sources:
objname, _ = splitext(basename(filename))
dupe_dict.setdefault(objname, set())
dupe_dict[objname] |= set([filename])
for filename in self.headers:
headername = basename(filename)
dupe_headers.setdefault(headername, set())
dupe_headers[headername] |= set([headername])
for res in self.features.values():
res._collect_duplicates(dupe_dict, dupe_headers)
return dupe_dict, dupe_headers
def detect_duplicates(self, toolchain):
"""Detect all potential ambiguities in filenames and report them with
a toolchain notification
Positional Arguments:
toolchain - used for notifications
"""
count = 0
dupe_dict, dupe_headers = self._collect_duplicates(dict(), dict())
for objname, filenames in dupe_dict.iteritems():
if len(filenames) > 1:
count+=1
toolchain.tool_error(
"Object file %s.o is not unique! It could be made from: %s"\
% (objname, " ".join(filenames)))
for headername, locations in dupe_headers.iteritems():
if len(locations) > 1:
count+=1
toolchain.tool_error(
"Header file %s is not unique! It could be: %s" %\
(headername, " ".join(locations)))
return count
def relative_to(self, base, dot=False):
for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
'cpp_sources', 'lib_dirs', 'objects', 'libraries',