mirror of https://github.com/ARMmbed/mbed-os.git
Deduplicate find-duplicate functionality
parent
a85a384973
commit
9f7c82a37f
|
@ -22,25 +22,5 @@ if __name__ == "__main__":
|
||||||
|
|
||||||
scanned_files = {}
|
scanned_files = {}
|
||||||
|
|
||||||
for r in [resources] + resources.features.values():
|
exit(resources.detect_duplicates())
|
||||||
for file in r.c_sources + r.s_sources + r.cpp_sources + r.objects + r.libraries + r.hex_files + r.bin_files:
|
|
||||||
scanned_files.setdefault(basename(file), [])
|
|
||||||
scanned_files[basename(file)].append(file)
|
|
||||||
filenameparts = splitext(file)
|
|
||||||
if filenameparts[-1] in ["c", "cpp", "s", "S"]:
|
|
||||||
filenameparts[-1] = "o"
|
|
||||||
file = ".".join(filenamparts)
|
|
||||||
scanned_files.setdefault(basename(file), [])
|
|
||||||
scanned_files[basename(file)].append(file)
|
|
||||||
|
|
||||||
count_dupe = 0
|
|
||||||
for key, value in scanned_files.iteritems():
|
|
||||||
if len(value) > 1:
|
|
||||||
count_dupe += 1
|
|
||||||
if not args.silent:
|
|
||||||
print("Multiple files found with name {}".format(key))
|
|
||||||
for file in value:
|
|
||||||
print(" {}".format(file))
|
|
||||||
|
|
||||||
exit(count_dupe)
|
|
||||||
|
|
||||||
|
|
|
@ -120,27 +120,35 @@ class Resources:
|
||||||
|
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def detect_duplicates(self):
|
def _collect_duplicates(self, dupe_dict, dupe_headers):
|
||||||
dupe_dict = dict()
|
|
||||||
for filename in self.s_sources + self.c_sources + self.cpp_sources:
|
for filename in self.s_sources + self.c_sources + self.cpp_sources:
|
||||||
objname, _ = splitext(basename(filename))
|
objname, _ = splitext(basename(filename))
|
||||||
dupe_dict.setdefault(objname, [])
|
dupe_dict.setdefault(objname, set())
|
||||||
dupe_dict[objname].append(filename)
|
dupe_dict[objname] |= set([filename])
|
||||||
|
for filename in self.headers:
|
||||||
|
headername = basename(filename)
|
||||||
|
dupe_headers.setdefault(headername, set())
|
||||||
|
dupe_headers[headername] |= set([headername])
|
||||||
|
for res in self.features.values():
|
||||||
|
res._collect_duplicates(dupe_dict, dupe_headers)
|
||||||
|
return dupe_dict, dupe_headers
|
||||||
|
|
||||||
|
def detect_duplicates(self):
|
||||||
|
count = 0
|
||||||
|
dupe_dict, dupe_headers = self._collect_duplicates(dict(), dict())
|
||||||
for objname, filenames in dupe_dict.iteritems():
|
for objname, filenames in dupe_dict.iteritems():
|
||||||
if len(filenames) > 1:
|
if len(filenames) > 1:
|
||||||
|
count+=1
|
||||||
print "[ERROR] Object file %s.o is not unique!"\
|
print "[ERROR] Object file %s.o is not unique!"\
|
||||||
" It could be made from:" % objname
|
" It could be made from:" % objname
|
||||||
print columnate(filenames)
|
print columnate(filenames)
|
||||||
dupe_headers = dict()
|
|
||||||
for filename in self.headers:
|
|
||||||
headername = basename(filename)
|
|
||||||
dupe_headers.setdefault(headername, [])
|
|
||||||
dupe_headers[headername].append(headername)
|
|
||||||
for headername, locations in dupe_headers.iteritems():
|
for headername, locations in dupe_headers.iteritems():
|
||||||
if len(filenames) > 1:
|
if len(locations) > 1:
|
||||||
|
count+=1
|
||||||
print "[ERROR] Header file %s is not unique! It could be:" %\
|
print "[ERROR] Header file %s is not unique! It could be:" %\
|
||||||
headername
|
headername
|
||||||
print columnate(locations)
|
print columnate(locations)
|
||||||
|
return count
|
||||||
|
|
||||||
|
|
||||||
def relative_to(self, base, dot=False):
|
def relative_to(self, base, dot=False):
|
||||||
|
|
Loading…
Reference in New Issue