Python2+3: working through many has_attr exceptions

pull/5848/head
Jimmy Brisson 2018-01-11 15:54:47 -06:00
parent 68737f2762
commit 380ecb1b0e
5 changed files with 24 additions and 25 deletions

View File

@ -32,7 +32,7 @@ from jsonschema import Draft4Validator, RefResolver
from tools.utils import json_file_to_dict, intelhex_offset
from tools.arm_pack_manager import Cache
from tools.targets import CUMULATIVE_ATTRIBUTES, TARGET_MAP, \
generate_py_target, get_resolution_order
generate_py_target, get_resolution_order, Target
PATH_OVERRIDES = set(["target.bootloader_img"])
BOOTLOADER_OVERRIDES = set(["target.bootloader_img", "target.restrict_size",
@ -432,15 +432,14 @@ class Config(object):
self.lib_config_data = {}
# Make sure that each config is processed only once
self.processed_configs = {}
if isinstance(tgt, basestring):
if isinstance(tgt, Target):
self.target = tgt
else:
if tgt in TARGET_MAP:
self.target = TARGET_MAP[tgt]
else:
self.target = generate_py_target(
self.app_config_data.get("custom_targets", {}), tgt)
else:
self.target = tgt
self.target = deepcopy(self.target)
self.target_labels = self.target.labels
for override in BOOTLOADER_OVERRIDES:
@ -465,7 +464,7 @@ class Config(object):
continue
full_path = os.path.normpath(os.path.abspath(config_file))
# Check that we didn't already process this file
if self.processed_configs.has_key(full_path):
if full_path in self.processed_configs:
continue
self.processed_configs[full_path] = True
# Read the library configuration and add a "__full_config_path"
@ -496,7 +495,7 @@ class Config(object):
# If there's already a configuration for a module with the same
# name, exit with error
if self.lib_config_data.has_key(cfg["name"]):
if cfg["name"] in self.lib_config_data:
raise ConfigException(
"Library name '%s' is not unique (defined in '%s' and '%s')"
% (cfg["name"], full_path,
@ -729,7 +728,7 @@ class Config(object):
unit_kind,
label)))))
for cumulatives in self.cumulative_overrides.itervalues():
for cumulatives in self.cumulative_overrides.values():
cumulatives.update_target(self.target)
return params

View File

@ -30,14 +30,14 @@ def hook_tool(function):
return function(t_self, *args, **kwargs)
_RUNNING_HOOKS[tool] = True
# If this tool isn't hooked, return original function
if not _HOOKS.has_key(tool):
if tool not in _HOOKS:
res = function(t_self, *args, **kwargs)
_RUNNING_HOOKS[tool] = False
return res
tooldesc = _HOOKS[tool]
setattr(t_self, tool_flag, False)
# If there is a replace hook, execute the replacement instead
if tooldesc.has_key("replace"):
if "replace" in tooldesc:
res = tooldesc["replace"](t_self, *args, **kwargs)
# If the replacement has set the "done" flag, exit now
# Otherwise continue as usual
@ -45,12 +45,12 @@ def hook_tool(function):
_RUNNING_HOOKS[tool] = False
return res
# Execute pre-function before main function if specified
if tooldesc.has_key("pre"):
if "pre" in tooldesc:
tooldesc["pre"](t_self, *args, **kwargs)
# Execute the main function now
res = function(t_self, *args, **kwargs)
# Execute post-function after main function if specified
if tooldesc.has_key("post"):
if "post" in tooldesc:
post_res = tooldesc["post"](t_self, *args, **kwargs)
_RUNNING_HOOKS[tool] = False
return post_res or res
@ -173,7 +173,7 @@ class Hook(object):
hook_type - one of the _HOOK_TYPES
cmdline - the initial command line
"""
if self._cmdline_hooks.has_key(hook_type):
if hook_type in self._cmdline_hooks:
cmdline = self._cmdline_hooks[hook_type](
self.toolchain.__class__.__name__, cmdline)
return cmdline

View File

@ -246,7 +246,7 @@ if __name__ == '__main__':
search_path = TOOLCHAIN_PATHS[toolchain] or "No path set"
args_error(parser, "Could not find executable for %s.\n"
"Currently set search path: %s"
%(toolchain,search_path))
%(toolchain, search_path))
# Test
build_data_blob = {} if options.build_data else None

View File

@ -214,16 +214,16 @@ class Target(namedtuple("Target", "name json_data resolution_order resolution_or
# inheritance level, left to right order to figure out all the
# other classes that change the definition by adding or removing
# elements
for idx in xrange(self.resolution_order[def_idx][1] - 1, -1, -1):
for idx in range(self.resolution_order[def_idx][1] - 1, -1, -1):
same_level_targets = [tar[0] for tar in self.resolution_order
if tar[1] == idx]
for tar in same_level_targets:
data = tdata[tar]
# Do we have anything to add ?
if data.has_key(attrname + "_add"):
if (attrname + "_add") in data:
starting_value.extend(data[attrname + "_add"])
# Do we have anything to remove ?
if data.has_key(attrname + "_remove"):
if (attrname + "_remove") in data:
# Macros can be defined either without a value (MACRO)
# or with a value (MACRO=10). When removing, we specify
# only the name of the macro, without the value. So we
@ -335,7 +335,7 @@ class Target(namedtuple("Target", "name json_data resolution_order resolution_or
# "class_name" must refer to a class in this file, so check if the
# class exists
mdata = self.get_module_data()
if not mdata.has_key(class_name) or \
if class_name not in mdata or \
not inspect.isclass(mdata[class_name]):
raise HookError(
("Class '%s' required by '%s' in target '%s'"

View File

@ -590,7 +590,7 @@ class mbedToolchain:
if not d or not exists(d):
return True
if not self.stat_cache.has_key(d):
if d not in self.stat_cache:
self.stat_cache[d] = stat(d).st_mtime
if self.stat_cache[d] >= target_mod_time:
@ -801,7 +801,7 @@ class mbedToolchain:
files_paths.remove(source)
for source in files_paths:
if resources is not None and resources.file_basepath.has_key(source):
if resources is not None and source in resources.file_basepath:
relative_path = relpath(source, resources.file_basepath[source])
elif rel_path is not None:
relative_path = relpath(source, rel_path)
@ -831,7 +831,7 @@ class mbedToolchain:
def get_inc_file(self, includes):
include_file = join(self.build_dir, ".includes_%s.txt" % self.inc_md5)
if not exists(include_file):
with open(include_file, "wb") as f:
with open(include_file, "w") as f:
cmd_list = []
for c in includes:
if c:
@ -892,7 +892,7 @@ class mbedToolchain:
# Sort include paths for consistency
inc_paths = sorted(set(inc_paths))
# Unique id of all include paths
self.inc_md5 = md5(' '.join(inc_paths)).hexdigest()
self.inc_md5 = md5(' '.join(inc_paths).encode('utf-8')).hexdigest()
objects = []
queue = []
@ -968,7 +968,7 @@ class mbedToolchain:
sleep(0.01)
pending = 0
for r in results:
if r._ready is True:
if r.ready():
try:
result = r.get()
results.remove(r)
@ -1055,7 +1055,7 @@ class mbedToolchain:
buff[0] = re.sub('^(.*?)\: ', '', buff[0])
for line in buff:
filename = line.replace('\\\n', '').strip()
if file:
if filename:
filename = filename.replace('\\ ', '\a')
dependencies.extend(((self.CHROOT if self.CHROOT else '') +
f.replace('\a', ' '))
@ -1319,7 +1319,7 @@ class mbedToolchain:
@staticmethod
def _overwrite_when_not_equal(filename, content):
if not exists(filename) or content != open(filename).read():
with open(filename, "wb") as out:
with open(filename, "w") as out:
out.write(content)
@staticmethod