scripts: Fix risky uses of non-raw regex strings in Python scripts
Fixes pylint warnings like this one: doc/conf.py:325:0: W1401: Anomalous backslash in string: '\s'. String constant might be missing an r prefix. (anomalous-backslash-in-string) The reason for this warning is that backslash escapes are interpreted in non-raw (non-r-prefixed) strings. For example, '\a' and r'\a' are not the same string (first one has a single ASCII bell character, second one has two characters). It just happens that there's no \s (or \., or \/) escape for example, and '\s' turns into two characters (as needed for a regex). It's risky to rely on stuff like that regexes though. Best to make them raw strings unless they're super trivial. Also note that '\s' and '\\s' turn into the same string. Another tip: A literal ' can be put into a string with "blah'blah" instead of 'blah\'blah'. Signed-off-by: Ulf Magnusson <Ulf.Magnusson@nordicsemi.no>
This commit is contained in:
parent
3c45eb45d6
commit
a449c98db2
|
@ -322,7 +322,7 @@ latex_elements = {
|
|||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
'preamble': '\setcounter{tocdepth}{2}',
|
||||
'preamble': r'\setcounter{tocdepth}{2}',
|
||||
|
||||
# Latex figure (float) alignment
|
||||
#'figure_align': 'htbp',
|
||||
|
|
|
@ -14,7 +14,7 @@ from docutils.parsers.rst import directives
|
|||
# This could be as simple as generating a couple of sets of instructions, one
|
||||
# for Unix environments, and another for Windows.
|
||||
class ZephyrAppCommandsDirective(Directive):
|
||||
'''
|
||||
r'''
|
||||
This is a Zephyr directive for generating consistent documentation
|
||||
of the shell commands needed to manage (build, flash, etc.) an application.
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ def setup(app):
|
|||
|
||||
def autolink(pattern):
|
||||
def role(name, rawtext, text, lineno, inliner, options={}, content=[]):
|
||||
m = re.search('(.*)\s*<(.*)>', text) # noqa: W605 - regular expression
|
||||
m = re.search(r'(.*)\s*<(.*)>', text)
|
||||
if m:
|
||||
link_text = m.group(1)
|
||||
link = m.group(2)
|
||||
|
|
|
@ -81,7 +81,7 @@ def src_deps(zephyr_base, src_file, dest):
|
|||
# argument, which is a (relative) path to the additional
|
||||
# dependency file.
|
||||
directives = "|".join(DIRECTIVES)
|
||||
pattern = re.compile("\.\.\s+(?P<directive>%s)::\s+(?P<dep_rel>.*)" %
|
||||
pattern = re.compile(r"\.\.\s+(?P<directive>%s)::\s+(?P<dep_rel>.*)" %
|
||||
directives)
|
||||
deps = []
|
||||
for l in content:
|
||||
|
|
|
@ -22,10 +22,10 @@ import sys
|
|||
# to write a valid linker script that will fail this script, but we
|
||||
# don't have such a use case and one isn't forseen.
|
||||
|
||||
section_re = re.compile('(?x)' # (allow whitespace)
|
||||
'^([a-zA-Z0-9_\.]+) \s+' # name
|
||||
' (0x[0-9a-f]+) \s+' # addr
|
||||
' (0x[0-9a-f]+)\s*') # size
|
||||
section_re = re.compile(r'(?x)' # (allow whitespace)
|
||||
r'^([a-zA-Z0-9_\.]+) \s+' # name
|
||||
r' (0x[0-9a-f]+) \s+' # addr
|
||||
r' (0x[0-9a-f]+)\s*') # size
|
||||
|
||||
load_addr_re = re.compile('load address (0x[0-9a-f]+)')
|
||||
|
||||
|
|
|
@ -60,7 +60,7 @@ def main():
|
|||
for f in files:
|
||||
if f.endswith(".rst") or f.endswith(".png") or f.endswith(".jpg"):
|
||||
continue
|
||||
p = re.match("^boards\/[^/]+\/([^/]+)\/", f)
|
||||
p = re.match(r"^boards\/[^/]+\/([^/]+)\/", f)
|
||||
if p and p.groups():
|
||||
boards.add(p.group(1))
|
||||
|
||||
|
|
|
@ -43,7 +43,7 @@ exclude_regexs = []
|
|||
# first is a list of one or more comment lines
|
||||
# followed by a list of non-comments which describe a multiline regex
|
||||
config_regex = \
|
||||
b"(?P<comment>(^\s*#.*\n)+)" \
|
||||
b"(?P<comment>(^\\s*#.*\n)+)" \
|
||||
b"(?P<regex>(^[^#].*\n)+)"
|
||||
|
||||
|
||||
|
@ -87,7 +87,7 @@ def config_import_path(path):
|
|||
"""
|
||||
Imports regular expresions from any file *.conf in the given path
|
||||
"""
|
||||
file_regex = re.compile(".*\.conf$")
|
||||
file_regex = re.compile(r".*\.conf$")
|
||||
try:
|
||||
for dirpath, _, filenames in os.walk(path):
|
||||
for _filename in sorted(filenames):
|
||||
|
|
|
@ -126,7 +126,7 @@ def parse_obj_files(partitions):
|
|||
# Iterate over all object files to find partitions
|
||||
for dirpath, _, files in os.walk(args.directory):
|
||||
for filename in files:
|
||||
if re.match(".*\.obj$",filename):
|
||||
if re.match(r".*\.obj$",filename):
|
||||
fullname = os.path.join(dirpath, filename)
|
||||
find_obj_file_partitions(fullname, partitions)
|
||||
|
||||
|
|
|
@ -66,7 +66,7 @@ class SignedOffBy(CommitRule):
|
|||
flags |= re.IGNORECASE
|
||||
for line in commit.message.body:
|
||||
if line.lower().startswith("signed-off-by"):
|
||||
if not re.search('(^)Signed-off-by: ([-\'\w.]+) ([-\'\w.]+) (.*)', line, flags=flags):
|
||||
if not re.search(r"(^)Signed-off-by: ([-'\w.]+) ([-'\w.]+) (.*)", line, flags=flags):
|
||||
return [RuleViolation(self.id, "Signed-off-by: must have a full name", line_nr=1)]
|
||||
else:
|
||||
return
|
||||
|
@ -106,7 +106,7 @@ class MaxLineLengthExceptions(LineRule):
|
|||
|
||||
def validate(self, line, _commit):
|
||||
max_length = self.options['line-length'].value
|
||||
urls = re.findall('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', line)
|
||||
urls = re.findall(r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', line)
|
||||
if line.startswith('Signed-off-by'):
|
||||
return
|
||||
|
||||
|
|
|
@ -105,9 +105,9 @@ def search_config_in_file(tree, items, completelog, exclude):
|
|||
with open(os.path.join(dirName, fname), "r", encoding="utf-8", errors="ignore") as f:
|
||||
searchConf = f.readlines()
|
||||
for line in searchConf:
|
||||
if re.search('(^|[\s|(])CONFIG_([a-zA-Z0-9_]+)', line) :
|
||||
configName = re.search('(^|[\s|(])'
|
||||
+'CONFIG_([a-zA-Z0-9_]+)', line)
|
||||
if re.search(r'(^|[\s|(])CONFIG_([a-zA-Z0-9_]+)', line) :
|
||||
configName = re.search(r'(^|[\s|(])'
|
||||
+ r'CONFIG_([a-zA-Z0-9_]+)', line)
|
||||
configs = configs + 1
|
||||
if completelog:
|
||||
print('\n' + configName.group(2) + ' at '
|
||||
|
|
Loading…
Reference in a new issue