Skip to content

Commit

Permalink
Fixme should be triggered only in comments
Browse files Browse the repository at this point in the history
Closes #2321
  • Loading branch information
Rogalski, Lukasz authored and PCManticore committed Jan 20, 2019
1 parent 12dc8a9 commit 4564f6a
Show file tree
Hide file tree
Showing 2 changed files with 81 additions and 110 deletions.
103 changes: 41 additions & 62 deletions pylint/checkers/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,9 @@

# pylint: disable=W0511

import re
import tokenize

from pylint.interfaces import IRawChecker
from pylint.interfaces import IRawChecker, ITokenChecker
from pylint.checkers import BaseChecker
from pylint.utils import OPTION_RGX, MessagesHandlerMixIn

Expand Down Expand Up @@ -69,7 +69,7 @@ class EncodingChecker(BaseChecker):
* encoding issues.
"""

__implements__ = IRawChecker
__implements__ = (IRawChecker, ITokenChecker)

# configuration section name
name = "miscellaneous"
Expand Down Expand Up @@ -103,53 +103,6 @@ class EncodingChecker(BaseChecker):
),
)

def _check_note(self, notes, lineno, line, module_last_lineno):
"""
Add the message 'fixme' in case a note is found in the line.
:param notes: regular expression object matching any notes
(XXX, TODO, FIXME) behind a '#'
:type notes: re.pattern object
:param lineno: line number
:type lineno: int
:param line: line to be checked
:type line: str
:param module_last_lineno: last line number of the module as parsed by astroid
(may be different from real last line number in case
commented lines exist at the end of the module)
:type module_last_lineno: int
"""
match = notes.search(line)
if not match:
return
# In case the module ends with commented lines, the astroid parser
# don't take into account those lines, then:
# - the line number of those lines is greater than the
# module last line number (module.tolineno)
# - astroid module object can't inform pylint
# of disabled messages in those extra lines.
if lineno > module_last_lineno:
disable_option_match = OPTION_RGX.search(line)
if disable_option_match:
try:
_, value = disable_option_match.group(1).split("=", 1)
values = [_val.strip().upper() for _val in value.split(",")]
if set(values) & set(self.config.notes):
return
except ValueError:
self.add_message(
"bad-inline-option",
args=disable_option_match.group(1).strip(),
line=line,
)
return
self.add_message(
"fixme",
args=line[match.start(1) :].rstrip(),
line=lineno,
col_offset=match.start(1),
)

def _check_encoding(self, lineno, line, file_encoding):
try:
return line.decode(file_encoding)
Expand All @@ -167,25 +120,51 @@ def _check_encoding(self, lineno, line, file_encoding):
)

def process_module(self, module):
"""inspect the source file to find encoding problem or fixmes like
notes
"""
if self.config.notes:
notes = re.compile(
r"#\s*(%s)\b" % "|".join(map(re.escape, self.config.notes)), re.I
)
else:
notes = None
"""inspect the source file to find encoding problem"""
if module.file_encoding:
encoding = module.file_encoding
else:
encoding = "ascii"

with module.stream() as stream:
for lineno, line in enumerate(stream):
line = self._check_encoding(lineno + 1, line, encoding)
if line is not None and notes:
self._check_note(notes, lineno + 1, line, module.tolineno)
self._check_encoding(lineno + 1, line, encoding)

def process_tokens(self, tokens):
"""inspect the source to find fixme problems"""
if not self.config.notes:
return
comments = [
token_info for token_info in tokens if token_info.type == tokenize.COMMENT
]
for comment in comments:
comment_text = comment.string[1:].lstrip() # trim '#' and whitespaces

# handle pylint disable clauses
disable_option_match = OPTION_RGX.search(comment_text)
if disable_option_match:
try:
_, value = disable_option_match.group(1).split("=", 1)
values = [_val.strip().upper() for _val in value.split(",")]
if set(values) & set(self.config.notes):
return
except ValueError:
self.add_message(
"bad-inline-option",
args=disable_option_match.group(1).strip(),
line=comment.string,
)
return

# emit warnings if necessary
for note in self.config.notes:
if comment_text.lower().startswith(note.lower()):
self.add_message(
"fixme",
args=comment_text,
line=comment.start[0],
col_offset=comment.string.lower().index(note.lower()),
)


def register(linter):
Expand Down
88 changes: 40 additions & 48 deletions pylint/test/unittest_checker_misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,85 +13,77 @@
"""Tests for the misc checker."""

from pylint.checkers import misc
from pylint.testutils import (
CheckerTestCase,
Message,
set_config,
_create_file_backed_module,
)
from pylint.testutils import CheckerTestCase, Message, set_config, _tokenize_str


class TestFixme(CheckerTestCase):
CHECKER_CLASS = misc.EncodingChecker

def test_fixme_with_message(self):
with _create_file_backed_module(
"""a = 1
code = """a = 1
# FIXME message
"""
) as module:
with self.assertAddsMessages(
Message(msg_id="fixme", line=2, args="FIXME message")
):
self.checker.process_module(module)
with self.assertAddsMessages(
Message(msg_id="fixme", line=2, args="FIXME message")
):
self.checker.process_tokens(_tokenize_str(code))

def test_todo_without_message(self):
with _create_file_backed_module(
"""a = 1
code = """a = 1
# TODO
"""
) as module:
with self.assertAddsMessages(Message(msg_id="fixme", line=2, args="TODO")):
self.checker.process_module(module)
with self.assertAddsMessages(Message(msg_id="fixme", line=2, args="TODO")):
self.checker.process_tokens(_tokenize_str(code))

def test_xxx_without_space(self):
with _create_file_backed_module(
"""a = 1
code = """a = 1
#XXX
"""
) as module:
with self.assertAddsMessages(Message(msg_id="fixme", line=2, args="XXX")):
self.checker.process_module(module)
with self.assertAddsMessages(Message(msg_id="fixme", line=2, args="XXX")):
self.checker.process_tokens(_tokenize_str(code))

def test_xxx_middle(self):
with _create_file_backed_module(
"""a = 1
# THIS TEST STARTED TO FAIL, WHY?
code = """a = 1
# midle XXX
"""
) as module:
with self.assertNoMessages():
self.checker.process_module(module)
with self.assertNoMessages():
self.checker.process_tokens(_tokenize_str(code))

def test_without_space_fixme(self):
with _create_file_backed_module(
"""a = 1
code = """a = 1
#FIXME
"""
) as module:
with self.assertAddsMessages(Message(msg_id="fixme", line=2, args="FIXME")):
self.checker.process_module(module)
with self.assertAddsMessages(Message(msg_id="fixme", line=2, args="FIXME")):
self.checker.process_tokens(_tokenize_str(code))

@set_config(notes=[])
def test_absent_codetag(self):
with _create_file_backed_module(
"""a = 1
# FIXME
# TODO
# XXX
code = """a = 1
# FIXME # FIXME
# TODO # TODO
# XXX # XXX
"""
) as module:
with self.assertNoMessages():
self.checker.process_module(module)
with self.assertNoMessages():
self.checker.process_tokens(_tokenize_str(code))

@set_config(notes=["CODETAG"])
def test_other_present_codetag(self):
with _create_file_backed_module(
"""a = 1
code = """a = 1
# CODETAG
# FIXME
"""
) as module:
with self.assertAddsMessages(
Message(msg_id="fixme", line=2, args="CODETAG")
):
self.checker.process_module(module)
with self.assertAddsMessages(Message(msg_id="fixme", line=2, args="CODETAG")):
self.checker.process_tokens(_tokenize_str(code))

def test_issue_2321_should_not_trigger(self):
code = 'print("# TODO this should not trigger a fixme")'
with self.assertNoMessages():
self.checker.process_tokens(_tokenize_str(code))

def test_issue_2321_should_trigger(self):
code = "# TODO this should not trigger a fixme"
with self.assertAddsMessages(
Message(msg_id="fixme", line=1, args="TODO this should not trigger a fixme")
):
self.checker.process_tokens(_tokenize_str(code))

0 comments on commit 4564f6a

Please sign in to comment.