Skip to content

Commit 5d16e6e

Browse files
committed
Fix formatting and linting
1 parent b08d311 commit 5d16e6e

File tree

2 files changed

+35
-28
lines changed

2 files changed

+35
-28
lines changed

pylint/checkers/misc.py

Lines changed: 16 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -96,17 +96,19 @@ class EncodingChecker(BaseTokenChecker, BaseRawFileChecker):
9696
"type": "yn",
9797
"metavar": "<y or n>",
9898
"default": False,
99-
"help": "Whether or not to search for fixme's in docstrings."
100-
}
101-
)
99+
"help": "Whether or not to search for fixme's in docstrings.",
100+
},
101+
),
102102
)
103103

104104
def open(self) -> None:
105105
super().open()
106106

107107
notes = "|".join(re.escape(note) for note in self.linter.config.notes)
108108
if self.linter.config.notes_rgx:
109-
comment_regex = rf"#\s*({notes}|{self.linter.config.notes_rgx})(?=(:|\s|\Z))"
109+
comment_regex = (
110+
rf"#\s*({notes}|{self.linter.config.notes_rgx})(?=(:|\s|\Z))"
111+
)
110112
self._comment_fixme_pattern = re.compile(comment_regex, re.I)
111113
if self.linter.config.check_fixme_in_docstring:
112114
docstring_regex = rf"((\"\"\")|(\'\'\'))\s*({notes}|{self.linter.config.notes_rgx})(?=(:|\s|\Z))"
@@ -149,22 +151,27 @@ def process_tokens(self, tokens: list[tokenize.TokenInfo]) -> None:
149151
return
150152
for token_info in tokens:
151153
if token_info.type == tokenize.COMMENT:
152-
comment_text = token_info.string[1:].lstrip() # trim '#' and white-spaces
154+
comment_text = token_info.string[
155+
1:
156+
].lstrip() # trim '#' and white-spaces
153157
if self._comment_fixme_pattern.search("#" + comment_text.lower()):
154158
self.add_message(
155159
"fixme",
156160
col_offset=token_info.start[1] + 1,
157161
args=comment_text,
158162
line=token_info.start[0],
159163
)
160-
elif self.linter.config.check_fixme_in_docstring and self._is_docstring_comment(token_info):
164+
elif (
165+
self.linter.config.check_fixme_in_docstring
166+
and self._is_docstring_comment(token_info)
167+
):
161168
docstring_lines = token_info.string.split("\n")
162169
for line_no, line in enumerate(docstring_lines):
163170
# trim '"""' at beginning or end and whitespace
164-
if line.startswith('"""') or line.startswith("'''"):
171+
if line.startswith(('"""', "'''")):
165172
line = line[3:]
166173
line = line.lstrip()
167-
if line.endswith('"""') or line.endswith("'''"):
174+
if line.endswith(('"""', "'''")):
168175
line = line[:-3]
169176
if self._docstring_fixme_pattern.search(
170177
'"""' + line.lower()
@@ -178,8 +185,7 @@ def process_tokens(self, tokens: list[tokenize.TokenInfo]) -> None:
178185

179186
def _is_docstring_comment(self, token_info: tokenize.TokenInfo) -> bool:
180187
return token_info.type == tokenize.STRING and (
181-
token_info.line.lstrip().startswith('"""')
182-
or token_info.line.lstrip().startswith("'''")
188+
token_info.line.lstrip().startswith(('"""', "'''"))
183189
)
184190

185191

tests/checkers/unittest_misc.py

Lines changed: 19 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
from pylint.testutils import CheckerTestCase, MessageTest, _tokenize_str, set_config
99

1010

11+
# pylint: disable=too-many-public-methods
1112
class TestFixme(CheckerTestCase):
1213
CHECKER_CLASS = misc.EncodingChecker
1314

@@ -124,7 +125,6 @@ def test_docstring_with_message(self) -> None:
124125
"""
125126
with self.assertAddsMessages(
126127
MessageTest(msg_id="fixme", line=2, args="FIXME message", col_offset=9)
127-
128128
):
129129
self.checker.process_tokens(_tokenize_str(code))
130130

@@ -135,7 +135,6 @@ def test_docstring_with_message_single_quote(self) -> None:
135135
"""
136136
with self.assertAddsMessages(
137137
MessageTest(msg_id="fixme", line=2, args="FIXME message", col_offset=9)
138-
139138
):
140139
self.checker.process_tokens(_tokenize_str(code))
141140

@@ -173,7 +172,7 @@ def test_docstring_with_nl_message_multi(self) -> None:
173172
"""
174173
with self.assertAddsMessages(
175174
MessageTest(msg_id="fixme", line=3, args="FIXME this", col_offset=9),
176-
MessageTest(msg_id="fixme", line=4, args="TODO: that", col_offset=9)
175+
MessageTest(msg_id="fixme", line=4, args="TODO: that", col_offset=9),
177176
):
178177
self.checker.process_tokens(_tokenize_str(code))
179178

@@ -189,7 +188,7 @@ def test_docstring_with_comment(self) -> None:
189188
with self.assertAddsMessages(
190189
MessageTest(msg_id="fixme", line=2, args="XXX message1", col_offset=9),
191190
MessageTest(msg_id="fixme", line=4, args="FIXME message2", col_offset=9),
192-
MessageTest(msg_id="fixme", line=5, args="TODO message3", col_offset=9)
191+
MessageTest(msg_id="fixme", line=5, args="TODO message3", col_offset=9),
193192
):
194193
self.checker.process_tokens(_tokenize_str(code))
195194

@@ -229,29 +228,27 @@ def test_docstring_todo_mult(self) -> None:
229228
\"\"\"
230229
"""
231230
with self.assertAddsMessages(
232-
MessageTest(msg_id="fixme", line=3, args="FIXME this TODO that", col_offset=9),
231+
MessageTest(
232+
msg_id="fixme", line=3, args="FIXME this TODO that", col_offset=9
233+
),
233234
):
234235
self.checker.process_tokens(_tokenize_str(code))
235-
236-
@set_config(
237-
check_fixme_in_docstring=True,
238-
notes=["CODETAG"]
239-
)
236+
237+
@set_config(check_fixme_in_docstring=True, notes=["CODETAG"])
240238
def test_docstring_custom_note(self) -> None:
241239
code = """
242240
\"\"\"
243241
CODETAG implement this
244242
\"\"\"
245243
"""
246244
with self.assertAddsMessages(
247-
MessageTest(msg_id="fixme", line=3, args="CODETAG implement this", col_offset=9),
245+
MessageTest(
246+
msg_id="fixme", line=3, args="CODETAG implement this", col_offset=9
247+
),
248248
):
249249
self.checker.process_tokens(_tokenize_str(code))
250-
251-
@set_config(
252-
check_fixme_in_docstring=True,
253-
notes_rgx="FIX.*"
254-
)
250+
251+
@set_config(check_fixme_in_docstring=True, notes_rgx="FIX.*")
255252
def test_docstring_custom_rgx(self) -> None:
256253
code = """
257254
\"\"\"
@@ -260,7 +257,11 @@ def test_docstring_custom_rgx(self) -> None:
260257
\"\"\"
261258
"""
262259
with self.assertAddsMessages(
263-
MessageTest(msg_id="fixme", line=3, args="FIXME implement this", col_offset=9),
264-
MessageTest(msg_id="fixme", line=4, args="FIXTHIS also implement this", col_offset=9),
260+
MessageTest(
261+
msg_id="fixme", line=3, args="FIXME implement this", col_offset=9
262+
),
263+
MessageTest(
264+
msg_id="fixme", line=4, args="FIXTHIS also implement this", col_offset=9
265+
),
265266
):
266267
self.checker.process_tokens(_tokenize_str(code))

0 commit comments

Comments
 (0)