Skip to content

Commit 23e4005

Browse files
authored
Merge pull request #1983 from PyCQA/py314
add support for t-strings
2 parents 6b6f3d5 + 019424b commit 23e4005

File tree

4 files changed

+57
-3
lines changed

4 files changed

+57
-3
lines changed

src/flake8/_compat.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,3 +9,10 @@
99
FSTRING_END = tokenize.FSTRING_END
1010
else: # pragma: <3.12 cover
1111
FSTRING_START = FSTRING_MIDDLE = FSTRING_END = -1
12+
13+
if sys.version_info >= (3, 14): # pragma: >=3.14 cover
14+
TSTRING_START = tokenize.TSTRING_START
15+
TSTRING_MIDDLE = tokenize.TSTRING_MIDDLE
16+
TSTRING_END = tokenize.TSTRING_END
17+
else: # pragma: <3.14 cover
18+
TSTRING_START = TSTRING_MIDDLE = TSTRING_END = -1

src/flake8/checker.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
from flake8 import processor
2020
from flake8 import utils
2121
from flake8._compat import FSTRING_START
22+
from flake8._compat import TSTRING_START
2223
from flake8.discover_files import expand_paths
2324
from flake8.options.parse_args import parse_args
2425
from flake8.plugins.finder import Checkers
@@ -554,6 +555,8 @@ def check_physical_eol(
554555
assert self.processor is not None
555556
if token.type == FSTRING_START: # pragma: >=3.12 cover
556557
self.processor.fstring_start(token.start[0])
558+
elif token.type == TSTRING_START: # pragma: >=3.14 cover
559+
self.processor.tstring_start(token.start[0])
557560
# a newline token ends a single physical line.
558561
elif processor.is_eol_token(token):
559562
# if the file does not end with a newline, the NEWLINE

src/flake8/processor.py

Lines changed: 14 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,8 @@
1313
from flake8 import utils
1414
from flake8._compat import FSTRING_END
1515
from flake8._compat import FSTRING_MIDDLE
16+
from flake8._compat import TSTRING_END
17+
from flake8._compat import TSTRING_MIDDLE
1618
from flake8.plugins.finder import LoadedPlugin
1719

1820
LOG = logging.getLogger(__name__)
@@ -113,7 +115,7 @@ def __init__(
113115
self.verbose = options.verbose
114116
#: Statistics dictionary
115117
self.statistics = {"logical lines": 0}
116-
self._fstring_start = -1
118+
self._fstring_start = self._tstring_start = -1
117119

118120
@functools.cached_property
119121
def file_tokens(self) -> list[tokenize.TokenInfo]:
@@ -125,10 +127,16 @@ def fstring_start(self, lineno: int) -> None: # pragma: >=3.12 cover
125127
"""Signal the beginning of an fstring."""
126128
self._fstring_start = lineno
127129

130+
def tstring_start(self, lineno: int) -> None: # pragma: >=3.14 cover
131+
"""Signal the beginning of an tstring."""
132+
self._tstring_start = lineno
133+
128134
def multiline_string(self, token: tokenize.TokenInfo) -> Generator[str]:
129135
"""Iterate through the lines of a multiline string."""
130136
if token.type == FSTRING_END: # pragma: >=3.12 cover
131137
start = self._fstring_start
138+
elif token.type == TSTRING_END: # pragma: >=3.14 cover
139+
start = self._tstring_start
132140
else:
133141
start = token.start[0]
134142

@@ -198,7 +206,10 @@ def build_logical_line_tokens(self) -> _Logical: # noqa: C901
198206
continue
199207
if token_type == tokenize.STRING:
200208
text = mutate_string(text)
201-
elif token_type == FSTRING_MIDDLE: # pragma: >=3.12 cover
209+
elif token_type in {
210+
FSTRING_MIDDLE,
211+
TSTRING_MIDDLE,
212+
}: # pragma: >=3.12 cover # noqa: E501
202213
# A curly brace in an FSTRING_MIDDLE token must be an escaped
203214
# curly brace. Both 'text' and 'end' will account for the
204215
# escaped version of the token (i.e. a single brace) rather
@@ -382,7 +393,7 @@ def is_eol_token(token: tokenize.TokenInfo) -> bool:
382393

383394
def is_multiline_string(token: tokenize.TokenInfo) -> bool:
384395
"""Check if this is a multiline string."""
385-
return token.type == FSTRING_END or (
396+
return token.type in {FSTRING_END, TSTRING_END} or (
386397
token.type == tokenize.STRING and "\n" in token.string
387398
)
388399

tests/integration/test_plugins.py

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -296,3 +296,36 @@ def test_escaping_of_fstrings_in_string_redacter(tmpdir, capsys):
296296
"""
297297
out, err = capsys.readouterr()
298298
assert out == expected
299+
300+
301+
@pytest.mark.xfail(sys.version_info < (3, 14), reason="3.14+")
302+
def test_tstring_logical_line(tmpdir, capsys): # pragma: >=3.14 cover
303+
cfg_s = f"""\
304+
[flake8]
305+
extend-ignore = F
306+
[flake8:local-plugins]
307+
extension =
308+
T = {yields_logical_line.__module__}:{yields_logical_line.__name__}
309+
"""
310+
311+
cfg = tmpdir.join("tox.ini")
312+
cfg.write(cfg_s)
313+
314+
src = """\
315+
t'''
316+
hello {world}
317+
'''
318+
t'{{"{hello}": "{world}"}}'
319+
"""
320+
t_py = tmpdir.join("t.py")
321+
t_py.write_binary(src.encode())
322+
323+
with tmpdir.as_cwd():
324+
assert main(("t.py", "--config", str(cfg))) == 1
325+
326+
expected = """\
327+
t.py:1:1: T001 "t'''xxxxxxx{world}x'''"
328+
t.py:4:1: T001 "t'xxx{hello}xxxx{world}xxx'"
329+
"""
330+
out, err = capsys.readouterr()
331+
assert out == expected

0 commit comments

Comments
 (0)