13
13
from flake8 import utils
14
14
from flake8 ._compat import FSTRING_END
15
15
from flake8 ._compat import FSTRING_MIDDLE
16
+ from flake8 ._compat import TSTRING_END
17
+ from flake8 ._compat import TSTRING_MIDDLE
16
18
from flake8 .plugins .finder import LoadedPlugin
17
19
18
20
LOG = logging .getLogger (__name__ )
@@ -113,7 +115,7 @@ def __init__(
113
115
self .verbose = options .verbose
114
116
#: Statistics dictionary
115
117
self .statistics = {"logical lines" : 0 }
116
- self ._fstring_start = - 1
118
+ self ._fstring_start = self . _tstring_start = - 1
117
119
118
120
@functools .cached_property
119
121
def file_tokens (self ) -> list [tokenize .TokenInfo ]:
@@ -125,10 +127,16 @@ def fstring_start(self, lineno: int) -> None: # pragma: >=3.12 cover
125
127
"""Signal the beginning of an fstring."""
126
128
self ._fstring_start = lineno
127
129
130
+ def tstring_start (self , lineno : int ) -> None : # pragma: >=3.14 cover
131
+ """Signal the beginning of an tstring."""
132
+ self ._tstring_start = lineno
133
+
128
134
def multiline_string (self , token : tokenize .TokenInfo ) -> Generator [str ]:
129
135
"""Iterate through the lines of a multiline string."""
130
136
if token .type == FSTRING_END : # pragma: >=3.12 cover
131
137
start = self ._fstring_start
138
+ elif token .type == TSTRING_END : # pragma: >=3.14 cover
139
+ start = self ._tstring_start
132
140
else :
133
141
start = token .start [0 ]
134
142
@@ -198,7 +206,10 @@ def build_logical_line_tokens(self) -> _Logical: # noqa: C901
198
206
continue
199
207
if token_type == tokenize .STRING :
200
208
text = mutate_string (text )
201
- elif token_type == FSTRING_MIDDLE : # pragma: >=3.12 cover
209
+ elif token_type in {
210
+ FSTRING_MIDDLE ,
211
+ TSTRING_MIDDLE ,
212
+ }: # pragma: >=3.12 cover # noqa: E501
202
213
# A curly brace in an FSTRING_MIDDLE token must be an escaped
203
214
# curly brace. Both 'text' and 'end' will account for the
204
215
# escaped version of the token (i.e. a single brace) rather
@@ -382,7 +393,7 @@ def is_eol_token(token: tokenize.TokenInfo) -> bool:
382
393
383
394
def is_multiline_string (token : tokenize .TokenInfo ) -> bool :
384
395
"""Check if this is a multiline string."""
385
- return token .type == FSTRING_END or (
396
+ return token .type in { FSTRING_END , TSTRING_END } or (
386
397
token .type == tokenize .STRING and "\n " in token .string
387
398
)
388
399
0 commit comments