Skip to content

Commit dbbd082

Browse files
author
Peter Dolak
committed
Fix unintended lstrip_blocks behavior. Fixes pallets#1138
Introduced in pallets#858. Tests will follow, also results of performance testing.
1 parent 4a70cd4 commit dbbd082

File tree

1 file changed

+8
-5
lines changed

1 file changed

+8
-5
lines changed

src/jinja2/lexer.py

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -682,6 +682,7 @@ def tokeniter(self, source, name, filename=None, state=None):
682682
balancing_stack = []
683683
lstrip_unless_re = self.lstrip_unless_re
684684
newlines_stripped = 0
685+
line_starting = True
685686

686687
while 1:
687688
# tokenizer loop
@@ -731,11 +732,11 @@ def tokeniter(self, source, name, filename=None, state=None):
731732
):
732733
# The start of text between the last newline and the tag.
733734
l_pos = text.rfind("\n") + 1
734-
735-
# If there's only whitespace between the newline and the
736-
# tag, strip it.
737-
if not lstrip_unless_re.search(text, l_pos):
738-
groups = (text[:l_pos],) + groups[1:]
735+
if l_pos > 0 or line_starting:
736+
# If there's only whitespace between the newline and the
737+
# tag, strip it.
738+
if not lstrip_unless_re.search(text, l_pos):
739+
groups = (text[:l_pos],) + groups[1:]
739740

740741
for idx, token in enumerate(tokens):
741742
# failure group
@@ -794,6 +795,8 @@ def tokeniter(self, source, name, filename=None, state=None):
794795
yield lineno, tokens, data
795796
lineno += data.count("\n")
796797

798+
line_starting = m.group()[-1:] == "\n"
799+
797800
# fetch new position into new variable so that we can check
798801
# if there is a internal parsing error which would result
799802
# in an infinite loop

0 commit comments

Comments
 (0)