Skip to content

Commit 9fb80dd

Browse files
committed
Add support for | unions from dmypy suggest argument type annotations
1 parent 84e6a95 commit 9fb80dd

File tree

2 files changed

+10
-2
lines changed

2 files changed

+10
-2
lines changed

src/idlemypyextension/annotate.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -176,7 +176,7 @@ def tokenize(txt: str) -> list[Token]:
176176
return tokens
177177
if txt[0] in {" ", "\n"}:
178178
txt = txt[1:]
179-
elif txt[0] in "()[],*":
179+
elif txt[0] in "()[],*|":
180180
tokens.append(Separator(txt[0]))
181181
txt = txt[1:]
182182
elif txt[:2] == "->":
@@ -983,7 +983,7 @@ def get_line(line: int) -> str:
983983
return line_text
984984
raise EOFError
985985

986-
print(f"{get_annotation(annotation, get_line)!r}")
986+
print(f"{get_annotation(annotation, get_line)[0]}")
987987

988988

989989
if __name__ == "__main__": # pragma: nocover

tests/test_annotate.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -175,6 +175,14 @@ def test_list_or(items: Collection[str], result: str) -> None:
175175
annotate.End(),
176176
],
177177
),
178+
(
179+
"int | float",
180+
[
181+
annotate.DottedName("int"),
182+
annotate.Separator("|"),
183+
annotate.DottedName("float"),
184+
],
185+
),
178186
],
179187
)
180188
def test_tokenize(text: str, tokens: list[annotate.Token]) -> None:

0 commit comments

Comments
 (0)