Skip to content

Commit 47e9734

Browse files
authored
Merge pull request #3017 from amykyta3/master
Improve python3 performance by adding __slots__
2 parents c79b0fd + 6990edd commit 47e9734

40 files changed

+124
-33
lines changed

runtime/Python3/src/antlr4/BufferedTokenStream.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ class TokenStream(object):
2727

2828

2929
class BufferedTokenStream(TokenStream):
30+
__slots__ = ('tokenSource', 'tokens', 'index', 'fetchedEOF')
3031

3132
def __init__(self, tokenSource:Lexer):
3233
# The {@link TokenSource} from which tokens for this stream are fetched.

runtime/Python3/src/antlr4/CommonTokenFactory.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,8 @@ class TokenFactory(object):
1515
pass
1616

1717
class CommonTokenFactory(TokenFactory):
18+
__slots__ = 'copyText'
19+
1820
#
1921
# The default {@link CommonTokenFactory} instance.
2022
#
@@ -56,4 +58,4 @@ def createThin(self, type:int, text:str):
5658
t.text = text
5759
return t
5860

59-
CommonTokenFactory.DEFAULT = CommonTokenFactory()
61+
CommonTokenFactory.DEFAULT = CommonTokenFactory()

runtime/Python3/src/antlr4/CommonTokenStream.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@
3535

3636

3737
class CommonTokenStream(BufferedTokenStream):
38+
__slots__ = 'channel'
3839

3940
def __init__(self, lexer:Lexer, channel:int=Token.DEFAULT_CHANNEL):
4041
super().__init__(lexer)

runtime/Python3/src/antlr4/FileStream.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414

1515

1616
class FileStream(InputStream):
17+
__slots__ = 'fileName'
1718

1819
def __init__(self, fileName:str, encoding:str='ascii', errors:str='strict'):
1920
super().__init__(self.readDataFrom(fileName, encoding, errors))

runtime/Python3/src/antlr4/InputStream.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212

1313

1414
class InputStream (object):
15+
__slots__ = ('name', 'strdata', '_index', 'data', '_size')
1516

1617
def __init__(self, data: str):
1718
self.name = "<empty>"

runtime/Python3/src/antlr4/IntervalSet.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
IntervalSet = None
1212

1313
class IntervalSet(object):
14+
__slots__ = ('intervals', 'readOnly')
1415

1516
def __init__(self):
1617
self.intervals = None

runtime/Python3/src/antlr4/LL1Analyzer.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414

1515

1616
class LL1Analyzer (object):
17+
__slots__ = 'atn'
1718

1819
#* Special value added to the lookahead sets to indicate that we hit
1920
# a predicate during analysis if {@code seeThruPreds==false}.

runtime/Python3/src/antlr4/Lexer.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,11 @@ class TokenSource(object):
2828

2929

3030
class Lexer(Recognizer, TokenSource):
31+
__slots__ = (
32+
'_input', '_output', '_factory', '_tokenFactorySourcePair', '_token',
33+
'_tokenStartCharIndex', '_tokenStartLine', '_tokenStartColumn',
34+
'_hitEOF', '_channel', '_type', '_modeStack', '_mode', '_text'
35+
)
3136

3237
DEFAULT_MODE = 0
3338
MORE = -2
@@ -322,4 +327,3 @@ def recover(self, re:RecognitionException):
322327
else:
323328
# TODO: Do we lose character or line position information?
324329
self._input.consume()
325-

runtime/Python3/src/antlr4/ListTokenSource.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818

1919

2020
class ListTokenSource(TokenSource):
21+
__slots__ = ('tokens', 'sourceName', 'pos', 'eofToken', '_factory')
2122

2223
# Constructs a new {@link ListTokenSource} instance from the specified
2324
# collection of {@link Token} objects and source name.
@@ -140,4 +141,4 @@ def getSourceName(self):
140141
if inputStream is not None:
141142
return inputStream.getSourceName()
142143
else:
143-
return "List"
144+
return "List"

runtime/Python3/src/antlr4/Parser.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
from antlr4.tree.Tree import ParseTreeListener, TerminalNode, ErrorNode
2424

2525
class TraceListener(ParseTreeListener):
26+
__slots__ = '_parser'
2627

2728
def __init__(self, parser):
2829
self._parser = parser
@@ -44,7 +45,11 @@ def exitEveryRule(self, ctx):
4445

4546
# self is all the parsing support code essentially; most of it is error recovery stuff.#
4647
class Parser (Recognizer):
48+
__slots__ = (
49+
'_input', '_output', '_errHandler', '_precedenceStack', '_ctx',
50+
'buildParseTrees', '_tracer', '_parseListeners', '_syntaxErrors'
4751

52+
)
4853
# self field maps from the serialized ATN string to the deserialized {@link ATN} with
4954
# bypass alternatives.
5055
#

0 commit comments

Comments
 (0)