Skip to content

Commit adafa22

Browse files
committed
add init method
1 parent d47ee78 commit adafa22

File tree

9 files changed

+86
-63
lines changed

9 files changed

+86
-63
lines changed

examples/test.py example.py

File renamed without changes.

port_CSharp/PythonLexerBase.cs

+19-12
Original file line numberDiff line numberDiff line change
@@ -33,35 +33,37 @@ THE SOFTWARE.
3333
public abstract class PythonLexerBase : Lexer
3434
{
3535
// A stack that keeps track of the indentation lengths
36-
private Stack<int> _indentLengthStack = new Stack<int>();
36+
private Stack<int> _indentLengthStack;
3737
// A list where tokens are waiting to be loaded into the token stream
38-
private LinkedList<IToken> _pendingTokens = new LinkedList<IToken>();
38+
private LinkedList<IToken> _pendingTokens;
3939
// last pending token types
40-
private int _previousPendingTokenType = 0;
41-
private int _lastPendingTokenTypeFromDefaultChannel = 0;
40+
private int _previousPendingTokenType;
41+
private int _lastPendingTokenTypeFromDefaultChannel;
4242

4343
// The amount of opened parentheses, square brackets, or curly braces
44-
private int _opened = 0;
44+
private int _opened;
4545

46-
private bool _wasSpaceIndentation = false;
47-
private bool _wasTabIndentation = false;
48-
private bool _wasIndentationMixedWithSpacesAndTabs = false;
46+
private bool _wasSpaceIndentation;
47+
private bool _wasTabIndentation;
48+
private bool _wasIndentationMixedWithSpacesAndTabs;
4949
private const int INVALID_LENGTH = -1;
5050

51-
private CommonToken _curToken = null!; // current (under processing) token
52-
private IToken _ffgToken = null!; // following (look ahead) token
51+
private CommonToken _curToken; // current (under processing) token
52+
private IToken _ffgToken; // following (look ahead) token
5353

5454
private const string _ERR_TXT = " ERROR: ";
5555

5656
protected PythonLexerBase(ICharStream input) : base(input)
5757
{
58+
Init();
5859
}
5960

6061
protected PythonLexerBase(ICharStream input, TextWriter output, TextWriter errorOutput) : base(input, output, errorOutput)
6162
{
63+
Init();
6264
}
6365

64-
public override void Reset()
66+
private void Init()
6567
{
6668
_indentLengthStack = new Stack<int>();
6769
_pendingTokens = new LinkedList<IToken>();
@@ -73,7 +75,6 @@ public override void Reset()
7375
_wasIndentationMixedWithSpacesAndTabs = false;
7476
_curToken = null!;
7577
_ffgToken = null!;
76-
base.Reset();
7778
}
7879

7980
public override IToken NextToken() // reading the input stream until a return EOF
@@ -385,4 +386,10 @@ private void ReportError(string errMsg)
385386
// the ERROR_TOKEN will raise an error in the parser
386387
CreateAndAddPendingToken(PythonLexer.ERROR_TOKEN, TokenConstants.DefaultChannel, _ERR_TXT + errMsg, _ffgToken);
387388
}
389+
390+
public override void Reset()
391+
{
392+
Init();
393+
base.Reset();
394+
}
388395
}

port_CSharp/README.md

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
### C#
22

33
#### Command line example for Windows:
4-
- first create a C# project called grun_tokens then copy the two grammar files and test.py to this directory
4+
- first create a C# project called grun_tokens then copy the two grammar files and example.py to this directory
55
```bash
66
dotnet new console -o . -n grun_tokens -f netcoreapp3.1
77
del program.cs
@@ -10,14 +10,14 @@
1010

1111
```bash
1212
copy ..\*.g4
13-
copy ..\examples\test.py
13+
copy ..\example.py
1414
```
1515

1616
```bash
1717
antlr4 -Dlanguage=CSharp PythonLexer.g4
1818
antlr4 -Dlanguage=CSharp PythonParser.g4
1919
dotnet build
20-
dotnet run test.py --no-build
20+
dotnet run example.py --no-build
2121
```
2222

2323
#### Related links:

port_CSharp/grun_tokens.cs

+1-1
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ private static string GetTokenMetaDataWithTokenName(PythonParser parser, IToken
3131
return metaData.Substring(0, lesserPos + 2) // modified format: [@TokenIndex,StartIndex:StopIndex='Text',<TokenName>,channel=Channel,Line:Column]
3232
+ parser.Vocabulary.GetSymbolicName(token.Type)
3333
+ metaData.Substring(greaterPos);
34-
34+
3535
}
3636
}
3737
}

port_Java/PythonLexerBase.java

+18-13
Original file line numberDiff line numberDiff line change
@@ -33,33 +33,33 @@ of this software and associated documentation files (the "Software"), to deal
3333

3434
public abstract class PythonLexerBase extends Lexer {
3535
// A stack that keeps track of the indentation lengths
36-
private Deque<Integer> _indentLengthStack = new ArrayDeque<>();
36+
private Deque<Integer> _indentLengthStack;
3737
// A linked where tokens are waiting to be loaded into the token stream
38-
private LinkedList<Token> _pendingTokens = new LinkedList<>();
38+
private LinkedList<Token> _pendingTokens;
3939

4040
// last pending token types
41-
private int _previousPendingTokenType = 0;
42-
private int _lastPendingTokenTypeFromDefaultChannel = 0;
41+
private int _previousPendingTokenType;
42+
private int _lastPendingTokenTypeFromDefaultChannel;
4343

4444
// The amount of opened parentheses, square brackets or curly braces
45-
private int _opened = 0;
45+
private int _opened;
4646

47-
private boolean _wasSpaceIndentation = false;
48-
private boolean _wasTabIndentation = false;
49-
private boolean _wasIndentationMixedWithSpacesAndTabs = false;
47+
private boolean _wasSpaceIndentation;
48+
private boolean _wasTabIndentation;
49+
private boolean _wasIndentationMixedWithSpacesAndTabs;
5050
private final int _INVALID_LENGTH = -1;
5151

52-
private CommonToken _curToken = null; // current (under processing) token
53-
private Token _ffgToken = null; // following (look ahead) token
52+
private CommonToken _curToken; // current (under processing) token
53+
private Token _ffgToken; // following (look ahead) token
5454

5555
private final String _ERR_TXT = " ERROR: ";
5656

5757
protected PythonLexerBase(CharStream input) {
5858
super(input);
59+
init();
5960
}
6061

61-
@Override
62-
public void reset() {
62+
private void init() {
6363
_indentLengthStack = new ArrayDeque<>();
6464
_pendingTokens = new LinkedList<>();
6565
_previousPendingTokenType = 0;
@@ -70,7 +70,6 @@ public void reset() {
7070
_wasIndentationMixedWithSpacesAndTabs = false;
7171
_curToken = null;
7272
_ffgToken = null;
73-
super.reset();
7473
}
7574

7675
@Override
@@ -316,4 +315,10 @@ private void reportError(final String errMsg) {
316315
// the ERROR_TOKEN will raise an error in the parser
317316
createAndAddPendingToken(PythonLexer.ERROR_TOKEN, Token.DEFAULT_CHANNEL, _ERR_TXT + errMsg, _ffgToken);
318317
}
318+
319+
@Override
320+
public void reset() {
321+
init();
322+
super.reset();
323+
}
319324
}

port_Java/README.md

+5-5
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,26 @@
11
### Java 8
22

33
#### Command line example:
4-
- first copy the two grammar files and the test.py to this directory
4+
- first copy the two grammar files and the example.py to this directory
55

66
Unix:
77
```bash
88
cp ../*.g4 .
9-
cp ../examples/test.py .
9+
cp ../example.py .
1010
```
1111

1212
Windows:
1313
```bash
1414
copy ..\*.g4
15-
copy ..\examples\test.py
15+
copy ..\example.py
1616
```
1717

1818
```bash
1919
antlr4 PythonLexer.g4
2020
antlr4 PythonParser.g4
2121
javac *.java
22-
grun Python file_input -tokens test.py
23-
grun Python file_input -gui test.py
22+
grun Python file_input -tokens example.py
23+
grun Python file_input -gui example.py
2424
```
2525

2626
#### Related link:

port_JavaScript/PythonLexerBase.js

+20-14
Original file line numberDiff line numberDiff line change
@@ -35,29 +35,31 @@ export default class PythonLexerBase extends antlr4.Lexer {
3535
super(input);
3636

3737
// A stack that keeps track of the indentation lengths
38-
this._indentLengthStack = [];
38+
this._indentLengthStack;
3939
// A list where tokens are waiting to be loaded into the token stream
40-
this._pendingTokens = [];
40+
this._pendingTokens;
4141

4242
// last pending token types
43-
this._previousPendingTokenType = 0;
44-
this._lastPendingTokenTypeFromDefaultChannel = 0;
43+
this._previousPendingTokenType;
44+
this._lastPendingTokenTypeFromDefaultChannel;
4545

4646
// The amount of opened parentheses, square brackets or curly braces
47-
this._opened = 0;
47+
this._opened;
4848

49-
this._wasSpaceIndentation = false;
50-
this._wasTabIndentation = false;
51-
this._wasIndentationMixedWithSpacesAndTabs = false;
52-
this._INVALID_LENGTH = -1;
49+
this._wasSpaceIndentation;
50+
this._wasTabIndentation;
51+
this._wasIndentationMixedWithSpacesAndTabs;
52+
this._INVALID_LENGTH;
5353

54-
this._curToken = null; // current (under processing) token
55-
this._ffgToken = null; // following (look ahead) token
54+
this._curToken; // current (under processing) token
55+
this._ffgToken; // following (look ahead) token
56+
57+
this._ERR_TXT;
5658

57-
this._ERR_TXT = " ERROR: ";
59+
this.init();
5860
}
5961

60-
reset() {
62+
init() {
6163
this._indentLengthStack = [];
6264
this._pendingTokens = [];
6365
this._previousPendingTokenType = 0;
@@ -70,7 +72,6 @@ export default class PythonLexerBase extends antlr4.Lexer {
7072
this._curToken = null;
7173
this._ffgToken = null;
7274
this._ERR_TXT = " ERROR: ";
73-
super.reset();
7475
}
7576

7677
nextToken() { // reading the input stream until a return EOF
@@ -325,4 +326,9 @@ export default class PythonLexerBase extends antlr4.Lexer {
325326
// the ERROR_TOKEN will raise an error in the parser
326327
this.createAndAddPendingToken(PythonLexer.ERROR_TOKEN, Token.DEFAULT_CHANNEL, this._ERR_TXT + errMsg, this._ffgToken);
327328
}
329+
330+
reset() {
331+
this.init();
332+
super.reset();
333+
}
328334
}

port_JavaScript/grun_tokens.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -20,4 +20,4 @@ tokens.fill();
2020
for (const token of tokens.tokens) {
2121
console.log(getTokenMetaDataWithTokenName(token));
2222
}
23-
const tree = parser.file_input();
23+
const tree = parser.file_input();

port_Python3/PythonLexerBase.py

+19-14
Original file line numberDiff line numberDiff line change
@@ -33,29 +33,31 @@ def __init__(self, input: InputStream, output: TextIO = sys.stdout):
3333
super().__init__(input, output)
3434

3535
# A stack that keeps track of the indentation lengths
36-
self._indent_length_stack: Deque[int] = deque()
36+
self._indent_length_stack: Deque[int]
3737

3838
# A list where tokens are waiting to be loaded into the token stream
39-
self._pending_tokens: list[CommonToken] = []
39+
self._pending_tokens: list[CommonToken]
4040

4141
# last pending token types
42-
self._previous_pending_token_type: int = 0
43-
self._last_pending_token_type_from_default_channel: int = 0
42+
self._previous_pending_token_type: int
43+
self._last_pending_token_type_from_default_channel: int
4444

4545
# The amount of opened parentheses, square brackets or curly braces
46-
self._opened: int = 0
46+
self._opened: int
4747

48-
self._was_space_indentation: bool = False
49-
self._was_tab_indentation: bool = False
50-
self._was_indentation_mixed_with_spaces_and_tabs: bool = False
51-
self._INVALID_LENGTH: int = -1
48+
self._was_space_indentation: bool
49+
self._was_tab_indentation: bool
50+
self._was_indentation_mixed_with_spaces_and_tabs: bool
51+
self._INVALID_LENGTH: int
5252

53-
self._cur_token: CommonToken = None # current (under processing) token
54-
self._ffg_token: CommonToken = None # following (look ahead) token
53+
self._cur_token: CommonToken # current (under processing) token
54+
self._ffg_token: CommonToken # following (look ahead) token
5555

56-
self._ERR_TXT: str = " ERROR: "
56+
self._ERR_TXT: str
5757

58-
def reset(self):
58+
self.init()
59+
60+
def init(self):
5961
self._indent_length_stack = deque()
6062
self._pending_tokens = []
6163
self._previous_pending_token_type = 0
@@ -68,7 +70,6 @@ def reset(self):
6870
self._cur_token = None
6971
self._ffg_token = None
7072
self._ERR_TXT = " ERROR: "
71-
super().reset()
7273

7374
def nextToken(self) -> CommonToken: # reading the input stream until a return EOF
7475
self.check_next_token()
@@ -254,3 +255,7 @@ def report_error(self, err_msg):
254255

255256
# the ERROR_TOKEN will raise an error in the parser
256257
self.create_and_add_pending_token(self.ERROR_TOKEN, Token.DEFAULT_CHANNEL, self._ERR_TXT + err_msg, self._ffg_token)
258+
259+
def reset(self):
260+
self.init()
261+
super().reset()

0 commit comments

Comments
 (0)