Skip to content

Commit 385089e

Browse files
committed
comments and formatting
1 parent 13770e8 commit 385089e

File tree

1 file changed

+35
-34
lines changed

1 file changed

+35
-34
lines changed

pkg/scanner/scanner.go

+35-34
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@ var Keywords = map[string]TokenType{
1818
"cdr": CDR,
1919
"nil": NIL,
2020
"true": TRUE,
21-
//"false": FALSE,
2221
"and?": ANDQ,
2322
"or?": ORQ,
2423
"not?": NOTQ,
@@ -37,7 +36,6 @@ var KeywordsReverse = map[TokenType]string{
3736
CDR: "cdr",
3837
NIL: "nil",
3938
TRUE: "true",
40-
//FALSE: "false",
4139
ANDQ: "and?",
4240
ORQ: "or?",
4341
NOTQ: "not?",
@@ -47,7 +45,6 @@ var KeywordsReverse = map[TokenType]string{
4745
NILQ: "nil?",
4846
}
4947

50-
5148
type Scanner struct {
5249
Source string
5350
Tokens []Token
@@ -97,7 +94,6 @@ func (s *Scanner) addTokenWithTypeAndLiteral(thisType TokenType, literal interfa
9794

9895
func (s *Scanner) ScanTokens() []Token {
9996
// Driving loop
100-
// Note that s.curr is not incremented in Number, String, or Identifier readers since they handle their own iteration
10197
for !s.isAtEnd() {
10298
s.Start = s.Curr
10399
s.ScanToken()
@@ -111,15 +107,24 @@ func (s *Scanner) ScanTokens() []Token {
111107
func (s *Scanner) ScanToken() {
112108
ch := s.advance()
113109
switch ch {
114-
case '(': s.addToken(LEFT_PAREN)
115-
case ')': s.addToken(RIGHT_PAREN)
116-
case '.': s.addToken(DOT)
117-
case '-': s.addToken(MINUS)
118-
case '+': s.addToken(PLUS)
119-
case '*': s.addToken(STAR)
120-
case '=': s.addToken(EQUAL)
121-
case '<': s.addToken(LESS)
122-
case '>': s.addToken(GREATER)
110+
case '(':
111+
s.addToken(LEFT_PAREN)
112+
case ')':
113+
s.addToken(RIGHT_PAREN)
114+
case '.':
115+
s.addToken(DOT)
116+
case '-':
117+
s.addToken(MINUS)
118+
case '+':
119+
s.addToken(PLUS)
120+
case '*':
121+
s.addToken(STAR)
122+
case '=':
123+
s.addToken(EQUAL)
124+
case '<':
125+
s.addToken(LESS)
126+
case '>':
127+
s.addToken(GREATER)
123128
case '/':
124129
if s.match('/') {
125130
for !s.isAtEnd() && s.peek() != '\n' {
@@ -129,19 +134,14 @@ func (s *Scanner) ScanToken() {
129134
} else {
130135
s.addToken(SLASH)
131136
}
132-
// Hanle comments
133-
// case ';':
134-
// for !s.isAtEnd() && s.peek() != '\n' {
135-
// s.advance()
136-
// }
137-
// Handle whitespace
138-
case ' ':
139-
case '\r':
140-
case '\t':
137+
case ' ':
138+
case '\r':
139+
case '\t':
141140
case '\n':
142141
s.Line++
143142
// Handle strings
144-
case '"': s.tokenizeString()
143+
case '"':
144+
s.tokenizeString()
145145
default:
146146
if unicode.IsDigit(rune(ch)) {
147147
s.tokenizeNumber()
@@ -152,7 +152,7 @@ func (s *Scanner) ScanToken() {
152152
LoxError(s.Line, errorStr)
153153
}
154154
}
155-
155+
156156
}
157157

158158
func (s *Scanner) match(expected rune) bool {
@@ -163,7 +163,7 @@ func (s *Scanner) match(expected rune) bool {
163163
return false
164164
}
165165
s.Curr++
166-
return true;
166+
return true
167167
}
168168

169169
func (s *Scanner) tokenizeString() {
@@ -198,18 +198,19 @@ func (s *Scanner) tokenizeString() {
198198
LoxError(s.Line, errorStr)
199199
} else {
200200
// Return token using substring created from initial and current positions
201-
s.addTokenWithTypeAndLiteral(STRING, s.Source[s.Start+1 : s.Curr-1])
201+
s.addTokenWithTypeAndLiteral(STRING, s.Source[s.Start+1:s.Curr-1])
202202
}
203203

204204
// Return token using substring created from initial and current positions
205205
}
206206

207207
// Number reader for Scanner
208208
func (s *Scanner) tokenizeNumber() {
209-
// Track initial position and whether or not a dot has been found
209+
// Track initial position and whether a dot has been found
210210
foundDot := false
211211

212212
// Iterate until end of number or end of file
213+
// If s.Curr has not overflowed, and the current character is a digit or a dot
213214
for s.Curr < len(s.Source) && (unicode.IsDigit(rune(s.Source[s.Curr])) || s.Source[s.Curr] == '.') {
214215

215216
// Check for dot
@@ -229,9 +230,9 @@ func (s *Scanner) tokenizeNumber() {
229230
floatVal, err := strconv.ParseFloat(s.Source[s.Start:s.Curr], 64)
230231

231232
if err != nil {
232-
errorStr := fmt.Sprintf("Invalid number at line %d", s.Line)
233+
errorStr := fmt.Sprintf("Invalid number at line %d", s.Line)
233234
LoxError(s.Line, errorStr)
234-
}
235+
}
235236
// Return token using substring created from initial and current positions
236237
s.addTokenWithTypeAndLiteral(NUMBER, floatVal)
237238
}
@@ -247,9 +248,9 @@ func (s *Scanner) tokenizeSymbol() {
247248
// Check for existing keyword
248249
symbol := s.Source[s.Start:s.Curr]
249250
if tokentype, exists := Keywords[symbol]; exists {
250-
s.addToken(tokentype)
251-
} else {
252-
// Set to default value if the key is not found
253-
s.addToken(SYMBOL)
254-
}
251+
s.addToken(tokentype)
252+
} else {
253+
// Set to default value if the key is not found
254+
s.addToken(SYMBOL)
255+
}
255256
}

0 commit comments

Comments
 (0)