1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
|
// SPDX-License-Identifier: Apache-2.0
package firrtl
import org.antlr.v4.runtime.{CommonToken, Token}
import scala.annotation.tailrec
import scala.collection.mutable
import firrtl.antlr.FIRRTLParser
/*
* ANTLR4 helper to handle indentation tokens in Lexer
* code adapted from: "https://github.com/yshavit/antlr-denter" (Yuval Shavit, MIT license)
*/
abstract class LexerHelper {
import FIRRTLParser.{DEDENT, INDENT, NEWLINE}
private val tokenBuffer = mutable.Queue.empty[Token]
private val indentations = mutable.Stack[Int]()
private var reachedEof = false
private def eofHandler(t: Token): Token = {
// when we reach EOF, unwind all indentations. If there aren't any, insert a NEWLINE. This lets the grammar treat
// un-indented expressions as just being NEWLINE-terminated, rather than NEWLINE|EOF.
val ret =
if (indentations.isEmpty)
createToken(NEWLINE, t)
else
unwindTo(0, t)
tokenBuffer.enqueue(t)
reachedEof = true
ret
}
def nextToken(): Token = {
// first run
if (indentations.isEmpty) {
indentations.push(0)
@tailrec
def findFirstRead(): Token = {
val t = pullToken()
if (t.getType != NEWLINE) t else findFirstRead()
}
val firstRealToken = findFirstRead()
if (firstRealToken.getCharPositionInLine > 0) {
indentations.push(firstRealToken.getCharPositionInLine)
tokenBuffer.enqueue(createToken(INDENT, firstRealToken))
}
tokenBuffer.enqueue(firstRealToken)
}
def handleNewlineToken(token: Token): Token = {
@tailrec
def nonNewline(token: Token): (Token, Token) = {
val nextNext = pullToken()
if (nextNext.getType == NEWLINE)
nonNewline(nextNext)
else
(token, nextNext)
}
val (nxtToken, nextNext) = nonNewline(token)
if (nextNext.getType == Token.EOF)
eofHandler(nextNext)
else {
val nlText = nxtToken.getText
val indent =
if (nlText.length > 0 && nlText.charAt(0) == '\r')
nlText.length - 2
else
nlText.length - 1
val prevIndent = indentations.head
val retToken =
if (indent == prevIndent)
nxtToken
else if (indent > prevIndent) {
indentations.push(indent)
createToken(INDENT, nxtToken)
} else {
unwindTo(indent, nxtToken)
}
tokenBuffer.enqueue(nextNext)
retToken
}
}
val t =
if (tokenBuffer.isEmpty)
pullToken()
else
tokenBuffer.dequeue()
if (reachedEof)
t
else if (t.getType == NEWLINE)
handleNewlineToken(t)
else if (t.getType == Token.EOF)
eofHandler(t)
else
t
}
// will be overridden to FIRRTLLexer.super.nextToken() in the g4 file
protected def pullToken(): Token
private def createToken(tokenType: Int, copyFrom: Token): Token =
new CommonToken(copyFrom) {
setType(tokenType)
tokenType match {
case `NEWLINE` => setText("<NEWLINE>")
case `INDENT` => setText("<INDENT>")
case `DEDENT` => setText("<DEDENT>")
}
}
/**
* Returns a DEDENT token, and also queues up additional DEDENTs as necessary.
*
* @param targetIndent the "size" of the indentation (number of spaces) by the end
* @param copyFrom the triggering token
* @return a DEDENT token
*/
private def unwindTo(targetIndent: Int, copyFrom: Token): Token = {
assert(tokenBuffer.isEmpty, tokenBuffer)
tokenBuffer.enqueue(createToken(NEWLINE, copyFrom))
// To make things easier, we'll queue up ALL of the dedents, and then pop off the first one.
// For example, here's how some text is analyzed:
//
// Text : Indentation : Action : Indents Deque
// [ baseline ] : 0 : nothing : [0]
// [ foo ] : 2 : INDENT : [0, 2]
// [ bar ] : 3 : INDENT : [0, 2, 3]
// [ baz ] : 0 : DEDENT x2 : [0]
@tailrec
def doPop(): Unit = {
val prevIndent = indentations.pop()
if (prevIndent < targetIndent) {
indentations.push(prevIndent)
tokenBuffer.enqueue(createToken(INDENT, copyFrom))
} else if (prevIndent > targetIndent) {
tokenBuffer.enqueue(createToken(DEDENT, copyFrom))
doPop()
}
}
doPop()
indentations.push(targetIndent)
tokenBuffer.dequeue()
}
}
|