Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 31 additions & 9 deletions hdlparse/minilexer.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

'''Minimalistic lexer engine inspired by the PyPigments RegexLexer'''

__version__ = '1.0.5'
__version__ = '1.0.7'

class MiniLexer(object):
'''Simple lexer state machine with regex matching rules'''
Expand All @@ -20,7 +20,7 @@ def __init__(self, tokens, flags=re.MULTILINE):
flags (int): Optional regex flags
'''
self.tokens = {}

self.flags = flags
# Pre-process the state definitions
for state, patterns in tokens.iteritems():
full_patterns = []
Expand All @@ -38,8 +38,27 @@ def __init__(self, tokens, flags=re.MULTILINE):

full_patterns.append((pat, action, new_state))
self.tokens[state] = full_patterns


#print("[minilexer] [state = {}] type(state)={}".format(state,type(state)) )


def insert_new_token(self,state,new_token):

#print('[insert_new_token] longitud token[state]',len(self.tokens[state]))
for patterns in new_token : #.iteritems():
pat = re.compile(patterns[0], self.flags)
action = patterns[1]
self.tokens[state].append((pat,action,None))
#print('[insert_new_token] longitud FInal token[state]',len(self.tokens[state]))

def delete_last_token(self,state):
'''
Elimina el ultimo elemento de la lista.
'''
self.tokens[state].pop()




def run(self, text):
'''Run lexer rules against a source text

Expand All @@ -53,16 +72,19 @@ def run(self, text):
stack = ['root']
pos = 0

patterns = self.tokens[stack[-1]]
self.patterns = self.tokens[stack[-1]]

while True:
for pat, action, new_state in patterns:
for pat, action, new_state in self.patterns:
#print("[minilexer] action = {}".format(action))

m = pat.match(text, pos)
if m:
#print("[minilexer] se encontro {}".format(action))
if action:
#print('## MATCH: {} -> {}'.format(m.group(), action))
yield (pos, m.end()-1), action, m.groups()

pos = m.end()

if new_state:
Expand All @@ -72,7 +94,7 @@ def run(self, text):
stack.append(new_state)

#print('## CHANGE STATE:', pos, new_state, stack)
patterns = self.tokens[stack[-1]]
self.patterns = self.tokens[stack[-1]]

break

Expand All @@ -84,4 +106,4 @@ def run(self, text):
pos += 1
except IndexError:
break

Loading