Here’s my PLY lexer for VHDL:
<code>import ply.lex as lex
reserved = {
'abs': 'ABS',
'access': 'ACCESS',
'after': 'AFTER',
'alias': 'ALIAS',
'all': 'ALL',
'and': 'AND',
'architecture': 'ARCHITECTURE',
'array': 'ARRAY',
'assert': 'ASSERT',
'attribute': 'ATTRIBUTE',
'begin': 'BEGIN',
'block': 'BLOCK',
'body': 'BODY',
'buffer': 'BUFFER',
'bus': 'BUS',
'case': 'CASE',
'component': 'COMPONENT',
'configuration': 'CONFIGURATION',
'constant': 'CONSTANT',
'disconnect': 'DISCONNECT',
'downto': 'DOWNTO',
'else': 'ELSE',
'elsif': 'ELSIF',
'end': 'END',
'entity': 'ENTITY',
'exit': 'EXIT',
'file': 'FILE',
'for': 'FOR',
'function': 'FUNCTION',
'generate': 'GENERATE',
'generic': 'GENERIC',
'group': 'GROUP',
'guarded': 'GUARDED',
'if': 'IF',
'impure': 'IMPURE',
'in': 'IN',
'inertial': 'INERTIAL',
'inout': 'INOUT',
'is': 'IS',
'label': 'LABEL',
'library': 'LIBRARY',
'linkage': 'LINKAGE',
'literal': 'LITERAL',
'loop': 'LOOP',
'map': 'MAP',
'mod': 'MOD',
'nand': 'NAND',
'new': 'NEW',
'next': 'NEXT',
'nor': 'NOR',
'not': 'NOT',
'null': 'NULL',
'of': 'OF',
'on': 'ON',
'open': 'OPEN',
'or': 'OR',
'others': 'OTHERS',
'out': 'OUT',
'package': 'PACKAGE',
'port': 'PORT',
'postponed': 'POSTPONED',
'procedure': 'PROCEDURE',
'process': 'PROCESS',
'pure': 'PURE',
'range': 'RANGE',
'record': 'RECORD',
'register': 'REGISTER',
'reject': 'REJECT',
'rem': 'REM',
'report': 'REPORT',
'return': 'RETURN',
'rol': 'ROL',
'ror': 'ROR',
'select': 'SELECT',
'severity': 'SEVERITY',
'signal': 'SIGNAL',
'shared': 'SHARED',
'sla': 'SLA',
'sll': 'SLL',
'sra': 'SRA',
'srl': 'SRL',
'subtype': 'SUBTYPE',
'then': 'THEN',
'to': 'TO',
'transport': 'TRANSPORT',
'type': 'TYPE',
'unaffected': 'UNAFFECTED',
'units': 'UNITS',
'until': 'UNTIL',
'use': 'USE',
'variable': 'VARIABLE',
'wait': 'WAIT',
'when': 'WHEN',
'while': 'WHILE',
'with': 'WITH',
'xnor': 'XNOR',
'xor': 'XOR'
}
# List of token names based on the VHDL grammar
tokens = (
'IDENTIFIER',
'NUMBER',
'STRING_LITERAL',
'PLUS',
'MINUS',
'STAR',
'SLASH',
'EQ',
'NEQ',
'LT',
'LE',
'GT',
'GE',
'LPAREN',
'RPAREN',
'COMMA',
'SEMICOLON',
'COLON',
'DOT',
'ARROW',
'LBRACKET',
'RBRACKET',
'LCURLY',
'RCURLY',
'COMMENT',
) + tuple(reserved.values())
# Regular expression rules for simple tokens
t_PLUS = r'+'
t_MINUS = r'-'
t_STAR = r'*'
t_SLASH = r'/'
t_EQ = r'='
t_NEQ = r'/='
t_LT = r'<'
t_LE = r'<='
t_GT = r'>'
t_GE = r'>='
t_LPAREN = r'('
t_RPAREN = r')'
t_COMMA = r','
t_SEMICOLON = r';'
t_COLON = r':'
t_DOT = r'.'
t_ARROW = r'=>'
t_LBRACKET = r'['
t_RBRACKET = r']'
t_LCURLY = r'{'
t_RCURLY = r'}'
# Regular expression with some action code
def t_IDENTIFIER(t):
r'[a-zA-Z_][a-zA-Z0-9_]*'
t.type = reserved.get(t.value, 'IDENTIFIER') # Check for reserved words
return t
def t_NUMBER(t):
r'd+'
t.value = int(t.value)
return t
def t_STRING_LITERAL(t):
r'"([^\n]|(\.))*?"'
return t
def t_COMMENT(t):
r'--.*'
pass # No return value. Token discarded.
# Define a rule so we can track line numbers
def t_newline(t):
r'n+'
t.lexer.lineno += len(t.value)
# A string containing ignored characters (spaces and tabs)
t_ignore = ' t'
# Error handling rule
def t_error(t):
print(f"Illegal character '{t.value[0]}'")
t.lexer.skip(1)
# Build the lexer
lexer = lex.lex()
# Test it out
data = '''
-- This is a comment
entity MyEntity is
port(
clk : in std_logic;
rst : in std_logic
);
end MyEntity;
'''
# Give the lexer some input
lexer.input(data)
# Tokenize
for tok in lexer:
print(tok)
</code>
<code>import ply.lex as lex
reserved = {
'abs': 'ABS',
'access': 'ACCESS',
'after': 'AFTER',
'alias': 'ALIAS',
'all': 'ALL',
'and': 'AND',
'architecture': 'ARCHITECTURE',
'array': 'ARRAY',
'assert': 'ASSERT',
'attribute': 'ATTRIBUTE',
'begin': 'BEGIN',
'block': 'BLOCK',
'body': 'BODY',
'buffer': 'BUFFER',
'bus': 'BUS',
'case': 'CASE',
'component': 'COMPONENT',
'configuration': 'CONFIGURATION',
'constant': 'CONSTANT',
'disconnect': 'DISCONNECT',
'downto': 'DOWNTO',
'else': 'ELSE',
'elsif': 'ELSIF',
'end': 'END',
'entity': 'ENTITY',
'exit': 'EXIT',
'file': 'FILE',
'for': 'FOR',
'function': 'FUNCTION',
'generate': 'GENERATE',
'generic': 'GENERIC',
'group': 'GROUP',
'guarded': 'GUARDED',
'if': 'IF',
'impure': 'IMPURE',
'in': 'IN',
'inertial': 'INERTIAL',
'inout': 'INOUT',
'is': 'IS',
'label': 'LABEL',
'library': 'LIBRARY',
'linkage': 'LINKAGE',
'literal': 'LITERAL',
'loop': 'LOOP',
'map': 'MAP',
'mod': 'MOD',
'nand': 'NAND',
'new': 'NEW',
'next': 'NEXT',
'nor': 'NOR',
'not': 'NOT',
'null': 'NULL',
'of': 'OF',
'on': 'ON',
'open': 'OPEN',
'or': 'OR',
'others': 'OTHERS',
'out': 'OUT',
'package': 'PACKAGE',
'port': 'PORT',
'postponed': 'POSTPONED',
'procedure': 'PROCEDURE',
'process': 'PROCESS',
'pure': 'PURE',
'range': 'RANGE',
'record': 'RECORD',
'register': 'REGISTER',
'reject': 'REJECT',
'rem': 'REM',
'report': 'REPORT',
'return': 'RETURN',
'rol': 'ROL',
'ror': 'ROR',
'select': 'SELECT',
'severity': 'SEVERITY',
'signal': 'SIGNAL',
'shared': 'SHARED',
'sla': 'SLA',
'sll': 'SLL',
'sra': 'SRA',
'srl': 'SRL',
'subtype': 'SUBTYPE',
'then': 'THEN',
'to': 'TO',
'transport': 'TRANSPORT',
'type': 'TYPE',
'unaffected': 'UNAFFECTED',
'units': 'UNITS',
'until': 'UNTIL',
'use': 'USE',
'variable': 'VARIABLE',
'wait': 'WAIT',
'when': 'WHEN',
'while': 'WHILE',
'with': 'WITH',
'xnor': 'XNOR',
'xor': 'XOR'
}
# List of token names based on the VHDL grammar
tokens = (
'IDENTIFIER',
'NUMBER',
'STRING_LITERAL',
'PLUS',
'MINUS',
'STAR',
'SLASH',
'EQ',
'NEQ',
'LT',
'LE',
'GT',
'GE',
'LPAREN',
'RPAREN',
'COMMA',
'SEMICOLON',
'COLON',
'DOT',
'ARROW',
'LBRACKET',
'RBRACKET',
'LCURLY',
'RCURLY',
'COMMENT',
) + tuple(reserved.values())
# Regular expression rules for simple tokens
t_PLUS = r'+'
t_MINUS = r'-'
t_STAR = r'*'
t_SLASH = r'/'
t_EQ = r'='
t_NEQ = r'/='
t_LT = r'<'
t_LE = r'<='
t_GT = r'>'
t_GE = r'>='
t_LPAREN = r'('
t_RPAREN = r')'
t_COMMA = r','
t_SEMICOLON = r';'
t_COLON = r':'
t_DOT = r'.'
t_ARROW = r'=>'
t_LBRACKET = r'['
t_RBRACKET = r']'
t_LCURLY = r'{'
t_RCURLY = r'}'
# Regular expression with some action code
def t_IDENTIFIER(t):
r'[a-zA-Z_][a-zA-Z0-9_]*'
t.type = reserved.get(t.value, 'IDENTIFIER') # Check for reserved words
return t
def t_NUMBER(t):
r'd+'
t.value = int(t.value)
return t
def t_STRING_LITERAL(t):
r'"([^\n]|(\.))*?"'
return t
def t_COMMENT(t):
r'--.*'
pass # No return value. Token discarded.
# Define a rule so we can track line numbers
def t_newline(t):
r'n+'
t.lexer.lineno += len(t.value)
# A string containing ignored characters (spaces and tabs)
t_ignore = ' t'
# Error handling rule
def t_error(t):
print(f"Illegal character '{t.value[0]}'")
t.lexer.skip(1)
# Build the lexer
lexer = lex.lex()
# Test it out
data = '''
-- This is a comment
entity MyEntity is
port(
clk : in std_logic;
rst : in std_logic
);
end MyEntity;
'''
# Give the lexer some input
lexer.input(data)
# Tokenize
for tok in lexer:
print(tok)
</code>
import ply.lex as lex
reserved = {
'abs': 'ABS',
'access': 'ACCESS',
'after': 'AFTER',
'alias': 'ALIAS',
'all': 'ALL',
'and': 'AND',
'architecture': 'ARCHITECTURE',
'array': 'ARRAY',
'assert': 'ASSERT',
'attribute': 'ATTRIBUTE',
'begin': 'BEGIN',
'block': 'BLOCK',
'body': 'BODY',
'buffer': 'BUFFER',
'bus': 'BUS',
'case': 'CASE',
'component': 'COMPONENT',
'configuration': 'CONFIGURATION',
'constant': 'CONSTANT',
'disconnect': 'DISCONNECT',
'downto': 'DOWNTO',
'else': 'ELSE',
'elsif': 'ELSIF',
'end': 'END',
'entity': 'ENTITY',
'exit': 'EXIT',
'file': 'FILE',
'for': 'FOR',
'function': 'FUNCTION',
'generate': 'GENERATE',
'generic': 'GENERIC',
'group': 'GROUP',
'guarded': 'GUARDED',
'if': 'IF',
'impure': 'IMPURE',
'in': 'IN',
'inertial': 'INERTIAL',
'inout': 'INOUT',
'is': 'IS',
'label': 'LABEL',
'library': 'LIBRARY',
'linkage': 'LINKAGE',
'literal': 'LITERAL',
'loop': 'LOOP',
'map': 'MAP',
'mod': 'MOD',
'nand': 'NAND',
'new': 'NEW',
'next': 'NEXT',
'nor': 'NOR',
'not': 'NOT',
'null': 'NULL',
'of': 'OF',
'on': 'ON',
'open': 'OPEN',
'or': 'OR',
'others': 'OTHERS',
'out': 'OUT',
'package': 'PACKAGE',
'port': 'PORT',
'postponed': 'POSTPONED',
'procedure': 'PROCEDURE',
'process': 'PROCESS',
'pure': 'PURE',
'range': 'RANGE',
'record': 'RECORD',
'register': 'REGISTER',
'reject': 'REJECT',
'rem': 'REM',
'report': 'REPORT',
'return': 'RETURN',
'rol': 'ROL',
'ror': 'ROR',
'select': 'SELECT',
'severity': 'SEVERITY',
'signal': 'SIGNAL',
'shared': 'SHARED',
'sla': 'SLA',
'sll': 'SLL',
'sra': 'SRA',
'srl': 'SRL',
'subtype': 'SUBTYPE',
'then': 'THEN',
'to': 'TO',
'transport': 'TRANSPORT',
'type': 'TYPE',
'unaffected': 'UNAFFECTED',
'units': 'UNITS',
'until': 'UNTIL',
'use': 'USE',
'variable': 'VARIABLE',
'wait': 'WAIT',
'when': 'WHEN',
'while': 'WHILE',
'with': 'WITH',
'xnor': 'XNOR',
'xor': 'XOR'
}
# List of token names based on the VHDL grammar
tokens = (
'IDENTIFIER',
'NUMBER',
'STRING_LITERAL',
'PLUS',
'MINUS',
'STAR',
'SLASH',
'EQ',
'NEQ',
'LT',
'LE',
'GT',
'GE',
'LPAREN',
'RPAREN',
'COMMA',
'SEMICOLON',
'COLON',
'DOT',
'ARROW',
'LBRACKET',
'RBRACKET',
'LCURLY',
'RCURLY',
'COMMENT',
) + tuple(reserved.values())
# Regular expression rules for simple tokens
t_PLUS = r'+'
t_MINUS = r'-'
t_STAR = r'*'
t_SLASH = r'/'
t_EQ = r'='
t_NEQ = r'/='
t_LT = r'<'
t_LE = r'<='
t_GT = r'>'
t_GE = r'>='
t_LPAREN = r'('
t_RPAREN = r')'
t_COMMA = r','
t_SEMICOLON = r';'
t_COLON = r':'
t_DOT = r'.'
t_ARROW = r'=>'
t_LBRACKET = r'['
t_RBRACKET = r']'
t_LCURLY = r'{'
t_RCURLY = r'}'
# Regular expression with some action code
def t_IDENTIFIER(t):
r'[a-zA-Z_][a-zA-Z0-9_]*'
t.type = reserved.get(t.value, 'IDENTIFIER') # Check for reserved words
return t
def t_NUMBER(t):
r'd+'
t.value = int(t.value)
return t
def t_STRING_LITERAL(t):
r'"([^\n]|(\.))*?"'
return t
def t_COMMENT(t):
r'--.*'
pass # No return value. Token discarded.
# Define a rule so we can track line numbers
def t_newline(t):
r'n+'
t.lexer.lineno += len(t.value)
# A string containing ignored characters (spaces and tabs)
t_ignore = ' t'
# Error handling rule
def t_error(t):
print(f"Illegal character '{t.value[0]}'")
t.lexer.skip(1)
# Build the lexer
lexer = lex.lex()
# Test it out
data = '''
-- This is a comment
entity MyEntity is
port(
clk : in std_logic;
rst : in std_logic
);
end MyEntity;
'''
# Give the lexer some input
lexer.input(data)
# Tokenize
for tok in lexer:
print(tok)
here’s my PLY parser for VHDL:
<code>import ply.yacc as yacc
from vhdl_lexer import tokens # Import token definitions
def p_design_file(p):
'''design_file : entity_declaration'''
print("p_design_file", p[1])
p[0] = p[1]
def p_entity_declaration(p):
'''entity_declaration : ENTITY IDENTIFIER IS port_clause END IDENTIFIER SEMICOLON'''
print("p_entity_declaration", p[2], p[4])
p[0] = ('entity_declaration', p[2], p[4])
def p_port_clause(p):
'''port_clause : PORT LPAREN port_list RPAREN SEMICOLON
| empty'''
print("p_port_clause", p[1])
p[0] = ('port_clause', p[3] if len(p) > 2 else None)
def p_port_list(p):
'''port_list : port_list COMMA port
| port'''
print("p_port_list", p[1:])
if len(p) == 4:
p[0] = p[1] + [p[3]]
else:
p[0] = [p[1]]
def p_port(p):
'''port : IDENTIFIER COLON direction type'''
print("p_port", p[1], p[3], p[4])
p[0] = ('port', p[1], p[3], p[4])
def p_direction(p):
'''direction : IN
| OUT
| INOUT'''
print("p_direction", p[1])
p[0] = p[1]
def p_type(p):
'''type : IDENTIFIER'''
print("p_type", p[1])
p[0] = p[1]
def p_empty(p):
'''empty :'''
pass
def p_error(p):
if p:
print(f"Syntax error at '{p.value}', line {p.lineno}, position {p.lexpos}")
else:
print("Syntax error at EOF")
# Set the start rule for the parser
parser = yacc.yacc(start='design_file', debug=True)
</code>
<code>import ply.yacc as yacc
from vhdl_lexer import tokens # Import token definitions
def p_design_file(p):
'''design_file : entity_declaration'''
print("p_design_file", p[1])
p[0] = p[1]
def p_entity_declaration(p):
'''entity_declaration : ENTITY IDENTIFIER IS port_clause END IDENTIFIER SEMICOLON'''
print("p_entity_declaration", p[2], p[4])
p[0] = ('entity_declaration', p[2], p[4])
def p_port_clause(p):
'''port_clause : PORT LPAREN port_list RPAREN SEMICOLON
| empty'''
print("p_port_clause", p[1])
p[0] = ('port_clause', p[3] if len(p) > 2 else None)
def p_port_list(p):
'''port_list : port_list COMMA port
| port'''
print("p_port_list", p[1:])
if len(p) == 4:
p[0] = p[1] + [p[3]]
else:
p[0] = [p[1]]
def p_port(p):
'''port : IDENTIFIER COLON direction type'''
print("p_port", p[1], p[3], p[4])
p[0] = ('port', p[1], p[3], p[4])
def p_direction(p):
'''direction : IN
| OUT
| INOUT'''
print("p_direction", p[1])
p[0] = p[1]
def p_type(p):
'''type : IDENTIFIER'''
print("p_type", p[1])
p[0] = p[1]
def p_empty(p):
'''empty :'''
pass
def p_error(p):
if p:
print(f"Syntax error at '{p.value}', line {p.lineno}, position {p.lexpos}")
else:
print("Syntax error at EOF")
# Set the start rule for the parser
parser = yacc.yacc(start='design_file', debug=True)
</code>
import ply.yacc as yacc
from vhdl_lexer import tokens # Import token definitions
def p_design_file(p):
'''design_file : entity_declaration'''
print("p_design_file", p[1])
p[0] = p[1]
def p_entity_declaration(p):
'''entity_declaration : ENTITY IDENTIFIER IS port_clause END IDENTIFIER SEMICOLON'''
print("p_entity_declaration", p[2], p[4])
p[0] = ('entity_declaration', p[2], p[4])
def p_port_clause(p):
'''port_clause : PORT LPAREN port_list RPAREN SEMICOLON
| empty'''
print("p_port_clause", p[1])
p[0] = ('port_clause', p[3] if len(p) > 2 else None)
def p_port_list(p):
'''port_list : port_list COMMA port
| port'''
print("p_port_list", p[1:])
if len(p) == 4:
p[0] = p[1] + [p[3]]
else:
p[0] = [p[1]]
def p_port(p):
'''port : IDENTIFIER COLON direction type'''
print("p_port", p[1], p[3], p[4])
p[0] = ('port', p[1], p[3], p[4])
def p_direction(p):
'''direction : IN
| OUT
| INOUT'''
print("p_direction", p[1])
p[0] = p[1]
def p_type(p):
'''type : IDENTIFIER'''
print("p_type", p[1])
p[0] = p[1]
def p_empty(p):
'''empty :'''
pass
def p_error(p):
if p:
print(f"Syntax error at '{p.value}', line {p.lineno}, position {p.lexpos}")
else:
print("Syntax error at EOF")
# Set the start rule for the parser
parser = yacc.yacc(start='design_file', debug=True)
Now when I try to use this parser/lexer it just gets stuck on direction rule then bails out. Here’s an example of by test program:
<code>from vhdl_lexer import lexer
from vhdl_parser import parser
# Test cases
test_cases = [
'''
entity MyEntity is
port(
clk : in std_logic;
rst : in std_logic
);
end MyEntity;
'''
]
# Function to process each test case
def process_test_case(i, data):
print(f"n=> Test Case {i}:")
print(data)
print(f"n-> parse")
result = parser.parse(data, lexer=lexer)
print("Result:", result)
print("n" + "="*80 + "n")
# Function to test the parser
def test_parser(test_cases):
process_test_case(1, test_cases[0])
# Run the tests
print("START")
test_parser(test_cases)
print("END")
</code>
<code>from vhdl_lexer import lexer
from vhdl_parser import parser
# Test cases
test_cases = [
'''
entity MyEntity is
port(
clk : in std_logic;
rst : in std_logic
);
end MyEntity;
'''
]
# Function to process each test case
def process_test_case(i, data):
print(f"n=> Test Case {i}:")
print(data)
print(f"n-> parse")
result = parser.parse(data, lexer=lexer)
print("Result:", result)
print("n" + "="*80 + "n")
# Function to test the parser
def test_parser(test_cases):
process_test_case(1, test_cases[0])
# Run the tests
print("START")
test_parser(test_cases)
print("END")
</code>
from vhdl_lexer import lexer
from vhdl_parser import parser
# Test cases
test_cases = [
'''
entity MyEntity is
port(
clk : in std_logic;
rst : in std_logic
);
end MyEntity;
'''
]
# Function to process each test case
def process_test_case(i, data):
print(f"n=> Test Case {i}:")
print(data)
print(f"n-> parse")
result = parser.parse(data, lexer=lexer)
print("Result:", result)
print("n" + "="*80 + "n")
# Function to test the parser
def test_parser(test_cases):
process_test_case(1, test_cases[0])
# Run the tests
print("START")
test_parser(test_cases)
print("END")
and the output looks something like this:
<code>user1@Home-Pc:~/vpar$ python3 vpar.py
LexToken(ENTITY,'entity',3,22)
LexToken(IDENTIFIER,'MyEntity',3,29)
LexToken(IS,'is',3,38)
LexToken(PORT,'port',4,45)
LexToken(LPAREN,'(',4,49)
LexToken(IDENTIFIER,'clk',5,59)
LexToken(COLON,':',5,63)
LexToken(IN,'in',5,65)
LexToken(IDENTIFIER,'std_logic',5,68)
LexToken(SEMICOLON,';',5,77)
LexToken(IDENTIFIER,'rst',6,87)
LexToken(COLON,':',6,91)
LexToken(IN,'in',6,93)
LexToken(IDENTIFIER,'std_logic',6,96)
LexToken(RPAREN,')',7,110)
LexToken(SEMICOLON,';',7,111)
LexToken(END,'end',8,113)
LexToken(IDENTIFIER,'MyEntity',8,117)
LexToken(SEMICOLON,';',8,125)
</code>
<code>user1@Home-Pc:~/vpar$ python3 vpar.py
LexToken(ENTITY,'entity',3,22)
LexToken(IDENTIFIER,'MyEntity',3,29)
LexToken(IS,'is',3,38)
LexToken(PORT,'port',4,45)
LexToken(LPAREN,'(',4,49)
LexToken(IDENTIFIER,'clk',5,59)
LexToken(COLON,':',5,63)
LexToken(IN,'in',5,65)
LexToken(IDENTIFIER,'std_logic',5,68)
LexToken(SEMICOLON,';',5,77)
LexToken(IDENTIFIER,'rst',6,87)
LexToken(COLON,':',6,91)
LexToken(IN,'in',6,93)
LexToken(IDENTIFIER,'std_logic',6,96)
LexToken(RPAREN,')',7,110)
LexToken(SEMICOLON,';',7,111)
LexToken(END,'end',8,113)
LexToken(IDENTIFIER,'MyEntity',8,117)
LexToken(SEMICOLON,';',8,125)
</code>
user1@Home-Pc:~/vpar$ python3 vpar.py
LexToken(ENTITY,'entity',3,22)
LexToken(IDENTIFIER,'MyEntity',3,29)
LexToken(IS,'is',3,38)
LexToken(PORT,'port',4,45)
LexToken(LPAREN,'(',4,49)
LexToken(IDENTIFIER,'clk',5,59)
LexToken(COLON,':',5,63)
LexToken(IN,'in',5,65)
LexToken(IDENTIFIER,'std_logic',5,68)
LexToken(SEMICOLON,';',5,77)
LexToken(IDENTIFIER,'rst',6,87)
LexToken(COLON,':',6,91)
LexToken(IN,'in',6,93)
LexToken(IDENTIFIER,'std_logic',6,96)
LexToken(RPAREN,')',7,110)
LexToken(SEMICOLON,';',7,111)
LexToken(END,'end',8,113)
LexToken(IDENTIFIER,'MyEntity',8,117)
LexToken(SEMICOLON,';',8,125)
<code>START
=> Test Case 1:
entity MyEntity is
port(
clk : in std_logic;
rst : in std_logic
);
end MyEntity;
-> parse
p_direction in
Syntax error at ';', line 12, position 68
Result: None
================================================================================
END
</code>
<code>START
=> Test Case 1:
entity MyEntity is
port(
clk : in std_logic;
rst : in std_logic
);
end MyEntity;
-> parse
p_direction in
Syntax error at ';', line 12, position 68
Result: None
================================================================================
END
</code>
START
=> Test Case 1:
entity MyEntity is
port(
clk : in std_logic;
rst : in std_logic
);
end MyEntity;
-> parse
p_direction in
Syntax error at ';', line 12, position 68
Result: None
================================================================================
END
As you can see it calls p_direction then errors out… not sure why it didn’t start with p_design_file first.