From 802d56d135bd5de4a7a2d127dd8ed9f0038da227 Mon Sep 17 00:00:00 2001 From: Jussi Pakkanen Date: Sun, 23 Dec 2012 17:02:39 +0200 Subject: [PATCH] Tokenize strings. --- builder.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/builder.py b/builder.py index 03e55b6e4..923b381a9 100755 --- a/builder.py +++ b/builder.py @@ -23,7 +23,8 @@ tokens = ['LPAREN', 'COMMENT', 'EQUALS', 'COMMA', - 'DOT'] + 'DOT', + 'STRING'] t_EQUALS = '=' t_LPAREN = '\(' @@ -32,6 +33,7 @@ t_VARIABLE = '[a-zA-Z][_0-9a-zA-Z]*' t_COMMENT = '\#[^\n]*' t_COMMA = ',' t_DOT = '\.' +t_STRING = "'[^']*'" t_ignore = ' \t\n' @@ -42,7 +44,7 @@ def t_error(t): def test_lexer(): s = """hello = (something) # this = (that) function(h) - obj.method(lll) + obj.method(lll, 'string') """ lexer = lex.lex() lexer.input(s)