Merge "tokenizer: do not try to decode strings on Python 3"
This commit is contained in:
commit
119f96f293
@ -82,12 +82,16 @@ def SearchTokenizer():
|
||||
|
||||
def t_SSTRING(t):
|
||||
r"'([^\\']+|\\'|\\\\)*'"
|
||||
t.value=t.value[1:-1].decode("string-escape")
|
||||
t.value = t.value[1:-1]
|
||||
if not isinstance(t.value, six.text_type):
|
||||
t.value = t.value.decode('string-escape')
|
||||
return t
|
||||
|
||||
def t_DSTRING(t):
|
||||
r'"([^\\"]+|\\"|\\\\)*"'
|
||||
t.value=t.value[1:-1].decode("string-escape")
|
||||
t.value = t.value[1:-1]
|
||||
if not isinstance(t.value, six.text_type):
|
||||
t.value = t.value.decode('string-escape')
|
||||
return t
|
||||
|
||||
def t_AND(t):
|
||||
|
Loading…
Reference in New Issue
Block a user