Merge "tokenizer: do not try to decode strings on Python 3"
This commit is contained in:
commit
119f96f293
@ -82,12 +82,16 @@ def SearchTokenizer():
|
|||||||
|
|
||||||
def t_SSTRING(t):
|
def t_SSTRING(t):
|
||||||
r"'([^\\']+|\\'|\\\\)*'"
|
r"'([^\\']+|\\'|\\\\)*'"
|
||||||
t.value=t.value[1:-1].decode("string-escape")
|
t.value = t.value[1:-1]
|
||||||
|
if not isinstance(t.value, six.text_type):
|
||||||
|
t.value = t.value.decode('string-escape')
|
||||||
return t
|
return t
|
||||||
|
|
||||||
def t_DSTRING(t):
|
def t_DSTRING(t):
|
||||||
r'"([^\\"]+|\\"|\\\\)*"'
|
r'"([^\\"]+|\\"|\\\\)*"'
|
||||||
t.value=t.value[1:-1].decode("string-escape")
|
t.value = t.value[1:-1]
|
||||||
|
if not isinstance(t.value, six.text_type):
|
||||||
|
t.value = t.value.decode('string-escape')
|
||||||
return t
|
return t
|
||||||
|
|
||||||
def t_AND(t):
|
def t_AND(t):
|
||||||
|
Loading…
x
Reference in New Issue
Block a user