From ee9cd7a16c2d73401fc5ac7877dea00a3e43d1fc Mon Sep 17 00:00:00 2001 From: Dmitry Tantsur Date: Wed, 23 Sep 2020 15:46:00 +0200 Subject: [PATCH] tokenizer: do not try to decode strings on Python 3 Change-Id: I37af7e9495fa3ddc0355f0184f93ed5c3ea87bd3 Story: #2007951 Task: #40427 --- gertty/search/tokenizer.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/gertty/search/tokenizer.py b/gertty/search/tokenizer.py index 88573db..6f6a415 100644 --- a/gertty/search/tokenizer.py +++ b/gertty/search/tokenizer.py @@ -82,12 +82,16 @@ def SearchTokenizer(): def t_SSTRING(t): r"'([^\\']+|\\'|\\\\)*'" - t.value=t.value[1:-1].decode("string-escape") + t.value = t.value[1:-1] + if not isinstance(t.value, six.text_type): + t.value = t.value.decode('string-escape') return t def t_DSTRING(t): r'"([^\\"]+|\\"|\\\\)*"' - t.value=t.value[1:-1].decode("string-escape") + t.value = t.value[1:-1] + if not isinstance(t.value, six.text_type): + t.value = t.value.decode('string-escape') return t def t_AND(t):