X-Git-Url: https://git.saurik.com/wxWidgets.git/blobdiff_plain/a5e3f3e9fade71f68325357ea9739f6d3f0dd2b3..1b55cabf505902af2275a5ae796acacae7073882:/utils/wxPython/lib/editor/tokenizer.py?ds=inline diff --git a/utils/wxPython/lib/editor/tokenizer.py b/utils/wxPython/lib/editor/tokenizer.py new file mode 100644 index 0000000000..aafe0aaeb5 --- /dev/null +++ b/utils/wxPython/lib/editor/tokenizer.py @@ -0,0 +1,60 @@ +from tokenize import * +from keyword import * +from string import * + +class Tokenizer: + """ + Simple class to create a list of token-tuples like: + + (type, string, first, last) + + Example: + t = Tokenizer('def hallo(du): # juchee') + print t.tokens() + """ + + def __init__(self, text): + self.text = text + self.toks = [] + try: + tokenize(self.readline, self.get) + except TokenError: + pass + + def tokens(self): + return self.toks + + def get(self, type, string, begin, end, l): + #print begin,end + h1, b = begin + h2, e = end + tname = tok_name[type] + if iskeyword(string): + tname = "KEY" + self.toks.append(tname, string, b, e) + + def readline(self): + t = self.text + self.text = "" + return t + + def line(self): + pre = "" + out = "" + for type, string, begin, end in self.toks: + if (pre in ["NAME","KEY"]) and (not string in [".",",","("]): + out = out + " " + + if type in ["NAME","KEY"]: + out = out + string + elif type=="OP": + if string in [",",":"]: + out = out + string + " " + else: + out = out + string + else: + out = out + string + pre = type + return out + +