Searched refs:tokenizer (Results 1 – 3 of 3) sorted by relevance
508 def _search(self, tokenizer): argument535 for i, tok in enumerate(tokenizer.tokens):571 s = str(tokenizer)574 yield start, len(tokenizer.tokens)585 tokenizer = source588 tokenizer = CTokenizer(source)591 for start, end in self._search(tokenizer):592 new_tokenizer = CTokenizer(tokenizer.tokens[start:end + 1])618 tokenizer = source621 tokenizer = CTokenizer(source)[all …]
24 C tokenizer
51 tokenizer = CTokenizer(data["source"])