24 references to EncodedToken
Microsoft.ML.Tokenizers (23)
Model\CodeGenTokenizer.cs (6)
379tokens.Add(new EncodedToken(BeginningOfSentenceId.Value, BeginningOfSentenceToken!, (0, 0))); 1593tokens.Add(new EncodedToken(tokensToAdd[0].Id, tokensToAdd[0].Value, (offset == 0 ? tokensToAdd[0].Offset.Index : tokensToAdd[0].Offset.Index + offset - 1, offset == 0 ? tokensToAdd[0].Offset.Length - 1 : tokensToAdd[0].Offset.Length))); 1597tokens.Add(new EncodedToken(tokensToAdd[i].Id, tokensToAdd[i].Value, (tokensToAdd[i].Offset.Index + offset - 1, tokensToAdd[i].Offset.Length))); 1605tokens.Add(new EncodedToken(t.Id, t.Value, (t.Offset.Index + offset, t.Offset.Length))); 1625return new List<EncodedToken> { new EncodedToken(_vocab[new StringSpanOrdinalKey(tokenValue)].Id, tokenValue, (mapping[0], 1)) }; 1699return new EncodedToken(id, token, (tokenStartIndex, tokenLength));
Model\EnglishRobertaTokenizer.cs (4)
328tokens.Add(new EncodedToken(t.Id, t.Value, (split.Offset + t.Offset.Index, t.Offset.Length))); 918list.Add(new EncodedToken(tokens[j].Id, tokens[j].Value, (indexMapping[index], tokens[j].Value.Length))); 950return new List<EncodedToken> { new EncodedToken(_vocab[new StringSpanOrdinalKey(tokenValue)], tokenValue, (indexMapping[0], 1)) }; 1039tokens.Add(new EncodedToken(_vocab[new StringSpanOrdinalKey(w)], w, (indexMapping[index], w.Length)));
Model\SentencePieceBpeTokenizer.cs (9)
275tokens.Add(new EncodedToken(BeginningOfSentenceId, BeginningOfSentenceToken, (0, 0))); 289tokens.Add(new EncodedToken(id, _specialTokensReverse![id], (Offset, Length))); 302tokens.Add(new EncodedToken(EndOfSentenceId, EndOfSentenceToken, (text.Length, 0))); 322tokens.Add(new EncodedToken(BeginningOfSentenceId, BeginningOfSentenceToken, (0, 0))); 352tokens.Add(new EncodedToken( 367tokens.Add(new EncodedToken(EndOfSentenceId, EndOfSentenceToken, (text.Length, 0))); 384tokens.Add(new EncodedToken(id, token, (index + i, 1))); 408tokens.Add(new EncodedToken(id, token, (index + i, length))); 436tokens.Add(new EncodedToken(id.Id, text.Slice(pieceSpan.Index, pieceSpan.Length).ToString(), (pieceSpan.Index, pieceSpan.Length)));
Model\TiktokenTokenizer.cs (3)
306tokens.Add(new EncodedToken( 318tokens.Add(new EncodedToken(mappedId.Id, mappedId.Token, (offset, mappedId.Token.Length))); 347tokens.Add(new EncodedToken(
Model\Word.cs (1)
299tokens.Add(new EncodedToken(_symbols[i].C, vocabReverse[_symbols[i].C], (index + offset, _symbols[i].Len)));
Microsoft.ML.Tokenizers.Tests (1)
TokenizerTests.cs (1)
115tokens.Add(new EncodedToken(c - 'a', c.ToString(), (count, 1)));