29 references to EncodeToTokens
Microsoft.ML.Tokenizers.Tests (26)
BpeTests.cs (3)
257IReadOnlyList<EncodedToken> encoding = tokenizer.EncodeToTokens(sentence, out _); 376IReadOnlyList<EncodedToken> encoding = tokenizer.EncodeToTokens(text, out _); 429IReadOnlyList<EncodedToken> encoding = tokenizer.EncodeToTokens(text, out _);
CodeGenTests.cs (5)
252IReadOnlyList<EncodedToken> encoding = tokenizer.EncodeToTokens(text, out _); 347IReadOnlyList<EncodedToken> encoding = tokenizer.EncodeToTokens(text, out _); 549IReadOnlyList<EncodedToken> encoding = codeGenTokenizer.EncodeToTokens(text, out _); 682encoding = codeGenTokenizer.EncodeToTokens(text, out _); 815encoding = codeGenTokenizer.EncodeToTokens(text, out _);
EnglishRobertaTests.cs (4)
181IReadOnlyList<EncodedToken> encoding = tokenizer.EncodeToTokens(text, out _); 249encoding = tokenizer.EncodeToTokens((string)p[0], out _); 256encoding = tokenizer.EncodeToTokens((string)p[0], out _); 263encoding = tokenizer.EncodeToTokens((string)p[0], out _);
LlamaTests.cs (4)
244IReadOnlyList<EncodedToken> result = llamaTokenizer.EncodeToTokens(input, out _); 338Assert.Equal([], llamaTokenizer.EncodeToTokens((string)null!, out _)); 500IReadOnlyList<EncodedToken> encoding = tokenizer.EncodeToTokens(text, out _); 664encodedTokens = tokenizer.EncodeToTokens(kvp.Key, out normalizedString);
NormalizerTests.cs (1)
65IReadOnlyList<EncodedToken> tokens = tokenizer.EncodeToTokens(text, out string? normalizedString);
PreTokenizerTests.cs (1)
59IReadOnlyList<EncodedToken> encoding = tokenizer.EncodeToTokens(text, out _);
TitokenTests.cs (8)
142IReadOnlyList<EncodedToken> result = tokenizer.EncodeToTokens(text, out string? normalizedString); 195IReadOnlyList<EncodedToken> result = GPT4.EncodeToTokens(text, out string? normalizedString); 238IReadOnlyList<EncodedToken> result = GPT4.EncodeToTokens(text, out string? normalizedString); 257IReadOnlyList<EncodedToken> result = GPT4.EncodeToTokens(text, out string? normalizedString); 273IReadOnlyList<EncodedToken> result = GPT4.EncodeToTokens(text, out string? normalizedString); 307IReadOnlyList<EncodedToken> result = GPT4o.EncodeToTokens(text, out string? normalizedString); 564IReadOnlyList<EncodedToken> encoding = tokenizer.EncodeToTokens(text, out _); 679IReadOnlyList<EncodedToken> result = GPT4.EncodeToTokens(text, out _);
Microsoft.ML.TorchSharp (3)
NasBert\NerTrainer.cs (2)
170IReadOnlyList<EncodedToken> encoding = Tokenizer.EncodeToTokens(sentence, out string normalizedString); 380IReadOnlyList<EncodedToken> encoding = tokenizer.EncodeToTokens(sentence, out string normalizedString);
Roberta\QATrainer.cs (1)
404var contextTokens = Tokenizer.EncodeToTokens(contextString, out string normalized);