7 types derived from Tokenizer
Microsoft.ML.Tokenizers (6)
Microsoft.ML.Tokenizers.Tests (1)
115 references to Tokenizer
Microsoft.Extensions.AI.Integration.Tests (3)
Microsoft.ML.GenAI.Core (8)
Microsoft.ML.GenAI.Core.Tests (2)
Microsoft.ML.GenAI.LLaMA (8)
Microsoft.ML.GenAI.Mistral (2)
Microsoft.ML.GenAI.Phi (10)
Microsoft.ML.GenAI.Phi.Tests (3)
Microsoft.ML.GenAI.Samples (2)
Microsoft.ML.Tokenizers (5)
Microsoft.ML.Tokenizers.Tests (55)
TiktokenTests.cs (16)
32public static Tokenizer GPT4 { get; } = TiktokenTokenizer.CreateForModel("gpt-4", _specialTokens);
33public static Tokenizer GPT2 { get; } = TiktokenTokenizer.CreateForModel("gpt2");
34public static Tokenizer P50kBase { get; } = TiktokenTokenizer.CreateForModel("text-davinci-003");
35public static Tokenizer R50kBase { get; } = TiktokenTokenizer.CreateForModel("ada");
36public static Tokenizer P50kEdit { get; } = TiktokenTokenizer.CreateForModel("text-davinci-edit-001");
37public static Tokenizer GPT4o { get; } = TiktokenTokenizer.CreateForModel("gpt-4o");
38public static Tokenizer Phi4 { get; } = TiktokenTokenizer.CreateForModel("phi-4");
62Tokenizer tokenizer = TiktokenTokenizer.Create(tokenizerDataFileName, GPT4.PreTokenizer, null, specialTokens);
112public async Task TestTokenizerUsingExternalVocab(Tokenizer tokenizer, string url)
138private void TestGPT4TokenizationEncoding(Tokenizer tokenizer)
213private void TestGPT4Tokenizer(Tokenizer gpt4Tokenizer)
452Tokenizer tokenizer = TiktokenTokenizer.CreateForModel(modelName);
465Tokenizer tokenizer = TiktokenTokenizer.CreateForEncoding(encodingName);
479Tokenizer tokenizer1 = TiktokenTokenizer.CreateForModel(modelName);
521Tokenizer tokenizer = TiktokenTokenizer.CreateForModel(name);
580private void TestTokenizerEncodingForTokenizer(Tokenizer tokenizer, string text, string[] expectedTokens, (int Index, int Length)[] expectedOffsets, int[] expectedIds)
Microsoft.ML.TorchSharp (17)
NasBert\NasBertTrainer.cs (3)
178public Tokenizer Tokenizer;
582private IList<int> PrepInputTokens(ref ReadOnlyMemory<char> sentence1, ref ReadOnlyMemory<char> sentence2, ref ValueGetter<ReadOnlyMemory<char>> getSentence1, ref ValueGetter<ReadOnlyMemory<char>> getSentence2, Tokenizer tokenizer)
612private protected void UpdateCacheIfNeeded(long position, TensorCacher outputCache, ref ReadOnlyMemory<char> sentence1, ref ReadOnlyMemory<char> sentence2, ref ValueGetter<ReadOnlyMemory<char>> getSentence1, ref ValueGetter<ReadOnlyMemory<char>> getSentence2, Tokenizer tokenizer)