1 instantiation of LlamaTokenizer
Microsoft.ML.Tokenizers (1)
43 references to LlamaTokenizer
Microsoft.ML.GenAI.Core.Tests (1)
Microsoft.ML.GenAI.Mistral (4)
Microsoft.ML.GenAI.Mistral.Tests (1)
Microsoft.ML.GenAI.Phi (3)
Microsoft.ML.GenAI.Phi.Tests (1)
Microsoft.ML.GenAI.Samples (14)
Mistral\Mistral_7B_Instruct.cs (6)
43var tokenizer = MistralTokenizerHelper.FromPretrained(originalWeightFolder);
46var pipeline = new CausalLMPipeline<LlamaTokenizer, MistralForCausalLM>(tokenizer, model, device);
74var tokenizer = MistralTokenizerHelper.FromPretrained(originalWeightFolder, modelName: "tokenizer.model");
81var pipeline = new CausalLMPipeline<LlamaTokenizer, MistralModel>(tokenizer, model, device);
128var tokenizer = MistralTokenizerHelper.FromPretrained(originalWeightFolder);
131var pipeline = new CausalLMPipeline<LlamaTokenizer, MistralForCausalLM>(tokenizer, model, device);
Microsoft.ML.Tokenizers (1)
Microsoft.ML.Tokenizers.Tests (18)
LlamaTests.cs (18)
33return LlamaTokenizer.Create(remoteStream);
40return LlamaTokenizer.Create(remoteStream);
47LlamaTokenizer tokenizer = LlamaTokenizer.Create(remoteStream, addBeginOfSentence: true, addEndOfSentence: false,
241LlamaTokenizer bpe = (llamaTokenizer as LlamaTokenizer)!;
298private void TestDecodingWithSpan(LlamaTokenizer tokenizer, int[] ids, string expectedDecoded)
354LlamaTokenizer? bpe = llamaTokenizer as LlamaTokenizer;
384public void TestDecodeSpecialTokenWithSmallId(LlamaTokenizer llamaTokenizer)
433normalizer = new SentencePieceNormalizer(removeExtraWhiteSpaces: false, addDummyPrefix: true, escapeWhiteSpaces: true, treatWhitespaceAsSuffix: false, specialTokens: (_llamaPhi3Tokenizer as LlamaTokenizer)!.SpecialTokens);
441normalizer = new SentencePieceNormalizer(removeExtraWhiteSpaces: false, addDummyPrefix: true, escapeWhiteSpaces: true, treatWhitespaceAsSuffix: true, specialTokens: (_llamaPhi3Tokenizer as LlamaTokenizer)!.SpecialTokens);
653LlamaTokenizer tokenizer = (_llamaPhi3Tokenizer as LlamaTokenizer)!;
834LlamaTokenizer tokenizer = (_llamaPhi3Tokenizer as LlamaTokenizer)!;
872LlamaTokenizer tokenizerWithSuffix = (_llamaPhi3TokenizerWithTreatSpaceSuffix as LlamaTokenizer)!;