1 write to UnknownTokenId
Microsoft.ML.Tokenizers (1)
Model\WordPieceTokenizer.cs (1)
68UnknownTokenId = id;
12 references to UnknownTokenId
Microsoft.ML.Tokenizers (8)
Model\BertTokenizer.cs (4)
442mask.Add(id == ClassificationTokenId || id == SeparatorTokenId || id == PaddingTokenId || id == MaskingTokenId || id == UnknownTokenId ? 1 : 0); 449mask.Add(id == ClassificationTokenId || id == SeparatorTokenId || id == PaddingTokenId || id == MaskingTokenId || id == UnknownTokenId ? 1 : 0); 529destination[valuesWritten++] = id == ClassificationTokenId || id == SeparatorTokenId || id == PaddingTokenId || id == MaskingTokenId || id == UnknownTokenId ? 1 : 0; 541destination[valuesWritten++] = id == ClassificationTokenId || id == SeparatorTokenId || id == PaddingTokenId || id == MaskingTokenId || id == UnknownTokenId ? 1 : 0;
Model\WordPieceTokenizer.cs (4)
319tokens.Add(new EncodedToken(UnknownTokenId, UnknownToken, new Range(offset, offset + text.Length))); 373tokens.Add(new EncodedToken(UnknownTokenId, UnknownToken, new Range(offset, offset + textLength))); 458accumulatedIds?.Add(UnknownTokenId); 522accumulatedIds?.Add(UnknownTokenId);
Microsoft.ML.Tokenizers.Tests (4)
BertTokenizerTests.cs (3)
48Assert.Equal(1, tokenizer.UnknownTokenId); 119Assert.Equal(1, tokenizer.UnknownTokenId); 191Assert.Equal(1, tokenizer.UnknownTokenId);
WordPieceTests.cs (1)
39Assert.Equal(0, tokenizer.UnknownTokenId);