1 write to SeparatorTokenId
Microsoft.ML.Tokenizers (1)
Model\BertTokenizer.cs (1)
39SeparatorTokenId = vocab[new StringSpanOrdinalKey(options.SeparatorToken)];
39 references to SeparatorTokenId
Microsoft.ML.Tokenizers (10)
Model\BertTokenizer.cs (10)
251list.Add(SeparatorTokenId); 273list.Add(SeparatorTokenId); 317ids.Add(SeparatorTokenId); 322ids.Add(SeparatorTokenId); 369destination[valuesWritten++] = SeparatorTokenId; 388destination[valuesWritten++] = SeparatorTokenId; 442mask.Add(id == ClassificationTokenId || id == SeparatorTokenId || id == PaddingTokenId || id == MaskingTokenId || id == UnknownTokenId ? 1 : 0); 449mask.Add(id == ClassificationTokenId || id == SeparatorTokenId || id == PaddingTokenId || id == MaskingTokenId || id == UnknownTokenId ? 1 : 0); 529destination[valuesWritten++] = id == ClassificationTokenId || id == SeparatorTokenId || id == PaddingTokenId || id == MaskingTokenId || id == UnknownTokenId ? 1 : 0; 541destination[valuesWritten++] = id == ClassificationTokenId || id == SeparatorTokenId || id == PaddingTokenId || id == MaskingTokenId || id == UnknownTokenId ? 1 : 0;
Microsoft.ML.Tokenizers.Tests (29)
BertTokenizerTests.cs (29)
71Assert.Equal([tokenizer.ClassificationTokenId, 8, 6, 10, 11, 12, 13, 7, tokenizer.SeparatorTokenId], ids); 93Assert.Equal([tokenizer.ClassificationTokenId, tokenizer.ClassificationTokenId, 8, 6, 10, 11, 12, 13, 7, tokenizer.SeparatorTokenId, tokenizer.SeparatorTokenId], ids); 143Assert.Equal([tokenizer.ClassificationTokenId, 8, 6, 10, 11, 12, 7, tokenizer.SeparatorTokenId], ids); 164Assert.Equal([tokenizer.ClassificationTokenId, tokenizer.ClassificationTokenId, 8, 6, 10, 11, 12, 7, tokenizer.SeparatorTokenId, tokenizer.SeparatorTokenId], ids); 211Assert.Equal([tokenizer.ClassificationTokenId, 1, 6, 1, 11, 12, 7, tokenizer.SeparatorTokenId], ids); 369Assert.Equal([bertTokenizer.ClassificationTokenId, 8, 6, 10, 11, 12, 7, bertTokenizer.SeparatorTokenId], ids1); 372Assert.Equal([bertTokenizer.ClassificationTokenId, 13, 14, 15, 5, bertTokenizer.SeparatorTokenId], ids2); 375[bertTokenizer.ClassificationTokenId, bertTokenizer.ClassificationTokenId, 8, 6, 10, 11, 12, 7, bertTokenizer.SeparatorTokenId, bertTokenizer.SeparatorTokenId], 387Assert.Equal(new int[] { bertTokenizer.ClassificationTokenId, bertTokenizer.ClassificationTokenId, 8, 6, 10, 11, 12, 7, bertTokenizer.SeparatorTokenId, bertTokenizer.SeparatorTokenId }, ids1Span.ToArray()); 390[bertTokenizer.ClassificationTokenId, bertTokenizer.ClassificationTokenId, 8, 6, 10, 11, 12, 7, bertTokenizer.SeparatorTokenId, bertTokenizer.SeparatorTokenId, bertTokenizer.ClassificationTokenId, 13, 14, 15, 5, bertTokenizer.SeparatorTokenId, bertTokenizer.SeparatorTokenId], 403new int[] { bertTokenizer.ClassificationTokenId, bertTokenizer.ClassificationTokenId, 8, 6, 10, 11, 12, 7, bertTokenizer.SeparatorTokenId, bertTokenizer.SeparatorTokenId, bertTokenizer.ClassificationTokenId, 13, 14, 15, 5, bertTokenizer.SeparatorTokenId, bertTokenizer.SeparatorTokenId }, 413[bertTokenizer.ClassificationTokenId, 8, 6, 10, 11, 12, 7, bertTokenizer.SeparatorTokenId], 426new int[] { bertTokenizer.ClassificationTokenId, 8, 6, 10, 11, 12, 7, bertTokenizer.SeparatorTokenId }, 430[bertTokenizer.ClassificationTokenId, 8, 6, 10, 11, 12, 7, bertTokenizer.SeparatorTokenId, 13, 14, 15, 5, bertTokenizer.SeparatorTokenId], 443new int[] { bertTokenizer.ClassificationTokenId, 8, 6, 10, 11, 12, 7, bertTokenizer.SeparatorTokenId, 13, 14, 15, 5, bertTokenizer.SeparatorTokenId }, 469Assert.Equal([bertTokenizer.ClassificationTokenId, 8, 6, 10, 11, 12, 7, bertTokenizer.SeparatorTokenId], ids1); 472Assert.Equal([bertTokenizer.ClassificationTokenId, 13, 14, 15, 5, bertTokenizer.SeparatorTokenId], ids2);