2 references to
Microsoft.ML.Tokenizers (2)
Model\EnglishRobertaTokenizer.cs (2)
119this(vocabularyPath is null ? throw new ArgumentNullException(nameof(vocabularyPath)) : File.OpenRead(vocabularyPath), 136this(vocabularyStream, mergeStream, highestOccurrenceMappingStream, preTokenizer, normalizer, filterUnsupportedChars, disposeStream: false)