2 references to
Microsoft.ML.Tokenizers (2)
Model\EnglishRobertaTokenizer.cs (2)
107this(vocabularyPath is null ? throw new ArgumentNullException(nameof(vocabularyPath)) : File.OpenRead(vocabularyPath), 124this(vocabularyStream, mergeStream, highestOccurrenceMappingStream, preTokenizer, normalizer, filterUnsupportedChars, disposeStream: false)