1 write to Host
Microsoft.ML.Data (1)
Training\TrainerEstimatorBase.cs (1)
64Host = host;
369 references to Host
Microsoft.ML.Data (11)
Training\TrainerEstimatorBase.cs (11)
65Host.CheckParam(feature.IsValid, nameof(feature), "not initialized properly"); 81Host.CheckValue(inputSchema, nameof(inputSchema)); 99Host.CheckValue(context, nameof(context)); 101Host.Check(pred != null, "Training did not return a predictor."); 109throw Host.ExceptSchemaMismatch(nameof(inputSchema), "feature", FeatureColumn.Name); 111throw Host.ExceptSchemaMismatch(nameof(inputSchema), "feature", FeatureColumn.Name, 117throw Host.ExceptSchemaMismatch(nameof(inputSchema), "weight", WeightColumn.Name); 119throw Host.ExceptSchemaMismatch(nameof(inputSchema), "weight", WeightColumn.Name, 128throw Host.ExceptSchemaMismatch(nameof(inputSchema), "label", LabelColumn.Name); 136Host.Assert(LabelColumn.IsValid); 139throw Host.ExceptSchemaMismatch(nameof(labelCol), "label", LabelColumn.Name,
Microsoft.ML.FastTree (75)
BoostingFastTree.cs (2)
72FastTreeTrainerOptions.Bias, Host); 138earlyStoppingRule = FastTreeTrainerOptions.EarlyStoppingRuleFactory.CreateComponent(Host, lowerIsBetter);
FastTree.cs (13)
94private protected string InnerOptions => CmdParser.GetSettings(Host, FastTreeTrainerOptions, new TOptions()); 146Host.CheckValue(options, nameof(options)); 192var instanceConverter = new ExamplesToFastTreeBins(Host, FastTreeTrainerOptions.MaximumBinCountPerFeature, useTranspose, !FastTreeTrainerOptions.FeatureFlocks, FastTreeTrainerOptions.MinimumExampleCountPerLeaf, GetMaxLabel()); 204Host.AssertValue(data); 205Host.Assert(data.Schema.Feature.HasValue); 646using (var pch = Host.StartProgressChannel("FastTree training")) 652Host.CheckAlive(); 693Host.CheckAlive(); 745Host.CheckAlive(); 758Host.CheckAlive(); 761Host.CheckAlive(); 764Host.CheckAlive(); 818ch.Trace("CommandLine = {0}", CmdParser.GetSettings(Host, FastTreeTrainerOptions, new TOptions()));
FastTreeClassification.cs (6)
186Host.CheckValue(context, nameof(context)); 191using (var ch = Host.Start("Training")) 206var pred = new FastTreeBinaryModelParameters(Host, TrainedEnsemble, FeatureCount, InnerOptions); 212var cali = new PlattCalibrator(Host, -1 * _sigmoidParameter, 0); 213return new FeatureWeightsCalibratedModelParameters<FastTreeBinaryModelParameters, PlattCalibrator>(Host, pred, cali); 303=> new BinaryPredictionTransformer<CalibratedModelParametersBase<FastTreeBinaryModelParameters, PlattCalibrator>>(Host, model, trainSchema, FeatureColumn.Name);
FastTreeRanking.cs (9)
107Host.CheckNonEmpty(rowGroupColumnName, nameof(rowGroupColumnName)); 125() => throw Host.ExceptSchemaMismatch(nameof(labelCol), "label", labelCol.Name, "Single or Key", labelCol.GetTypeString()); 140Host.CheckValue(context, nameof(context)); 145using (var ch = Host.Start("Training")) 152return new FastTreeRankingModelParameters(Host, TrainedEnsemble, FeatureCount, InnerOptions); 159Host.AssertValue(FastTreeTrainerOptions.CustomGains); 165throw Host.Except(ex, "Error in the format of custom gains. Inner exception is {0}", ex.Message); 223Host.Assert(FastTreeTrainerOptions.BaggingSize > 0); 476=> new RankingPredictionTransformer<FastTreeRankingModelParameters>(Host, model, trainSchema, FeatureColumn.Name);
FastTreeRegression.cs (4)
110Host.CheckValue(context, nameof(context)); 115using (var ch = Host.Start("Training")) 124return new FastTreeRegressionModelParameters(Host, TrainedEnsemble, FeatureCount, InnerOptions); 192=> new RegressionPredictionTransformer<FastTreeRegressionModelParameters>(Host, model, trainSchema, FeatureColumn.Name);
FastTreeTweedie.cs (7)
100Host.CheckNonEmpty(labelColumnName, nameof(labelColumnName)); 101Host.CheckNonEmpty(featureColumnName, nameof(featureColumnName)); 119Host.CheckValue(context, nameof(context)); 124using (var ch = Host.Start("Training")) 134return new FastTreeTweedieModelParameters(Host, TrainedEnsemble, FeatureCount, InnerOptions); 207Host.CheckUserArg(1 <= FastTreeTrainerOptions.Index && FastTreeTrainerOptions.Index <= 2, nameof(FastTreeTrainerOptions.Index), "Must be in the range [1, 2]"); 351=> new RegressionPredictionTransformer<FastTreeTweedieModelParameters>(Host, model, trainSchema, FeatureColumn.Name);
GamClassification.cs (4)
140var predictor = new GamBinaryModelParameters(Host, 142var calibrator = new PlattCalibrator(Host, -1.0 * _sigmoidParameter, 0); 143return new ValueMapperCalibratedModelParameters<GamBinaryModelParameters, PlattCalibrator>(Host, predictor, calibrator); 174=> new BinaryPredictionTransformer<CalibratedModelParametersBase<GamBinaryModelParameters, PlattCalibrator>>(Host, model, trainSchema, FeatureColumn.Name);
GamRegression.cs (2)
109return new GamRegressionModelParameters(Host, BinUpperBounds, BinEffects, MeanEffect, InputLength, FeatureMap); 126=> new RegressionPredictionTransformer<GamRegressionModelParameters>(Host, model, trainSchema, FeatureColumn.Name);
GamTrainer.cs (14)
201Host.CheckValue(options, nameof(options)); 203Host.CheckParam(options.LearningRate > 0, nameof(options.LearningRate), "Must be positive."); 204Host.CheckParam(options.NumberOfThreads == null || options.NumberOfThreads > 0, nameof(options.NumberOfThreads), "Must be positive."); 205Host.CheckParam(0 <= options.EntropyCoefficient && options.EntropyCoefficient <= 1, nameof(options.EntropyCoefficient), "Must be in [0, 1]."); 206Host.CheckParam(0 <= options.GainConfidenceLevel && options.GainConfidenceLevel < 1, nameof(options.GainConfidenceLevel), "Must be in [0, 1)."); 207Host.CheckParam(0 < options.MaximumBinCountPerFeature, nameof(options.MaximumBinCountPerFeature), "Must be positive."); 208Host.CheckParam(0 < options.NumberOfIterations, nameof(options.NumberOfIterations), "Must be positive."); 209Host.CheckParam(0 < options.MinimumExampleCountPerLeaf, nameof(options.MinimumExampleCountPerLeaf), "Must be positive."); 222using (var ch = Host.Start("Training")) 257var instanceConverter = new ExamplesToFastTreeBins(Host, GamTrainerOptions.MaximumBinCountPerFeature, useTranspose, !GamTrainerOptions.FeatureFlocks, GamTrainerOptions.MinimumExampleCountPerLeaf, float.PositiveInfinity); 264Host.Assert(FeatureMap == null || FeatureMap.Length == TrainSet.NumFeatures); 269Host.AssertValue(data); 270Host.Assert(data.Schema.Feature.HasValue); 301using (var pch = Host.StartProgressChannel("GAM training"))
RandomForest.cs (2)
44Host.CheckValue(ch, nameof(ch)); 72FastTreeTrainerOptions.Bias, Host);
RandomForestClassification.cs (6)
197Host.CheckNonEmpty(labelColumnName, nameof(labelColumnName)); 198Host.CheckNonEmpty(featureColumnName, nameof(featureColumnName)); 213Host.CheckValue(context, nameof(context)); 218using (var ch = Host.Start("Training")) 251return new FastForestBinaryModelParameters(Host, TrainedEnsemble, FeatureCount, InnerOptions); 366=> new BinaryPredictionTransformer<FastForestBinaryModelParameters>(Host, model, trainSchema, FeatureColumn.Name);
RandomForestRegression.cs (6)
336Host.CheckNonEmpty(labelColumnName, nameof(labelColumnName)); 337Host.CheckNonEmpty(featureColumnName, nameof(featureColumnName)); 352Host.CheckValue(context, nameof(context)); 357using (var ch = Host.Start("Training")) 384return new FastForestRegressionModelParameters(Host, TrainedEnsemble, FeatureCount, InnerOptions, FastTreeTrainerOptions.NumberOfQuantileSamples); 495=> new RegressionPredictionTransformer<FastForestRegressionModelParameters>(Host, model, trainSchema, FeatureColumn.Name);
Microsoft.ML.KMeansClustering (15)
KMeansPlusPlusTrainer.cs (15)
173Host.CheckValue(options, nameof(options)); 174Host.CheckUserArg(options.NumberOfClusters > 0, nameof(options.NumberOfClusters), "Must be positive"); 180Host.CheckUserArg(options.MaximumNumberOfIterations > 0, nameof(options.MaximumNumberOfIterations), "Must be positive"); 183Host.CheckUserArg(options.OptimizationTolerance > 0, nameof(options.OptimizationTolerance), "Tolerance must be positive"); 186Host.CheckUserArg(options.AccelerationMemoryBudgetMb > 0, nameof(options.AccelerationMemoryBudgetMb), "Must be positive"); 191Host.CheckUserArg(!options.NumberOfThreads.HasValue || options.NumberOfThreads > 0, nameof(options.NumberOfThreads), 199Host.CheckValue(context, nameof(context)); 205using (var ch = Host.Start("Training")) 213Host.AssertValue(ch); 241KMeansPlusPlusInit.Initialize(Host, _numThreads, ch, cursorFactory, _k, dimensionality, 246KMeansRandomInit.Initialize(Host, _numThreads, ch, cursorFactory, _k, 252KMeansBarBarInitialization.Initialize(Host, _numThreads, ch, cursorFactory, _k, dimensionality, 260Host, _numThreads, ch, cursorFactory, totalTrainingInstances, _k, dimensionality, _maxIterations, 271return new KMeansModelParameters(Host, _k, centroids, copyIn: true); 322=> new ClusteringPredictionTransformer<KMeansModelParameters>(Host, model, trainSchema, _featureColumn);
Microsoft.ML.LightGbm (52)
LightGbmBinaryTrainer.cs (6)
255Host.Check(TrainedEnsemble != null, "The predictor cannot be created before training is complete"); 257var pred = new LightGbmBinaryModelParameters(Host, TrainedEnsemble, FeatureCount, innerArgs); 258var cali = new PlattCalibrator(Host, -LightGbmTrainerOptions.Sigmoid, 0); 259return new FeatureWeightsCalibratedModelParameters<LightGbmBinaryModelParameters, PlattCalibrator>(Host, pred, cali); 264Host.AssertValue(ch); 294=> new BinaryPredictionTransformer<CalibratedModelParametersBase<LightGbmBinaryModelParameters, PlattCalibrator>>(Host, model, trainSchema, FeatureColumn.Name);
LightGbmMulticlassTrainer.cs (14)
208return new LightGbmBinaryModelParameters(Host, GetBinaryEnsemble(classID), FeatureCount, innerArgs); 213Host.Check(TrainedEnsemble != null, "The predictor cannot be created before training is complete."); 215Host.Assert(_numberOfClassesIncludingNan > 1, "Must know the number of classes before creating a predictor."); 216Host.Assert(TrainedEnsemble.NumTrees % _numberOfClassesIncludingNan == 0, "Number of trees should be a multiple of number of classes."); 223var cali = new PlattCalibrator(Host, -LightGbmTrainerOptions.Sigmoid, 0); 224predictors[i] = new FeatureWeightsCalibratedModelParameters<LightGbmBinaryModelParameters, PlattCalibrator>(Host, pred, cali); 228return OneVersusAllModelParameters.Create(Host, OneVersusAllModelParameters.OutputFormula.Softmax, predictors); 230return OneVersusAllModelParameters.Create(Host, predictors); 235Host.AssertValue(ch); 296_numberOfClassesIncludingNan = keyType.GetCountAsInt32(Host) + 1; 298_numberOfClassesIncludingNan = keyType.GetCountAsInt32(Host); 299_numberOfClasses = keyType.GetCountAsInt32(Host); 336Host.AssertValue(ch); 376=> new MulticlassPredictionTransformer<OneVersusAllModelParameters>(Host, model, trainSchema, FeatureColumn.Name, LabelColumn.Name);
LightGbmRankingTrainer.cs (7)
216Host.CheckNonEmpty(rowGroupIdColumnName, nameof(rowGroupIdColumnName)); 241Host.AssertValue(ch); 273() => throw Host.ExceptSchemaMismatch(nameof(labelCol), "label", labelCol.Name, "Single or Key", labelCol.GetTypeString()); 283Host.Check(TrainedEnsemble != null, "The predictor cannot be created before training is complete"); 285return new LightGbmRankingModelParameters(Host, TrainedEnsemble, FeatureCount, innerArgs); 290Host.AssertValue(ch); 307=> new RankingPredictionTransformer<LightGbmRankingModelParameters>(Host, model, trainSchema, FeatureColumn.Name);
LightGbmRegressionTrainer.cs (4)
218Host.Check(TrainedEnsemble != null, 221return new LightGbmRegressionModelParameters(Host, TrainedEnsemble, FeatureCount, innerArgs); 226Host.AssertValue(ch); 255=> new RegressionPredictionTransformer<LightGbmRegressionModelParameters>(Host, model, trainSchema, FeatureColumn.Name);
LightGbmTrainerBase.cs (21)
370Host.CheckValue(options, nameof(options)); 379ParallelTraining = LightGbmTrainerOptions.ParallelTrainer != null ? LightGbmTrainerOptions.ParallelTrainer.CreateComponent(Host) : new SingleTrainer(); 380GbmOptions = LightGbmTrainerOptions.ToDictionary(Host); 388Host.CheckValue(context, nameof(context)); 397using (var ch = Host.Start("Loading LightGBM model file")) 462using (var ch = Host.Start("Loading data for LightGBM")) 464using (var pch = Host.StartProgressChannel("Loading data for LightGBM")) 471using (var ch = Host.Start("Training with LightGBM")) 473using (var pch = Host.StartProgressChannel("Training with LightGBM")) 669Host.AssertValue(ch); 700Host.AssertValue(ch); 721Host.AssertValue(ch); 722Host.AssertValue(pch); 723Host.AssertValue(dtrain); 724Host.AssertValueOrNull(dvalid); 725Host.CheckAlive(); 735using (Booster bst = WrappedLightGbmTraining.Train(Host, ch, pch, GbmOptions, dtrain, 938Host.AssertValue(ch); 942var rand = Host.Rand; 1027Host.AssertValue(ch); 1032var rand = Host.Rand;
Microsoft.ML.Mkl.Components (23)
OlsLinearRegression.cs (9)
124Host.CheckValue(options, nameof(options)); 125Host.CheckUserArg(options.L2Regularization >= 0, nameof(options.L2Regularization), "L2 regularization term cannot be negative"); 132=> new RegressionPredictionTransformer<OlsModelParameters>(Host, model, trainSchema, FeatureColumn.Name); 153using (var ch = Host.Start("Training")) 415Host.AssertValue(ch); 452return new OlsModelParameters(Host, in weights, bias); 459IValueMapper lrPredictor = new LinearRegressionModelParameters(Host, in weights, bias); 488return new OlsModelParameters(Host, in weights, bias, rSquared: rSquared, rSquaredAdjusted: rSquaredAdjusted); 533return new OlsModelParameters(Host, in weights, bias, standardErrors, tValues, pValues, rSquared, rSquaredAdjusted);
SymSgdClassificationTrainer.cs (14)
200idvToFeedTrain = new RowShufflingTransformer(Host, shuffleArgs, idvToShuffle); 219Host.CheckValue(context, nameof(context)); 220using (var ch = Host.Start("Training")) 227Host.CheckParam(initPred == null || linearInitPred != null, nameof(context), 244Host.CheckValue(options, nameof(options)); 245options.Check(Host); 253Host.CheckParam(weights.Length > 0, nameof(weights)); 258var predictor = new LinearBinaryModelParameters(Host, in maybeSparseWeights, bias); 259return new ParameterMixingCalibratedModelParameters<LinearBinaryModelParameters, PlattCalibrator>(Host, predictor, new PlattCalibrator(Host, -1, 0)); 263=> new BinaryPredictionTransformer<TPredictor>(Host, model, trainSchema, FeatureColumn.Name); 745using (var pch = Host.StartProgressChannel("Preprocessing")) 751using (var pch = Host.StartProgressChannel("Training")) 791inputDataManager.RestartLoading(_options.Shuffle, Host);
Microsoft.ML.PCA (11)
PcaTrainer.cs (11)
169_seed = options.Seed ?? Host.Rand.Next(); 176_seed = seed ?? Host.Rand.Next(); 181Host.CheckUserArg(_rank > 0, nameof(_rank), "Rank must be positive"); 182Host.CheckUserArg(_oversampling >= 0, nameof(_oversampling), "Oversampling must be non-negative"); 188Host.CheckValue(context, nameof(context)); 192using (var ch = Host.Start("Training")) 213Host.AssertValue(ch); 236Project(Host, cursorFactory, ref mean, omega, y, out numBad); 254Project(Host, cursorFactory, ref mean, q, b, out numBad); 269return new PcaModelParameters(Host, _rank, b, in mean); 391=> new AnomalyPredictionTransformer<PcaModelParameters>(Host, model, trainSchema, _featureColumn);
Microsoft.ML.StandardTrainers (165)
LdSvm\LdSvmTrainer.cs (27)
170Host.CheckValue(options, nameof(options)); 171CheckOptions(Host, options); 191Host.CheckValue(trainContext, nameof(trainContext)); 192using (var ch = Host.Start("Training")) 236var sample = data.SampleForGammaUpdate(Host.Rand); 240int thetaIdx = Host.Rand.Next(numLeaf - 1); 313var sample = data.SampleExamples(Host.Rand); 393return new LdSvmModelParameters(Host, w, thetaPrime, theta, _options.Sigma, biasW, biasTheta, 408Host.Assert(Utils.Size(tempW) == count); 409Host.Assert(Utils.Size(w) == count); 410Host.Assert(Utils.Size(theta) == half); 411Host.Assert(Utils.Size(thetaPrime) == count); 412Host.Assert(Utils.Size(biasW) == count); 413Host.Assert(Utils.Size(biasTheta) == half); 414Host.Assert(Utils.Size(biasThetaPrime) == count); 415Host.Assert(Utils.Size(tempThetaPrime) == count); 416Host.Assert(Utils.Size(tempTheta) == half); 417Host.Assert(Utils.Size(tempBiasW) == count); 418Host.Assert(Utils.Size(tempBiasTheta) == half); 419Host.Assert(Utils.Size(tempBiasThetaPrime) == count); 430wInit.Values[j] = 2 * Host.Rand.NextSingle() - 1; 431thetaPrimeInit.Values[j] = 2 * Host.Rand.NextSingle() - 1; 433thetaInit.Values[j] = 2 * Host.Rand.NextSingle() - 1; 443float bW = 2 * Host.Rand.NextSingle() - 1; 446float bTP = 2 * Host.Rand.NextSingle() - 1; 459float bT = 2 * Host.Rand.NextSingle() - 1; 666=> new BinaryPredictionTransformer<LdSvmModelParameters>(Host, model, trainSchema, _options.FeatureColumnName);
Standard\LogisticRegression\LbfgsPredictorBase.cs (23)
235Host.CheckValue(options, nameof(options)); 244Host.CheckUserArg(!LbfgsTrainerOptions.UseThreads || LbfgsTrainerOptions.NumberOfThreads > 0 || LbfgsTrainerOptions.NumberOfThreads == null, 246Host.CheckUserArg(LbfgsTrainerOptions.L2Regularization >= 0, nameof(LbfgsTrainerOptions.L2Regularization), "Must be non-negative"); 247Host.CheckUserArg(LbfgsTrainerOptions.L1Regularization >= 0, nameof(LbfgsTrainerOptions.L1Regularization), "Must be non-negative"); 248Host.CheckUserArg(LbfgsTrainerOptions.OptimizationTolerance > 0, nameof(LbfgsTrainerOptions.OptimizationTolerance), "Must be positive"); 249Host.CheckUserArg(LbfgsTrainerOptions.HistorySize > 0, nameof(LbfgsTrainerOptions.HistorySize), "Must be positive"); 250Host.CheckUserArg(LbfgsTrainerOptions.MaximumNumberOfIterations > 0, nameof(LbfgsTrainerOptions.MaximumNumberOfIterations), "Must be positive"); 251Host.CheckUserArg(LbfgsTrainerOptions.StochasticGradientDescentInitilaizationTolerance >= 0, nameof(LbfgsTrainerOptions.StochasticGradientDescentInitilaizationTolerance), "Must be non-negative"); 252Host.CheckUserArg(LbfgsTrainerOptions.NumberOfThreads == null || LbfgsTrainerOptions.NumberOfThreads.Value >= 0, nameof(LbfgsTrainerOptions.NumberOfThreads), "Must be non-negative"); 254Host.CheckParam(!(LbfgsTrainerOptions.L2Regularization < 0), nameof(LbfgsTrainerOptions.L2Regularization), "Must be non-negative, if provided."); 255Host.CheckParam(!(LbfgsTrainerOptions.L1Regularization < 0), nameof(LbfgsTrainerOptions.L1Regularization), "Must be non-negative, if provided"); 256Host.CheckParam(!(LbfgsTrainerOptions.OptimizationTolerance <= 0), nameof(LbfgsTrainerOptions.OptimizationTolerance), "Must be positive, if provided."); 257Host.CheckParam(!(LbfgsTrainerOptions.HistorySize <= 0), nameof(LbfgsTrainerOptions.HistorySize), "Must be positive, if provided."); 275using (var ch = Host.Start("Initialization")) 321? new L1Optimizer(Host, BiasCount, L1Weight / NumGoodRows, MemorySize, DenseOptimizer, null, EnforceNonNegativity) 322: new Optimizer(Host, MemorySize, DenseOptimizer, null, EnforceNonNegativity); 331initWeights[j] = InitWtsDiameter * (Host.Rand.NextSingle() - 0.5f); 433Host.CheckParam(context.InitialPredictor == null || context.InitialPredictor is IPredictor, nameof(context.InitialPredictor)); 449using (var ch = Host.Start("Training")) 466Host.AssertValue(ch); 565Host.AssertValue(ch); 600using (var pch = Host.StartProgressChannel("LBFGS data prep")) 611Host.CheckAlive();
Standard\LogisticRegression\LogisticRegression.cs (9)
144Host.CheckNonEmpty(featureColumn, nameof(featureColumn)); 145Host.CheckNonEmpty(labelColumn, nameof(labelColumn)); 181=> new BinaryPredictionTransformer<CalibratedModelParametersBase<LinearBinaryModelParameters, PlattCalibrator>>(Host, model, trainSchema, FeatureColumn.Name); 274Host.Assert(namesSpans.Length == featureLength); 312_stats = new ModelStatisticsBase(Host, NumGoodRows, numParams, deviance, nullDeviance); 414_stats = new LinearModelParameterStatistics(Host, NumGoodRows, numParams, deviance, nullDeviance, std, weightsOnly, bias); 456return new ParameterMixingCalibratedModelParameters<LinearBinaryModelParameters, PlattCalibrator>(Host, 457new LinearBinaryModelParameters(Host, in weights, bias, _stats), 458new PlattCalibrator(Host, -1, 0));
Standard\LogisticRegression\MulticlassLogisticRegression.cs (6)
152Host.CheckNonEmpty(featureColumn, nameof(featureColumn)); 153Host.CheckNonEmpty(labelColumn, nameof(labelColumn)); 299using (var ch = Host.Start("Creating Predictor")) 305return new MaximumEntropyModelParameters(Host, in CurrentWeights, _numClasses, NumFeatures, _labelNames, _stats); 359_stats = new ModelStatisticsBase(Host, NumGoodRows, numParams, deviance, nullDeviance); 384=> new MulticlassPredictionTransformer<MaximumEntropyModelParameters>(Host, model, trainSchema, FeatureColumn.Name, LabelColumn.Name);
Standard\MulticlassClassification\MulticlassNaiveBayesTrainer.cs (12)
100Host.CheckNonEmpty(featureColumn, nameof(featureColumn)); 101Host.CheckNonEmpty(labelColumn, nameof(labelColumn)); 111Host.CheckValue(options, nameof(options)); 130=> new MulticlassPredictionTransformer<NaiveBayesMulticlassModelParameters>(Host, model, trainSchema, FeatureColumn.Name, LabelColumn.Name); 134Host.CheckValue(context, nameof(context)); 136Host.Check(data.Schema.Label.HasValue, "Missing Label column"); 138Host.Check(labelCol.Type == NumberDataViewType.Single || labelCol.Type is KeyDataViewType, 141Host.Check(data.Schema.Feature.HasValue, "Missing Feature column"); 146labelCount = labelKeyType.GetCountAsInt32(Host); 150using (var pch = Host.StartProgressChannel("Multi Class Naive Bayes training")) 151using (var ch = Host.Start("Training")) 201return new NaiveBayesMulticlassModelParameters(Host, labelHistogram, featureHistogram, featureCount);
Standard\Online\AveragedPerceptron.cs (1)
218=> new BinaryPredictionTransformer<LinearBinaryModelParameters>(Host, model, trainSchema, FeatureColumn.Name);
Standard\Online\LinearSvm.cs (1)
338=> new BinaryPredictionTransformer<LinearBinaryModelParameters>(Host, model, trainSchema, FeatureColumn.Name);
Standard\Online\OnlineGradientDescent.cs (1)
203=> new RegressionPredictionTransformer<LinearRegressionModelParameters>(Host, model, trainSchema, FeatureColumn.Name);
Standard\Online\OnlineLinear.cs (7)
132ParentHost = parent.Host; 165weightValues[i] = parent.OnlineLinearTrainerOptions.InitialWeightsDiameter * (parent.Host.Rand.NextSingle() - (float)0.5); 167Bias = parent.OnlineLinearTrainerOptions.InitialWeightsDiameter * (parent.Host.Rand.NextSingle() - (float)0.5); 276Host.CheckValue(context, nameof(context)); 284Host.CheckParam(initPredictor == null || initLinearPred != null, nameof(context), 292using (var ch = Host.Start("Training")) 322var rand = shuffle ? Host.Rand : null;
Standard\PoissonRegression\PoissonRegression.cs (4)
99Host.CheckNonEmpty(featureColumn, nameof(featureColumn)); 100Host.CheckNonEmpty(labelColumn, nameof(labelColumn)); 128=> new RegressionPredictionTransformer<PoissonRegressionModelParameters>(Host, model, trainSchema, FeatureColumn.Name); 195return new PoissonRegressionModelParameters(Host, in weights, bias);
Standard\SdcaBinary.cs (41)
71Host.CheckValue(context, nameof(context)); 72using (var ch = Host.Start("Training")) 78Host.CheckParam(context.InitialPredictor == null || linInitPred != null, nameof(context), 111idvToFeedTrain = new RowShufflingTransformer(Host, shuffleArgs, idvToShuffle); 339Host.CheckUserArg(numThreads > 0, nameof(OptionsBase.NumberOfThreads), "The number of threads must be either null or a positive integer."); 382using (var pch = Host.StartProgressChannel("SDCA preprocessing")) 387Host.CheckAlive(); 432using (var pch = Host.StartProgressChannel("SDCA preprocessing with lookup")) 563rands[i] = RandomUtils.Create(Host.Rand.Next()); 572using (var pch = Host.StartProgressChannel("SDCA invariants initialization")) 578Host.CheckAlive(); 600using (var pch = Host.StartProgressChannel("SDCA training")) 814Host.CheckAlive(); 978Host.CheckAlive(); 989Host.Assert(idToIdx == null || row == duals.Length); 1504Host.CheckNonEmpty(featureColumnName, nameof(featureColumnName)); 1505Host.CheckNonEmpty(labelColumnName, nameof(labelColumnName)); 1527Host.CheckParam(Utils.Size(weights) == 1, nameof(weights)); 1528Host.CheckParam(Utils.Size(bias) == 1, nameof(bias)); 1529Host.CheckParam(weights[0].Length > 0, nameof(weights)); 1536return new LinearBinaryModelParameters(Host, in maybeSparseWeights, bias[0]); 1551=> new BinaryPredictionTransformer<TModelParameters>(Host, model, trainSchema, FeatureColumn.Name); 1616var calibrator = new PlattCalibrator(Host, -1, 0); 1617return new ParameterMixingCalibratedModelParameters<LinearBinaryModelParameters, PlattCalibrator>(Host, linearModel, calibrator); 1815Host.CheckParam(Utils.Size(weights) == 1, nameof(weights)); 1816Host.CheckParam(Utils.Size(bias) == 1, nameof(bias)); 1817Host.CheckParam(weights[0].Length > 0, nameof(weights)); 1824var predictor = new LinearBinaryModelParameters(Host, in maybeSparseWeights, bias[0]); 1827return new ParameterMixingCalibratedModelParameters<LinearBinaryModelParameters, PlattCalibrator>(Host, predictor, new PlattCalibrator(Host, -1, 0)); 1976Host.CheckNonEmpty(featureColumn, nameof(featureColumn)); 1977Host.CheckNonEmpty(labelColumn, nameof(labelColumn)); 2008=> new BinaryPredictionTransformer<TModel>(Host, model, trainSchema, FeatureColumn.Name); 2115rands[e - 1] = RandomUtils.Create(Host.Rand.Next()); 2165using (var pch = Host.StartProgressChannel("SGD Training")) 2219Host.CheckParam(weights.Length > 0, nameof(weights)); 2225return new LinearBinaryModelParameters(Host, in maybeSparseWeights, bias); 2318var calibrator = new PlattCalibrator(Host, -1, 0); 2320return new ParameterMixingCalibratedModelParameters<LinearBinaryModelParameters, PlattCalibrator>(Host, subModel, calibrator); 2461var calibrator = new PlattCalibrator(Host, -1, 0); 2463return new ParameterMixingCalibratedModelParameters<LinearBinaryModelParameters, PlattCalibrator>(Host, subModel, calibrator);
Standard\SdcaMulticlass.cs (19)
137Host.CheckNonEmpty(featureColumn, nameof(featureColumn)); 138Host.CheckNonEmpty(labelColumn, nameof(labelColumn)); 147Host.CheckValue(labelColumn, nameof(labelColumn)); 148Host.CheckValue(featureColumn, nameof(featureColumn)); 203Host.CheckAlive(); 379Host.CheckAlive(); 408Host.Assert(idToIdx == null || row * numClasses == duals.Length); 556Host.CheckValue(weights, nameof(weights)); 557Host.CheckValue(bias, nameof(bias)); 558Host.CheckParam(weights.Length > 0, nameof(weights)); 559Host.CheckParam(weights.Length == bias.Length, nameof(weights)); 561return new MaximumEntropyModelParameters(Host, weights, bias, bias.Length, weights[0].Length, null, stats: null); 566new MulticlassPredictionTransformer<MaximumEntropyModelParameters>(Host, model, trainSchema, FeatureColumn.Name, LabelColumn.Name); 653Host.CheckValue(weights, nameof(weights)); 654Host.CheckValue(bias, nameof(bias)); 655Host.CheckParam(weights.Length > 0, nameof(weights)); 656Host.CheckParam(weights.Length == bias.Length, nameof(weights)); 658return new LinearMulticlassModelParameters(Host, weights, bias, bias.Length, weights[0].Length, null, stats: null); 663new MulticlassPredictionTransformer<LinearMulticlassModelParameters>(Host, model, trainSchema, FeatureColumn.Name, LabelColumn.Name);
Standard\SdcaRegression.cs (10)
122Host.CheckNonEmpty(featureColumn, nameof(featureColumn)); 123Host.CheckNonEmpty(labelColumn, nameof(labelColumn)); 131Host.CheckValue(labelColumn, nameof(labelColumn)); 132Host.CheckValue(featureColumn, nameof(featureColumn)); 133Host.CheckValueOrNull(weightColumn); 146Host.CheckParam(Utils.Size(weights) == 1, nameof(weights)); 147Host.CheckParam(Utils.Size(bias) == 1, nameof(bias)); 148Host.CheckParam(weights[0].Length > 0, nameof(weights)); 155return new LinearRegressionModelParameters(Host, in maybeSparseWeights, bias[0]); 202=> new RegressionPredictionTransformer<LinearRegressionModelParameters>(Host, model, trainSchema, FeatureColumn.Name);
Standard\StochasticTrainerBase.cs (4)
32Host.CheckValue(context, nameof(context)); 33using (var ch = Host.Start("Training")) 41Host.CheckParam(context.InitialPredictor == null || linInitPred != null, nameof(context), 75idvToFeedTrain = new RowShufflingTransformer(Host, shuffleArgs, idvToShuffle);
Microsoft.ML.Vision (17)
ImageClassificationTrainer.cs (17)
531Host.CheckValue(options, nameof(options)); 532Host.CheckNonEmpty(options.FeatureColumnName, nameof(options.FeatureColumnName)); 533Host.CheckNonEmpty(options.LabelColumnName, nameof(options.LabelColumnName)); 534Host.CheckNonEmpty(options.ScoreColumnName, nameof(options.ScoreColumnName)); 535Host.CheckNonEmpty(options.PredictedLabelColumnName, nameof(options.PredictedLabelColumnName)); 564var logger = Host.Start(nameof(ImageClassificationTrainer)); 605throw Host.ExceptSchemaMismatch(nameof(input.Schema), "label", (string)labelColumn.Name, "Key", 614_session = LoadTensorFlowSessionFromMetaGraph(Host, _options.Arch).Session; 651=> new MulticlassPredictionTransformer<ImageClassificationModelParameters>(Host, model, trainSchema, 725return new ImageClassificationModelParameters(Host, session, _classCount, _jpegDataTensorName, 731Host.CheckNonWhiteSpace(options.LabelColumnName, nameof(options.LabelColumnName)); 735throw Host.ExceptParam(nameof(_labelTensor.name), $"'{_labelTensor.name}' does not" + 742throw Host.ExceptParam(nameof(options.EarlyStoppingCriteria), $"Early stopping enabled but unable to" + 808throw Host.ExceptSchemaMismatch(nameof(labelColumn), "Label", 847featurizedImages = featurizedImages.OrderBy(x => Host.Rand.Next(0, metrics.Bottleneck.Index)).ToList(); 1123Host.CheckAlive(); 1199_session = LoadTFSessionByModelFilePath(Host, frozenModelPath, false);