1 write to Host
Microsoft.ML.Data (1)
Training\TrainerEstimatorBase.cs (1)
64
Host
= host;
369 references to Host
Microsoft.ML.Data (11)
Training\TrainerEstimatorBase.cs (11)
65
Host
.CheckParam(feature.IsValid, nameof(feature), "not initialized properly");
81
Host
.CheckValue(inputSchema, nameof(inputSchema));
99
Host
.CheckValue(context, nameof(context));
101
Host
.Check(pred != null, "Training did not return a predictor.");
109
throw
Host
.ExceptSchemaMismatch(nameof(inputSchema), "feature", FeatureColumn.Name);
111
throw
Host
.ExceptSchemaMismatch(nameof(inputSchema), "feature", FeatureColumn.Name,
117
throw
Host
.ExceptSchemaMismatch(nameof(inputSchema), "weight", WeightColumn.Name);
119
throw
Host
.ExceptSchemaMismatch(nameof(inputSchema), "weight", WeightColumn.Name,
128
throw
Host
.ExceptSchemaMismatch(nameof(inputSchema), "label", LabelColumn.Name);
136
Host
.Assert(LabelColumn.IsValid);
139
throw
Host
.ExceptSchemaMismatch(nameof(labelCol), "label", LabelColumn.Name,
Microsoft.ML.FastTree (75)
BoostingFastTree.cs (2)
72
FastTreeTrainerOptions.Bias,
Host
);
138
earlyStoppingRule = FastTreeTrainerOptions.EarlyStoppingRuleFactory.CreateComponent(
Host
, lowerIsBetter);
FastTree.cs (13)
94
private protected string InnerOptions => CmdParser.GetSettings(
Host
, FastTreeTrainerOptions, new TOptions());
146
Host
.CheckValue(options, nameof(options));
192
var instanceConverter = new ExamplesToFastTreeBins(
Host
, FastTreeTrainerOptions.MaximumBinCountPerFeature, useTranspose, !FastTreeTrainerOptions.FeatureFlocks, FastTreeTrainerOptions.MinimumExampleCountPerLeaf, GetMaxLabel());
204
Host
.AssertValue(data);
205
Host
.Assert(data.Schema.Feature.HasValue);
646
using (var pch =
Host
.StartProgressChannel("FastTree training"))
652
Host
.CheckAlive();
693
Host
.CheckAlive();
745
Host
.CheckAlive();
758
Host
.CheckAlive();
761
Host
.CheckAlive();
764
Host
.CheckAlive();
818
ch.Trace("CommandLine = {0}", CmdParser.GetSettings(
Host
, FastTreeTrainerOptions, new TOptions()));
FastTreeClassification.cs (6)
186
Host
.CheckValue(context, nameof(context));
191
using (var ch =
Host
.Start("Training"))
206
var pred = new FastTreeBinaryModelParameters(
Host
, TrainedEnsemble, FeatureCount, InnerOptions);
212
var cali = new PlattCalibrator(
Host
, -1 * _sigmoidParameter, 0);
213
return new FeatureWeightsCalibratedModelParameters<FastTreeBinaryModelParameters, PlattCalibrator>(
Host
, pred, cali);
303
=> new BinaryPredictionTransformer<CalibratedModelParametersBase<FastTreeBinaryModelParameters, PlattCalibrator>>(
Host
, model, trainSchema, FeatureColumn.Name);
FastTreeRanking.cs (9)
107
Host
.CheckNonEmpty(rowGroupColumnName, nameof(rowGroupColumnName));
125
() => throw
Host
.ExceptSchemaMismatch(nameof(labelCol), "label", labelCol.Name, "Single or Key", labelCol.GetTypeString());
140
Host
.CheckValue(context, nameof(context));
145
using (var ch =
Host
.Start("Training"))
152
return new FastTreeRankingModelParameters(
Host
, TrainedEnsemble, FeatureCount, InnerOptions);
159
Host
.AssertValue(FastTreeTrainerOptions.CustomGains);
165
throw
Host
.Except(ex, "Error in the format of custom gains. Inner exception is {0}", ex.Message);
223
Host
.Assert(FastTreeTrainerOptions.BaggingSize > 0);
476
=> new RankingPredictionTransformer<FastTreeRankingModelParameters>(
Host
, model, trainSchema, FeatureColumn.Name);
FastTreeRegression.cs (4)
110
Host
.CheckValue(context, nameof(context));
115
using (var ch =
Host
.Start("Training"))
124
return new FastTreeRegressionModelParameters(
Host
, TrainedEnsemble, FeatureCount, InnerOptions);
192
=> new RegressionPredictionTransformer<FastTreeRegressionModelParameters>(
Host
, model, trainSchema, FeatureColumn.Name);
FastTreeTweedie.cs (7)
100
Host
.CheckNonEmpty(labelColumnName, nameof(labelColumnName));
101
Host
.CheckNonEmpty(featureColumnName, nameof(featureColumnName));
119
Host
.CheckValue(context, nameof(context));
124
using (var ch =
Host
.Start("Training"))
134
return new FastTreeTweedieModelParameters(
Host
, TrainedEnsemble, FeatureCount, InnerOptions);
207
Host
.CheckUserArg(1 <= FastTreeTrainerOptions.Index && FastTreeTrainerOptions.Index <= 2, nameof(FastTreeTrainerOptions.Index), "Must be in the range [1, 2]");
351
=> new RegressionPredictionTransformer<FastTreeTweedieModelParameters>(
Host
, model, trainSchema, FeatureColumn.Name);
GamClassification.cs (4)
140
var predictor = new GamBinaryModelParameters(
Host
,
142
var calibrator = new PlattCalibrator(
Host
, -1.0 * _sigmoidParameter, 0);
143
return new ValueMapperCalibratedModelParameters<GamBinaryModelParameters, PlattCalibrator>(
Host
, predictor, calibrator);
174
=> new BinaryPredictionTransformer<CalibratedModelParametersBase<GamBinaryModelParameters, PlattCalibrator>>(
Host
, model, trainSchema, FeatureColumn.Name);
GamRegression.cs (2)
109
return new GamRegressionModelParameters(
Host
, BinUpperBounds, BinEffects, MeanEffect, InputLength, FeatureMap);
126
=> new RegressionPredictionTransformer<GamRegressionModelParameters>(
Host
, model, trainSchema, FeatureColumn.Name);
GamTrainer.cs (14)
201
Host
.CheckValue(options, nameof(options));
203
Host
.CheckParam(options.LearningRate > 0, nameof(options.LearningRate), "Must be positive.");
204
Host
.CheckParam(options.NumberOfThreads == null || options.NumberOfThreads > 0, nameof(options.NumberOfThreads), "Must be positive.");
205
Host
.CheckParam(0 <= options.EntropyCoefficient && options.EntropyCoefficient <= 1, nameof(options.EntropyCoefficient), "Must be in [0, 1].");
206
Host
.CheckParam(0 <= options.GainConfidenceLevel && options.GainConfidenceLevel < 1, nameof(options.GainConfidenceLevel), "Must be in [0, 1).");
207
Host
.CheckParam(0 < options.MaximumBinCountPerFeature, nameof(options.MaximumBinCountPerFeature), "Must be positive.");
208
Host
.CheckParam(0 < options.NumberOfIterations, nameof(options.NumberOfIterations), "Must be positive.");
209
Host
.CheckParam(0 < options.MinimumExampleCountPerLeaf, nameof(options.MinimumExampleCountPerLeaf), "Must be positive.");
222
using (var ch =
Host
.Start("Training"))
257
var instanceConverter = new ExamplesToFastTreeBins(
Host
, GamTrainerOptions.MaximumBinCountPerFeature, useTranspose, !GamTrainerOptions.FeatureFlocks, GamTrainerOptions.MinimumExampleCountPerLeaf, float.PositiveInfinity);
264
Host
.Assert(FeatureMap == null || FeatureMap.Length == TrainSet.NumFeatures);
269
Host
.AssertValue(data);
270
Host
.Assert(data.Schema.Feature.HasValue);
301
using (var pch =
Host
.StartProgressChannel("GAM training"))
RandomForest.cs (2)
44
Host
.CheckValue(ch, nameof(ch));
72
FastTreeTrainerOptions.Bias,
Host
);
RandomForestClassification.cs (6)
197
Host
.CheckNonEmpty(labelColumnName, nameof(labelColumnName));
198
Host
.CheckNonEmpty(featureColumnName, nameof(featureColumnName));
213
Host
.CheckValue(context, nameof(context));
218
using (var ch =
Host
.Start("Training"))
251
return new FastForestBinaryModelParameters(
Host
, TrainedEnsemble, FeatureCount, InnerOptions);
366
=> new BinaryPredictionTransformer<FastForestBinaryModelParameters>(
Host
, model, trainSchema, FeatureColumn.Name);
RandomForestRegression.cs (6)
336
Host
.CheckNonEmpty(labelColumnName, nameof(labelColumnName));
337
Host
.CheckNonEmpty(featureColumnName, nameof(featureColumnName));
352
Host
.CheckValue(context, nameof(context));
357
using (var ch =
Host
.Start("Training"))
384
return new FastForestRegressionModelParameters(
Host
, TrainedEnsemble, FeatureCount, InnerOptions, FastTreeTrainerOptions.NumberOfQuantileSamples);
495
=> new RegressionPredictionTransformer<FastForestRegressionModelParameters>(
Host
, model, trainSchema, FeatureColumn.Name);
Microsoft.ML.KMeansClustering (15)
KMeansPlusPlusTrainer.cs (15)
173
Host
.CheckValue(options, nameof(options));
174
Host
.CheckUserArg(options.NumberOfClusters > 0, nameof(options.NumberOfClusters), "Must be positive");
180
Host
.CheckUserArg(options.MaximumNumberOfIterations > 0, nameof(options.MaximumNumberOfIterations), "Must be positive");
183
Host
.CheckUserArg(options.OptimizationTolerance > 0, nameof(options.OptimizationTolerance), "Tolerance must be positive");
186
Host
.CheckUserArg(options.AccelerationMemoryBudgetMb > 0, nameof(options.AccelerationMemoryBudgetMb), "Must be positive");
191
Host
.CheckUserArg(!options.NumberOfThreads.HasValue || options.NumberOfThreads > 0, nameof(options.NumberOfThreads),
199
Host
.CheckValue(context, nameof(context));
205
using (var ch =
Host
.Start("Training"))
213
Host
.AssertValue(ch);
241
KMeansPlusPlusInit.Initialize(
Host
, _numThreads, ch, cursorFactory, _k, dimensionality,
246
KMeansRandomInit.Initialize(
Host
, _numThreads, ch, cursorFactory, _k,
252
KMeansBarBarInitialization.Initialize(
Host
, _numThreads, ch, cursorFactory, _k, dimensionality,
260
Host
, _numThreads, ch, cursorFactory, totalTrainingInstances, _k, dimensionality, _maxIterations,
271
return new KMeansModelParameters(
Host
, _k, centroids, copyIn: true);
322
=> new ClusteringPredictionTransformer<KMeansModelParameters>(
Host
, model, trainSchema, _featureColumn);
Microsoft.ML.LightGbm (52)
LightGbmBinaryTrainer.cs (6)
255
Host
.Check(TrainedEnsemble != null, "The predictor cannot be created before training is complete");
257
var pred = new LightGbmBinaryModelParameters(
Host
, TrainedEnsemble, FeatureCount, innerArgs);
258
var cali = new PlattCalibrator(
Host
, -LightGbmTrainerOptions.Sigmoid, 0);
259
return new FeatureWeightsCalibratedModelParameters<LightGbmBinaryModelParameters, PlattCalibrator>(
Host
, pred, cali);
264
Host
.AssertValue(ch);
294
=> new BinaryPredictionTransformer<CalibratedModelParametersBase<LightGbmBinaryModelParameters, PlattCalibrator>>(
Host
, model, trainSchema, FeatureColumn.Name);
LightGbmMulticlassTrainer.cs (14)
208
return new LightGbmBinaryModelParameters(
Host
, GetBinaryEnsemble(classID), FeatureCount, innerArgs);
213
Host
.Check(TrainedEnsemble != null, "The predictor cannot be created before training is complete.");
215
Host
.Assert(_numberOfClassesIncludingNan > 1, "Must know the number of classes before creating a predictor.");
216
Host
.Assert(TrainedEnsemble.NumTrees % _numberOfClassesIncludingNan == 0, "Number of trees should be a multiple of number of classes.");
223
var cali = new PlattCalibrator(
Host
, -LightGbmTrainerOptions.Sigmoid, 0);
224
predictors[i] = new FeatureWeightsCalibratedModelParameters<LightGbmBinaryModelParameters, PlattCalibrator>(
Host
, pred, cali);
228
return OneVersusAllModelParameters.Create(
Host
, OneVersusAllModelParameters.OutputFormula.Softmax, predictors);
230
return OneVersusAllModelParameters.Create(
Host
, predictors);
235
Host
.AssertValue(ch);
296
_numberOfClassesIncludingNan = keyType.GetCountAsInt32(
Host
) + 1;
298
_numberOfClassesIncludingNan = keyType.GetCountAsInt32(
Host
);
299
_numberOfClasses = keyType.GetCountAsInt32(
Host
);
336
Host
.AssertValue(ch);
376
=> new MulticlassPredictionTransformer<OneVersusAllModelParameters>(
Host
, model, trainSchema, FeatureColumn.Name, LabelColumn.Name);
LightGbmRankingTrainer.cs (7)
216
Host
.CheckNonEmpty(rowGroupIdColumnName, nameof(rowGroupIdColumnName));
241
Host
.AssertValue(ch);
273
() => throw
Host
.ExceptSchemaMismatch(nameof(labelCol), "label", labelCol.Name, "Single or Key", labelCol.GetTypeString());
283
Host
.Check(TrainedEnsemble != null, "The predictor cannot be created before training is complete");
285
return new LightGbmRankingModelParameters(
Host
, TrainedEnsemble, FeatureCount, innerArgs);
290
Host
.AssertValue(ch);
307
=> new RankingPredictionTransformer<LightGbmRankingModelParameters>(
Host
, model, trainSchema, FeatureColumn.Name);
LightGbmRegressionTrainer.cs (4)
218
Host
.Check(TrainedEnsemble != null,
221
return new LightGbmRegressionModelParameters(
Host
, TrainedEnsemble, FeatureCount, innerArgs);
226
Host
.AssertValue(ch);
255
=> new RegressionPredictionTransformer<LightGbmRegressionModelParameters>(
Host
, model, trainSchema, FeatureColumn.Name);
LightGbmTrainerBase.cs (21)
370
Host
.CheckValue(options, nameof(options));
379
ParallelTraining = LightGbmTrainerOptions.ParallelTrainer != null ? LightGbmTrainerOptions.ParallelTrainer.CreateComponent(
Host
) : new SingleTrainer();
380
GbmOptions = LightGbmTrainerOptions.ToDictionary(
Host
);
388
Host
.CheckValue(context, nameof(context));
397
using (var ch =
Host
.Start("Loading LightGBM model file"))
462
using (var ch =
Host
.Start("Loading data for LightGBM"))
464
using (var pch =
Host
.StartProgressChannel("Loading data for LightGBM"))
471
using (var ch =
Host
.Start("Training with LightGBM"))
473
using (var pch =
Host
.StartProgressChannel("Training with LightGBM"))
669
Host
.AssertValue(ch);
700
Host
.AssertValue(ch);
721
Host
.AssertValue(ch);
722
Host
.AssertValue(pch);
723
Host
.AssertValue(dtrain);
724
Host
.AssertValueOrNull(dvalid);
725
Host
.CheckAlive();
735
using (Booster bst = WrappedLightGbmTraining.Train(
Host
, ch, pch, GbmOptions, dtrain,
938
Host
.AssertValue(ch);
942
var rand =
Host
.Rand;
1027
Host
.AssertValue(ch);
1032
var rand =
Host
.Rand;
Microsoft.ML.Mkl.Components (23)
OlsLinearRegression.cs (9)
124
Host
.CheckValue(options, nameof(options));
125
Host
.CheckUserArg(options.L2Regularization >= 0, nameof(options.L2Regularization), "L2 regularization term cannot be negative");
132
=> new RegressionPredictionTransformer<OlsModelParameters>(
Host
, model, trainSchema, FeatureColumn.Name);
153
using (var ch =
Host
.Start("Training"))
415
Host
.AssertValue(ch);
452
return new OlsModelParameters(
Host
, in weights, bias);
459
IValueMapper lrPredictor = new LinearRegressionModelParameters(
Host
, in weights, bias);
488
return new OlsModelParameters(
Host
, in weights, bias, rSquared: rSquared, rSquaredAdjusted: rSquaredAdjusted);
533
return new OlsModelParameters(
Host
, in weights, bias, standardErrors, tValues, pValues, rSquared, rSquaredAdjusted);
SymSgdClassificationTrainer.cs (14)
200
idvToFeedTrain = new RowShufflingTransformer(
Host
, shuffleArgs, idvToShuffle);
219
Host
.CheckValue(context, nameof(context));
220
using (var ch =
Host
.Start("Training"))
227
Host
.CheckParam(initPred == null || linearInitPred != null, nameof(context),
244
Host
.CheckValue(options, nameof(options));
245
options.Check(
Host
);
253
Host
.CheckParam(weights.Length > 0, nameof(weights));
258
var predictor = new LinearBinaryModelParameters(
Host
, in maybeSparseWeights, bias);
259
return new ParameterMixingCalibratedModelParameters<LinearBinaryModelParameters, PlattCalibrator>(
Host
, predictor, new PlattCalibrator(
Host
, -1, 0));
263
=> new BinaryPredictionTransformer<TPredictor>(
Host
, model, trainSchema, FeatureColumn.Name);
745
using (var pch =
Host
.StartProgressChannel("Preprocessing"))
751
using (var pch =
Host
.StartProgressChannel("Training"))
791
inputDataManager.RestartLoading(_options.Shuffle,
Host
);
Microsoft.ML.PCA (11)
PcaTrainer.cs (11)
169
_seed = options.Seed ??
Host
.Rand.Next();
176
_seed = seed ??
Host
.Rand.Next();
181
Host
.CheckUserArg(_rank > 0, nameof(_rank), "Rank must be positive");
182
Host
.CheckUserArg(_oversampling >= 0, nameof(_oversampling), "Oversampling must be non-negative");
188
Host
.CheckValue(context, nameof(context));
192
using (var ch =
Host
.Start("Training"))
213
Host
.AssertValue(ch);
236
Project(
Host
, cursorFactory, ref mean, omega, y, out numBad);
254
Project(
Host
, cursorFactory, ref mean, q, b, out numBad);
269
return new PcaModelParameters(
Host
, _rank, b, in mean);
391
=> new AnomalyPredictionTransformer<PcaModelParameters>(
Host
, model, trainSchema, _featureColumn);
Microsoft.ML.StandardTrainers (165)
LdSvm\LdSvmTrainer.cs (27)
170
Host
.CheckValue(options, nameof(options));
171
CheckOptions(
Host
, options);
191
Host
.CheckValue(trainContext, nameof(trainContext));
192
using (var ch =
Host
.Start("Training"))
236
var sample = data.SampleForGammaUpdate(
Host
.Rand);
240
int thetaIdx =
Host
.Rand.Next(numLeaf - 1);
313
var sample = data.SampleExamples(
Host
.Rand);
393
return new LdSvmModelParameters(
Host
, w, thetaPrime, theta, _options.Sigma, biasW, biasTheta,
408
Host
.Assert(Utils.Size(tempW) == count);
409
Host
.Assert(Utils.Size(w) == count);
410
Host
.Assert(Utils.Size(theta) == half);
411
Host
.Assert(Utils.Size(thetaPrime) == count);
412
Host
.Assert(Utils.Size(biasW) == count);
413
Host
.Assert(Utils.Size(biasTheta) == half);
414
Host
.Assert(Utils.Size(biasThetaPrime) == count);
415
Host
.Assert(Utils.Size(tempThetaPrime) == count);
416
Host
.Assert(Utils.Size(tempTheta) == half);
417
Host
.Assert(Utils.Size(tempBiasW) == count);
418
Host
.Assert(Utils.Size(tempBiasTheta) == half);
419
Host
.Assert(Utils.Size(tempBiasThetaPrime) == count);
430
wInit.Values[j] = 2 *
Host
.Rand.NextSingle() - 1;
431
thetaPrimeInit.Values[j] = 2 *
Host
.Rand.NextSingle() - 1;
433
thetaInit.Values[j] = 2 *
Host
.Rand.NextSingle() - 1;
443
float bW = 2 *
Host
.Rand.NextSingle() - 1;
446
float bTP = 2 *
Host
.Rand.NextSingle() - 1;
459
float bT = 2 *
Host
.Rand.NextSingle() - 1;
666
=> new BinaryPredictionTransformer<LdSvmModelParameters>(
Host
, model, trainSchema, _options.FeatureColumnName);
Standard\LogisticRegression\LbfgsPredictorBase.cs (23)
235
Host
.CheckValue(options, nameof(options));
244
Host
.CheckUserArg(!LbfgsTrainerOptions.UseThreads || LbfgsTrainerOptions.NumberOfThreads > 0 || LbfgsTrainerOptions.NumberOfThreads == null,
246
Host
.CheckUserArg(LbfgsTrainerOptions.L2Regularization >= 0, nameof(LbfgsTrainerOptions.L2Regularization), "Must be non-negative");
247
Host
.CheckUserArg(LbfgsTrainerOptions.L1Regularization >= 0, nameof(LbfgsTrainerOptions.L1Regularization), "Must be non-negative");
248
Host
.CheckUserArg(LbfgsTrainerOptions.OptimizationTolerance > 0, nameof(LbfgsTrainerOptions.OptimizationTolerance), "Must be positive");
249
Host
.CheckUserArg(LbfgsTrainerOptions.HistorySize > 0, nameof(LbfgsTrainerOptions.HistorySize), "Must be positive");
250
Host
.CheckUserArg(LbfgsTrainerOptions.MaximumNumberOfIterations > 0, nameof(LbfgsTrainerOptions.MaximumNumberOfIterations), "Must be positive");
251
Host
.CheckUserArg(LbfgsTrainerOptions.StochasticGradientDescentInitilaizationTolerance >= 0, nameof(LbfgsTrainerOptions.StochasticGradientDescentInitilaizationTolerance), "Must be non-negative");
252
Host
.CheckUserArg(LbfgsTrainerOptions.NumberOfThreads == null || LbfgsTrainerOptions.NumberOfThreads.Value >= 0, nameof(LbfgsTrainerOptions.NumberOfThreads), "Must be non-negative");
254
Host
.CheckParam(!(LbfgsTrainerOptions.L2Regularization < 0), nameof(LbfgsTrainerOptions.L2Regularization), "Must be non-negative, if provided.");
255
Host
.CheckParam(!(LbfgsTrainerOptions.L1Regularization < 0), nameof(LbfgsTrainerOptions.L1Regularization), "Must be non-negative, if provided");
256
Host
.CheckParam(!(LbfgsTrainerOptions.OptimizationTolerance <= 0), nameof(LbfgsTrainerOptions.OptimizationTolerance), "Must be positive, if provided.");
257
Host
.CheckParam(!(LbfgsTrainerOptions.HistorySize <= 0), nameof(LbfgsTrainerOptions.HistorySize), "Must be positive, if provided.");
275
using (var ch =
Host
.Start("Initialization"))
321
? new L1Optimizer(
Host
, BiasCount, L1Weight / NumGoodRows, MemorySize, DenseOptimizer, null, EnforceNonNegativity)
322
: new Optimizer(
Host
, MemorySize, DenseOptimizer, null, EnforceNonNegativity);
331
initWeights[j] = InitWtsDiameter * (
Host
.Rand.NextSingle() - 0.5f);
433
Host
.CheckParam(context.InitialPredictor == null || context.InitialPredictor is IPredictor, nameof(context.InitialPredictor));
449
using (var ch =
Host
.Start("Training"))
466
Host
.AssertValue(ch);
565
Host
.AssertValue(ch);
600
using (var pch =
Host
.StartProgressChannel("LBFGS data prep"))
611
Host
.CheckAlive();
Standard\LogisticRegression\LogisticRegression.cs (9)
144
Host
.CheckNonEmpty(featureColumn, nameof(featureColumn));
145
Host
.CheckNonEmpty(labelColumn, nameof(labelColumn));
181
=> new BinaryPredictionTransformer<CalibratedModelParametersBase<LinearBinaryModelParameters, PlattCalibrator>>(
Host
, model, trainSchema, FeatureColumn.Name);
274
Host
.Assert(namesSpans.Length == featureLength);
312
_stats = new ModelStatisticsBase(
Host
, NumGoodRows, numParams, deviance, nullDeviance);
414
_stats = new LinearModelParameterStatistics(
Host
, NumGoodRows, numParams, deviance, nullDeviance, std, weightsOnly, bias);
456
return new ParameterMixingCalibratedModelParameters<LinearBinaryModelParameters, PlattCalibrator>(
Host
,
457
new LinearBinaryModelParameters(
Host
, in weights, bias, _stats),
458
new PlattCalibrator(
Host
, -1, 0));
Standard\LogisticRegression\MulticlassLogisticRegression.cs (6)
152
Host
.CheckNonEmpty(featureColumn, nameof(featureColumn));
153
Host
.CheckNonEmpty(labelColumn, nameof(labelColumn));
299
using (var ch =
Host
.Start("Creating Predictor"))
305
return new MaximumEntropyModelParameters(
Host
, in CurrentWeights, _numClasses, NumFeatures, _labelNames, _stats);
359
_stats = new ModelStatisticsBase(
Host
, NumGoodRows, numParams, deviance, nullDeviance);
384
=> new MulticlassPredictionTransformer<MaximumEntropyModelParameters>(
Host
, model, trainSchema, FeatureColumn.Name, LabelColumn.Name);
Standard\MulticlassClassification\MulticlassNaiveBayesTrainer.cs (12)
100
Host
.CheckNonEmpty(featureColumn, nameof(featureColumn));
101
Host
.CheckNonEmpty(labelColumn, nameof(labelColumn));
111
Host
.CheckValue(options, nameof(options));
130
=> new MulticlassPredictionTransformer<NaiveBayesMulticlassModelParameters>(
Host
, model, trainSchema, FeatureColumn.Name, LabelColumn.Name);
134
Host
.CheckValue(context, nameof(context));
136
Host
.Check(data.Schema.Label.HasValue, "Missing Label column");
138
Host
.Check(labelCol.Type == NumberDataViewType.Single || labelCol.Type is KeyDataViewType,
141
Host
.Check(data.Schema.Feature.HasValue, "Missing Feature column");
146
labelCount = labelKeyType.GetCountAsInt32(
Host
);
150
using (var pch =
Host
.StartProgressChannel("Multi Class Naive Bayes training"))
151
using (var ch =
Host
.Start("Training"))
201
return new NaiveBayesMulticlassModelParameters(
Host
, labelHistogram, featureHistogram, featureCount);
Standard\Online\AveragedPerceptron.cs (1)
218
=> new BinaryPredictionTransformer<LinearBinaryModelParameters>(
Host
, model, trainSchema, FeatureColumn.Name);
Standard\Online\LinearSvm.cs (1)
338
=> new BinaryPredictionTransformer<LinearBinaryModelParameters>(
Host
, model, trainSchema, FeatureColumn.Name);
Standard\Online\OnlineGradientDescent.cs (1)
203
=> new RegressionPredictionTransformer<LinearRegressionModelParameters>(
Host
, model, trainSchema, FeatureColumn.Name);
Standard\Online\OnlineLinear.cs (7)
132
ParentHost = parent.
Host
;
165
weightValues[i] = parent.OnlineLinearTrainerOptions.InitialWeightsDiameter * (parent.
Host
.Rand.NextSingle() - (float)0.5);
167
Bias = parent.OnlineLinearTrainerOptions.InitialWeightsDiameter * (parent.
Host
.Rand.NextSingle() - (float)0.5);
276
Host
.CheckValue(context, nameof(context));
284
Host
.CheckParam(initPredictor == null || initLinearPred != null, nameof(context),
292
using (var ch =
Host
.Start("Training"))
322
var rand = shuffle ?
Host
.Rand : null;
Standard\PoissonRegression\PoissonRegression.cs (4)
99
Host
.CheckNonEmpty(featureColumn, nameof(featureColumn));
100
Host
.CheckNonEmpty(labelColumn, nameof(labelColumn));
128
=> new RegressionPredictionTransformer<PoissonRegressionModelParameters>(
Host
, model, trainSchema, FeatureColumn.Name);
195
return new PoissonRegressionModelParameters(
Host
, in weights, bias);
Standard\SdcaBinary.cs (41)
71
Host
.CheckValue(context, nameof(context));
72
using (var ch =
Host
.Start("Training"))
78
Host
.CheckParam(context.InitialPredictor == null || linInitPred != null, nameof(context),
111
idvToFeedTrain = new RowShufflingTransformer(
Host
, shuffleArgs, idvToShuffle);
339
Host
.CheckUserArg(numThreads > 0, nameof(OptionsBase.NumberOfThreads), "The number of threads must be either null or a positive integer.");
382
using (var pch =
Host
.StartProgressChannel("SDCA preprocessing"))
387
Host
.CheckAlive();
432
using (var pch =
Host
.StartProgressChannel("SDCA preprocessing with lookup"))
563
rands[i] = RandomUtils.Create(
Host
.Rand.Next());
572
using (var pch =
Host
.StartProgressChannel("SDCA invariants initialization"))
578
Host
.CheckAlive();
600
using (var pch =
Host
.StartProgressChannel("SDCA training"))
814
Host
.CheckAlive();
978
Host
.CheckAlive();
989
Host
.Assert(idToIdx == null || row == duals.Length);
1504
Host
.CheckNonEmpty(featureColumnName, nameof(featureColumnName));
1505
Host
.CheckNonEmpty(labelColumnName, nameof(labelColumnName));
1527
Host
.CheckParam(Utils.Size(weights) == 1, nameof(weights));
1528
Host
.CheckParam(Utils.Size(bias) == 1, nameof(bias));
1529
Host
.CheckParam(weights[0].Length > 0, nameof(weights));
1536
return new LinearBinaryModelParameters(
Host
, in maybeSparseWeights, bias[0]);
1551
=> new BinaryPredictionTransformer<TModelParameters>(
Host
, model, trainSchema, FeatureColumn.Name);
1616
var calibrator = new PlattCalibrator(
Host
, -1, 0);
1617
return new ParameterMixingCalibratedModelParameters<LinearBinaryModelParameters, PlattCalibrator>(
Host
, linearModel, calibrator);
1815
Host
.CheckParam(Utils.Size(weights) == 1, nameof(weights));
1816
Host
.CheckParam(Utils.Size(bias) == 1, nameof(bias));
1817
Host
.CheckParam(weights[0].Length > 0, nameof(weights));
1824
var predictor = new LinearBinaryModelParameters(
Host
, in maybeSparseWeights, bias[0]);
1827
return new ParameterMixingCalibratedModelParameters<LinearBinaryModelParameters, PlattCalibrator>(
Host
, predictor, new PlattCalibrator(
Host
, -1, 0));
1976
Host
.CheckNonEmpty(featureColumn, nameof(featureColumn));
1977
Host
.CheckNonEmpty(labelColumn, nameof(labelColumn));
2008
=> new BinaryPredictionTransformer<TModel>(
Host
, model, trainSchema, FeatureColumn.Name);
2115
rands[e - 1] = RandomUtils.Create(
Host
.Rand.Next());
2165
using (var pch =
Host
.StartProgressChannel("SGD Training"))
2219
Host
.CheckParam(weights.Length > 0, nameof(weights));
2225
return new LinearBinaryModelParameters(
Host
, in maybeSparseWeights, bias);
2318
var calibrator = new PlattCalibrator(
Host
, -1, 0);
2320
return new ParameterMixingCalibratedModelParameters<LinearBinaryModelParameters, PlattCalibrator>(
Host
, subModel, calibrator);
2461
var calibrator = new PlattCalibrator(
Host
, -1, 0);
2463
return new ParameterMixingCalibratedModelParameters<LinearBinaryModelParameters, PlattCalibrator>(
Host
, subModel, calibrator);
Standard\SdcaMulticlass.cs (19)
137
Host
.CheckNonEmpty(featureColumn, nameof(featureColumn));
138
Host
.CheckNonEmpty(labelColumn, nameof(labelColumn));
147
Host
.CheckValue(labelColumn, nameof(labelColumn));
148
Host
.CheckValue(featureColumn, nameof(featureColumn));
203
Host
.CheckAlive();
379
Host
.CheckAlive();
408
Host
.Assert(idToIdx == null || row * numClasses == duals.Length);
556
Host
.CheckValue(weights, nameof(weights));
557
Host
.CheckValue(bias, nameof(bias));
558
Host
.CheckParam(weights.Length > 0, nameof(weights));
559
Host
.CheckParam(weights.Length == bias.Length, nameof(weights));
561
return new MaximumEntropyModelParameters(
Host
, weights, bias, bias.Length, weights[0].Length, null, stats: null);
566
new MulticlassPredictionTransformer<MaximumEntropyModelParameters>(
Host
, model, trainSchema, FeatureColumn.Name, LabelColumn.Name);
653
Host
.CheckValue(weights, nameof(weights));
654
Host
.CheckValue(bias, nameof(bias));
655
Host
.CheckParam(weights.Length > 0, nameof(weights));
656
Host
.CheckParam(weights.Length == bias.Length, nameof(weights));
658
return new LinearMulticlassModelParameters(
Host
, weights, bias, bias.Length, weights[0].Length, null, stats: null);
663
new MulticlassPredictionTransformer<LinearMulticlassModelParameters>(
Host
, model, trainSchema, FeatureColumn.Name, LabelColumn.Name);
Standard\SdcaRegression.cs (10)
122
Host
.CheckNonEmpty(featureColumn, nameof(featureColumn));
123
Host
.CheckNonEmpty(labelColumn, nameof(labelColumn));
131
Host
.CheckValue(labelColumn, nameof(labelColumn));
132
Host
.CheckValue(featureColumn, nameof(featureColumn));
133
Host
.CheckValueOrNull(weightColumn);
146
Host
.CheckParam(Utils.Size(weights) == 1, nameof(weights));
147
Host
.CheckParam(Utils.Size(bias) == 1, nameof(bias));
148
Host
.CheckParam(weights[0].Length > 0, nameof(weights));
155
return new LinearRegressionModelParameters(
Host
, in maybeSparseWeights, bias[0]);
202
=> new RegressionPredictionTransformer<LinearRegressionModelParameters>(
Host
, model, trainSchema, FeatureColumn.Name);
Standard\StochasticTrainerBase.cs (4)
32
Host
.CheckValue(context, nameof(context));
33
using (var ch =
Host
.Start("Training"))
41
Host
.CheckParam(context.InitialPredictor == null || linInitPred != null, nameof(context),
75
idvToFeedTrain = new RowShufflingTransformer(
Host
, shuffleArgs, idvToShuffle);
Microsoft.ML.Vision (17)
ImageClassificationTrainer.cs (17)
531
Host
.CheckValue(options, nameof(options));
532
Host
.CheckNonEmpty(options.FeatureColumnName, nameof(options.FeatureColumnName));
533
Host
.CheckNonEmpty(options.LabelColumnName, nameof(options.LabelColumnName));
534
Host
.CheckNonEmpty(options.ScoreColumnName, nameof(options.ScoreColumnName));
535
Host
.CheckNonEmpty(options.PredictedLabelColumnName, nameof(options.PredictedLabelColumnName));
564
var logger =
Host
.Start(nameof(ImageClassificationTrainer));
605
throw
Host
.ExceptSchemaMismatch(nameof(input.Schema), "label", (string)labelColumn.Name, "Key",
614
_session = LoadTensorFlowSessionFromMetaGraph(
Host
, _options.Arch).Session;
651
=> new MulticlassPredictionTransformer<ImageClassificationModelParameters>(
Host
, model, trainSchema,
725
return new ImageClassificationModelParameters(
Host
, session, _classCount, _jpegDataTensorName,
731
Host
.CheckNonWhiteSpace(options.LabelColumnName, nameof(options.LabelColumnName));
735
throw
Host
.ExceptParam(nameof(_labelTensor.name), $"'{_labelTensor.name}' does not" +
742
throw
Host
.ExceptParam(nameof(options.EarlyStoppingCriteria), $"Early stopping enabled but unable to" +
808
throw
Host
.ExceptSchemaMismatch(nameof(labelColumn), "Label",
847
featurizedImages = featurizedImages.OrderBy(x =>
Host
.Rand.Next(0, metrics.Bottleneck.Index)).ToList();
1123
Host
.CheckAlive();
1199
_session = LoadTFSessionByModelFilePath(
Host
, frozenModelPath, false);