1 write to LbfgsTrainerOptions
Microsoft.ML.StandardTrainers (1)
Standard\LogisticRegression\LbfgsPredictorBase.cs (1)
236
LbfgsTrainerOptions
= options;
45 references to LbfgsTrainerOptions
Microsoft.ML.StandardTrainers (45)
Standard\LogisticRegression\LbfgsPredictorBase.cs (39)
244
Host.CheckUserArg(!
LbfgsTrainerOptions
.UseThreads ||
LbfgsTrainerOptions
.NumberOfThreads > 0 ||
LbfgsTrainerOptions
.NumberOfThreads == null,
245
nameof(
LbfgsTrainerOptions
.NumberOfThreads), "Must be positive (or empty for default)");
246
Host.CheckUserArg(
LbfgsTrainerOptions
.L2Regularization >= 0, nameof(
LbfgsTrainerOptions
.L2Regularization), "Must be non-negative");
247
Host.CheckUserArg(
LbfgsTrainerOptions
.L1Regularization >= 0, nameof(
LbfgsTrainerOptions
.L1Regularization), "Must be non-negative");
248
Host.CheckUserArg(
LbfgsTrainerOptions
.OptimizationTolerance > 0, nameof(
LbfgsTrainerOptions
.OptimizationTolerance), "Must be positive");
249
Host.CheckUserArg(
LbfgsTrainerOptions
.HistorySize > 0, nameof(
LbfgsTrainerOptions
.HistorySize), "Must be positive");
250
Host.CheckUserArg(
LbfgsTrainerOptions
.MaximumNumberOfIterations > 0, nameof(
LbfgsTrainerOptions
.MaximumNumberOfIterations), "Must be positive");
251
Host.CheckUserArg(
LbfgsTrainerOptions
.StochasticGradientDescentInitilaizationTolerance >= 0, nameof(
LbfgsTrainerOptions
.StochasticGradientDescentInitilaizationTolerance), "Must be non-negative");
252
Host.CheckUserArg(
LbfgsTrainerOptions
.NumberOfThreads == null ||
LbfgsTrainerOptions
.NumberOfThreads.Value >= 0, nameof(
LbfgsTrainerOptions
.NumberOfThreads), "Must be non-negative");
254
Host.CheckParam(!(
LbfgsTrainerOptions
.L2Regularization < 0), nameof(
LbfgsTrainerOptions
.L2Regularization), "Must be non-negative, if provided.");
255
Host.CheckParam(!(
LbfgsTrainerOptions
.L1Regularization < 0), nameof(
LbfgsTrainerOptions
.L1Regularization), "Must be non-negative, if provided");
256
Host.CheckParam(!(
LbfgsTrainerOptions
.OptimizationTolerance <= 0), nameof(
LbfgsTrainerOptions
.OptimizationTolerance), "Must be positive, if provided.");
257
Host.CheckParam(!(
LbfgsTrainerOptions
.HistorySize <= 0), nameof(
LbfgsTrainerOptions
.HistorySize), "Must be positive, if provided.");
259
L2Weight =
LbfgsTrainerOptions
.L2Regularization;
260
L1Weight =
LbfgsTrainerOptions
.L1Regularization;
261
OptTol =
LbfgsTrainerOptions
.OptimizationTolerance;
262
MemorySize =
LbfgsTrainerOptions
.HistorySize;
263
MaxIterations =
LbfgsTrainerOptions
.MaximumNumberOfIterations;
264
SgdInitializationTolerance =
LbfgsTrainerOptions
.StochasticGradientDescentInitilaizationTolerance;
265
Quiet =
LbfgsTrainerOptions
.Quiet;
266
InitWtsDiameter =
LbfgsTrainerOptions
.InitialWeightsDiameter;
267
UseThreads =
LbfgsTrainerOptions
.UseThreads;
268
NumThreads =
LbfgsTrainerOptions
.NumberOfThreads;
269
DenseOptimizer =
LbfgsTrainerOptions
.DenseOptimizer;
270
EnforceNonNegativity =
LbfgsTrainerOptions
.EnforceNonNegativity;
Standard\LogisticRegression\LogisticRegression.cs (4)
148
ShowTrainingStats =
LbfgsTrainerOptions
.ShowTrainingStatistics;
158
ShowTrainingStats =
LbfgsTrainerOptions
.ShowTrainingStatistics;
307
if (hessianDimension > int.MaxValue ||
LbfgsTrainerOptions
.ComputeStandardDeviation == null)
413
var std =
LbfgsTrainerOptions
.ComputeStandardDeviation.ComputeStandardDeviation(hessian, weightIndices, numParams, CurrentWeights.Length, ch, L2Weight);
Standard\LogisticRegression\MulticlassLogisticRegression.cs (2)
155
ShowTrainingStats =
LbfgsTrainerOptions
.ShowTrainingStatistics;
164
ShowTrainingStats =
LbfgsTrainerOptions
.ShowTrainingStatistics;