2 writes to FastTreeTrainerOptions
Microsoft.ML.FastTree (2)
FastTree.cs (2)
113
FastTreeTrainerOptions
= new TOptions();
147
FastTreeTrainerOptions
= options;
249 references to FastTreeTrainerOptions
Microsoft.ML.FastTree (249)
BoostingFastTree.cs (48)
31
FastTreeTrainerOptions
.LearningRate = learningRate;
36
if (
FastTreeTrainerOptions
.OptimizationAlgorithm == BoostedTreeOptions.OptimizationAlgorithmType.AcceleratedGradientDescent)
37
FastTreeTrainerOptions
.UseLineSearch = true;
38
if (
FastTreeTrainerOptions
.OptimizationAlgorithm == BoostedTreeOptions.OptimizationAlgorithmType.ConjugateGradientDescent)
39
FastTreeTrainerOptions
.UseLineSearch = true;
41
if (
FastTreeTrainerOptions
.CompressEnsemble &&
FastTreeTrainerOptions
.WriteLastEnsemble)
44
if (
FastTreeTrainerOptions
.NumberOfLeaves > 2 &&
FastTreeTrainerOptions
.HistogramPoolSize >
FastTreeTrainerOptions
.NumberOfLeaves - 1)
47
if (
FastTreeTrainerOptions
.NumberOfLeaves > 2 &&
FastTreeTrainerOptions
.HistogramPoolSize >
FastTreeTrainerOptions
.NumberOfLeaves - 1)
50
if (
FastTreeTrainerOptions
.EnablePruning && !HasValidSet)
53
bool doEarlyStop =
FastTreeTrainerOptions
.EarlyStoppingRuleFactory != null;
57
if (
FastTreeTrainerOptions
.UseTolerantPruning && (!
FastTreeTrainerOptions
.EnablePruning || !HasValidSet))
66
TrainSet,
FastTreeTrainerOptions
.NumberOfLeaves,
FastTreeTrainerOptions
.MinimumExampleCountPerLeaf,
FastTreeTrainerOptions
.EntropyCoefficient,
67
FastTreeTrainerOptions
.FeatureFirstUsePenalty,
FastTreeTrainerOptions
.FeatureReusePenalty,
FastTreeTrainerOptions
.SoftmaxTemperature,
68
FastTreeTrainerOptions
.HistogramPoolSize,
FastTreeTrainerOptions
.Seed,
FastTreeTrainerOptions
.FeatureFractionPerSplit,
FastTreeTrainerOptions
.FilterZeroLambdas,
69
FastTreeTrainerOptions
.AllowEmptyTrees,
FastTreeTrainerOptions
.GainConfidenceLevel,
FastTreeTrainerOptions
.MaximumCategoricalGroupCountPerNode,
70
FastTreeTrainerOptions
.MaximumCategoricalSplitPointCount, BsrMaxTreeOutput(), ParallelTraining,
71
FastTreeTrainerOptions
.MinimumExampleFractionForCategoricalSplit,
FastTreeTrainerOptions
.Bundling,
FastTreeTrainerOptions
.MinimumExamplesForCategoricalSplit,
72
FastTreeTrainerOptions
.Bias, Host);
81
switch (
FastTreeTrainerOptions
.OptimizationAlgorithm)
93
throw ch.Except("Unknown optimization algorithm '{0}'",
FastTreeTrainerOptions
.OptimizationAlgorithm);
98
optimizationAlgorithm.Smoothing =
FastTreeTrainerOptions
.Smoothing;
99
optimizationAlgorithm.DropoutRate =
FastTreeTrainerOptions
.DropoutRate;
100
optimizationAlgorithm.DropoutRng = new Random(
FastTreeTrainerOptions
.Seed);
108
if (!
FastTreeTrainerOptions
.BestStepRankingRegressionTrees)
121
if (
FastTreeTrainerOptions
.EarlyStoppingRuleFactory == null)
137
if (
FastTreeTrainerOptions
.EarlyStoppingRuleFactory != null)
138
earlyStoppingRule =
FastTreeTrainerOptions
.EarlyStoppingRuleFactory.CreateComponent(Host, lowerIsBetter);
157
if (!
FastTreeTrainerOptions
.WriteLastEnsemble && PruningTest != null)
170
if (
FastTreeTrainerOptions
.BestStepRankingRegressionTrees)
171
return
FastTreeTrainerOptions
.MaximumTreeOutput;
178
return
FastTreeTrainerOptions
.RandomStart;
FastTree.cs (58)
94
private protected string InnerOptions => CmdParser.GetSettings(Host,
FastTreeTrainerOptions
, new TOptions());
117
FastTreeTrainerOptions
.NumberOfLeaves = numberOfLeaves;
118
FastTreeTrainerOptions
.NumberOfTrees = numberOfTrees;
119
FastTreeTrainerOptions
.MinimumExampleCountPerLeaf = minimumExampleCountPerLeaf;
121
FastTreeTrainerOptions
.LabelColumnName = label.Name;
122
FastTreeTrainerOptions
.FeatureColumnName = featureColumnName;
123
FastTreeTrainerOptions
.ExampleWeightColumnName = exampleWeightColumnName;
124
FastTreeTrainerOptions
.RowGroupColumnName = rowGroupColumnName;
180
ParallelTraining =
FastTreeTrainerOptions
.ParallelTrainer != null ?
FastTreeTrainerOptions
.ParallelTrainer.CreateComponent(env) : new SingleTrainer();
185
InitializeThreads(
FastTreeTrainerOptions
.NumberOfThreads ?? Environment.ProcessorCount);
191
var useTranspose = UseTranspose(
FastTreeTrainerOptions
.DiskTranspose, trainData) && (ValidData == null || UseTranspose(
FastTreeTrainerOptions
.DiskTranspose, ValidData));
192
var instanceConverter = new ExamplesToFastTreeBins(Host,
FastTreeTrainerOptions
.MaximumBinCountPerFeature, useTranspose, !
FastTreeTrainerOptions
.FeatureFlocks,
FastTreeTrainerOptions
.MinimumExampleCountPerLeaf, GetMaxLabel());
194
TrainSet = instanceConverter.FindBinsAndReturnDataset(trainData, PredictionKind, ParallelTraining, CategoricalFeatures,
FastTreeTrainerOptions
.CategoricalSplit);
197
ValidSet = instanceConverter.GetCompatibleDataset(ValidData, PredictionKind, CategoricalFeatures,
FastTreeTrainerOptions
.CategoricalSplit);
199
TestSets = new[] { instanceConverter.GetCompatibleDataset(TestData, PredictionKind, CategoricalFeatures,
FastTreeTrainerOptions
.CategoricalSplit) };
226
if (
FastTreeTrainerOptions
.MemoryStatistics)
231
if (
FastTreeTrainerOptions
.ExecutionTime)
259
FastTreeTrainerOptions
.Check(ch);
261
IntArray.CompatibilityLevel =
FastTreeTrainerOptions
.FeatureCompressionLevel;
264
if (
FastTreeTrainerOptions
.HistogramPoolSize < 2)
265
FastTreeTrainerOptions
.HistogramPoolSize =
FastTreeTrainerOptions
.NumberOfLeaves * 2 / 3;
266
if (
FastTreeTrainerOptions
.HistogramPoolSize >
FastTreeTrainerOptions
.NumberOfLeaves - 1)
267
FastTreeTrainerOptions
.HistogramPoolSize =
FastTreeTrainerOptions
.NumberOfLeaves - 1;
269
if (
FastTreeTrainerOptions
.BaggingSize > 0)
271
int bagCount =
FastTreeTrainerOptions
.NumberOfTrees /
FastTreeTrainerOptions
.BaggingSize;
272
if (bagCount *
FastTreeTrainerOptions
.BaggingSize !=
FastTreeTrainerOptions
.NumberOfTrees)
276
if (!(0 <=
FastTreeTrainerOptions
.GainConfidenceLevel &&
FastTreeTrainerOptions
.GainConfidenceLevel < 1))
327
if (!
FastTreeTrainerOptions
.PrintTestGraph)
413
if (
FastTreeTrainerOptions
.FeatureFraction < 1.0)
416
_featureSelectionRandom = new Random(
FastTreeTrainerOptions
.FeatureSelectionSeed);
421
activeFeatures[i] = _featureSelectionRandom.NextDouble() <=
FastTreeTrainerOptions
.FeatureFraction;
585
Contracts.Assert(
FastTreeTrainerOptions
.BaggingSize > 0);
586
return new BaggingProvider(TrainSet,
FastTreeTrainerOptions
.NumberOfLeaves,
FastTreeTrainerOptions
.Seed,
FastTreeTrainerOptions
.BaggingExampleFraction);
597
int numTotalTrees =
FastTreeTrainerOptions
.NumberOfTrees;
617
OptimizationAlgorithm.TrainingScores.RandomizeScores(
FastTreeTrainerOptions
.Seed, false);
623
BaggingProvider baggingProvider =
FastTreeTrainerOptions
.BaggingSize > 0 ? CreateBaggingProvider() : null;
661
if (
FastTreeTrainerOptions
.BaggingSize > 0 && Ensemble.NumTrees %
FastTreeTrainerOptions
.BaggingSize == 0)
684
else if (
FastTreeTrainerOptions
.BaggingSize > 0 && Ensemble.Trees.Count() > 0)
707
OptimizationAlgorithm.TrainingScores.RandomizeScores(
FastTreeTrainerOptions
.Seed, true);
798
if (
FastTreeTrainerOptions
.TestFrequency != int.MaxValue && (Ensemble.NumTrees %
FastTreeTrainerOptions
.TestFrequency == 0 || Ensemble.NumTrees ==
FastTreeTrainerOptions
.NumberOfTrees))
818
ch.Trace("CommandLine = {0}", CmdParser.GetSettings(Host,
FastTreeTrainerOptions
, new TOptions()));
820
ch.Trace("{0}",
FastTreeTrainerOptions
);
848
if (!
FastTreeTrainerOptions
.CompressEnsemble)
FastTreeClassification.cs (17)
167
_sigmoidParameter = 2.0 *
FastTreeTrainerOptions
.LearningRate;
179
_sigmoidParameter = 2.0 *
FastTreeTrainerOptions
.LearningRate;
221
FastTreeTrainerOptions
.LearningRate,
222
FastTreeTrainerOptions
.Shrinkage,
224
FastTreeTrainerOptions
.UnbalancedSets,
225
FastTreeTrainerOptions
.MaximumTreeOutput,
226
FastTreeTrainerOptions
.GetDerivativesSampleRate,
227
FastTreeTrainerOptions
.BestStepRankingRegressionTrees,
228
FastTreeTrainerOptions
.Seed,
235
if (
FastTreeTrainerOptions
.UseLineSearch)
239
optimizationAlgorithm.AdjustTreeOutputsOverride = new LineSearch(lossCalculator,
FastTreeTrainerOptions
.UnbalancedSets ? 3 /*Unbalanced sets loss*/ : 1 /*normal loss*/,
FastTreeTrainerOptions
.MaximumNumberOfLineSearchSteps,
FastTreeTrainerOptions
.MinimumStepSize);
286
if (
FastTreeTrainerOptions
.EnablePruning && ValidSet != null)
288
if (!
FastTreeTrainerOptions
.UseTolerantPruning)
296
PruningTest = new TestWindowWithTolerance(ValidTest, 0,
FastTreeTrainerOptions
.PruningWindowSize,
FastTreeTrainerOptions
.PruningThreshold);
FastTreeRanking.cs (40)
159
Host.AssertValue(
FastTreeTrainerOptions
.CustomGains);
160
return
FastTreeTrainerOptions
.CustomGains;
172
if (
FastTreeTrainerOptions
.CustomGains != null)
174
var gains =
FastTreeTrainerOptions
.CustomGains;
177
throw ch.ExceptUserArg(nameof(
FastTreeTrainerOptions
.CustomGains),
185
bool doEarlyStop =
FastTreeTrainerOptions
.EarlyStoppingRuleFactory != null ||
186
FastTreeTrainerOptions
.EnablePruning;
189
ch.CheckUserArg(
FastTreeTrainerOptions
.EarlyStoppingMetrics == 1 ||
FastTreeTrainerOptions
.EarlyStoppingMetrics == 3,
190
nameof(
FastTreeTrainerOptions
.EarlyStoppingMetrics), "should be 1 or 3.");
198
if (
FastTreeTrainerOptions
.CompressEnsemble)
201
_ensembleCompressor.Initialize(
FastTreeTrainerOptions
.NumberOfTrees, TrainSet, TrainSet.Ratings,
FastTreeTrainerOptions
.Seed);
207
return new LambdaRankObjectiveFunction(TrainSet, TrainSet.Ratings,
FastTreeTrainerOptions
, ParallelTraining);
213
if (
FastTreeTrainerOptions
.UseLineSearch)
215
_specialTrainSetTest = new FastNdcgTest(optimizationAlgorithm.TrainingScores, TrainSet.Ratings,
FastTreeTrainerOptions
.SortingAlgorithm,
FastTreeTrainerOptions
.EarlyStoppingMetrics);
216
optimizationAlgorithm.AdjustTreeOutputsOverride = new LineSearch(_specialTrainSetTest, 0,
FastTreeTrainerOptions
.MaximumNumberOfLineSearchSteps,
FastTreeTrainerOptions
.MinimumStepSize);
223
Host.Assert(
FastTreeTrainerOptions
.BaggingSize > 0);
224
return new RankingBaggingProvider(TrainSet,
FastTreeTrainerOptions
.NumberOfLeaves,
FastTreeTrainerOptions
.Seed,
FastTreeTrainerOptions
.BaggingExampleFraction);
233
return new NdcgTest(ConstructScoreTracker(TrainSet), TrainSet.Ratings,
FastTreeTrainerOptions
.SortingAlgorithm);
238
if (
FastTreeTrainerOptions
.TestFrequency != int.MaxValue)
243
if (
FastTreeTrainerOptions
.PrintTestGraph)
260
if (
FastTreeTrainerOptions
.PrintTrainValidGraph &&
FastTreeTrainerOptions
.EnablePruning && _specialTrainSetTest == null)
265
if (
FastTreeTrainerOptions
.EnablePruning && ValidTest != null)
267
if (!
FastTreeTrainerOptions
.UseTolerantPruning)
275
PruningTest = new TestWindowWithTolerance(ValidTest, 0,
FastTreeTrainerOptions
.PruningWindowSize,
FastTreeTrainerOptions
.PruningThreshold);
397
if (tree != null &&
FastTreeTrainerOptions
.CompressEnsemble)
417
FastTreeTrainerOptions
.SortingAlgorithm);
430
FastTreeTrainerOptions
.SortingAlgorithm,
431
FastTreeTrainerOptions
.EarlyStoppingMetrics);
443
FastTreeTrainerOptions
.SortingAlgorithm,
444
FastTreeTrainerOptions
.EarlyStoppingMetrics);
464
if (
FastTreeTrainerOptions
.PrintTrainValidGraph)
469
FastTreeTrainerOptions
.EarlyStoppingMetrics);
FastTreeRegression.cs (20)
133
bool doEarlyStop =
FastTreeTrainerOptions
.EarlyStoppingRuleFactory != null ||
134
FastTreeTrainerOptions
.EnablePruning;
137
ch.CheckUserArg(
FastTreeTrainerOptions
.EarlyStoppingMetrics >= 1 &&
FastTreeTrainerOptions
.EarlyStoppingMetrics <= 2,
138
nameof(
FastTreeTrainerOptions
.EarlyStoppingMetrics), "earlyStoppingMetrics should be 1 or 2. (1: L1, 2: L2)");
148
return new ObjectiveImpl(TrainSet,
FastTreeTrainerOptions
);
154
if (
FastTreeTrainerOptions
.UseLineSearch)
158
optimizationAlgorithm.AdjustTreeOutputsOverride = new LineSearch(lossCalculator, 1 /*L2 error*/,
FastTreeTrainerOptions
.MaximumNumberOfLineSearchSteps,
FastTreeTrainerOptions
.MinimumStepSize);
259
if (
FastTreeTrainerOptions
.TestFrequency != int.MaxValue)
262
if (
FastTreeTrainerOptions
.PrintTestGraph)
274
if (
FastTreeTrainerOptions
.PrintTrainValidGraph && _trainRegressionTest == null)
280
if (
FastTreeTrainerOptions
.PrintTrainValidGraph && _testRegressionTest == null && TestSets != null && TestSets.Length > 0)
284
TrainTest = new RegressionTest(ConstructScoreTracker(TrainSet),
FastTreeTrainerOptions
.EarlyStoppingMetrics);
286
ValidTest = new RegressionTest(ConstructScoreTracker(ValidSet),
FastTreeTrainerOptions
.EarlyStoppingMetrics);
288
if (
FastTreeTrainerOptions
.EnablePruning && ValidTest != null)
290
if (
FastTreeTrainerOptions
.UseTolerantPruning) // Use simple early stopping condition.
291
PruningTest = new TestWindowWithTolerance(ValidTest, 0,
FastTreeTrainerOptions
.PruningWindowSize,
FastTreeTrainerOptions
.PruningThreshold);
341
if (
FastTreeTrainerOptions
.PrintTrainValidGraph)
FastTreeTweedie.cs (24)
146
if (
FastTreeTrainerOptions
.EarlyStoppingMetrics > 0)
149
bool doEarlyStop =
FastTreeTrainerOptions
.EarlyStoppingRuleFactory != null ||
150
FastTreeTrainerOptions
.EnablePruning;
154
ch.CheckUserArg(
FastTreeTrainerOptions
.EarlyStoppingMetrics == 1 ||
FastTreeTrainerOptions
.EarlyStoppingMetrics == 2,
155
nameof(
FastTreeTrainerOptions
.EarlyStoppingMetrics), "should be 1 (L1-norm) or 2 (L2-norm).");
160
return new ObjectiveImpl(TrainSet,
FastTreeTrainerOptions
);
166
if (
FastTreeTrainerOptions
.UseLineSearch)
171
optimizationAlgorithm.AdjustTreeOutputsOverride = new LineSearch(lossCalculator, 1 /*L2 error*/,
FastTreeTrainerOptions
.MaximumNumberOfLineSearchSteps,
FastTreeTrainerOptions
.MinimumStepSize);
207
Host.CheckUserArg(1 <=
FastTreeTrainerOptions
.Index &&
FastTreeTrainerOptions
.Index <= 2, nameof(
FastTreeTrainerOptions
.Index), "Must be in the range [1, 2]");
238
if (
FastTreeTrainerOptions
.TestFrequency != int.MaxValue)
241
if (
FastTreeTrainerOptions
.PrintTestGraph)
253
if (
FastTreeTrainerOptions
.PrintTrainValidGraph && _trainRegressionTest == null)
259
if (
FastTreeTrainerOptions
.PrintTrainValidGraph && _testRegressionTest == null && TestSets != null && TestSets.Length > 0)
263
TrainTest = new RegressionTest(ConstructScoreTracker(TrainSet),
FastTreeTrainerOptions
.EarlyStoppingMetrics);
265
ValidTest = new RegressionTest(ConstructScoreTracker(ValidSet),
FastTreeTrainerOptions
.EarlyStoppingMetrics);
267
if (
FastTreeTrainerOptions
.EnablePruning && ValidTest != null)
269
if (
FastTreeTrainerOptions
.UseTolerantPruning) // Use simple early stopping condition.
270
PruningTest = new TestWindowWithTolerance(ValidTest, 0,
FastTreeTrainerOptions
.PruningWindowSize,
FastTreeTrainerOptions
.PruningThreshold);
285
if (
FastTreeTrainerOptions
.PrintTrainValidGraph)
RandomForest.cs (19)
50
optimizationAlgorithm.Smoothing =
FastTreeTrainerOptions
.Smoothing;
66
TrainSet,
FastTreeTrainerOptions
.NumberOfLeaves,
FastTreeTrainerOptions
.MinimumExampleCountPerLeaf,
FastTreeTrainerOptions
.EntropyCoefficient,
67
FastTreeTrainerOptions
.FeatureFirstUsePenalty,
FastTreeTrainerOptions
.FeatureReusePenalty,
FastTreeTrainerOptions
.SoftmaxTemperature,
68
FastTreeTrainerOptions
.HistogramPoolSize,
FastTreeTrainerOptions
.Seed,
FastTreeTrainerOptions
.FeatureFractionPerSplit,
69
FastTreeTrainerOptions
.AllowEmptyTrees,
FastTreeTrainerOptions
.GainConfidenceLevel,
FastTreeTrainerOptions
.MaximumCategoricalGroupCountPerNode,
70
FastTreeTrainerOptions
.MaximumCategoricalSplitPointCount, _quantileEnabled,
FastTreeTrainerOptions
.NumberOfQuantileSamples, ParallelTraining,
71
FastTreeTrainerOptions
.MinimumExampleFractionForCategoricalSplit,
FastTreeTrainerOptions
.Bundling,
FastTreeTrainerOptions
.MinimumExamplesForCategoricalSplit,
72
FastTreeTrainerOptions
.Bias, Host);
RandomForestClassification.cs (11)
229
if (
FastTreeTrainerOptions
.FeatureFraction != 1.0)
231
ch.Warning($"oneDAL decision forest doesn't support 'FeatureFraction'[per tree] != 1.0, changing it from {
FastTreeTrainerOptions
.FeatureFraction} to 1.0");
232
FastTreeTrainerOptions
.FeatureFraction = 1.0;
274
int numberOfLeaves =
FastTreeTrainerOptions
.NumberOfLeaves;
275
int numberOfTrees =
FastTreeTrainerOptions
.NumberOfTrees;
278
if (
FastTreeTrainerOptions
.NumberOfThreads.HasValue)
279
numberOfThreads =
FastTreeTrainerOptions
.NumberOfThreads.Value;
305
numberOfThreads, (float)
FastTreeTrainerOptions
.FeatureFractionPerSplit, numberOfTrees,
306
numberOfLeaves,
FastTreeTrainerOptions
.MinimumExampleCountPerLeaf,
FastTreeTrainerOptions
.MaximumBinCountPerFeature,
350
return new ObjectiveFunctionImpl(TrainSet, _trainSetLabels,
FastTreeTrainerOptions
);
RandomForestRegression.cs (12)
368
if (
FastTreeTrainerOptions
.FeatureFraction != 1.0)
370
ch.Warning($"oneDAL decision forest doesn't support 'FeatureFraction'[per tree] != 1.0, changing it from {
FastTreeTrainerOptions
.FeatureFraction} to 1.0");
371
FastTreeTrainerOptions
.FeatureFraction = 1.0;
384
return new FastForestRegressionModelParameters(Host, TrainedEnsemble, FeatureCount, InnerOptions,
FastTreeTrainerOptions
.NumberOfQuantileSamples);
406
int numberOfLeaves =
FastTreeTrainerOptions
.NumberOfLeaves;
407
int numberOfTrees =
FastTreeTrainerOptions
.NumberOfTrees;
410
if (
FastTreeTrainerOptions
.NumberOfThreads.HasValue)
411
numberOfThreads =
FastTreeTrainerOptions
.NumberOfThreads.Value;
437
numberOfThreads, (float)
FastTreeTrainerOptions
.FeatureFractionPerSplit, numberOfTrees,
438
numberOfLeaves,
FastTreeTrainerOptions
.MinimumExampleCountPerLeaf,
FastTreeTrainerOptions
.MaximumBinCountPerFeature,
486
return ObjectiveFunctionImplBase.Create(TrainSet,
FastTreeTrainerOptions
);