29 writes to Description
Microsoft.ML.Ensemble (11)
OutputCombiners\MultiWeightedAverage.cs (1)
48[TGUI(Label = "Metric Name", Description = "The weights are calculated according to the selected metric")]
OutputCombiners\WeightedAverage.cs (1)
43[TGUI(Label = "Weightage Name", Description = "The weights are calculated according to the selected metric")]
Trainer\Binary\EnsembleTrainer.cs (2)
43Description = "Algorithm to prune the base learners for selective Ensemble")] 47[TGUI(Label = "Output combiner", Description = "Output combiner type")]
Trainer\EnsembleTrainerBase.cs (3)
35Description = 40[TGUI(Label = "Sampling Type", Description = "Subset Selection Algorithm to induce the base learner.Sub-settings can be used to select the features")] 44[TGUI(Label = "Train parallel", Description = "All the base learners will run asynchronously if the value is true")]
Trainer\Multiclass\MulticlassDataPartitionEnsembleTrainer.cs (2)
44[TGUI(Label = "Sub-Model Selector(pruning) Type", Description = "Algorithm to prune the base learners for selective Ensemble")] 48[TGUI(Label = "Output combiner", Description = "Output combiner type")]
Trainer\Regression\RegressionEnsembleTrainer.cs (2)
38[TGUI(Label = "Sub-Model Selector(pruning) Type", Description = "Algorithm to prune the base learners for selective Ensemble")] 42[TGUI(Label = "Output combiner", Description = "Output combiner type")]
Microsoft.ML.FastTree (8)
FastTreeArguments.cs (7)
514[TGUI(Description = "The maximum number of leaves per tree", SuggestedSweeps = "2-128;log;inc:4")] 524[TGUI(Description = "Minimum number of training instances required to form a leaf", SuggestedSweeps = "1,10,50")] 533[TGUI(Description = "Total number of trees constructed", SuggestedSweeps = "20,100,500")] 695[TGUI(Label = "Early Stopping Rule", Description = "Early stopping rule. (Validation set (/valid) is required.)")] 722[TGUI(Description = "Early stopping metrics. (For regression, 1: L1, 2:L2; for ranking, 1:NDCG@1, 3:NDCG@3)")] 741[TGUI(Description = "Pruning threshold")] 748[TGUI(Description = "Pruning window size")]
GamRegression.cs (1)
67[TGUI(Description = "Metric for pruning. (For regression, 1: L1, 2:L2; default L2")]
Microsoft.ML.KMeansClustering (1)
KMeansPlusPlusTrainer.cs (1)
126[TGUI(Label = "Optimization Tolerance", Description = "Threshold for trainer convergence")]
Microsoft.ML.LightGbm (1)
LightGbmTrainerBase.cs (1)
105[TGUI(Description = "The maximum number of leaves per tree", SuggestedSweeps = "2-128;log;inc:4")]
Microsoft.ML.StandardTrainers (8)
Standard\LogisticRegression\LbfgsPredictorBase.cs (4)
46[TGUI(Label = "L2 Weight", Description = "Weight of L2 regularizer term", SuggestedSweeps = "0,0.1,1")] 55[TGUI(Label = "L1 Weight", Description = "Weight of L1 regularizer term", SuggestedSweeps = "0,0.1,1")] 65[TGUI(Label = "Optimization Tolerance", Description = "Threshold for optimizer convergence", SuggestedSweeps = "1e-4,1e-7")] 74[TGUI(Description = "Memory size for L-BFGS", SuggestedSweeps = "5,20,50")]
Standard\MulticlassClassification\MetaMulticlassTrainer.cs (1)
24[TGUI(Label = "Predictor Type", Description = "Type of underlying binary predictor")]
Standard\MulticlassClassification\OneVersusAllTrainer.cs (1)
106[TGUI(Label = "Use Probability", Description = "Use probabilities (vs. raw outputs) to identify top-score category")]
Standard\Online\AveragedLinear.cs (1)
41[TGUI(Label = "Decrease Learning Rate", Description = "Decrease learning rate as iterations progress")]
Standard\Online\OnlineLinear.cs (1)
28[TGUI(Label = "Number of Iterations", Description = "Number of training iterations through data", SuggestedSweeps = "1,10,100")]