11 writes to Gain
Microsoft.ML.FastTree (11)
Dataset\FeatureFlock.cs (4)
324
leafSplitCandidates.FeatureSplitInfo[featureIndex].
Gain
= (bestShiftedGain - gainShift) * trust - usePenalty;
481
leafSplitCandidates.FeatureSplitInfo[firstFlockFeature].
Gain
= (bestShiftedGain - gainShift) * trust -
693
leafSplitCandidates.FeatureSplitInfo[firstFlockFeature].
Gain
= (bestShiftedGain - gainShift) * trust -
915
leafSplitCandidates.FeatureSplitInfo[firstFlockFeature].
Gain
= (bestShiftedGain - gainShift) * trust -
Training\TreeLearners\LeastSquaresRegressionTreeLearner.cs (7)
209
newRootSplitInfo.
Gain
= 0;
398
BestSplitInfoPerLeaf[lteChild].
Gain
= double.NegativeInfinity;
399
BestSplitInfoPerLeaf[gtChild].
Gain
= double.NegativeInfinity;
803
leafSplitCandidates.FeatureSplitInfo[feature].
Gain
= (bestShiftedGain - gainShift) * trust - usePenalty;
1078
FeatureSplitInfo[f].
Gain
= double.NegativeInfinity;
1117
Gain
= double.NegativeInfinity;
1168
Gain
= buffer.ToDouble(ref offset);
23 references to Gain
Microsoft.ML.FastTree (23)
Dataset\FeatureFlock.cs (2)
226
leafSplitCandidates.FeatureSplitInfo[leafSplitCandidates.FlockToBestFeature[flock]].
Gain
<
227
leafSplitCandidates.FeatureSplitInfo[feature].
Gain
)
GamTrainer.cs (1)
368
if (_leafSplitCandidates.FeatureSplitInfo[globalFeatureIndex].
Gain
> 0)
Training\TreeLearners\FastForestLeastSquaresTreeLearner.cs (3)
57
double max = infos[0].
Gain
;
60
if (infos[i].
Gain
> max && Rand.NextDouble() < SplitFraction || Double.IsNegativeInfinity(max))
61
max = infos[bestFeature = i].
Gain
;
Training\TreeLearners\LeastSquaresRegressionTreeLearner.cs (17)
240
if (Double.IsNaN(rootSplitInfo.
Gain
) || Double.IsNegativeInfinity(rootSplitInfo.
Gain
))
265
bestLeaf = BestSplitInfoPerLeaf.Select(info => info.
Gain
).ArgMax(tree.NumLeaves);
269
if (bestLeafSplitInfo.
Gain
<= 0)
290
bestSplitInfo.CategoricalSplit, bestSplitInfo.Threshold, bestSplitInfo.LteOutput, bestSplitInfo.GTOutput, bestSplitInfo.
Gain
, bestSplitInfo.GainPValue);
456
bestFeature = leafSplitCandidates.FeatureSplitInfo.Select(info => info.
Gain
)
462
double max = infos[0].
Gain
;
471
if (bestFeatInFlock != -1 && infos[bestFeatInFlock].
Gain
> max)
472
max = infos[bestFeature = bestFeatInFlock].
Gain
;
479
if (infos[f].
Gain
> max)
480
max = infos[bestFeature = f].
Gain
;
486
bestFeature = leafSplitCandidates.FeatureSplitInfo.Select(info => info.
Gain
/ SoftmaxTemperature).SoftArgMax(Rand);
810
leafSplitCandidates.FeatureSplitInfo[leafSplitCandidates.FlockToBestFeature[flock]].
Gain
<
811
leafSplitCandidates.FeatureSplitInfo[feature].
Gain
)
1141
Gain
.ToByteArray(buffer, ref offset);
1188
double myGain =
Gain
;
1189
double otherGain = other.
Gain
;