1 write to FinalValue
Microsoft.ML.FastTree (1)
Training\Test.cs (1)
72
FinalValue
= CalculateFinalValue();
22 references to FinalValue
Microsoft.ML.FastTree (22)
BoostingFastTree.cs (4)
128
ch.Assert(validationResult.
FinalValue
>= 0);
132
ch.Assert(trainingResult.
FinalValue
>= 0);
145
bool shouldStop = earlyStoppingRule.CheckScore((float)validationResult.
FinalValue
,
146
(float)trainingResult.
FinalValue
, out isBestCandidate);
FastTreeRanking.cs (3)
358
lineBuilder.AppendFormat("\t{0:0.0000}", r.
FinalValue
);
370
trainTestResult = _specialTrainSetTest.ComputeTests().First().
FinalValue
;
375
validTestResult = PruningTest.ComputeTests().First().
FinalValue
;
FastTreeRegression.cs (3)
381
lineBuilder.AppendFormat("\t{0:0.0000}", r.
FinalValue
);
392
trainRegression = _trainRegressionTest.ComputeTests().Last().
FinalValue
;
394
validRegression = _testRegressionTest.ComputeTests().Last().
FinalValue
;
FastTreeTweedie.cs (3)
317
lineBuilder.AppendFormat("\t{0:0.0000}", r.
FinalValue
);
328
trainRegression = _trainRegressionTest.ComputeTests().Last().
FinalValue
;
330
validRegression = _testRegressionTest.ComputeTests().Last().
FinalValue
;
GamTrainer.cs (3)
432
double bestLoss = finalResult.
FinalValue
;
436
bestLoss = PruningTest.BestResult.
FinalValue
;
439
ch.Info($"Best Iteration ({lossFunctionName}): {bestIteration} @ {bestLoss:G6} (vs {GamTrainerOptions.NumberOfIterations} @ {finalResult.
FinalValue
:G6}).");
Training\Test.cs (6)
80
return
FinalValue
.CompareTo(o.
FinalValue
) * (LowerIsBetter ? -1 : 1);
188
sb.AppendFormat("{0}.{1}={2}\n", ScoreTracker.DatasetName, r.LossFunctionName, r.
FinalValue
);
305
double currentValue = ComputeTests().First().
FinalValue
* (r.LowerIsBetter ? -1.0 : 1.0);
374
sb.AppendFormat("@{0}:{1:00.00}", i++, 100.0 * t.
FinalValue
);
509
sb.AppendFormat("{0}:{1:00.00}", t.LossFunctionName, t.
FinalValue
);