1 write to FinalValue
Microsoft.ML.FastTree (1)
Training\Test.cs (1)
72FinalValue = CalculateFinalValue();
22 references to FinalValue
Microsoft.ML.FastTree (22)
BoostingFastTree.cs (4)
128ch.Assert(validationResult.FinalValue >= 0); 132ch.Assert(trainingResult.FinalValue >= 0); 145bool shouldStop = earlyStoppingRule.CheckScore((float)validationResult.FinalValue, 146(float)trainingResult.FinalValue, out isBestCandidate);
FastTreeRanking.cs (3)
358lineBuilder.AppendFormat("\t{0:0.0000}", r.FinalValue); 370trainTestResult = _specialTrainSetTest.ComputeTests().First().FinalValue; 375validTestResult = PruningTest.ComputeTests().First().FinalValue;
FastTreeRegression.cs (3)
381lineBuilder.AppendFormat("\t{0:0.0000}", r.FinalValue); 392trainRegression = _trainRegressionTest.ComputeTests().Last().FinalValue; 394validRegression = _testRegressionTest.ComputeTests().Last().FinalValue;
FastTreeTweedie.cs (3)
317lineBuilder.AppendFormat("\t{0:0.0000}", r.FinalValue); 328trainRegression = _trainRegressionTest.ComputeTests().Last().FinalValue; 330validRegression = _testRegressionTest.ComputeTests().Last().FinalValue;
GamTrainer.cs (3)
432double bestLoss = finalResult.FinalValue; 436bestLoss = PruningTest.BestResult.FinalValue; 439ch.Info($"Best Iteration ({lossFunctionName}): {bestIteration} @ {bestLoss:G6} (vs {GamTrainerOptions.NumberOfIterations} @ {finalResult.FinalValue:G6}).");
Training\Test.cs (6)
80return FinalValue.CompareTo(o.FinalValue) * (LowerIsBetter ? -1 : 1); 188sb.AppendFormat("{0}.{1}={2}\n", ScoreTracker.DatasetName, r.LossFunctionName, r.FinalValue); 305double currentValue = ComputeTests().First().FinalValue * (r.LowerIsBetter ? -1.0 : 1.0); 374sb.AppendFormat("@{0}:{1:00.00}", i++, 100.0 * t.FinalValue); 509sb.AppendFormat("{0}:{1:00.00}", t.LossFunctionName, t.FinalValue);