8 writes to Scores
Microsoft.ML.FastTree (8)
Training\OptimizationAlgorithms\GradientDescent.cs (2)
112TrainingScores.Scores = _scores; 124TrainingScores.Scores = backupScores;
Training\ScoreTracker.cs (6)
25Scores = (double[])s.Scores.Clone(); 46Scores = (double[])scores1.Scores.Clone(); 62Scores = new double[Dataset.NumDocs]; 70Scores = (double[])initScores.Clone(); 77Scores = scores; 125public double[] XK { get { return Scores; } set { Scores = value; } } //An Xk is an alias to scores
34 references to Scores
Microsoft.ML.FastTree (34)
FastTree.cs (2)
689OptimizationAlgorithm.TrainingScores.Scores, 854return st.Scores;
FastTreeTweedie.cs (1)
404var scores = trainingScores.Scores;
GamTrainer.cs (4)
312var gradient = _objectiveFunction.GetGradient(ch, TrainSetScore.Scores); 316TrainOnEachFeature(gradient, TrainSetScore.Scores, sumTargets, sumWeights, iteration); 378UpdateScoresForSet(TrainSet, TrainSetScore.Scores, iteration); 383UpdateScoresForSet(ValidSet, ValidSetScore.Scores, iteration);
Training\OptimizationAlgorithms\ConjugateGradientDescent.cs (1)
26_currentGradient = ObjectiveFunction.GetGradient(ch, TrainingScores.Scores);
Training\OptimizationAlgorithms\GradientDescent.cs (6)
43_droppedScores = new double[TrainingScores.Scores.Length]; 47_scores = new double[TrainingScores.Scores.Length]; 72_scores[j] = TrainingScores.Scores[j] - _droppedScores[j]; 73TrainingScores.Scores[j] -= _droppedScores[j] / (1.0 + _numberOfDroppedTrees); 78return ObjectiveFunction.GetGradient(ch, TrainingScores.Scores); 111backupScores = TrainingScores.Scores;
Training\OptimizationAlgorithms\OptimizationAlgorithm.cs (1)
76t.SetScores(updatedScores.Scores);
Training\ScoreTracker.cs (16)
25Scores = (double[])s.Scores.Clone(); 44if (Scores == null || Scores.Length != scores1.Scores.Length) 46Scores = (double[])scores1.Scores.Clone(); 50Array.Copy(scores1.Scores, Scores, Scores.Length); 61if (Scores == null) 64Array.Clear(Scores, 0, Scores.Length); 90for (int i = 0; i < Scores.Length; ++i) 91Scores[i] += 10.0 * rndStart.NextDouble() * (reverseRandomization ? -1.0 : 1.0); 97tree.AddOutputsToScores(Dataset, Scores, multiplier); 113Scores[documents[i]] += output; 125public double[] XK { get { return Scores; } set { Scores = value; } } //An Xk is an alias to scores
Training\StepSearch.cs (1)
87Loss = _lossCalculator.ComputeTests(Scores.Scores).ToList()[_lossIndex];
Training\Test.cs (2)
172CachedResults = ComputeTests(ScoreTracker.Scores); 433CachedResults = ComputeTests(_trainingScores.Scores);