8 writes to Scores
Microsoft.ML.FastTree (8)
Training\OptimizationAlgorithms\GradientDescent.cs (2)
112
TrainingScores.
Scores
= _scores;
124
TrainingScores.
Scores
= backupScores;
Training\ScoreTracker.cs (6)
25
Scores
= (double[])s.Scores.Clone();
46
Scores
= (double[])scores1.Scores.Clone();
62
Scores
= new double[Dataset.NumDocs];
70
Scores
= (double[])initScores.Clone();
77
Scores
= scores;
125
public double[] XK { get { return Scores; } set {
Scores
= value; } } //An Xk is an alias to scores
34 references to Scores
Microsoft.ML.FastTree (34)
FastTree.cs (2)
689
OptimizationAlgorithm.TrainingScores.
Scores
,
854
return st.
Scores
;
FastTreeTweedie.cs (1)
404
var scores = trainingScores.
Scores
;
GamTrainer.cs (4)
312
var gradient = _objectiveFunction.GetGradient(ch, TrainSetScore.
Scores
);
316
TrainOnEachFeature(gradient, TrainSetScore.
Scores
, sumTargets, sumWeights, iteration);
378
UpdateScoresForSet(TrainSet, TrainSetScore.
Scores
, iteration);
383
UpdateScoresForSet(ValidSet, ValidSetScore.
Scores
, iteration);
Training\OptimizationAlgorithms\ConjugateGradientDescent.cs (1)
26
_currentGradient = ObjectiveFunction.GetGradient(ch, TrainingScores.
Scores
);
Training\OptimizationAlgorithms\GradientDescent.cs (6)
43
_droppedScores = new double[TrainingScores.
Scores
.Length];
47
_scores = new double[TrainingScores.
Scores
.Length];
72
_scores[j] = TrainingScores.
Scores
[j] - _droppedScores[j];
73
TrainingScores.
Scores
[j] -= _droppedScores[j] / (1.0 + _numberOfDroppedTrees);
78
return ObjectiveFunction.GetGradient(ch, TrainingScores.
Scores
);
111
backupScores = TrainingScores.
Scores
;
Training\OptimizationAlgorithms\OptimizationAlgorithm.cs (1)
76
t.SetScores(updatedScores.
Scores
);
Training\ScoreTracker.cs (16)
25
Scores = (double[])s.
Scores
.Clone();
44
if (
Scores
== null ||
Scores
.Length != scores1.
Scores
.Length)
46
Scores = (double[])scores1.
Scores
.Clone();
50
Array.Copy(scores1.
Scores
,
Scores
,
Scores
.Length);
61
if (
Scores
== null)
64
Array.Clear(
Scores
, 0,
Scores
.Length);
90
for (int i = 0; i <
Scores
.Length; ++i)
91
Scores
[i] += 10.0 * rndStart.NextDouble() * (reverseRandomization ? -1.0 : 1.0);
97
tree.AddOutputsToScores(Dataset,
Scores
, multiplier);
113
Scores
[documents[i]] += output;
125
public double[] XK { get { return
Scores
; } set { Scores = value; } } //An Xk is an alias to scores
Training\StepSearch.cs (1)
87
Loss = _lossCalculator.ComputeTests(Scores.
Scores
).ToList()[_lossIndex];
Training\Test.cs (2)
172
CachedResults = ComputeTests(ScoreTracker.
Scores
);
433
CachedResults = ComputeTests(_trainingScores.
Scores
);