39 references to Dataset
Microsoft.ML.FastTree (39)
Training\Test.cs (39)
350DcgCalculator = new DcgCalculator(Dataset.MaxDocsPerQuery, _sortingAlgorithm);
356double[] ndcg = DcgCalculator.NdcgRangeFromScores(Dataset, Labels, scores);
359result.Add(new TestResult("NDCG@" + (i + 1).ToString(), ndcg[i] * Dataset.NumQueries, Dataset.NumQueries, false, TestResult.ValueOperator.Average));
399fastNdcg = DcgCalculator.Ndcg1(Dataset, Labels, scores);
402fastNdcg = DcgCalculator.Ndcg3(Dataset, Labels, scores);
411new TestResult("NDCG@" + NdcgTruncation.ToString(), fastNdcg * Dataset.NumQueries, Dataset.NumQueries, false, TestResult.ValueOperator.Average),
444fastNdcg = DcgCalculator.Ndcg1(Dataset, trainQueriesTopLabels);
447fastNdcg = DcgCalculator.Ndcg3(Dataset, trainQueriesTopLabels);
454new TestResult("NDCG@" + NdcgTruncation.ToString(), fastNdcg * Dataset.NumQueries, Dataset.NumQueries, false, TestResult.ValueOperator.Average),
476() => new WinLossCalculator(Dataset.MaxDocsPerQuery, _sortingAlgorithm));
481double[] surplus = _winLossCalculator.Value.WinLossRangeFromScores(Dataset, _labels, scores);
486new TestResult("Surplus@100", surplus[0] * _scaleFactor * Dataset.NumQueries, Dataset.NumQueries, false, TestResult.ValueOperator.Average),
487new TestResult("Surplus@200", surplus[1] * _scaleFactor * Dataset.NumQueries, Dataset.NumQueries, false, TestResult.ValueOperator.Average),
488new TestResult("Surplus@300", surplus[2] * _scaleFactor * Dataset.NumQueries, Dataset.NumQueries, false, TestResult.ValueOperator.Average),
489new TestResult("Surplus@400", surplus[3] * _scaleFactor * Dataset.NumQueries, Dataset.NumQueries, false, TestResult.ValueOperator.Average),
490new TestResult("Surplus@500", surplus[4] * _scaleFactor * Dataset.NumQueries, Dataset.NumQueries, false, TestResult.ValueOperator.Average),
491new TestResult("Surplus@1000", surplus[5] * _scaleFactor * Dataset.NumQueries, Dataset.NumQueries, false, TestResult.ValueOperator.Average),
535double[] weights = Dataset.SampleWeights;
538int chunkSize = 1 + Dataset.NumDocs / BlockingThreadPool.NumThreads; // Minimizes the number of repeat computations in sparse array to have each thread take as big a chunk as possible
541var actions = new Action[(int)Math.Ceiling(1.0 * Dataset.NumDocs / chunkSize)];
543for (int documentStart = 0; documentStart < Dataset.NumDocs; documentStart += chunkSize)
546var endDoc = Math.Min(documentStart + chunkSize - 1, Dataset.NumDocs - 1);
574result.Add(new TestResult("L1", totalL1Error, Dataset.NumDocs, true, TestResult.ValueOperator.Average));
577result.Add(new TestResult("L2", totalL2Error, Dataset.NumDocs, true, TestResult.ValueOperator.SqrtAverage));
580result.Add(new TestResult("L1", totalL1Error, Dataset.NumDocs, true, TestResult.ValueOperator.Average));
581result.Add(new TestResult("L2", totalL2Error, Dataset.NumDocs, true, TestResult.ValueOperator.SqrtAverage));
656int chunkSize = 1 + Dataset.NumDocs / BlockingThreadPool.NumThreads; // Minimizes the number of repeat computations in sparse array to have each thread take as big a chunk as possible
659var actions = new Action[(int)Math.Ceiling(1.0 * Dataset.NumDocs / chunkSize)];
661for (int documentStart = 0; documentStart < Dataset.NumDocs; documentStart += chunkSize)
664var endDoc = Math.Min(documentStart + chunkSize - 1, Dataset.NumDocs - 1);