76 references to NumDocs
Microsoft.ML.FastTree (76)
Dataset\Dataset.cs (1)
944Contracts.Assert(0 <= rowIndex && rowIndex < indexer._dataset.NumDocs);
FastTree.cs (2)
433set.NumDocs, set.NumQueries, set.NumFeatures, datasetSize / 1024 / 1024, (datasetSize - skeletonSize) / 1024 / 1024); 863double[] scores = new double[set.NumDocs];
FastTreeClassification.cs (1)
253_trainSetLabels = GetClassificationLabelsFromRatings(TrainSet).ToArray(TrainSet.NumDocs);
FastTreeRanking.cs (5)
633_scoresCopy = new double[Dataset.NumDocs]; 634_labelsCopy = new short[Dataset.NumDocs]; 635_groupIdToTopLabel = new short[Dataset.NumDocs]; 961_gainLabels = new double[Dataset.NumDocs]; 962for (int i = 0; i < Dataset.NumDocs; i++)
FastTreeRegression.cs (1)
177Contracts.Assert(dlabels.Length == set.NumDocs);
FastTreeTweedie.cs (1)
190Contracts.Assert(dlabels.Length == set.NumDocs);
GamTrainer.cs (4)
333TrainSet.NumDocs, 398DefineDocumentThreadBlocks(dataset.NumDocs, BlockingThreadPool.NumThreads, out int[] threadBlocks); 494DefineDocumentThreadBlocks(TrainSet.NumDocs, BlockingThreadPool.NumThreads, out int[] trainThreadBlocks); 529meanEffects[featureIndex] /= TrainSet.NumDocs;
RandomForestClassification.cs (1)
357_trainSetLabels = TrainSet.Ratings.Select(x => x >= 1).ToArray(TrainSet.NumDocs);
RandomForestRegression.cs (1)
527Contracts.Assert(_labels.Length == trainData.NumDocs);
Training\Applications\ObjectiveFunction.cs (2)
51Gradient = new double[Dataset.NumDocs]; 52Weights = new double[Dataset.NumDocs];
Training\BaggingProvider.cs (4)
30int[] trainDocs = new int[CompleteTrainingSet.NumDocs]; 31int[] outOfBagDocs = new int[CompleteTrainingSet.NumDocs]; 99int[] trainDocs = new int[CompleteTrainingSet.NumDocs]; 100int[] outOfBagDocs = new int[CompleteTrainingSet.NumDocs];
Training\DcgCalculator.cs (1)
505int[] result = new int[dataset.NumDocs];
Training\DocumentPartitioning.cs (7)
55: this(dataset.NumDocs, tree.NumLeaves) 62int innerLoopSize = 1 + dataset.NumDocs / BlockingThreadPool.NumThreads; // +1 is to make sure we don't have a few left over at the end 65int numChunks = dataset.NumDocs / innerLoopSize; 66if (dataset.NumDocs % innerLoopSize != 0) 71var actions = new Action[(int)Math.Ceiling(1.0 * dataset.NumDocs / innerLoopSize)]; 73for (int docStart = 0; docStart < dataset.NumDocs; docStart += innerLoopSize) 76var toDoc = Math.Min(docStart + innerLoopSize, dataset.NumDocs);
Training\EnsembleCompression\LassoBasedEnsembleCompressor.cs (4)
66_numObservations = Math.Min(_trainSet.NumDocs, maxObservations); 82if (_numObservations == _trainSet.NumDocs) 91for (int d = 0; d < _trainSet.NumDocs; d++) 115for (int d = 0; d < _trainSet.NumDocs; d++)
Training\OptimizationAlgorithms\ConjugateGradientDescent.cs (1)
18_currentDk = new double[trainData.NumDocs];
Training\ScoreTracker.cs (6)
62Scores = new double[Dataset.NumDocs]; 68if (initScores.Length != Dataset.NumDocs) 172int innerLoopSize = 1 + Dataset.NumDocs / BlockingThreadPool.NumThreads; // +1 is to make sure we don't have a few left over at the end 175var actions = new Action[(int)Math.Ceiling(1.0 * Dataset.NumDocs / innerLoopSize)]; 177for (int d = 0; d < Dataset.NumDocs; d += innerLoopSize) 180var toDoc = Math.Min(d + innerLoopSize, Dataset.NumDocs);
Training\Test.cs (15)
348Contracts.Check(scoreTracker.Dataset.NumDocs == labels.Length, "Mismatch between dataset and labels"); 528Contracts.Check(scoreTracker.Dataset.NumDocs == _labels.Length, "Mismatch between dataset and labels"); 538int chunkSize = 1 + Dataset.NumDocs / BlockingThreadPool.NumThreads; // Minimizes the number of repeat computations in sparse array to have each thread take as big a chunk as possible 541var actions = new Action[(int)Math.Ceiling(1.0 * Dataset.NumDocs / chunkSize)]; 543for (int documentStart = 0; documentStart < Dataset.NumDocs; documentStart += chunkSize) 546var endDoc = Math.Min(documentStart + chunkSize - 1, Dataset.NumDocs - 1); 574result.Add(new TestResult("L1", totalL1Error, Dataset.NumDocs, true, TestResult.ValueOperator.Average)); 577result.Add(new TestResult("L2", totalL2Error, Dataset.NumDocs, true, TestResult.ValueOperator.SqrtAverage)); 580result.Add(new TestResult("L1", totalL1Error, Dataset.NumDocs, true, TestResult.ValueOperator.Average)); 581result.Add(new TestResult("L2", totalL2Error, Dataset.NumDocs, true, TestResult.ValueOperator.SqrtAverage)); 602Contracts.Check(scoreTracker.Dataset.NumDocs == binaryLabels.Length, "Mismatch between dataset and labels"); 656int chunkSize = 1 + Dataset.NumDocs / BlockingThreadPool.NumThreads; // Minimizes the number of repeat computations in sparse array to have each thread take as big a chunk as possible 659var actions = new Action[(int)Math.Ceiling(1.0 * Dataset.NumDocs / chunkSize)]; 661for (int documentStart = 0; documentStart < Dataset.NumDocs; documentStart += chunkSize) 664var endDoc = Math.Min(documentStart + chunkSize - 1, Dataset.NumDocs - 1);
Training\TreeLearners\LeastSquaresRegressionTreeLearner.cs (4)
355if (Partitioning.NumDocs == TrainData.NumDocs) 847_docIndicesCopy = _docIndices = new int[data.NumDocs]; 848Targets = new FloatType[data.NumDocs]; 850Weights = new double[data.NumDocs];
Training\TreeLearners\TreeLearner.cs (1)
21Partitioning = new DocumentPartitioning(TrainData.NumDocs, numLeaves);
TreeEnsemble\InternalRegressionTree.cs (10)
996double[] outputs = new double[dataset.NumDocs]; 997for (int d = 0; d < dataset.NumDocs; ++d) 1379int innerLoopSize = 1 + dataset.NumDocs / BlockingThreadPool.NumThreads; // +1 is to make sure we don't have a few left over at the end 1382var actions = new Action[(int)Math.Ceiling(1.0 * dataset.NumDocs / innerLoopSize)]; 1384for (int d = 0; d < dataset.NumDocs; d += innerLoopSize) 1387var toDoc = Math.Min(d + innerLoopSize, dataset.NumDocs); 1404int innerLoopSize = 1 + dataset.NumDocs / BlockingThreadPool.NumThreads; // +1 is to make sure we don't have a few left over at the end 1407var actions = new Action[(int)Math.Ceiling(1.0 * dataset.NumDocs / innerLoopSize)]; 1409for (int d = 0; d < dataset.NumDocs; d += innerLoopSize) 1412var toDoc = Math.Min(d + innerLoopSize, dataset.NumDocs);
TreeEnsemble\InternalTreeEnsemble.cs (4)
293int innerLoopSize = 1 + dataset.NumDocs / BlockingThreadPool.NumThreads; // minimize number of times we have to skip forward in the sparse arrays 296var actions = new Action[(int)Math.Ceiling(1.0 * dataset.NumDocs / innerLoopSize)]; 298for (int d = 0; d < dataset.NumDocs; d += innerLoopSize) 303var toDoc = Math.Min(d + innerLoopSize, dataset.NumDocs);