18 references to _messages
System.Threading.Tasks.Dataflow (18)
Blocks\BatchBlock.cs (18)
389_messages.Enqueue(messageValue!); 395(_batchesCompleted + (_messages.Count / _batchSize)) >= _dataflowBlockOptions.ActualMaxNumberOfGroups) 444if (dropPendingMessages) _messages.Clear(); 509bool noMoreMessages = _decliningPermanently && _messages.Count < _batchSize; 520if (_messages.Count > 0) MakeBatchIfPossible(evenIfFewerThanBatchSize: true); 574int neededMessageCountToCompleteBatch = _batchSize - _messages.Count; 583if (_nonGreedyState.AcceptFewerThanBatchSize && _messages.Count > 0) 649_messages.Count + _nonGreedyState.PostponedMessages.Count); 735bool fullBatch = _messages.Count >= _batchSize; 738if (fullBatch || (evenIfFewerThanBatchSize && _messages.Count > 0)) 740var newBatch = new T[fullBatch ? _batchSize : _messages.Count]; 741for (int i = 0; i < newBatch.Length; i++) newBatch[i] = _messages.Dequeue(); 773Debug.Assert(_messages.Count == 0, "The queue must be empty between batches in non-greedy mode"); 893itemCountNeededToCompleteBatch = _batchSize - _messages.Count; 1021_messages.Enqueue(sourceAndMessage.Value.Value); 1071if (sourceAndMessage.Key != null) _messages.Enqueue(sourceAndMessage.Value.Value); 1153private int InputCountForDebugger { get { return _messages.Count; } } 1180public IEnumerable<T> InputQueue { get { return _target._messages.ToList(); } }