private void updateCalculationProgress(TransportRoutingContext ctx, PriorityQueue<TransportRouteSegment> queue) { if (ctx.calculationProgress != null) { ctx.calculationProgress.directSegmentQueueSize = queue.size(); if (queue.size() > 0) { TransportRouteSegment peek = queue.peek(); ctx.calculationProgress.distanceFromBegin = (float) Math.max(peek.distFromStart, ctx.calculationProgress.distanceFromBegin); } } }
@Override public boolean next() throws IOException { while (pq.size() > 0) { ReaderBuffer buffer = pq.poll(); String[] minEntry = buffer.pop(); this.row = minEntry; if (buffer.empty()) { pq.remove(buffer); } else { pq.add(buffer); // add it back } if (this.row == null) { //avoid the case of empty file return false; } return true; } return false; }
/** * 添加一个元素 * @param e 元素 * @return 是否添加成功 */ public boolean add(E e) { if (queue.size() < maxSize) { // 未达到最大容量,直接添加 queue.add(e); return true; } else { // 队列已满 E peek = queue.peek(); if (queue.comparator().compare(e, peek) > 0) { // 将新元素与当前堆顶元素比较,保留较小的元素 queue.poll(); queue.add(e); return true; } } return false; }
public static PriorityQueue<Integer> add(PriorityQueue<Integer> accumulator, Integer n, Integer val) { if (n <= 0) { return accumulator; } if (accumulator.size() >= n) { if (val > accumulator.peek()) { accumulator.remove(); accumulator.add(val); } } else { accumulator.add(val); } return accumulator; } public static List<Integer> result(PriorityQueue<Integer> accumulator) {
final PriorityQueue<FlowFileRecord> tempQueue = new PriorityQueue<>(activeQueue.size() + swapQueue.size(), Collections.reverseOrder(new QueuePrioritizer(getPriorities()))); tempQueue.addAll(activeQueue); tempQueue.addAll(swapQueue); final FlowFileRecord flowFile = tempQueue.poll(); toSwap.add(flowFile); bytesSwappedOut += flowFile.getSize(); while (tempQueue.size() > swapThreshold) { final FlowFileRecord record = tempQueue.poll(); swapQueue.add(record); updatedSwapQueueBytes += record.getSize(); FlowFileRecord toRequeue; long activeQueueBytes = 0L; while ((toRequeue = tempQueue.poll()) != null) { activeQueue.offer(toRequeue); activeQueueBytes += toRequeue.getSize(); final long addedSwapBytes = updatedSwapQueueBytes - originalSwapQueueBytes; final FlowFileQueueSize newSize = new FlowFileQueueSize(activeQueue.size(), activeQueueBytes, originalSize.getSwappedCount() + addedSwapRecords + flowFilesSwappedOut, originalSize.getSwappedBytes() + addedSwapBytes + bytesSwappedOut,
@Override public boolean moveNext() throws IOException { while (pq.size() > 0) { ReaderBuffer buffer = pq.poll(); String minEntry = buffer.pop(); this.colValue = minEntry; if (buffer.empty()) { pq.remove(buffer); } else { pq.add(buffer); // add it back } if (this.colValue == null) { //avoid the case of empty file return false; } return true; } return false; }
@SuppressWarnings("unchecked") @Override public void add(String groupKey, Object result) { GroupKeyResultPair newGroupKeyResultPair = new GroupKeyResultPair(groupKey, (Comparable) result); if (_heap.size() == _trimSize) { GroupKeyResultPair minGroupKeyResultPair = _heap.peek(); if (_comparator.compare(newGroupKeyResultPair, minGroupKeyResultPair) > 0) { _heap.poll(); _heap.add(newGroupKeyResultPair); } } else { _heap.add(newGroupKeyResultPair); } }
private void findKBest( int k, float threshold, PriorityQueue<FloatIntPair> heap, Vector hidden, Vector output) { computeOutputSoftmax(hidden, output); for (int i = 0; i < osz_; i++) { if (output.data_[i] < threshold) { continue; } if (heap.size() == k && stdLog(output.data_[i]) < heap.peek().first) { continue; } heap.add(new FloatIntPair(stdLog(output.data_[i]), i)); if (heap.size() > k) { heap.remove(); } } }
public double modelCoefficientTrainingCompleteness() { if (LOG.isDebugEnabled()) { LOG.debug("Linear regression model training data indices: {}", INDICES); } PriorityQueue<Integer> mostFilledBuckets = new PriorityQueue<>(MIN_CPU_UTIL_OBSERVATION_BUCKETS); for (AtomicInteger index : INDICES.values()) { mostFilledBuckets.add(index.get()); if (mostFilledBuckets.size() > MIN_CPU_UTIL_OBSERVATION_BUCKETS) { mostFilledBuckets.remove(); } } double completeness = 0.0; for (Integer index : mostFilledBuckets) { completeness += ((double) Math.min(index, NUM_OBSERVATIONS_PER_UTIL_BUCKET)) / NUM_OBSERVATIONS_PER_UTIL_BUCKET / MIN_CPU_UTIL_OBSERVATION_BUCKETS; } return completeness; }
private boolean shouldAdd(String dimName) { final boolean belowThreshold = pQueue.size() < threshold; final boolean belowMax = belowThreshold || comparator.compare(pQueue.peek().getTopNMetricVal(), dimName) < 0; // Only add if dimName is after previousStop return belowMax && (previousStop == null || comparator.compare(dimName, previousStop) > 0); }
/** * Uses a priority queue to compute the xor aggregate. * * This function runs in linearithmic (O(n log n)) time with respect to the number of bitmaps. * * @param bitmaps input bitmaps * @return aggregated bitmap * @see #horizontal_xor(ImmutableRoaringBitmap...) */ public static MutableRoaringBitmap priorityqueue_xor(ImmutableRoaringBitmap... bitmaps) { // code could be faster, see priorityqueue_or if (bitmaps.length < 2) { throw new IllegalArgumentException("Expecting at least 2 bitmaps"); } final PriorityQueue<ImmutableRoaringBitmap> pq = new PriorityQueue<>(bitmaps.length, new Comparator<ImmutableRoaringBitmap>() { @Override public int compare(ImmutableRoaringBitmap a, ImmutableRoaringBitmap b) { return (int)(a.getLongSizeInBytes() - b.getLongSizeInBytes()); } }); Collections.addAll(pq, bitmaps); while (pq.size() > 1) { final ImmutableRoaringBitmap x1 = pq.poll(); final ImmutableRoaringBitmap x2 = pq.poll(); pq.add(ImmutableRoaringBitmap.xor(x1, x2)); } return (MutableRoaringBitmap) pq.poll(); }
/** * Helper method to add a value to a {@link PriorityQueue}. * * @param value value to be added. * @param queue priority queue. * @param maxNumValues maximum number of values in the priority queue. * @param <T> type for the value. */ public static <T> void addToPriorityQueue(@Nonnull T value, @Nonnull PriorityQueue<T> queue, int maxNumValues) { if (queue.size() < maxNumValues) { queue.add(value); } else if (queue.comparator().compare(queue.peek(), value) < 0) { queue.poll(); queue.offer(value); } } }
if (stQueue.size() == maxSize) { final ScoreTerm t = stQueue.peek(); if (boost < t.boost) return true; stQueue.offer(st); if (stQueue.size() > maxSize) { st = stQueue.poll(); visitedTerms.remove(st.bytes.get()); st = new ScoreTerm(new TermContext(topReaderContext)); assert stQueue.size() <= maxSize : "the PQ size must be limited to maxSize"; if (stQueue.size() == maxSize) { t = stQueue.peek(); maxBoostAtt.setMaxNonCompetitiveBoost(t.boost); maxBoostAtt.setCompetitiveTerm(t.bytes.get());
/** * Render the selection rows to a {@link SelectionResults} object for selection queries with * <code>ORDER BY</code>. (Broker side) * <p>{@link SelectionResults} object will be used to build the broker response. * <p>Should be called after method "reduceWithOrdering()". * * @return {@link SelectionResults} object results. */ @Nonnull public SelectionResults renderSelectionResultsWithOrdering() { LinkedList<Serializable[]> rowsInSelectionResults = new LinkedList<>(); int[] columnIndices = SelectionOperatorUtils.getColumnIndicesWithOrdering(_selectionColumns, _dataSchema); while (_rows.size() > _selectionOffset) { rowsInSelectionResults.addFirst(SelectionOperatorUtils.extractColumns(_rows.poll(), columnIndices)); } return new SelectionResults(_selectionColumns, rowsInSelectionResults); } }