java.util.PriorityQueue#peek ( )源码实例Demo

下面列出了java.util.PriorityQueue#peek ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: AILibs   文件: TAGPolicy.java
@Override
public void updatePath(final ILabeledPath<T, A> path, final Double playout, final int playoutLength) {
	for (T node : path.getNodes()) {
		this.visitsPerNode.put(node, this.visitsPerNode.computeIfAbsent(node, n -> 0) + 1);

		/* update list of best observed scores */
		PriorityQueue<Double> bestScores = this.statsPerNode.computeIfAbsent(node, n -> this.isMaximize ? new PriorityQueue<>((c1,c2) -> Double.compare(c2, c1)) : new PriorityQueue<>());
		if (bestScores.size() < this.s) {
			bestScores.add(playout);
		}
		else if (bestScores.peek() < playout) {
			bestScores.poll();
			bestScores.add(playout);
		}
	}
}
 
源代码2 项目: datafu   文件: SetDifference.java
/**
 * Counts how many elements in the priority queue match the
 * element at the front of the queue, which should be from the first bag. 
 * 
 * @param pq priority queue
 * @return number of matches
 */
public int countMatches(PriorityQueue<Pair> pq)
{
  Pair nextPair = pq.peek();
  Tuple data = nextPair.data;
  
  // sanity check
  if (!nextPair.index.equals(0))
  {
    throw new RuntimeException("Expected next bag to have index 0");
  }
  
  int matches = 0;
  for (Pair p : pq) {
    if (data.equals(p.data))
      matches++;
  }
  // subtract 1 since element matches itself
  return matches - 1;
}
 
源代码3 项目: java-technology-stack   文件: Solution.java
public List<Integer> topKFrequent(int[] nums, int k) {

        TreeMap<Integer, Integer> map = new TreeMap<>();
        for(int num: nums){
            if(map.containsKey(num))
                map.put(num, map.get(num) + 1);
            else
                map.put(num, 1);
        }

        PriorityQueue<Freq> pq = new PriorityQueue<>();
        for(int key: map.keySet()){
            if(pq.size() < k)
                pq.add(new Freq(key, map.get(key)));
            else if(map.get(key) > pq.peek().freq){
                pq.remove();
                pq.add(new Freq(key, map.get(key)));
            }
        }

        LinkedList<Integer> res = new LinkedList<>();
        while(!pq.isEmpty())
            res.add(pq.remove().e);
        return res;
    }
 
源代码4 项目: algorithm-primer   文件: _040_Min_K_In_Array.java
public ArrayList<Integer> minK(int [] input, int k) {
    if (k <= 0 || k > input.length) return new ArrayList<Integer>();

    PriorityQueue<Integer> maxHeap = new PriorityQueue<Integer>(k, Collections.reverseOrder());
    // PriorityQueue<Integer> minHeap = new PriorityQueue<Integer>(k);

    for (int i = 0; i < k; i ++) {
        maxHeap.offer(input[i]);
    }

    for (int j = k; j < input.length; j ++){
        if (input[j] < maxHeap.peek()){
            maxHeap.poll();
            maxHeap.offer(input[j]);
        }
    }

    return new ArrayList<>(maxHeap);
}
 
public int findKthLargest(int[] nums, int k) {
            PriorityQueue<Integer> minRootHeap = new PriorityQueue<>();
//            PriorityQueue<Integer> minRootHeap = new PriorityQueue<>(Comparator.reverseOrder());

//            for (int num: nums) {
//                System.out.println("num = " + num);
//                minRootHeap.offer(num);
//                if (minRootHeap.size() > k) {
//                    minRootHeap.poll();
//                }
//            }

            for (int i = 0; i < k; i ++) {
                minRootHeap.offer(nums[i]);
            }

            for (int j = k; j < nums.length; j ++) {
                if (nums[j] > minRootHeap.peek()) {
                    minRootHeap.poll();
                    minRootHeap.offer(nums[j]);
                }
            }

            return minRootHeap.peek();
        }
 
源代码6 项目: AlgoCS   文件: KClosestPointsOrigin_973.java
public int[][] kClosest(int[][] points, int k) {

        PriorityQueue<Pair> pQ = new PriorityQueue<>();
        int[][] res = new int[k][2];

        for (int[] point : points) {
            Pair pair = new Pair(point[0], point[1]);

            if (pQ.size() == k) {
                if (pQ.peek() != null && pQ.peek().compareTo(pair) < 0) {
                    pQ.poll();
                    pQ.add(pair);
                }
            } else pQ.add(pair);
        }
        int i = 0;
        while (!pQ.isEmpty()) {
            Pair p = pQ.poll();
            res[i++] = new int[]{p.x, p.y};
        }
        return res;
    }
 
源代码7 项目: gcs   文件: FontMapperImpl.java
/**
 * For debugging. Prints all matches and returns the best match.
 */
private FontMatch printMatches(PriorityQueue<FontMatch> queue)
{
    FontMatch bestMatch = queue.peek();
    System.out.println("-------");
    while (!queue.isEmpty())
    {
        FontMatch match = queue.poll();
        FontInfo info = match.info;
        System.out.println(match.score + " | " + info.getMacStyle() + " " +
                           info.getFamilyClass() + " " + info.getPanose() + " " +
                           info.getCIDSystemInfo() + " " + info.getPostScriptName() + " " +
                           info.getFormat());
    }
    System.out.println("-------");
    return bestMatch;
}
 
源代码8 项目: spring-boot-demo   文件: Sub215.java
public int findKthLargest(int[] nums, int k) {
    PriorityQueue<Integer> priorityQueue = new PriorityQueue<>();
    for (int i = 0; i < nums.length; i++) {
        priorityQueue.add(nums[i]);
        if (priorityQueue.size() > k) {
            priorityQueue.poll();
        }
    }
    return priorityQueue.peek();
}
 
@Override
public Iterator<IntermediateSampleData<T>> sampleInPartition(Iterator<T> input) {
	if (numSamples == 0) {
		return emptyIntermediateIterable;
	}

	// This queue holds fixed number elements with the top K weight for current partition.
	PriorityQueue<IntermediateSampleData<T>> queue = new PriorityQueue<IntermediateSampleData<T>>(numSamples);
	int index = 0;
	IntermediateSampleData<T> smallest = null;
	while (input.hasNext()) {
		T element = input.next();
		if (index < numSamples) {
			// Fill the queue with first K elements from input.
			queue.add(new IntermediateSampleData<T>(random.nextDouble(), element));
			smallest = queue.peek();
		} else {
			double rand = random.nextDouble();
			// Remove the element with the smallest weight, and append current element into the queue.
			if (rand > smallest.getWeight()) {
				queue.remove();
				queue.add(new IntermediateSampleData<T>(rand, element));
				smallest = queue.peek();
			}
		}
		index++;
	}
	return queue.iterator();
}
 
源代码10 项目: datafu   文件: SetIntersect.java
public boolean all_equal(PriorityQueue<pair> pq)
{
  Object o = pq.peek().data;
  for (pair p : pq) {
    if (!o.equals(p.data))
      return false;
  }
  return true;
}
 
/**
 * Heap. Priority queue. O(nlogk) Time, O(k) Space.
 * Create a min heap as a window.
 * For each number in the array, add it to the heap.
 * If heap size > k, poll from the heap.
 * Return the top of the heap at the end.
 */
public int findKthLargest(int[] nums, int k) {
  PriorityQueue<Integer> pq = new PriorityQueue<>();
  for (int n : nums) {
    pq.offer(n);
    if (pq.size() > k) {
      pq.poll();
    }
  }
  return pq.peek();
}
 
public List<Integer> topKFrequent(int[] nums, int k) {
    Map<Integer, Integer> countMap = new HashMap<>();
    for (int i = 0; i < nums.length; i++) {
        countMap.put(nums[i], countMap.containsKey(nums[i]) ? countMap.get(nums[i]) + 1 : 1);
    }

    PriorityQueue<Pair> queue = new PriorityQueue<>(new Comparator<Pair>() {
        @Override
        public int compare(Pair p1, Pair p2) {
            return p1.count - p2.count;
        }
    });


    for (Map.Entry<Integer, Integer> entry : countMap.entrySet()) {
        if (queue.size() < k) {
            queue.add(new Pair(entry.getKey(), entry.getValue()));
        } else {
            // 偷偷看一眼堆顶的元素
            Pair heapTop = queue.peek();
            if (heapTop.count < entry.getValue()) {
                queue.poll();
                queue.add(new Pair(entry.getKey(), entry.getValue()));
            }
        }
    }

    List<Integer> res = new ArrayList<>();
    while (!queue.isEmpty()) {
        res.add(0, queue.poll().key);
    }

    return res;
}
 
源代码13 项目: JImageHash   文件: BinaryTreeTest.java
@Test
public void searchDistantItem() {
	Hash hash = TestResources.createHash("101010100011", 0);
	Hash needle = TestResources.createHash("101010101111", 0);

	binTree.addHash(hash, 1);
	PriorityQueue<Result> results = binTree.getElementsWithinHammingDistance(needle, 100);

	Result r = results.peek();
	assertEquals(1, r.value);
	assertEquals(2, r.distance);
}
 
源代码14 项目: pinpoint   文件: AgentEventServiceImpl.java
private List<AgentEvent> createAgentEvents(List<AgentEventBo> agentEventBos) {
    if (CollectionUtils.isEmpty(agentEventBos)) {
        return Collections.emptyList();
    }
    List<AgentEvent> agentEvents = new ArrayList<>(agentEventBos.size());
    PriorityQueue<DurationalAgentEvent> durationalAgentEvents = new PriorityQueue<>(agentEventBos.size(), AgentEvent.EVENT_TIMESTAMP_ASC_COMPARATOR);
    for (AgentEventBo agentEventBo : agentEventBos) {
        if (agentEventBo.getEventType().isCategorizedAs(AgentEventTypeCategory.DURATIONAL)) {
            durationalAgentEvents.add(createDurationalAgentEvent(agentEventBo, false));
        } else {
            boolean hasMessage = !ArrayUtils.isEmpty(agentEventBo.getEventBody());
            agentEvents.add(createAgentEvent(agentEventBo, hasMessage));
        }
    }
    long durationStartTimestamp = DurationalAgentEvent.UNKNOWN_TIMESTAMP;
    while (!durationalAgentEvents.isEmpty()) {
        DurationalAgentEvent currentEvent = durationalAgentEvents.remove();
        if (durationStartTimestamp == DurationalAgentEvent.UNKNOWN_TIMESTAMP) {
            durationStartTimestamp = currentEvent.getEventTimestamp();
        }
        currentEvent.setDurationStartTimestamp(durationStartTimestamp);
        DurationalAgentEvent nextEvent = durationalAgentEvents.peek();
        if (nextEvent != null) {
            long nextEventTimestamp = nextEvent.getEventTimestamp();
            currentEvent.setDurationEndTimestamp(nextEventTimestamp);
            durationStartTimestamp = nextEventTimestamp;
        }
        agentEvents.add(currentEvent);
    }
    return agentEvents;
}
 
源代码15 项目: j2objc   文件: PriorityQueueTest.java
/**
 * serialization/deserialization.
 */
public void test_Serialization_casting() throws Exception {
    Integer[] array = { 2, 45, 7, -12, 9, 23, 17, 1118, 10, 16, 39 };
    List<Integer> list = Arrays.asList(array);
    PriorityQueue<Integer> srcIntegerQueue = new PriorityQueue<Integer>(
            list);
    PriorityQueue<String> destStringQueue = (PriorityQueue<String>) SerializationTester
            .getDeserilizedObject(srcIntegerQueue);
    // will not incur class cast exception.
    Object o = destStringQueue.peek();
    Arrays.sort(array);
    Integer I = (Integer) o;
    assertEquals(array[0], I);
}
 
源代码16 项目: cs-summary-reflection   文件: Leetocde_215_Sort.java
/**
 * @author 梦境迷离
 * @description 堆排序 :时间复杂度 O(OlogK),空间复杂度 O(K)。
 * @time 2018年4月8日
 */
public int findKthLargest1(int[] nums, int k) {
    PriorityQueue<Integer> pq = new PriorityQueue<>();
    for (int val : nums) {
        pq.add(val);
        // 保证堆中有K个元素
        if (pq.size() > k) {
            // 每次超过K个 就删除最小的元素
            pq.poll();
        }
    }
    // 此时堆顶就是一个第K大的元素
    return pq.peek();
}
 
源代码17 项目: cs-summary-reflection   文件: GetLeastNumbers.java
/**
 * @description 优先级队列
 * @param input
 * @param k
 * @return
 */
@SuppressWarnings("unused")
public ArrayList<Integer> GetLeastNumbers2_Solution(int[] input, int k) {
    ArrayList<Integer> result = new ArrayList<Integer>();
    int length = input.length;
    if (k > length || k == 0) {
        return result;
    }
    PriorityQueue<Integer> maxHeap =
            new PriorityQueue<Integer>(
                    k,
                    new Comparator<Integer>() {
                        @Override
                        public int compare(Integer o1, Integer o2) {
                            return o2.compareTo(o1);
                        }
                    });
    for (int i = 0; i < length; i++) {
        if (maxHeap.size() != k) {
            maxHeap.offer(input[i]);
        } else if (maxHeap.peek() > input[i]) {
            Integer temp = maxHeap.poll();
            temp = null;
            maxHeap.offer(input[i]);
        }
    }
    for (Integer integer : maxHeap) {
        result.add(integer);
    }
    return result;
}
 
源代码18 项目: Flink-CEPplus   文件: NFA.java
private void processMatchesAccordingToSkipStrategy(
		SharedBufferAccessor<T> sharedBufferAccessor,
		NFAState nfaState,
		AfterMatchSkipStrategy afterMatchSkipStrategy,
		PriorityQueue<ComputationState> potentialMatches,
		PriorityQueue<ComputationState> partialMatches,
		List<Map<String, List<T>>> result) throws Exception {

	nfaState.getCompletedMatches().addAll(potentialMatches);

	ComputationState earliestMatch = nfaState.getCompletedMatches().peek();

	if (earliestMatch != null) {

		ComputationState earliestPartialMatch;
		while (earliestMatch != null && ((earliestPartialMatch = partialMatches.peek()) == null ||
			isEarlier(earliestMatch, earliestPartialMatch))) {

			nfaState.setStateChanged();
			nfaState.getCompletedMatches().poll();
			List<Map<String, List<EventId>>> matchedResult =
				sharedBufferAccessor.extractPatterns(earliestMatch.getPreviousBufferEntry(), earliestMatch.getVersion());

			afterMatchSkipStrategy.prune(
				partialMatches,
				matchedResult,
				sharedBufferAccessor);

			afterMatchSkipStrategy.prune(
				nfaState.getCompletedMatches(),
				matchedResult,
				sharedBufferAccessor);

			result.add(sharedBufferAccessor.materializeMatch(matchedResult.get(0)));
			sharedBufferAccessor.releaseNode(earliestMatch.getPreviousBufferEntry());
			earliestMatch = nfaState.getCompletedMatches().peek();
		}

		nfaState.getPartialMatches().removeIf(pm -> pm.getStartEventID() != null && !partialMatches.contains(pm));
	}
}
 
源代码19 项目: lucene-solr   文件: PossibilityIterator.java
/**
 * <p>
 * We assume here that the passed-in inner LinkedHashMaps are already sorted
 * in order of "Best Possible Correction".
 * </p>
 */
public PossibilityIterator(
    Map<Token,LinkedHashMap<String,Integer>> suggestions,
    int maximumRequiredSuggestions, int maxEvaluations, boolean overlap) {
  this.suggestionsMayOverlap = overlap;
  for (Map.Entry<Token,LinkedHashMap<String,Integer>> entry : suggestions
      .entrySet()) {
    Token token = entry.getKey();
    if (entry.getValue().size() == 0) {
      continue;
    }
    List<SpellCheckCorrection> possibleCorrections = new ArrayList<>();
    for (Map.Entry<String,Integer> entry1 : entry.getValue().entrySet()) {
      SpellCheckCorrection correction = new SpellCheckCorrection();
      correction.setOriginal(token);
      correction.setCorrection(entry1.getKey());
      correction.setNumberOfOccurences(entry1.getValue());
      possibleCorrections.add(correction);
    }
    possibilityList.add(possibleCorrections);
  }
  
  int wrapSize = possibilityList.size();
  if (wrapSize == 0) {
    done = true;
  } else {
    correctionIndex = new int[wrapSize];
    for (int i = 0; i < wrapSize; i++) {
      int suggestSize = possibilityList.get(i).size();
      if (suggestSize == 0) {
        done = true;
        break;
      }
      correctionIndex[i] = 0;
    }
  }
  PriorityQueue<RankedSpellPossibility> rankedPossibilities = new PriorityQueue<>(
      11, new RankComparator());
  Set<RankedSpellPossibility> removeDuplicates = null;
  if (suggestionsMayOverlap) {
    removeDuplicates = new HashSet<>();
  }
  long numEvaluations = 0;
  while (numEvaluations < maxEvaluations && internalHasNext()) {
    RankedSpellPossibility rsp = internalNext();
    numEvaluations++;
    if (rankedPossibilities.size() >= maximumRequiredSuggestions
        && rsp.rank >= rankedPossibilities.peek().rank) {
      continue;
    }
    if (!isSuggestionForReal(rsp)) {
      continue;
    }
    if (removeDuplicates == null) {
      rankedPossibilities.offer(rsp);
    } else {
      // Needs to be in token-offset order so that the match-and-replace
      // option for collations can work.
      Collections.sort(rsp.corrections, new StartOffsetComparator());
      if (removeDuplicates.add(rsp)) {
        rankedPossibilities.offer(rsp);
      }
    }
    if (rankedPossibilities.size() > maximumRequiredSuggestions) {
      RankedSpellPossibility removed = rankedPossibilities.poll();
      if (removeDuplicates != null) {
        removeDuplicates.remove(removed);
      }
    }
  }
  
  RankedSpellPossibility[] rpArr = new RankedSpellPossibility[rankedPossibilities
      .size()];
  for (int i = rankedPossibilities.size() - 1; i >= 0; i--) {
    rpArr[i] = rankedPossibilities.remove();
  }
  rankedPossibilityIterator = Arrays.asList(rpArr).iterator();
}
 
源代码20 项目: flink   文件: DistributedRandomSampler.java
/**
 * Sample algorithm for the second phase. This operation should be executed as the UDF of
 * an all reduce operation.
 *
 * @param input The intermediate sample output generated in the first phase.
 * @return The sampled output.
 */
public Iterator<T> sampleInCoordinator(Iterator<IntermediateSampleData<T>> input) {
	if (numSamples == 0) {
		return emptyIterable;
	}

	// This queue holds fixed number elements with the top K weight for the coordinator.
	PriorityQueue<IntermediateSampleData<T>> reservoir = new PriorityQueue<IntermediateSampleData<T>>(numSamples);
	int index = 0;
	IntermediateSampleData<T> smallest = null;
	while (input.hasNext()) {
		IntermediateSampleData<T> element = input.next();
		if (index < numSamples) {
			// Fill the queue with first K elements from input.
			reservoir.add(element);
			smallest = reservoir.peek();
		} else {
			// If current element weight is larger than the smallest one in queue, remove the element
			// with the smallest weight, and append current element into the queue.
			if (element.getWeight() > smallest.getWeight()) {
				reservoir.remove();
				reservoir.add(element);
				smallest = reservoir.peek();
			}
		}
		index++;
	}
	final Iterator<IntermediateSampleData<T>> itr = reservoir.iterator();

	return new Iterator<T>() {
		@Override
		public boolean hasNext() {
			return itr.hasNext();
		}

		@Override
		public T next() {
			return itr.next().getElement();
		}

		@Override
		public void remove() {
			itr.remove();
		}
	};
}