private int revokeGuaranteed(int count, TaskInfo failedUpdate, List<TaskInfo> toUpdate) { WM_LOG.info("Revoking " + count + " from " + guaranteedTasks.size() + " levels" + (failedUpdate == null ? "" : "; on failure")); int remainingCount = count; Iterator<Entry<Integer, TreeSet<TaskInfo>>> iterator = guaranteedTasks.descendingMap().entrySet().iterator(); // When done, handleUpdate.. may break the iterator, so the order of these checks is important. while (remainingCount > 0 && iterator.hasNext()) { remainingCount = handleUpdateForSinglePriorityLevel( remainingCount, iterator, failedUpdate, toUpdate, false); } return count - remainingCount; }
/** * Get the image list of this backup for restore in time order. * @param reverse If true, then output in reverse order, otherwise in time order from old to new * @return the backup image list for restore in time order */ public ArrayList<BackupImage> getRestoreDependentList(boolean reverse) { TreeMap<Long, BackupImage> restoreImages = new TreeMap<>(); restoreImages.put(backupImage.startTs, backupImage); for (BackupImage image : backupImage.getAncestors()) { restoreImages.put(Long.valueOf(image.startTs), image); } return new ArrayList<>(reverse ? (restoreImages.descendingMap().values()) : (restoreImages.values())); }
/** * The method normalizes the weights and bone indexes data. * First it truncates the amount to MAXIMUM_WEIGHTS_PER_VERTEX because this is how many weights JME can handle. * Next it normalizes the weights so that the sum of all verts is 1. * @param maximumSize * the maximum size that the data will be truncated to (usually: MAXIMUM_WEIGHTS_PER_VERTEX) */ private void normalizeBoneBuffers(int maximumSize) { for (TreeMap<Float, Integer> group : boneWeightAndIndexes) { if (group.size() > maximumSize) { NavigableMap<Float, Integer> descendingWeights = group.descendingMap(); while (descendingWeights.size() > maximumSize) { descendingWeights.pollLastEntry(); } } // normalizing the weights so that the sum of the values is equal to '1' TreeMap<Float, Integer> normalizedGroup = new TreeMap<Float, Integer>(); float sum = 0; for (Entry<Float, Integer> entry : group.entrySet()) { sum += entry.getKey(); } if (sum != 0 && sum != 1) { for (Entry<Float, Integer> entry : group.entrySet()) { normalizedGroup.put(entry.getKey() / sum, entry.getValue()); } group.clear(); group.putAll(normalizedGroup); } } }
private boolean findGuaranteedToReallocate(TaskInfo candidate, Ref<TaskInfo> toUpdate) { Iterator<Entry<Integer, TreeSet<TaskInfo>>> iterator = guaranteedTasks.descendingMap().entrySet().iterator(); while (iterator.hasNext()) { Entry<Integer, TreeSet<TaskInfo>> entry = iterator.next();
private void performReplacements( final Document document, final Map<TextRange, String> replacements) { if (replacements.isEmpty()) { return; } TreeMap<TextRange, String> sorted = new TreeMap<>(comparing(TextRange::getStartOffset)); sorted.putAll(replacements); WriteCommandAction.runWriteCommandAction( getProject(), () -> { for (Entry<TextRange, String> entry : sorted.descendingMap().entrySet()) { document.replaceString( entry.getKey().getStartOffset(), entry.getKey().getEndOffset(), entry.getValue()); } PsiDocumentManager.getInstance(getProject()).commitDocument(document); }); } }
private List<TaskInfo> preemptTasksFromMap(TreeMap<Integer, TreeSet<TaskInfo>> runningTasks, int forPriority, int forVertex, int numTasksToPreempt, String[] potentialHosts, Set<String> preemptHosts, List<TaskInfo> preemptedTaskList) { NavigableMap<Integer, TreeSet<TaskInfo>> orderedMap = runningTasks.descendingMap(); Iterator<Entry<Integer, TreeSet<TaskInfo>>> iterator = orderedMap.entrySet().iterator(); int preemptedCount = 0;
TreeMap<Integer,Long> countsByLevel = new TreeMap<>(); indexReader.getIndexInfo(sizesByLevel, countsByLevel); for (Entry<Integer,Long> entry : sizesByLevel.descendingMap().entrySet()) { out.printf("\t%-22s : %,d bytes %,d blocks\n", "Index level " + entry.getKey(), entry.getValue(), countsByLevel.get(entry.getKey()));
entrySet = topValues.entrySet(); } else { entrySet = topValues.descendingMap().entrySet();
TreeMap<Long,String> treeMap = new TreeMap<Long,String>(); NavigableMap <Long, String> nmap = treeMap.descendingMap(); Set<Long, String> set = nmap.entrySet(); Iterator<Long, String> iterator = set.iterator();
for (Entry<Integer, Axis<ST, S>> e : yAxisMap.descendingMap().entrySet()) { Axis<ST, S> ya = e.getValue(); if (styler.getYAxisGroupPosistion(e.getKey()) != YAxisPosition.Right) {
public List<Object> highest( int n ) { List<Object> results = new ArrayList<Object>(); int count = 0; for( Entry<Object, List<Object>> entry : map.descendingMap().entrySet() ) { List<Object> objs = entry.getValue(); for( Object obj : objs ) { results.add(obj); if( ++count==n ) return results; } } return results; }
TreeMap<Integer, String> map = new TreeMap<Integer, String>(); map.put(1, "abc1"); map.put(2, "abc2"); map.put(3, "abc3"); NavigableMap<Integer, String> nmap = map.descendingMap(); for (NavigableMap.Entry<Integer, String> entry : nmap.entrySet()) { System.out.println("Key : " + entry.getKey() + " Value : " + entry.getValue()); }
public static List<List<SocialAction>> mergeSocialActions( TreeMap<Date, List<SocialAction>> actions) { List<List<SocialAction>> mergedActionsList = new ArrayList<>(); for (List<SocialAction> socialActions : actions.descendingMap().values()) { mergedActionsList.addAll(mergeSocialActions(socialActions)); } return mergedActionsList; }
/** * Returns the youngest version of the artifact that's authentic Jenkins artifact. */ public Map.Entry<VersionNumber,HPI> findYoungestJenkinsArtifact() { for (Map.Entry<VersionNumber,HPI> e : artifacts.descendingMap().entrySet()) { if (e.getValue().isAuthenticJenkinsArtifact()) return e; } return null; }
/** * @since 1.6 */ public NavigableSet<K> descendingKeySet() { return descendingMap().navigableKeySet(); }
@Override public Function<Map<K, Long>, NavigableMap<Long, M>> finisher() { return m1 -> m1.entrySet().stream() .collect(Collectors.groupingBy(Map.Entry::getValue, TreeMap::new, mapping(Map.Entry::getKey, downstream))) .descendingMap(); }
/** * @since 1.6 */ public NavigableSet<K> descendingKeySet() { return descendingMap().navigableKeySet(); }
@Override Iterator<Map.Entry<Long, VALUE>> descendingIterator(long from, long to) { return map.descendingMap().subMap(to, false, from, true).entrySet().iterator(); }
private List<String> addLibraries(List<String> categories, boolean isEmbed, Map<String, ? extends ClientLibrary> librariesMap, String[] requestedCategories, int currentPosition) { // the order is given by the paths // we just sort alphabetically in here TreeMap<String, ClientLibrary> sortedLibrariesMap = new TreeMap<String, ClientLibrary>(librariesMap); // add in reverse order (because each might add a number of dependent libraries) for (Map.Entry<String, ClientLibrary> entry: sortedLibrariesMap.descendingMap().entrySet()) { ClientLibraryDependency dependency = new ClientLibraryDependency(this, entry.getValue(), new HashSet<String>(Arrays.asList(requestedCategories)), isEmbed, type); categories = dependency.buildDependencyTree(categories, currentPosition); } return categories; }
public SortedMap<Integer, String> aggregateTypesOrderedByFrequencyAsSortedMap() { cleanUpOldExceptions(); final Multiset<Class<?>> classes = HashMultiset.create(); for( final SimpleImmutableEntry<ZonedDateTime, Throwable> entry : timestampedThrowables ) { classes.add(entry.getValue().getClass()); } final TreeMap<Integer, String> sortedMap = Maps.newTreeMap(); for( final Class<?> cls : classes.elementSet() ) { sortedMap.put(classes.count(cls), cls.getCanonicalName()); } return sortedMap.descendingMap(); }