Refine search
private boolean addRecordAndEvictIfNecessary(GlobalMetadata recordToAdd) { // First remove the element from the HashSet if it's already in there to reset // the 'LRU' piece; then add it back in boolean isNew = !metadataRecords.remove(recordToAdd); metadataRecords.add(recordToAdd); // Now remove the first element (which should be the oldest) from the list // if we've exceeded the cache size if (cacheSize != -1 && metadataRecords.size() > cacheSize) { Iterator<GlobalMetadata> recordIt = metadataRecords.iterator(); recordIt.next(); // Remove the oldest element - don't care what it is recordIt.remove(); } return isNew; }
/** * It is different from its super implementation only in not invoking * incrementTakeSidePutPermits(). Fix for #41521. */ @Override protected Long getAndRemoveNextAvailableID() throws InterruptedException { Long next = null; acquireWriteLock(); try { if (this.idsAvailable.isEmpty()) { if (waitForData()) { Iterator itr = this.idsAvailable.iterator(); next = (Long) itr.next(); itr.remove(); } } else { Iterator itr = this.idsAvailable.iterator(); next = (Long) itr.next(); itr.remove(); } } finally { releaseWriteLock(); } return next; }
@Override public Enumeration<URL> getResources(String name) throws IOException { final LinkedHashSet<URL> resourceUrls = new LinkedHashSet<URL>(); final Iterator<ClassLoader> clIterator = newClassLoaderIterator(); while ( clIterator.hasNext() ) { final ClassLoader classLoader = clIterator.next(); final Enumeration<URL> urls = classLoader.getResources( name ); while ( urls.hasMoreElements() ) { resourceUrls.add( urls.nextElement() ); } } return new Enumeration<URL>() { final Iterator<URL> resourceUrlIterator = resourceUrls.iterator(); @Override public boolean hasMoreElements() { return resourceUrlIterator.hasNext(); } @Override public URL nextElement() { return resourceUrlIterator.next(); } }; }
/** * Returns the next position counter present in idsAvailable set. This method is invoked by the * peek function. In case of BlockingQueue, this method waits till a valid ID is available. * * @return valid Long poistion or null depending upon the nature of the queue * @throws TimeoutException if operation is interrupted (unfortunately) */ private Long getNextAvailableID() throws InterruptedException { Long next = null; acquireReadLock(); try { if (this.idsAvailable.isEmpty()) { // Asif:Wait in case it is a blocking thread if (waitForData()) { next = (Long) this.idsAvailable.iterator().next(); } } else { next = (Long) this.idsAvailable.iterator().next(); } } finally { releaseReadLock(); } return next; }
/** * {@inheritDoc} */ public MethodGraph.Node asNode(Merger merger) { Iterator<MethodDescription> iterator = methodDescriptions.iterator(); MethodDescription methodDescription = iterator.next(); while (iterator.hasNext()) { methodDescription = merger.merge(methodDescription, iterator.next()); } return new Node(key.detach(methodDescription.asTypeToken()), methodDescription, visibility); }
private void doEviction() { int currentlyDeleted = 0; float target = maxCacheSize * evictionFactor; while (currentlyDeleted < target) { LinkedHashSet<CacheNode<Key, Value>> nodes = frequencyList[lowestFrequency]; if (nodes.isEmpty()) { throw new IllegalStateException("Lowest frequency constraint violated!"); } else { Iterator<CacheNode<Key, Value>> it = nodes.iterator(); while (it.hasNext() && currentlyDeleted++ < target) { CacheNode<Key, Value> node = it.next(); it.remove(); cache.remove(node.k); } if (!it.hasNext()) { findNextLowestFrequency(); } } } }
private static FieldType resolveTypeConflict(String fullName, FieldType existing, FieldType incoming) { // Prefer to upcast the incoming field to the existing first LinkedHashSet<FieldType> incomingSuperTypes = incoming.getCastingTypes(); if (incomingSuperTypes.contains(existing)) { // Incoming can be cast to existing. return existing; } // See if existing can be upcast to the incoming field's type next LinkedHashSet<FieldType> existingSuperTypes = existing.getCastingTypes(); if (existingSuperTypes.contains(incoming)) { // Existing can be cast to incoming return incoming; } // Finally, Try to pick the lowest common super type for both fields if it exists if (incomingSuperTypes.size() > 0 && existingSuperTypes.size() > 0) { LinkedHashSet<FieldType> combined = new LinkedHashSet<FieldType>(incomingSuperTypes); combined.retainAll(existingSuperTypes); if (combined.size() > 0) { return combined.iterator().next(); } } // If none of the above options succeed, the fields are conflicting throw new EsHadoopIllegalArgumentException("Incompatible types found in multi-mapping: " + "Field ["+fullName+"] has conflicting types of ["+existing+"] and ["+ incoming+"]."); }
/** * Remove all values of the given attribute name. * * @param name the attribute name (must not be {@code null}) * @return the removed attribute values, or {@code null} if the attribute was not present in the builder */ public List<AttributeValue> removeAttribute(final String name) { Assert.checkNotNullParam("name", name); final LinkedHashSet<AttributeValue> removed = attributes.remove(name); if (removed == null) { return Collections.emptyList(); } final Iterator<AttributeValue> iterator = removed.iterator(); if (! iterator.hasNext()) { return Collections.emptyList(); } final AttributeValue first = iterator.next(); if (! iterator.hasNext()) { return Collections.singletonList(first); } final ArrayList<AttributeValue> list = new ArrayList<>(removed.size()); list.add(first); do { list.add(iterator.next()); } while (iterator.hasNext()); return list; }
/** * This method is invoked by the take function . For non blocking queue it returns null or a valid * long position while for blocking queue it waits for data in the queue or throws Exception if * the thread encounters exception while waiting. */ protected Long getAndRemoveNextAvailableID() throws InterruptedException { Long next = null; acquireWriteLock(); try { if (this.idsAvailable.isEmpty()) { if (waitForData()) { Iterator itr = this.idsAvailable.iterator(); next = (Long) itr.next(); itr.remove(); this.incrementTakeSidePutPermits(); } } else { Iterator itr = this.idsAvailable.iterator(); next = (Long) itr.next(); itr.remove(); this.incrementTakeSidePutPermits(); } } finally { releaseWriteLock(); } return next; }
Supplier<String> doCreateSupplier(final LinkedHashSet<String> set, final SSLSession sslSession) { final Supplier<String> prevSupplier = prev.doCreateSupplier(set, sslSession); final Iterator<String> iterator = set.iterator(); return () -> { String name = prevSupplier.get(); if (name != null) { return name; } while (iterator.hasNext()) { name = iterator.next(); if (predicate.test(name, sslSession)) try { return name; } finally { iterator.remove(); } } return null; }; }
/** * {@inheritDoc} */ public Entry<U> extendBy(MethodDescription methodDescription, Harmonizer<U> harmonizer) { Harmonized<U> key = this.key.extend(methodDescription.asDefined(), harmonizer); LinkedHashSet<MethodDescription> methodDescriptions = new LinkedHashSet<MethodDescription>(); TypeDescription declaringType = methodDescription.getDeclaringType().asErasure(); boolean bridge = methodDescription.isBridge(); Visibility visibility = this.visibility; for (MethodDescription extendedMethod : this.methodDescriptions) { if (extendedMethod.getDeclaringType().asErasure().equals(declaringType)) { if (extendedMethod.isBridge() ^ bridge) { methodDescriptions.add(bridge ? extendedMethod : methodDescription); } else { methodDescriptions.add(methodDescription); methodDescriptions.add(extendedMethod); } } visibility = visibility.expandTo(extendedMethod.getVisibility()); } if (methodDescriptions.isEmpty()) { return new Resolved<U>(key, methodDescription, visibility, bridge); } else if (methodDescriptions.size() == 1) { return new Resolved<U>(key, methodDescriptions.iterator().next(), visibility, Resolved.NOT_MADE_VISIBLE); } else { return new Ambiguous<U>(key, methodDescriptions, visibility); } }
@Override protected String getCommonSuperClass(String type1, String type2) { // Using type closures resolved via the associate classloader LinkedHashSet<String> type1Closure = new LinkedHashSet<>(); LinkedHashSet<String> type2Closure = new LinkedHashSet<>(); InstrumentUtils.collectHierarchyClosure(targetCL, type1, type1Closure, true); InstrumentUtils.collectHierarchyClosure(targetCL, type2, type2Closure, true); // basically, do intersection type1Closure.retainAll(type2Closure); // if the intersection is not empty the first element is the closest common ancestor Iterator<String> iter = type1Closure.iterator(); if (iter.hasNext()) { String common = iter.next(); return common; } return Constants.OBJECT_INTERNAL; }
for (Iterator itr = kvp.entrySet().iterator(); itr.hasNext(); ) { Map.Entry entry = (Map.Entry) itr.next(); String key = (String) entry.getKey(); Object value = null; value = null; } else if (normalized.size() == 1) { value = normalized.iterator().next(); } else { value = (String[]) normalized.toArray(new String[normalized.size()]);
Iterator<MaterializedField> thisIter = children.iterator(); Iterator<MaterializedField> otherIter = other.children.iterator(); while (thisIter.hasNext()) { MaterializedField thisChild = thisIter.next(); MaterializedField otherChild = otherIter.next(); if (! thisChild.isEquivalent(otherChild)) { return false;
/** * When inserting new network addresses to the proxy the order should remain in the order they * were inserted. */ @Test public void shouldPreserveAddressOrderOnInsertions() { Socks5Proxy proxy = Socks5Proxy.getSocks5Proxy(); LinkedHashSet<String> addresses = new LinkedHashSet<>(proxy.getLocalAddresses()); for (int i = 1 ; i <= 3; i++) { addresses.add(Integer.toString(i)); } for (String address : addresses) { proxy.addLocalAddress(address); } List<String> localAddresses = proxy.getLocalAddresses(); Iterator<String> iterator = addresses.iterator(); for (int i = 0; i < addresses.size(); i++) { assertEquals(iterator.next(), localAddresses.get(i)); } }