private FieldSet(FieldSet fieldSet, int... fieldIDs) { if (fieldIDs == null || fieldIDs.length == 0) { this.collection = fieldSet.collection; } else { HashSet<Integer> set = new HashSet<Integer>(2 * (fieldSet.collection.size() + fieldIDs.length)); set.addAll(fieldSet.collection); for (int i = 0; i < fieldIDs.length; i++) { set.add(fieldIDs[i]); } this.collection = Collections.unmodifiableSet(set); } }
private IndexLimitation[] limitationsUnion( Iterable<IndexCapability> capabilities ) { HashSet<IndexLimitation> union = new HashSet<>(); for ( IndexCapability capability : capabilities ) { union.addAll( Arrays.asList( capability.limitations() ) ); } return union.toArray( new IndexLimitation[union.size()] ); } }
private Set<Integer> calculateRetryBuckets() { Iterator<Map.Entry<InternalDistributedMember, List<Integer>>> memberToBucketList = node2bucketIds.entrySet().iterator(); final HashSet<Integer> retryBuckets = new HashSet<Integer>(); while (memberToBucketList.hasNext()) { Map.Entry<InternalDistributedMember, List<Integer>> e = memberToBucketList.next(); InternalDistributedMember m = e.getKey(); if (!this.resultsPerMember.containsKey(m) || (!((MemberResultsList) this.resultsPerMember.get(m)).isLastChunkReceived())) { retryBuckets.addAll(e.getValue()); this.resultsPerMember.remove(m); } } if (logger.isDebugEnabled()) { StringBuffer logStr = new StringBuffer(); logStr.append("Query ").append(this.query.getQueryString()) .append(" needs to retry bucketsIds: ["); for (Integer i : retryBuckets) { logStr.append("," + i); } logStr.append("]"); logger.debug(logStr); } return retryBuckets; }
public static Collection<Node> takeNodes(int nodesNeeded, NodePool[] pools) { LOG.debug("Trying to grab {} free nodes from {}", nodesNeeded, pools); HashSet<Node> ret = new HashSet<>(); for (NodePool pool : pools) { Collection<Node> got = pool.takeNodes(nodesNeeded); ret.addAll(got); nodesNeeded -= got.size(); LOG.debug("Got {} nodes so far need {} more nodes", ret.size(), nodesNeeded); if (nodesNeeded <= 0) { break; } } return ret; }
Entry<String, SupervisorInfo> entry = it.next(); String supervisorId = entry.getKey(); SupervisorInfo info = entry.getValue(); Entry<String, StormBase> entry = it.next(); String stormId = entry.getKey(); StormBase base = entry.getValue(); Assignment assignment = stormClusterState.assignment_info(stormId,null); if (assignment != null) { HashSet<NodePort> workers = new HashSet<NodePort>(); Collection<NodePort> entryColl = assignment.getTaskToNodeport().values(); workers.addAll(entryColl); topologySummaries.add(new TopologySummary(stormId, base.getStormName(), assignment.getTaskToNodeport().size(), workers.size(), TimeUtils.time_delta(base .getLanchTimeSecs()), extractStatusStr(base)));
outset.addAll(inset); Iterator keyIt = keylist.iterator(); while (keyIt.hasNext()) { Object key = keyIt.next(); if (key instanceof ArrayRef) { outset.remove(key); Iterator keyIt = keylist.iterator(); while (keyIt.hasNext()) { Object key = keyIt.next(); if (key instanceof ArrayRef) { Value base = ((ArrayRef) key).getBase(); if (condset == null || (condset.size() == 0)) { outset.addAll(genset); } else { Iterator condIt = condset.iterator(); while (condIt.hasNext()) { if (inset.contains(condIt.next())) { outset.addAll(genset); break; outset.addAll(absgenset);
/** * Add all objects that this table depends on to the hash set. * * @param dependencies the current set of dependencies */ public void addDependencies(HashSet<DbObject> dependencies) { if (dependencies.contains(this)) { // avoid endless recursion return; } if (sequences != null) { dependencies.addAll(sequences); } ExpressionVisitor visitor = ExpressionVisitor.getDependenciesVisitor( dependencies); for (Column col : columns) { col.isEverything(visitor); } if (constraints != null) { for (Constraint c : constraints) { c.isEverything(visitor); } } dependencies.add(this); }
public boolean isNeedStorageAggregation(Cuboid cuboid, Collection<TblColRef> groupD, Collection<TblColRef> singleValueD) { HashSet<TblColRef> temp = Sets.newHashSet(); temp.addAll(groupD); temp.addAll(singleValueD); if (cuboid.getColumns().size() == temp.size()) { logger.debug("Does not need storage aggregation"); return false; } else { logger.debug("Need storage aggregation"); return true; } }
Iterator<String> columnAliasIter = columnAliases.iterator(); HashSet<String> columnsUnique = new HashSet<String>(); if (usedAliases!=null) { columnsUnique.addAll( Arrays.asList(usedAliases) ); String column = iter.next(); String columnAlias = columnAliasIter.next(); if ( columnsUnique.add(columnAlias) ) { buf.append(", ") .append(column)
public void find_StatementSequences(SequenceFinder sf, DavaBody davaBody) { Iterator<IterableSet> sbit = subBodies.iterator(); while (sbit.hasNext()) { IterableSet body = sbit.next(); IterableSet children = body2childChain.get(body); HashSet<AugmentedStmt> childUnion = new HashSet<AugmentedStmt>(); Iterator cit = children.iterator(); while (cit.hasNext()) { SETNode child = (SETNode) cit.next(); child.find_StatementSequences(sf, davaBody); childUnion.addAll(child.get_Body()); } sf.find_StatementSequences(this, body, childUnion, davaBody); } }
private static Set<String> buildContraintCategories() { HashSet<String> categories = new HashSet<String>(); categories.addAll( Arrays.asList( "23", // "integrity constraint violation" "27", // "triggered data change violation" "44" // "with check option violation" ) ); return Collections.unmodifiableSet( categories ); }
/** * @param fieldSet The first part of the new set, NOT NULL! * @param fieldID The ID to be added, NOT NULL! */ private FieldSet(FieldSet fieldSet, Integer fieldID) { if (fieldSet.size() == 0) { this.collection = Collections.singleton(fieldID); } else { HashSet<Integer> set = new HashSet<Integer>(2 * (fieldSet.collection.size() + 1)); set.addAll(fieldSet.collection); set.add(fieldID); this.collection = Collections.unmodifiableSet(set); } }
public static Collection<Node> takeNodes(int nodesNeeded, NodePool[] pools) { LOG.debug("Trying to grab {} free nodes from {}", nodesNeeded, pools); HashSet<Node> ret = new HashSet<>(); for (NodePool pool : pools) { Collection<Node> got = pool.takeNodes(nodesNeeded); ret.addAll(got); nodesNeeded -= got.size(); LOG.debug("Got {} nodes so far need {} more nodes", ret.size(), nodesNeeded); if (nodesNeeded <= 0) { break; } } return ret; }
TransitiveTargets tt = new TransitiveTargets(cg); statement: for (Iterator sIt = body.getUnits().iterator(); sIt.hasNext();) { final Stmt s = (Stmt) sIt.next(); HashSet read = new HashSet(); HashSet write = new HashSet(); Iterator<MethodOrMethodContext> it = tt.iterator(s); while (it.hasNext()) { SootMethod target = (SootMethod) it.next(); ensureProcessed(target); if (target.isNative()) { continue statement; read.addAll(methodToRead.get(target)); write.addAll(methodToWrite.get(target)); if (read.size() + write.size() > threshold) { continue statement;
public HashSet<Statement> buildContinueSet() { continueSet.clear(); for (Statement st : stats) { continueSet.addAll(st.buildContinueSet()); if (st != first) { continueSet.remove(st.getBasichead()); } } for (StatEdge edge : getEdges(StatEdge.TYPE_CONTINUE, DIRECTION_FORWARD)) { continueSet.add(edge.getDestination().getBasichead()); } if (type == TYPE_DO) { continueSet.remove(first.getBasichead()); } return continueSet; }