Refine search
@Override public List<T> get() { List<T> retVal = Lists.newArrayListWithExpectedSize(itemsToLoad.size()); for (Key<? extends T> key : itemsToLoad) { retVal.add(injector.getInstance(key)); } return retVal; } }
@Override public Void apply(List<GetResult<Versioned<byte[]>>> nodeResults) { List<NodeValue<ByteArray, byte[]>> nodeValues = Lists.newArrayListWithExpectedSize(nodeResults.size()); for(GetResult<Versioned<byte[]>> getResult: nodeResults) fillRepairReadsValues(nodeValues, getResult.key, getResult.node, getResult.retrieved); repairReads(nodeValues, repairReads && transforms == null); return null; } };
public byte[] getAnalysisTypesCacheKey() { int size = 1; List<byte[]> typeBytesList = Lists.newArrayListWithExpectedSize(analysisTypes.size()); for (AnalysisType analysisType : analysisTypes) { final byte[] bytes = analysisType.getCacheKey(); typeBytesList.add(bytes); size += bytes.length; } final ByteBuffer bytes = ByteBuffer.allocate(size); bytes.put(ANALYSIS_TYPES_CACHE_PREFIX); for (byte[] typeBytes : typeBytesList) { bytes.put(typeBytes); } return bytes.array(); }
static List<Long> getEntriesToSearch( long startEntryId, long endEntryId, int nWays) { if (startEntryId > endEntryId) { return Lists.newArrayList(); } long numEntries = endEntryId - startEntryId + 1; long step = Math.max(1L, numEntries / nWays); List<Long> entryList = Lists.newArrayListWithExpectedSize(nWays); for (long i = startEntryId, j = nWays - 1; i <= endEntryId && j > 0; i += step, j--) { entryList.add(i); } if (entryList.get(entryList.size() - 1) < endEntryId) { entryList.add(endEntryId); } return entryList; } }
/** * @param writable Druid Writable to be deserialized. * @return List of Hive Writables. * @throws SerDeException if there is Serde issues. */ @Override public Object deserialize(Writable writable) throws SerDeException { final DruidWritable input = (DruidWritable) writable; final List<Object> output = Lists.newArrayListWithExpectedSize(columns.length); for (int i = 0; i < columns.length; i++) { final Object value = input.isCompacted() ? input.getCompactedValue().get(i) : input.getValue().get(columns[i]); if (value == null) { output.add(null); } else { output.add(convertAsPrimitive(value, types[i])); } } return output; }
public final List<Entity> put(FullEntity<?>... entities) { validateActive(); List<IncompleteKey> incompleteKeys = Lists.newArrayListWithExpectedSize(entities.length); for (FullEntity<?> entity : entities) { IncompleteKey key = entity.getKey(); Preconditions.checkArgument(key != null, "Entity must have a key"); if (!(key instanceof Key)) { incompleteKeys.add(key); if (!incompleteKeys.isEmpty()) { IncompleteKey[] toAllocate = Iterables.toArray(incompleteKeys, IncompleteKey.class); allocated = getDatastore().allocateId(toAllocate).iterator(); allocated = Collections.emptyIterator(); List<Entity> answer = Lists.newArrayListWithExpectedSize(entities.length); for (FullEntity<?> entity : entities) { if (entity.getKey() instanceof Key) { putInternal((FullEntity<Key>) entity); answer.add(Entity.convert((FullEntity<Key>) entity)); } else { Entity entityWithAllocatedId = Entity.newBuilder(allocated.next(), entity).build(); putInternal(entityWithAllocatedId); answer.add(entityWithAllocatedId);
final List<List<String>> segmentLists = Lists.newArrayListWithExpectedSize(2); LogSegmentNamesListener listener = new LogSegmentNamesListener() { @Override List<String> firstSegmentList = segmentLists.get(0); Collections.sort(firstSegmentList); assertEquals("List of segments should be same", List<String> secondSegmentList = segmentLists.get(1); Collections.sort(secondSegmentList); assertEquals("List of segments should be updated", 2 * numSegments, secondSegmentList.size()); assertEquals("List of segments should be updated", newChildren, secondSegmentList);
@Override public List<TableName> visit(JoinTableNode joinNode) throws SQLException { List<TableName> lhs = joinNode.getLHS().accept(this); List<TableName> rhs = joinNode.getType() == JoinType.Semi || joinNode.getType() == JoinType.Anti ? Collections.<TableName> emptyList() : joinNode.getRHS().accept(this); List<TableName> ret = Lists.<TableName>newArrayListWithExpectedSize(lhs.size() + rhs.size()); ret.addAll(lhs); ret.addAll(rhs); return ret; }
public ConfigurationSubscription(ConcurrentBaseConfiguration viewConfig, List<FileConfigurationBuilder> fileConfigBuilders, ScheduledExecutorService executorService, int reloadPeriod, TimeUnit reloadUnit) throws ConfigurationException { Preconditions.checkNotNull(fileConfigBuilders); Preconditions.checkArgument(!fileConfigBuilders.isEmpty()); Preconditions.checkNotNull(executorService); Preconditions.checkNotNull(viewConfig); this.viewConfig = viewConfig; this.executorService = executorService; this.reloadPeriod = reloadPeriod; this.reloadUnit = reloadUnit; this.fileConfigBuilders = fileConfigBuilders; this.fileConfigs = Lists.newArrayListWithExpectedSize(this.fileConfigBuilders.size()); this.confListeners = new CopyOnWriteArraySet<ConfigurationListener>(); reload(); scheduleReload(); }
private void addChildTasks(CuboidResult parent) { List<Long> children = cuboidScheduler.getSpanningCuboid(parent.cuboidId); if (children != null && !children.isEmpty()) { List<CuboidTask> childTasks = Lists.newArrayListWithExpectedSize(children.size()); for (Long child : children) { CuboidTask task = new CuboidTask(parent, child, this); childTasks.add(task); task.fork(); } for (CuboidTask childTask : childTasks) { childTask.join(); } } }
public static List<PostAggregator> decoratePostAggregators( List<PostAggregator> postAggs, Map<String, AggregatorFactory> aggFactories ) { List<PostAggregator> decorated = Lists.newArrayListWithExpectedSize(postAggs.size()); for (PostAggregator aggregator : postAggs) { decorated.add(aggregator.decorate(aggFactories)); } return decorated; }
TIntList boneParents = new TIntArrayList(md5.numJoints); for (int i = 0; i < md5.numJoints; ++i) { boneNames.add(md5.joints[i].name); boneParents.add(md5.joints[i].parent); for (int frameIndex = 0; frameIndex < md5.numFrames; ++frameIndex) { MD5Frame frame = md5.frames[frameIndex]; List<Vector3f> positions = Lists.newArrayListWithExpectedSize(md5.numJoints); List<Vector3f> rawRotations = Lists.newArrayListWithExpectedSize(md5.numJoints); for (int i = 0; i < md5.numJoints; ++i) { positions.add(new Vector3f(md5.baseFramePosition[i])); rawRotations.add(new Vector3f(md5.baseFrameOrientation[i])); int compIndex = 0; if ((md5.joints[jointIndex].flags & POSITION_X_FLAG) != 0) { positions.get(jointIndex).x = frame.components[md5.joints[jointIndex].startIndex + compIndex]; compIndex++; positions.get(jointIndex).y = frame.components[md5.joints[jointIndex].startIndex + compIndex]; compIndex++; positions.get(jointIndex).z = frame.components[md5.joints[jointIndex].startIndex + compIndex]; compIndex++;
public final List<Entity> add(FullEntity<?>... entities) { validateActive(); List<IncompleteKey> incompleteKeys = Lists.newArrayListWithExpectedSize(entities.length); for (FullEntity<?> entity : entities) { IncompleteKey key = entity.getKey(); Preconditions.checkArgument(key != null, "Entity must have a key"); if (!(key instanceof Key)) { incompleteKeys.add(key); if (!incompleteKeys.isEmpty()) { IncompleteKey[] toAllocate = Iterables.toArray(incompleteKeys, IncompleteKey.class); allocated = getDatastore().allocateId(toAllocate).iterator(); allocated = Collections.emptyIterator(); List<Entity> answer = Lists.newArrayListWithExpectedSize(entities.length); for (FullEntity<?> entity : entities) { if (entity.getKey() instanceof Key) { addInternal((FullEntity<Key>) entity); answer.add(Entity.convert((FullEntity<Key>) entity)); } else { Entity entityWithAllocatedId = Entity.newBuilder(allocated.next(), entity).build(); addInternal(entityWithAllocatedId); answer.add(entityWithAllocatedId);
@Override public byte[] getCacheKey() { int size = 1; List<byte[]> columns = Lists.newArrayListWithExpectedSize(this.columns.size()); for (String column : this.columns) { final byte[] bytes = StringUtils.toUtf8(column); columns.add(bytes); size += bytes.length + 1; } final ByteBuffer bytes = ByteBuffer.allocate(size).put(LIST_CACHE_PREFIX); for (byte[] column : columns) { bytes.put(column); bytes.put((byte) 0xff); } return bytes.array(); } }
final List<List<String>> segmentLists = Lists.newArrayListWithExpectedSize(2); LogSegmentNamesListener listener = new LogSegmentNamesListener() { @Override List<String> firstSegmentList = segmentLists.get(0); Collections.sort(firstSegmentList); assertEquals("List of segments should be same", List<String> secondSegmentList = segmentLists.get(1); Collections.sort(secondSegmentList); assertEquals("List of segments should be updated", 0, secondSegmentList.size()); assertEquals("List of segments should be updated", newChildren, secondSegmentList);
public static List<Mutation> generateIndexData(PTable index, PTable table, List<Mutation> dataMutations, ImmutableBytesWritable ptr, KeyValueBuilder builder) throws SQLException { List<Mutation> indexMutations = Lists.newArrayListWithExpectedSize(dataMutations.size()); for (Mutation dataMutation : dataMutations) { indexMutations.addAll(generateIndexData(index, table, dataMutation, ptr, builder)); } return indexMutations; }
public AbstractReadRepair(PD pipelineData, Event completeEvent, int preferred, long timeoutMs, Map<Integer, NonblockingStore> nonblockingStores, ReadRepairer<ByteArray, byte[]> readRepairer) { super(pipelineData, completeEvent); this.preferred = preferred; this.timeoutMs = timeoutMs; this.nonblockingStores = nonblockingStores; this.readRepairer = readRepairer; this.nodeValues = Lists.newArrayListWithExpectedSize(pipelineData.getResponses().size()); }
@Override public Iterator<Tuple2<RowKeyWritable, KeyValue>> call(Tuple2<Text, Text> textTextTuple2) throws Exception { List<Tuple2<RowKeyWritable, KeyValue>> result = Lists.newArrayListWithExpectedSize(cfNum); Object[] inputMeasures = new Object[cubeDesc.getMeasures().size()]; inputCodec.decode(ByteBuffer.wrap(textTextTuple2._2.getBytes(), 0, textTextTuple2._2.getLength()), inputMeasures); for (int i = 0; i < cfNum; i++) { KeyValue outputValue = keyValueCreators.get(i).create(textTextTuple2._1, inputMeasures); result.add(new Tuple2<>(new RowKeyWritable(outputValue.createKeyOnly(false).getKey()), outputValue)); } return result.iterator(); } });
private Iterator<String> getIterator(List<Set<String>> resultList) { List<Iterator<String>> iterList = Lists.newArrayListWithExpectedSize(resultList.size()); for (Set<String> result : resultList) { iterList.add(result.iterator()); } return Iterators.concat(iterList.iterator()); }
List<HadoopyShardSpec> actualSpecs = Lists.newArrayListWithExpectedSize(numberOfShards); if (numberOfShards == 1) { actualSpecs.add(new HadoopyShardSpec(NoneShardSpec.instance(), shardCount++)); } else { for (int i = 0; i < numberOfShards; ++i) { actualSpecs.add( new HadoopyShardSpec( new HashBasedNumberedShardSpec( log.info("DateTime[%s], partition[%d], spec[%s]", bucket, i, actualSpecs.get(i));