/** * Computes the size of each store file in {@code storeFileNames} */ long getSizeOfStoreFiles(TableName tn, Set<StoreFileReference> storeFileNames) { return storeFileNames.stream() .collect(Collectors.summingLong((sfr) -> getSizeOfStoreFile(tn, sfr))); }
private TreeMap<DateTime, Long> aggregateToDaily(Map<DateTime, Long> histogram) { return histogram.entrySet().stream() .collect(Collectors.groupingBy(entry -> entry.getKey().withTimeAtStartOfDay(), TreeMap::new, Collectors.mapping(Map.Entry::getValue, Collectors.summingLong(Long::valueOf)))); }
/** * Computes the size of the store files for a single region. */ long getSizeOfStoreFile(TableName tn, StoreFileReference storeFileName) { String regionName = storeFileName.getRegionName(); return storeFileName.getFamilyToFilesMapping() .entries().stream() .collect(Collectors.summingLong((e) -> getSizeOfStoreFile(tn, regionName, e.getKey(), e.getValue()))); }
@Override public Mono<Long> delete(Publisher<K> keys) { Assert.notNull(keys, "Keys must not be null!"); return createFlux(connection -> connection.keyCommands() // .mDel(Flux.from(keys).map(this::rawKey).buffer(128)) // .map(CommandResponse::getOutput)) // .collect(Collectors.summingLong(value -> value)); }
@Override public Mono<Long> unlink(Publisher<K> keys) { Assert.notNull(keys, "Keys must not be null!"); return createFlux(connection -> connection.keyCommands() // .mUnlink(Flux.from(keys).map(this::rawKey).buffer(128)) // .map(CommandResponse::getOutput)) // .collect(Collectors.summingLong(value -> value)); }
public long longSize() { return this.map.values().stream().collect(Collectors.summingLong(Long::longValue)); }
public long longSize() { return this.map.values().stream().collect(Collectors.summingLong(Long::longValue)); }
@Override public long size() { return rdfDataSet.graphNames().stream().map(rdfDataSet::getQuads) .collect(Collectors.summingLong(List::size)); }
@Override public long sumLong(ToLongFunction<? super T> function) { return collect(Collectors.summingLong(function)); }
public Map<Set<ECPublicKey>, Map<EUID, Long>> summary() { return getParticles().stream() .filter(Particle::isAbstractConsumable) .map(Particle::getAsAbstractConsumable) .collect(Collectors.groupingBy( AbstractConsumable::getOwnersPublicKeys, Collectors.groupingBy( AbstractConsumable::getAssetId, Collectors.summingLong(AbstractConsumable::getSignedQuantity) ) )); }
@Override public long sumLong(ToLongFunction<? super T> function) { return collect(Collectors.summingLong(function)); }
public void dump(long l, StringBuilder str) { long ms = this.children.stream() .collect(Collectors.summingLong(e -> e.durationMs())); str.append(String.format("%-80s...%s", this.description, Performance.formatTime(ms))); str.append("\n"); } }
private TreeMap<DateTime, Long> aggregateToDaily(Map<DateTime, Long> histogram) { return histogram.entrySet().stream() .collect(Collectors.groupingBy(entry -> entry.getKey().withTimeAtStartOfDay(), TreeMap::new, Collectors.mapping(Map.Entry::getValue, Collectors.summingLong(Long::valueOf)))); }
private long getTotalAssetSize(Asset asset) { long size = asset.getRenditions().stream().collect(Collectors.summingLong(r -> r.getSize())); if (includeSubassets && !asset.isSubAsset()) { size += DamUtil.getSubAssets(asset.adaptTo(Resource.class)).stream().collect(Collectors.summingLong(this::getTotalAssetSize)); } return size; }
@Override public long size() { final long quads = Iter.asStream(datasetGraph.listGraphNodes()) .map(datasetGraph::getGraph) .collect(Collectors.summingLong(org.apache.jena.graph.Graph::size)); return quads + datasetGraph.getDefaultGraph().size(); }
@Override public Mono<Long> delete(Publisher<K> keys) { Assert.notNull(keys, "Keys must not be null!"); return createFlux(connection -> connection.keyCommands() // .mDel(Flux.from(keys).map(this::rawKey).buffer(128)) // .map(CommandResponse::getOutput)) // .collect(Collectors.summingLong(value -> value)); }
@Override public Mono<Long> unlink(Publisher<K> keys) { Assert.notNull(keys, "Keys must not be null!"); return createFlux(connection -> connection.keyCommands() // .mUnlink(Flux.from(keys).map(this::rawKey).buffer(128)) // .map(CommandResponse::getOutput)) // .collect(Collectors.summingLong(value -> value)); }
@Override public Mono<Long> delete(Publisher<K> keys) { Assert.notNull(keys, "Keys must not be null!"); return createFlux(connection -> connection.keyCommands() // .mDel(Flux.from(keys).map(this::rawKey).buffer(128)) // .map(CommandResponse::getOutput)) // .collect(Collectors.summingLong(value -> value)); }
@Override public Mono<Long> unlink(Publisher<K> keys) { Assert.notNull(keys, "Keys must not be null!"); return createFlux(connection -> connection.keyCommands() // .mUnlink(Flux.from(keys).map(this::rawKey).buffer(128)) // .map(CommandResponse::getOutput)) // .collect(Collectors.summingLong(value -> value)); }
protected void writePieChartGeneratorBody(ChartWriter writer, List<TestReportHtml> htmls) { List<TestReportHtml> nonOverviews = filterBy(htmls, r -> !r.isOverviewPage()); writeStatusPieChartGenerator(writer, nonOverviews); writer.writePieChartGenerator("Tests / Run", TESTCOUNT_CHART_ID, nonOverviews, r -> r.getRunName(), Collectors.counting()); writer.writePieChartGenerator("Time / Run", RUNTIME_CHART_ID, nonOverviews, r -> r.getRunName(), Collectors.summingLong(r -> r.getTime() < 0 ? 0 : r.getTime())); writer.writeBarChartGenerator("ms / Test", TIME_PER_TEST_CHART_ID, ",hAxis:{textPosition:'none'}"); }