Refine search
public static <T> void partialSort(List<T> list, List<Integer> items, Comparator<? super T> c) { List<T> temp = Lists.newLinkedList(); for (int index : items) { temp.add(list.get(index)); } Collections.sort(temp, c); for (int i = 0; i < temp.size(); ++i) { list.set(items.get(i), temp.get(i)); } } }
public static List<String> getDimensionsForMetricsJob() { List<String> result = Lists.newLinkedList(); result.add(JobPropertyEnum.USER.toString()); result.add(JobPropertyEnum.PROJECT.toString()); result.add(JobPropertyEnum.CUBE.toString()); result.add(JobPropertyEnum.TYPE.toString()); result.add(JobPropertyEnum.ALGORITHM.toString()); result.addAll(getTimeDimensionsForMetrics()); return result; }
@Override protected List<String> create(String[] elements) { List<String> list = Lists.newLinkedList(); for (int i = elements.length - 1; i >= 0; i--) { list.add(elements[i]); } return Lists.reverse(list); } })
public static List<String> getDimensionsForMetricsQueryRPC() { List<String> result = Lists.newLinkedList(); result.add(RecordEvent.RecordReserveKeyEnum.HOST.toString()); result.add(QueryRPCPropertyEnum.PROJECT.toString()); result.add(QueryRPCPropertyEnum.REALIZATION.toString()); result.add(QueryRPCPropertyEnum.RPC_SERVER.toString()); result.add(QueryRPCPropertyEnum.EXCEPTION.toString()); result.addAll(getTimeDimensionsForMetrics()); return result; }
@Override public Collection<Metric<Serializable>> findAll(List<String> metricKeys) { List<Metric<Serializable>> result = Lists.newLinkedList(); for (String metricKey : metricKeys) { Metric<Serializable> metric = findByKey(metricKey); if (metric != null) { result.add(metric); } } return result; }
public static List<String> getDimensionsForMetricsJobException() { List<String> result = Lists.newLinkedList(); result.add(JobPropertyEnum.USER.toString()); result.add(JobPropertyEnum.PROJECT.toString()); result.add(JobPropertyEnum.CUBE.toString()); result.add(JobPropertyEnum.TYPE.toString()); result.add(JobPropertyEnum.ALGORITHM.toString()); result.add(JobPropertyEnum.EXCEPTION.toString()); result.addAll(getTimeDimensionsForMetrics()); return result; }
/** * The returned defensive copy is only "somewhat" defensive. We do, for instance, return a defensive copy of the * enclosing List instance, and we do try to defensively copy any contained Rankable objects, too. However, the * contract of {@link org.apache.storm.starter.tools.Rankable#copy()} does not guarantee that any Object's embedded within * a Rankable will be defensively copied, too. * * @return a somewhat defensive copy of ranked items */ public List<Rankable> getRankings() { List<Rankable> copy = Lists.newLinkedList(); for (Rankable r : rankedItems) { copy.add(r.copy()); } return ImmutableList.copyOf(copy); }
public static List<String> getDimensionsForMetricsQuery() { List<String> result = Lists.newLinkedList(); result.add(RecordEvent.RecordReserveKeyEnum.HOST.toString()); result.add(QueryPropertyEnum.USER.toString()); result.add(QueryPropertyEnum.PROJECT.toString()); result.add(QueryPropertyEnum.REALIZATION.toString()); result.add(QueryPropertyEnum.REALIZATION_TYPE.toString()); result.add(QueryPropertyEnum.TYPE.toString()); result.add(QueryPropertyEnum.EXCEPTION.toString()); result.addAll(getTimeDimensionsForMetrics()); return result; }
@Override protected List<String> create(String[] elements) { List<String> fromList = Lists.newLinkedList(); for (String element : elements) { fromList.add("q" + checkNotNull(element)); } return Lists.transform(fromList, removeFirst); } })
public static List<String> getDimensionsForMetricsQueryCube() { List<String> result = Lists.newLinkedList(); result.add(RecordEvent.RecordReserveKeyEnum.HOST.toString()); result.add(QueryCubePropertyEnum.PROJECT.toString()); result.add(QueryCubePropertyEnum.CUBE.toString()); result.add(QueryCubePropertyEnum.SEGMENT.toString()); result.add(QueryCubePropertyEnum.CUBOID_SOURCE.toString()); result.add(QueryCubePropertyEnum.CUBOID_TARGET.toString()); result.add(QueryCubePropertyEnum.FILTER_MASK.toString()); result.add(QueryCubePropertyEnum.IF_MATCH.toString()); result.add(QueryCubePropertyEnum.IF_SUCCESS.toString()); result.addAll(getTimeDimensionsForMetrics()); return result; }
/*** * Level: important **/ public List<Env> portalSupportedEnvs() { String[] configurations = getArrayProperty("apollo.portal.envs", new String[]{"FAT", "UAT", "PRO"}); List<Env> envs = Lists.newLinkedList(); for (String env : configurations) { envs.add(Env.fromString(env)); } return envs; }
LocalCache<Object, Object> map = makeLocalCache(builder.concurrencyLevel(1)); Segment<Object, Object> segment = map.segments[0]; List<ReferenceEntry<Object, Object>> writeOrder = Lists.newLinkedList(); List<ReferenceEntry<Object, Object>> readOrder = Lists.newLinkedList(); for (int i = 0; i < DRAIN_THRESHOLD * 2; i++) { Object key = new Object(); writeOrder.add(entry); readOrder.add(entry); if (random.nextBoolean()) { map.get(entry.getKey()); reads.add(entry); i.remove(); assertTrue(segment.recencyQueue.size() <= DRAIN_THRESHOLD);
@Override public Collection<String> create(String[] elements) { List<String> unfiltered = newLinkedList(); unfiltered.add("yyy"); Collections.addAll(unfiltered, elements); unfiltered.add("zzz"); return Collections2.filter(unfiltered, NOT_YYY_ZZZ); } })
LocalCache<Object, Object> map = makeLocalCache(builder.concurrencyLevel(1)); Segment<Object, Object> segment = map.segments[0]; List<ReferenceEntry<Object, Object>> writeOrder = Lists.newLinkedList(); List<ReferenceEntry<Object, Object>> readOrder = Lists.newLinkedList(); for (int i = 0; i < SMALL_MAX_SIZE; i++) { Object key = new Object(); writeOrder.add(entry); readOrder.add(entry); if (random.nextBoolean()) { map.get(entry.getKey(), loader); reads.add(entry); i.remove(); assertTrue(segment.recencyQueue.size() <= DRAIN_THRESHOLD);
List<Throwable> causes = Lists.newLinkedList(); causes.add(next); current = next; counter++;
LocalCache<Object, Object> map = makeLocalCache(builder.concurrencyLevel(1)); Segment<Object, Object> segment = map.segments[0]; List<ReferenceEntry<Object, Object>> writeOrder = Lists.newLinkedList(); List<ReferenceEntry<Object, Object>> readOrder = Lists.newLinkedList(); for (int i = 0; i < DRAIN_THRESHOLD * 2; i++) { Object key = new Object(); writeOrder.add(entry); readOrder.add(entry); if (random.nextBoolean()) { segment.recordRead(entry, map.ticker.read()); reads.add(entry); i.remove();
public static List<String> getMeasuresForMetricsJobException() { List<String> result = Lists.newLinkedList(); result.add(JobPropertyEnum.ID_CODE.toString()); return result; }
LocalCache<Object, Object> map = makeLocalCache(builder.concurrencyLevel(1)); Segment<Object, Object> segment = map.segments[0]; List<ReferenceEntry<Object, Object>> writeOrder = Lists.newLinkedList(); for (int i = 0; i < DRAIN_THRESHOLD * 2; i++) { Object key = new Object(); writeOrder.add(entry); if (random.nextBoolean()) { segment.recordWrite(entry, 1, map.ticker.read()); writes.add(entry); i.remove();
public List<Event> readEvents(int numEvents, boolean backoffWithoutNL, boolean addByteOffset) throws IOException { List<Event> events = Lists.newLinkedList(); for (int i = 0; i < numEvents; i++) { Event event = readEvent(backoffWithoutNL, addByteOffset); if (event == null) { break; } events.add(event); } return events; }
public static List<Pair<String, String>> getHiveColumnsForMetricsJobException() { List<Pair<String, String>> columns = Lists.newLinkedList(); columns.add(new Pair<>(JobPropertyEnum.ID_CODE.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(RecordEvent.RecordReserveKeyEnum.HOST.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(JobPropertyEnum.USER.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(JobPropertyEnum.PROJECT.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(JobPropertyEnum.CUBE.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(JobPropertyEnum.TYPE.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(JobPropertyEnum.ALGORITHM.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(JobPropertyEnum.EXCEPTION.toString(), HiveTypeEnum.HSTRING.toString())); columns.addAll(getTimeColumnsForMetrics()); return columns; }