src/share/classes/java/util/stream/Collectors.java

Print this page
rev 7597 : 8015318: Extend Collector with 'finish' operation
Reviewed-by:
Contributed-by: brian.goetz@oracle.com

*** 25,34 **** --- 25,35 ---- package java.util.stream; import java.util.AbstractMap; import java.util.AbstractSet; import java.util.ArrayList; + import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.DoubleSummaryStatistics; import java.util.EnumSet;
*** 37,54 **** import java.util.IntSummaryStatistics; import java.util.Iterator; import java.util.List; import java.util.LongSummaryStatistics; import java.util.Map; - import java.util.NoSuchElementException; import java.util.Objects; import java.util.Set; import java.util.StringJoiner; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.function.BiFunction; import java.util.function.BinaryOperator; import java.util.function.Function; import java.util.function.Predicate; import java.util.function.Supplier; import java.util.function.ToDoubleFunction; import java.util.function.ToIntFunction; --- 38,57 ---- import java.util.IntSummaryStatistics; import java.util.Iterator; import java.util.List; import java.util.LongSummaryStatistics; import java.util.Map; import java.util.Objects; + import java.util.Optional; import java.util.Set; import java.util.StringJoiner; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; + import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.BinaryOperator; + import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; import java.util.function.Supplier; import java.util.function.ToDoubleFunction; import java.util.function.ToIntFunction;
*** 62,119 **** * <p>The following are examples of using the predefined {@code Collector} * implementations in {@link Collectors} with the {@code Stream} API to perform * mutable reduction tasks: * * <pre>{@code ! * // Accumulate elements into a List ! * List<Person> list = people.collect(Collectors.toList()); * ! * // Accumulate elements into a TreeSet ! * List<Person> list = people.collect(Collectors.toCollection(TreeSet::new)); * * // Convert elements to strings and concatenate them, separated by commas ! * String joined = stream.map(Object::toString) ! * .collect(Collectors.toStringJoiner(", ")) ! * .toString(); * * // Find highest-paid employee * Employee highestPaid = employees.stream() ! * .collect(Collectors.maxBy(Comparator.comparing(Employee::getSalary))); * * // Group employees by department * Map<Department, List<Employee>> byDept * = employees.stream() * .collect(Collectors.groupingBy(Employee::getDepartment)); * * // Find highest-paid employee by department ! * Map<Department, Employee> highestPaidByDept * = employees.stream() * .collect(Collectors.groupingBy(Employee::getDepartment, * Collectors.maxBy(Comparator.comparing(Employee::getSalary)))); * * // Partition students into passing and failing * Map<Boolean, List<Student>> passingFailing = * students.stream() ! * .collect(Collectors.partitioningBy(s -> s.getGrade() >= PASS_THRESHOLD); * * }</pre> * * TODO explanation of parallel collection * * @since 1.8 */ public final class Collectors { ! private static final Set<Collector.Characteristics> CH_CONCURRENT = Collections.unmodifiableSet(EnumSet.of(Collector.Characteristics.CONCURRENT, - Collector.Characteristics.STRICTLY_MUTATIVE, - Collector.Characteristics.UNORDERED)); - private static final Set<Collector.Characteristics> CH_STRICT - = Collections.unmodifiableSet(EnumSet.of(Collector.Characteristics.STRICTLY_MUTATIVE)); - private static final Set<Collector.Characteristics> CH_STRICT_UNORDERED - = Collections.unmodifiableSet(EnumSet.of(Collector.Characteristics.STRICTLY_MUTATIVE, Collector.Characteristics.UNORDERED)); private Collectors() { } /** * Returns a merge function, suitable for use in --- 65,127 ---- * <p>The following are examples of using the predefined {@code Collector} * implementations in {@link Collectors} with the {@code Stream} API to perform * mutable reduction tasks: * * <pre>{@code ! * // Accumulate names into a List ! * List<String> list = people.stream().map(Person::getName).collect(Collectors.toList()); * ! * // Accumulate names into a TreeSet ! * Set<String> list = people.stream().map(Person::getName).collect(Collectors.toCollection(TreeSet::new)); * * // Convert elements to strings and concatenate them, separated by commas ! * String joined = things.stream() ! * .map(Object::toString) ! * .collect(Collectors.joining(", ")); * * // Find highest-paid employee * Employee highestPaid = employees.stream() ! * .collect(Collectors.maxBy(Comparator.comparing(Employee::getSalary))) ! * .get(); * * // Group employees by department * Map<Department, List<Employee>> byDept * = employees.stream() * .collect(Collectors.groupingBy(Employee::getDepartment)); * * // Find highest-paid employee by department ! * Map<Department, Optional<Employee>> highestPaidByDept * = employees.stream() * .collect(Collectors.groupingBy(Employee::getDepartment, * Collectors.maxBy(Comparator.comparing(Employee::getSalary)))); * * // Partition students into passing and failing * Map<Boolean, List<Student>> passingFailing = * students.stream() ! * .collect(Collectors.partitioningBy(s -> s.getGrade() >= PASS_THRESHOLD)); * * }</pre> * * TODO explanation of parallel collection * * @since 1.8 */ public final class Collectors { ! static final Set<Collector.Characteristics> CH_CONCURRENT_ID ! = Collections.unmodifiableSet(EnumSet.of(Collector.Characteristics.CONCURRENT, ! Collector.Characteristics.UNORDERED, ! Collector.Characteristics.IDENTITY_FINISH)); ! static final Set<Collector.Characteristics> CH_CONCURRENT_NOID = Collections.unmodifiableSet(EnumSet.of(Collector.Characteristics.CONCURRENT, Collector.Characteristics.UNORDERED)); + static final Set<Collector.Characteristics> CH_ID + = Collections.unmodifiableSet(EnumSet.of(Collector.Characteristics.IDENTITY_FINISH)); + static final Set<Collector.Characteristics> CH_UNORDERED_ID + = Collections.unmodifiableSet(EnumSet.of(Collector.Characteristics.UNORDERED, + Collector.Characteristics.IDENTITY_FINISH)); + static final Set<Collector.Characteristics> CH_NOID = Collections.emptySet(); private Collectors() { } /** * Returns a merge function, suitable for use in
*** 122,213 **** * throws {@code IllegalStateException}. This can be used to enforce the * assumption that the elements being collected are distinct. * * @param <T> the type of input arguments to the merge function * @return a merge function which always throw {@code IllegalStateException} - * - * @see #firstWinsMerger() - * @see #lastWinsMerger() */ ! public static <T> BinaryOperator<T> throwingMerger() { return (u,v) -> { throw new IllegalStateException(String.format("Duplicate key %s", u)); }; } /** - * Returns a merge function, suitable for use in - * {@link Map#merge(Object, Object, BiFunction) Map.merge()} or - * {@link #toMap(Function, Function, BinaryOperator) toMap()}, - * which implements a "first wins" policy. - * - * @param <T> the type of input arguments to the merge function - * @return a merge function which always returns its first argument - * @see #lastWinsMerger() - * @see #throwingMerger() - */ - public static <T> BinaryOperator<T> firstWinsMerger() { - return (u,v) -> u; - } - - /** - * Returns a merge function, suitable for use in - * {@link Map#merge(Object, Object, BiFunction) Map.merge()} or - * {@link #toMap(Function, Function, BinaryOperator) toMap()}, - * which implements a "last wins" policy. - * - * @param <T> the type of input arguments to the merge function - * @return a merge function which always returns its second argument - * @see #firstWinsMerger() - * @see #throwingMerger() - */ - public static <T> BinaryOperator<T> lastWinsMerger() { - return (u,v) -> v; - } - - /** * Simple implementation class for {@code Collector}. * * @param <T> the type of elements to be collected * @param <R> the type of the result */ ! private static final class CollectorImpl<T, R> implements Collector<T,R> { ! private final Supplier<R> resultSupplier; ! private final BiFunction<R, T, R> accumulator; ! private final BinaryOperator<R> combiner; private final Set<Characteristics> characteristics; ! CollectorImpl(Supplier<R> resultSupplier, ! BiFunction<R, T, R> accumulator, ! BinaryOperator<R> combiner, Set<Characteristics> characteristics) { ! this.resultSupplier = resultSupplier; this.accumulator = accumulator; this.combiner = combiner; this.characteristics = characteristics; } ! CollectorImpl(Supplier<R> resultSupplier, ! BiFunction<R, T, R> accumulator, ! BinaryOperator<R> combiner) { ! this(resultSupplier, accumulator, combiner, Collections.emptySet()); } @Override ! public BiFunction<R, T, R> accumulator() { return accumulator; } @Override ! public Supplier<R> resultSupplier() { ! return resultSupplier; } @Override ! public BinaryOperator<R> combiner() { return combiner; } @Override public Set<Characteristics> characteristics() { return characteristics; } } --- 130,197 ---- * throws {@code IllegalStateException}. This can be used to enforce the * assumption that the elements being collected are distinct. * * @param <T> the type of input arguments to the merge function * @return a merge function which always throw {@code IllegalStateException} */ ! private static <T> BinaryOperator<T> throwingMerger() { return (u,v) -> { throw new IllegalStateException(String.format("Duplicate key %s", u)); }; } /** * Simple implementation class for {@code Collector}. * * @param <T> the type of elements to be collected * @param <R> the type of the result */ ! static class CollectorImpl<T, A, R> implements Collector<T, A, R> { ! private final Supplier<A> supplier; ! private final BiConsumer<A, T> accumulator; ! private final BinaryOperator<A> combiner; ! private final Function<A, R> finisher; private final Set<Characteristics> characteristics; ! CollectorImpl(Supplier<A> supplier, ! BiConsumer<A, T> accumulator, ! BinaryOperator<A> combiner, ! Function<A,R> finisher, Set<Characteristics> characteristics) { ! this.supplier = supplier; this.accumulator = accumulator; this.combiner = combiner; + this.finisher = finisher; this.characteristics = characteristics; } ! CollectorImpl(Supplier<A> supplier, ! BiConsumer<A, T> accumulator, ! BinaryOperator<A> combiner, ! Set<Characteristics> characteristics) { ! this(supplier, accumulator, combiner, i -> (R) i, characteristics); } @Override ! public BiConsumer<A, T> accumulator() { return accumulator; } @Override ! public Supplier<A> supplier() { ! return supplier; } @Override ! public BinaryOperator<A> combiner() { return combiner; } @Override + public Function<A, R> finisher() { + return finisher; + } + + @Override public Set<Characteristics> characteristics() { return characteristics; } }
*** 222,236 **** * {@code Collection} of the appropriate type * @return a {@code Collector} which collects all the input elements into a * {@code Collection}, in encounter order */ public static <T, C extends Collection<T>> ! Collector<T, C> toCollection(Supplier<C> collectionFactory) { ! return new CollectorImpl<>(collectionFactory, ! (r, t) -> { r.add(t); return r; }, (r1, r2) -> { r1.addAll(r2); return r1; }, ! CH_STRICT); } /** * Returns a {@code Collector} that accumulates the input elements into a * new {@code List}. There are no guarantees on the type, mutability, --- 206,219 ---- * {@code Collection} of the appropriate type * @return a {@code Collector} which collects all the input elements into a * {@code Collection}, in encounter order */ public static <T, C extends Collection<T>> ! Collector<T, ?, C> toCollection(Supplier<C> collectionFactory) { ! return new CollectorImpl<>(collectionFactory, Collection::add, (r1, r2) -> { r1.addAll(r2); return r1; }, ! CH_ID); } /** * Returns a {@code Collector} that accumulates the input elements into a * new {@code List}. There are no guarantees on the type, mutability,
*** 239,278 **** * @param <T> the type of the input elements * @return a {@code Collector} which collects all the input elements into a * {@code List}, in encounter order */ public static <T> ! Collector<T, List<T>> toList() { ! BiFunction<List<T>, T, List<T>> accumulator = (list, t) -> { ! switch (list.size()) { ! case 0: ! return Collections.singletonList(t); ! case 1: ! List<T> newList = new ArrayList<>(); ! newList.add(list.get(0)); ! newList.add(t); ! return newList; ! default: ! list.add(t); ! return list; ! } ! }; ! BinaryOperator<List<T>> combiner = (left, right) -> { ! switch (left.size()) { ! case 0: ! return right; ! case 1: ! List<T> newList = new ArrayList<>(left.size() + right.size()); ! newList.addAll(left); ! newList.addAll(right); ! return newList; ! default: ! left.addAll(right); ! return left; ! } ! }; ! return new CollectorImpl<>(Collections::emptyList, accumulator, combiner); } /** * Returns a {@code Collector} that accumulates the input elements into a * new {@code Set}. There are no guarantees on the type, mutability, --- 222,235 ---- * @param <T> the type of the input elements * @return a {@code Collector} which collects all the input elements into a * {@code List}, in encounter order */ public static <T> ! Collector<T, ?, List<T>> toList() { ! return new CollectorImpl<>((Supplier<List<T>>) ArrayList::new, List::add, ! (left, right) -> { left.addAll(right); return left; }, ! CH_ID); } /** * Returns a {@code Collector} that accumulates the input elements into a * new {@code Set}. There are no guarantees on the type, mutability,
*** 284,331 **** * @param <T> the type of the input elements * @return a {@code Collector} which collects all the input elements into a * {@code Set} */ public static <T> ! Collector<T, Set<T>> toSet() { ! return new CollectorImpl<>((Supplier<Set<T>>) HashSet::new, ! (r, t) -> { r.add(t); return r; }, ! (r1, r2) -> { r1.addAll(r2); return r1; }, ! CH_STRICT_UNORDERED); } /** * Returns a {@code Collector} that concatenates the input elements into a ! * new {@link StringBuilder}. * ! * @return a {@code Collector} which collects String elements into a ! * {@code StringBuilder}, in encounter order */ ! public static Collector<String, StringBuilder> toStringBuilder() { ! return new CollectorImpl<>(StringBuilder::new, ! (r, t) -> { r.append(t); return r; }, (r1, r2) -> { r1.append(r2); return r1; }, ! CH_STRICT); } /** ! * Returns a {@code Collector} that concatenates the input elements into a ! * new {@link StringJoiner}, using the specified delimiter. * * @param delimiter the delimiter to be used between each element ! * @return A {@code Collector} which collects String elements into a ! * {@code StringJoiner}, in encounter order */ ! public static Collector<CharSequence, StringJoiner> toStringJoiner(CharSequence delimiter) { ! BinaryOperator<StringJoiner> merger = (sj, other) -> { ! if (other.length() > 0) ! sj.add(other.toString()); ! return sj; ! }; ! return new CollectorImpl<>(() -> new StringJoiner(delimiter), ! (r, t) -> { r.add(t); return r; }, ! merger, CH_STRICT); } /** * {@code BinaryOperator<Map>} that merges the contents of its right * argument into its left argument, using the provided merge function to --- 241,302 ---- * @param <T> the type of the input elements * @return a {@code Collector} which collects all the input elements into a * {@code Set} */ public static <T> ! Collector<T, ?, Set<T>> toSet() { ! return new CollectorImpl<>((Supplier<Set<T>>) HashSet::new, Set::add, ! (left, right) -> { left.addAll(right); return left; }, ! CH_UNORDERED_ID); } /** * Returns a {@code Collector} that concatenates the input elements into a ! * {@code String}, in encounter order. * ! * @return a {@code Collector} that concatenates the input elements into a ! * {@code String}, in encounter order */ ! public static Collector<CharSequence, ?, String> joining() { ! return new CollectorImpl<CharSequence, StringBuilder, String>( ! StringBuilder::new, StringBuilder::append, (r1, r2) -> { r1.append(r2); return r1; }, ! StringBuilder::toString, CH_NOID); } /** ! * Returns a {@code Collector} that concatenates the input elements, ! * separated by the specified delimiter, in encounter order. * * @param delimiter the delimiter to be used between each element ! * @return A {@code Collector} which concatenates CharSequence elements, ! * separated by the specified delimiter, in encounter order */ ! public static Collector<CharSequence, ?, String> joining(CharSequence delimiter) { ! return joining(delimiter, "", ""); ! } ! ! /** ! * Returns a {@code Collector} that concatenates the input elements, ! * separated by the specified delimiter, with the specified prefix and ! * suffix, in encounter order. ! * ! * @param delimiter the delimiter to be used between each element ! * @param prefix the sequence of characters to be used at the beginning ! * of the joined result ! * @param suffix the sequence of characters to be used at the end ! * of the joined result ! * @return A {@code Collector} which concatenates CharSequence elements, ! * separated by the specified delimiter, in encounter order ! */ ! public static Collector<CharSequence, ?, String> joining(CharSequence delimiter, ! CharSequence prefix, ! CharSequence suffix) { ! return new CollectorImpl<>( ! () -> new StringJoiner(delimiter, prefix, suffix), ! StringJoiner::add, StringJoiner::merge, ! StringJoiner::toString, CH_NOID); } /** * {@code BinaryOperator<Map>} that merges the contents of its right * argument into its left argument, using the provided merge function to
*** 346,406 **** return m1; }; } /** ! * Adapts a {@code Collector<U,R>} to a {@code Collector<T,R>} by applying ! * a mapping function to each input element before accumulation. * * @apiNote * The {@code mapping()} collectors are most useful when used in a ! * multi-level reduction, downstream of {@code groupingBy} or * {@code partitioningBy}. For example, given a stream of * {@code Person}, to accumulate the set of last names in each city: * <pre>{@code * Map<City, Set<String>> lastNamesByCity * = people.stream().collect(groupingBy(Person::getCity, * mapping(Person::getLastName, toSet()))); * }</pre> * * @param <T> the type of the input elements * @param <U> type of elements accepted by downstream collector * @param <R> result type of collector * @param mapper a function to be applied to the input elements * @param downstream a collector which will accept mapped values * @return a collector which applies the mapping function to the input * elements and provides the mapped results to the downstream collector */ ! public static <T, U, R> Collector<T, R> ! mapping(Function<? super T, ? extends U> mapper, Collector<? super U, R> downstream) { ! BiFunction<R, ? super U, R> downstreamAccumulator = downstream.accumulator(); ! return new CollectorImpl<>(downstream.resultSupplier(), ! (r, t) -> downstreamAccumulator.apply(r, mapper.apply(t)), ! downstream.combiner(), downstream.characteristics()); } /** ! * Returns a {@code Collector<T, Long>} that counts the number of input ! * elements. * * @implSpec * This produces a result equivalent to: * <pre>{@code * reducing(0L, e -> 1L, Long::sum) * }</pre> * * @param <T> the type of the input elements * @return a {@code Collector} that counts the input elements */ ! public static <T> Collector<T, Long> counting() { return reducing(0L, e -> 1L, Long::sum); } /** ! * Returns a {@code Collector<T, T>} that produces the minimal element ! * according to a given {@code Comparator}. * * @implSpec * This produces a result equivalent to: * <pre>{@code * reducing(BinaryOperator.minBy(comparator)) --- 317,382 ---- return m1; }; } /** ! * Adapts a {@code Collector} accepting elements of type {@code U} to one ! * accepting elements of type {@code T} by applying a mapping function to ! * each input element before accumulation. * * @apiNote * The {@code mapping()} collectors are most useful when used in a ! * multi-level reduction, such as downstream of a {@code groupingBy} or * {@code partitioningBy}. For example, given a stream of * {@code Person}, to accumulate the set of last names in each city: * <pre>{@code * Map<City, Set<String>> lastNamesByCity * = people.stream().collect(groupingBy(Person::getCity, * mapping(Person::getLastName, toSet()))); * }</pre> * * @param <T> the type of the input elements * @param <U> type of elements accepted by downstream collector + * @param <A> intermediate accumulation type of the downstream collector * @param <R> result type of collector * @param mapper a function to be applied to the input elements * @param downstream a collector which will accept mapped values * @return a collector which applies the mapping function to the input * elements and provides the mapped results to the downstream collector */ ! public static <T, U, A, R> ! Collector<T, ?, R> mapping(Function<? super T, ? extends U> mapper, ! Collector<? super U, A, R> downstream) { ! BiConsumer<A, ? super U> downstreamAccumulator = downstream.accumulator(); ! return new CollectorImpl<>(downstream.supplier(), ! (r, t) -> downstreamAccumulator.accept(r, mapper.apply(t)), ! downstream.combiner(), downstream.finisher(), ! downstream.characteristics()); } /** ! * Returns a {@code Collector} accepting elements of type {@code T} that ! * counts the number of input elements. If no elements are present, the ! * result is 0. * * @implSpec * This produces a result equivalent to: * <pre>{@code * reducing(0L, e -> 1L, Long::sum) * }</pre> * * @param <T> the type of the input elements * @return a {@code Collector} that counts the input elements */ ! public static <T> Collector<T, ?, Long> counting() { return reducing(0L, e -> 1L, Long::sum); } /** ! * Returns a {@code Collector} that produces the minimal element according ! * to a given {@code Comparator}, described as an {@code Optional<T>}. * * @implSpec * This produces a result equivalent to: * <pre>{@code * reducing(BinaryOperator.minBy(comparator))
*** 408,425 **** * * @param <T> the type of the input elements * @param comparator a {@code Comparator} for comparing elements * @return a {@code Collector} that produces the minimal value */ ! public static <T> Collector<T, T> minBy(Comparator<? super T> comparator) { return reducing(BinaryOperator.minBy(comparator)); } /** ! * Returns a {@code Collector<T, T>} that produces the maximal element ! * according to a given {@code Comparator}. * * @implSpec * This produces a result equivalent to: * <pre>{@code * reducing(BinaryOperator.maxBy(comparator)) --- 384,401 ---- * * @param <T> the type of the input elements * @param comparator a {@code Comparator} for comparing elements * @return a {@code Collector} that produces the minimal value */ ! public static <T> Collector<T, ?, Optional<T>> minBy(Comparator<? super T> comparator) { return reducing(BinaryOperator.minBy(comparator)); } /** ! * Returns a {@code Collector} that produces the maximal element according ! * to a given {@code Comparator}, described as an {@code Optional<T>}. * * @implSpec * This produces a result equivalent to: * <pre>{@code * reducing(BinaryOperator.maxBy(comparator))
*** 427,487 **** * * @param <T> the type of the input elements * @param comparator a {@code Comparator} for comparing elements * @return a {@code Collector} that produces the maximal value */ ! public static <T> Collector<T, T> maxBy(Comparator<? super T> comparator) { return reducing(BinaryOperator.maxBy(comparator)); } /** ! * Returns a {@code Collector<T, Long>} that produces the sum of a ! * long-valued function applied to the input element. * ! * @implSpec ! * This produces a result equivalent to: ! * <pre>{@code ! * reducing(0L, mapper, Long::sum) ! * }</pre> * * @param <T> the type of the input elements * @param mapper a function extracting the property to be summed * @return a {@code Collector} that produces the sum of a derived property */ ! public static <T> Collector<T, Long> ! sumBy(Function<? super T, Long> mapper) { ! return reducing(0L, mapper, Long::sum); } /** ! * Returns a {@code Collector<T,T>} which performs a reduction of its ! * input elements under a specified {@code BinaryOperator}. * * @apiNote * The {@code reducing()} collectors are most useful when used in a * multi-level reduction, downstream of {@code groupingBy} or * {@code partitioningBy}. To perform a simple reduction on a stream, ! * use {@link Stream#reduce(BinaryOperator)} instead. * * @param <T> element type for the input and output of the reduction * @param identity the identity value for the reduction (also, the value * that is returned when there are no input elements) * @param op a {@code BinaryOperator<T>} used to reduce the input elements * @return a {@code Collector} which implements the reduction operation * * @see #reducing(BinaryOperator) * @see #reducing(Object, Function, BinaryOperator) */ ! public static <T> Collector<T, T> reducing(T identity, BinaryOperator<T> op) { ! return new CollectorImpl<>(() -> identity, (r, t) -> (r == null ? t : op.apply(r, t)), op); } /** ! * Returns a {@code Collector<T,T>} which performs a reduction of its ! * input elements under a specified {@code BinaryOperator}. * * @apiNote * The {@code reducing()} collectors are most useful when used in a * multi-level reduction, downstream of {@code groupingBy} or * {@code partitioningBy}. To perform a simple reduction on a stream, --- 403,578 ---- * * @param <T> the type of the input elements * @param comparator a {@code Comparator} for comparing elements * @return a {@code Collector} that produces the maximal value */ ! public static <T> Collector<T, ?, Optional<T>> maxBy(Comparator<? super T> comparator) { return reducing(BinaryOperator.maxBy(comparator)); } /** ! * Returns a {@code Collector} that produces the sum of a integer-valued ! * function applied to the input elements. If no elements are present, ! * the result is 0. * ! * @param <T> the type of the input elements ! * @param mapper a function extracting the property to be summed ! * @return a {@code Collector} that produces the sum of a derived property ! */ ! public static <T> Collector<T, ?, Integer> ! summingInt(ToIntFunction<? super T> mapper) { ! return new CollectorImpl<T, int[], Integer>( ! () -> new int[1], ! (a, t) -> { a[0] += mapper.applyAsInt(t); }, ! (a, b) -> { a[0] += b[0]; return a; }, ! a -> a[0], CH_NOID); ! } ! ! /** ! * Returns a {@code Collector} that produces the sum of a long-valued ! * function applied to the input elements. If no elements are present, ! * the result is 0. ! * ! * @param <T> the type of the input elements ! * @param mapper a function extracting the property to be summed ! * @return a {@code Collector} that produces the sum of a derived property ! */ ! public static <T> Collector<T, ?, Long> ! summingLong(ToLongFunction<? super T> mapper) { ! return new CollectorImpl<T, long[], Long>( ! () -> new long[1], ! (a, t) -> { a[0] += mapper.applyAsLong(t); }, ! (a, b) -> { a[0] += b[0]; return a; }, ! a -> a[0], CH_NOID); ! } ! ! /** ! * Returns a {@code Collector} that produces the sum of a double-valued ! * function applied to the input elements. If no elements are present, ! * the result is 0. ! * ! * <p>The sum returned can vary depending upon the order in which ! * values are recorded, due to accumulated rounding error in ! * addition of values of differing magnitudes. Values sorted by increasing ! * absolute magnitude tend to yield more accurate results. If any recorded ! * value is a {@code NaN} or the sum is at any point a {@code NaN} then the ! * sum will be {@code NaN}. ! * ! * @param <T> the type of the input elements ! * @param mapper a function extracting the property to be summed ! * @return a {@code Collector} that produces the sum of a derived property ! */ ! public static <T> Collector<T, ?, Double> ! summingDouble(ToDoubleFunction<? super T> mapper) { ! return new CollectorImpl<T, double[], Double>( ! () -> new double[1], ! (a, t) -> { a[0] += mapper.applyAsDouble(t); }, ! (a, b) -> { a[0] += b[0]; return a; }, ! a -> a[0], CH_NOID); ! } ! ! /** ! * Returns a {@code Collector} that produces the average of an integer-valued ! * function applied to the input elements. If no elements are present, ! * the result is 0. * * @param <T> the type of the input elements * @param mapper a function extracting the property to be summed * @return a {@code Collector} that produces the sum of a derived property */ ! public static <T> Collector<T, ?, Double> ! averagingInt(ToIntFunction<? super T> mapper) { ! return new CollectorImpl<T, long[], Double>( ! () -> new long[2], ! (a, t) -> { a[0] += mapper.applyAsInt(t); a[1]++; }, ! (a, b) -> { a[0] += b[0]; a[1] += b[1]; return a; }, ! a -> (a[1] == 0) ? 0.0d : (double) a[0] / a[1], CH_NOID); } /** ! * Returns a {@code Collector} that produces the average of a long-valued ! * function applied to the input elements. If no elements are present, ! * the result is 0. ! * ! * @param <T> the type of the input elements ! * @param mapper a function extracting the property to be summed ! * @return a {@code Collector} that produces the sum of a derived property ! */ ! public static <T> Collector<T, ?, Double> ! averagingLong(ToLongFunction<? super T> mapper) { ! return new CollectorImpl<T, long[], Double>( ! () -> new long[2], ! (a, t) -> { a[0] += mapper.applyAsLong(t); a[1]++; }, ! (a, b) -> { a[0] += b[0]; a[1] += b[1]; return a; }, ! a -> (a[1] == 0) ? 0.0d : (double) a[0] / a[1], CH_NOID); ! } ! ! /** ! * Returns a {@code Collector} that produces the average of a double-valued ! * function applied to the input elements. If no elements are present, ! * the result is 0. ! * ! * <p>The average returned can vary depending upon the order in which ! * values are recorded, due to accumulated rounding error in ! * addition of values of differing magnitudes. Values sorted by increasing ! * absolute magnitude tend to yield more accurate results. If any recorded ! * value is a {@code NaN} or the sum is at any point a {@code NaN} then the ! * average will be {@code NaN}. ! * ! * @param <T> the type of the input elements ! * @param mapper a function extracting the property to be summed ! * @return a {@code Collector} that produces the sum of a derived property ! */ ! public static <T> Collector<T, ?, Double> ! averagingDouble(ToDoubleFunction<? super T> mapper) { ! return new CollectorImpl<T, double[], Double>( ! () -> new double[2], ! (a, t) -> { a[0] += mapper.applyAsDouble(t); a[1]++; }, ! (a, b) -> { a[0] += b[0]; a[1] += b[1]; return a; }, ! a -> (a[1] == 0) ? 0.0d : a[0] / a[1], CH_NOID); ! } ! ! /** ! * Returns a {@code Collector} which performs a reduction of its ! * input elements under a specified {@code BinaryOperator} using the ! * provided identity. * * @apiNote * The {@code reducing()} collectors are most useful when used in a * multi-level reduction, downstream of {@code groupingBy} or * {@code partitioningBy}. To perform a simple reduction on a stream, ! * use {@link Stream#reduce(Object, BinaryOperator)}} instead. * * @param <T> element type for the input and output of the reduction * @param identity the identity value for the reduction (also, the value * that is returned when there are no input elements) * @param op a {@code BinaryOperator<T>} used to reduce the input elements * @return a {@code Collector} which implements the reduction operation * * @see #reducing(BinaryOperator) * @see #reducing(Object, Function, BinaryOperator) */ ! public static <T> Collector<T, ?, T> reducing(T identity, BinaryOperator<T> op) { ! return new CollectorImpl<>( ! boxSupplier(identity), ! (a, t) -> { a[0] = op.apply(a[0], t); }, ! (a, b) -> { a[0] = op.apply(a[0], b[0]); return a; }, ! a -> a[0], ! CH_NOID); ! } ! ! @SuppressWarnings("unchecked") ! private static <T> Supplier<T[]> boxSupplier(T identity) { ! return () -> (T[]) new Object[] { identity }; } /** ! * Returns a {@code Collector} which performs a reduction of its ! * input elements under a specified {@code BinaryOperator}. The result ! * is described as an {@code Optional<T>}. * * @apiNote * The {@code reducing()} collectors are most useful when used in a * multi-level reduction, downstream of {@code groupingBy} or * {@code partitioningBy}. To perform a simple reduction on a stream,
*** 489,542 **** * * <p>For example, given a stream of {@code Person}, to calculate tallest * person in each city: * <pre>{@code * Comparator<Person> byHeight = Comparator.comparing(Person::getHeight); - * BinaryOperator<Person> tallerOf = BinaryOperator.greaterOf(byHeight); * Map<City, Person> tallestByCity ! * = people.stream().collect(groupingBy(Person::getCity, reducing(tallerOf))); ! * }</pre> ! * ! * @implSpec ! * The default implementation is equivalent to: ! * <pre>{@code ! * reducing(null, op); * }</pre> * * @param <T> element type for the input and output of the reduction * @param op a {@code BinaryOperator<T>} used to reduce the input elements * @return a {@code Collector} which implements the reduction operation * * @see #reducing(Object, BinaryOperator) * @see #reducing(Object, Function, BinaryOperator) */ ! public static <T> Collector<T, T> reducing(BinaryOperator<T> op) { ! return reducing(null, op); } /** ! * Returns a {@code Collector<T,U>} which performs a reduction of its * input elements under a specified mapping function and * {@code BinaryOperator}. This is a generalization of * {@link #reducing(Object, BinaryOperator)} which allows a transformation * of the elements before reduction. * * @apiNote * The {@code reducing()} collectors are most useful when used in a * multi-level reduction, downstream of {@code groupingBy} or ! * {@code partitioningBy}. To perform a simple reduction on a stream, ! * use {@link Stream#reduce(BinaryOperator)} instead. * * <p>For example, given a stream of {@code Person}, to calculate the longest * last name of residents in each city: * <pre>{@code * Comparator<String> byLength = Comparator.comparing(String::length); - * BinaryOperator<String> longerOf = BinaryOperator.greaterOf(byLength); * Map<City, String> longestLastNameByCity * = people.stream().collect(groupingBy(Person::getCity, ! * reducing(Person::getLastName, longerOf))); * }</pre> * * @param <T> the type of the input elements * @param <U> the type of the mapped values * @param identity the identity value for the reduction (also, the value --- 580,645 ---- * * <p>For example, given a stream of {@code Person}, to calculate tallest * person in each city: * <pre>{@code * Comparator<Person> byHeight = Comparator.comparing(Person::getHeight); * Map<City, Person> tallestByCity ! * = people.stream().collect(groupingBy(Person::getCity, reducing(BinaryOperator.maxBy(byHeight)))); * }</pre> * * @param <T> element type for the input and output of the reduction * @param op a {@code BinaryOperator<T>} used to reduce the input elements * @return a {@code Collector} which implements the reduction operation * * @see #reducing(Object, BinaryOperator) * @see #reducing(Object, Function, BinaryOperator) */ ! public static <T> Collector<T, ?, Optional<T>> reducing(BinaryOperator<T> op) { ! class OptionalBox implements Consumer<T> { ! T value = null; ! boolean present = false; ! ! @Override ! public void accept(T t) { ! if (present) { ! value = op.apply(value, t); ! } ! else { ! value = t; ! present = true; ! } ! } ! } ! ! return new CollectorImpl<T, OptionalBox, Optional<T>>( ! OptionalBox::new, OptionalBox::accept, ! (a, b) -> { if (b.present) a.accept(b.value); return a; }, ! a -> Optional.ofNullable(a.value), CH_NOID); } /** ! * Returns a {@code Collector} which performs a reduction of its * input elements under a specified mapping function and * {@code BinaryOperator}. This is a generalization of * {@link #reducing(Object, BinaryOperator)} which allows a transformation * of the elements before reduction. * * @apiNote * The {@code reducing()} collectors are most useful when used in a * multi-level reduction, downstream of {@code groupingBy} or ! * {@code partitioningBy}. To perform a simple map-reduce on a stream, ! * use {@link Stream#map(Function)} and {@link Stream#reduce(Object, BinaryOperator)} ! * instead. * * <p>For example, given a stream of {@code Person}, to calculate the longest * last name of residents in each city: * <pre>{@code * Comparator<String> byLength = Comparator.comparing(String::length); * Map<City, String> longestLastNameByCity * = people.stream().collect(groupingBy(Person::getCity, ! * reducing(Person::getLastName, BinaryOperator.maxBy(byLength)))); * }</pre> * * @param <T> the type of the input elements * @param <U> the type of the mapped values * @param identity the identity value for the reduction (also, the value
*** 547,568 **** * * @see #reducing(Object, BinaryOperator) * @see #reducing(BinaryOperator) */ public static <T, U> ! Collector<T, U> reducing(U identity, Function<? super T, ? extends U> mapper, BinaryOperator<U> op) { ! return new CollectorImpl<>(() -> identity, ! (r, t) -> (r == null ? mapper.apply(t) : op.apply(r, mapper.apply(t))), ! op); } /** * Returns a {@code Collector} implementing a "group by" operation on * input elements of type {@code T}, grouping elements according to a ! * classification function. * * <p>The classification function maps elements to some key type {@code K}. * The collector produces a {@code Map<K, List<T>>} whose keys are the * values resulting from applying the classification function to the input * elements, and whose corresponding values are {@code List}s containing the --- 650,673 ---- * * @see #reducing(Object, BinaryOperator) * @see #reducing(BinaryOperator) */ public static <T, U> ! Collector<T, ?, U> reducing(U identity, Function<? super T, ? extends U> mapper, BinaryOperator<U> op) { ! return new CollectorImpl<>( ! boxSupplier(identity), ! (a, t) -> { a[0] = op.apply(a[0], mapper.apply(t)); }, ! (a, b) -> { a[0] = op.apply(a[0], b[0]); return a; }, ! a -> a[0], CH_NOID); } /** * Returns a {@code Collector} implementing a "group by" operation on * input elements of type {@code T}, grouping elements according to a ! * classification function, and returning the results in a {@code Map}. * * <p>The classification function maps elements to some key type {@code K}. * The collector produces a {@code Map<K, List<T>>} whose keys are the * values resulting from applying the classification function to the input * elements, and whose corresponding values are {@code List}s containing the
*** 584,596 **** * * @see #groupingBy(Function, Collector) * @see #groupingBy(Function, Supplier, Collector) * @see #groupingByConcurrent(Function) */ ! public static <T, K> ! Collector<T, Map<K, List<T>>> groupingBy(Function<? super T, ? extends K> classifier) { ! return groupingBy(classifier, HashMap::new, toList()); } /** * Returns a {@code Collector} implementing a cascaded "group by" operation * on input elements of type {@code T}, grouping elements according to a --- 689,701 ---- * * @see #groupingBy(Function, Collector) * @see #groupingBy(Function, Supplier, Collector) * @see #groupingByConcurrent(Function) */ ! public static <T, K> Collector<T, ?, Map<K, List<T>>> ! groupingBy(Function<? super T, ? extends K> classifier) { ! return groupingBy(classifier, toList()); } /** * Returns a {@code Collector} implementing a cascaded "group by" operation * on input elements of type {@code T}, grouping elements according to a
*** 613,634 **** * mapping(Person::getLastName, toSet()))); * }</pre> * * @param <T> the type of the input elements * @param <K> the type of the keys * @param <D> the result type of the downstream reduction * @param classifier a classifier function mapping input elements to keys * @param downstream a {@code Collector} implementing the downstream reduction * @return a {@code Collector} implementing the cascaded group-by operation * @see #groupingBy(Function) * * @see #groupingBy(Function, Supplier, Collector) * @see #groupingByConcurrent(Function, Collector) */ ! public static <T, K, D> ! Collector<T, Map<K, D>> groupingBy(Function<? super T, ? extends K> classifier, ! Collector<? super T, D> downstream) { return groupingBy(classifier, HashMap::new, downstream); } /** * Returns a {@code Collector} implementing a cascaded "group by" operation --- 718,740 ---- * mapping(Person::getLastName, toSet()))); * }</pre> * * @param <T> the type of the input elements * @param <K> the type of the keys + * @param <A> the intermediate accumulation type of the downstream collector * @param <D> the result type of the downstream reduction * @param classifier a classifier function mapping input elements to keys * @param downstream a {@code Collector} implementing the downstream reduction * @return a {@code Collector} implementing the cascaded group-by operation * @see #groupingBy(Function) * * @see #groupingBy(Function, Supplier, Collector) * @see #groupingByConcurrent(Function, Collector) */ ! public static <T, K, A, D> ! Collector<T, ?, Map<K, D>> groupingBy(Function<? super T, ? extends K> classifier, ! Collector<? super T, A, D> downstream) { return groupingBy(classifier, HashMap::new, downstream); } /** * Returns a {@code Collector} implementing a cascaded "group by" operation
*** 651,660 **** --- 757,767 ---- * mapping(Person::getLastName, toSet()))); * }</pre> * * @param <T> the type of the input elements * @param <K> the type of the keys + * @param <A> the intermediate accumulation type of the downstream collector * @param <D> the result type of the downstream reduction * @param <M> the type of the resulting {@code Map} * @param classifier a classifier function mapping input elements to keys * @param downstream a {@code Collector} implementing the downstream reduction * @param mapFactory a function which, when called, produces a new empty
*** 663,691 **** * * @see #groupingBy(Function, Collector) * @see #groupingBy(Function) * @see #groupingByConcurrent(Function, Supplier, Collector) */ ! public static <T, K, D, M extends Map<K, D>> ! Collector<T, M> groupingBy(Function<? super T, ? extends K> classifier, Supplier<M> mapFactory, ! Collector<? super T, D> downstream) { ! Supplier<D> downstreamSupplier = downstream.resultSupplier(); ! BiFunction<D, ? super T, D> downstreamAccumulator = downstream.accumulator(); ! BiFunction<M, T, M> accumulator = (m, t) -> { K key = Objects.requireNonNull(classifier.apply(t), "element cannot be mapped to a null key"); ! D oldContainer = m.computeIfAbsent(key, k -> downstreamSupplier.get()); ! D newContainer = downstreamAccumulator.apply(oldContainer, t); ! if (newContainer != oldContainer) ! m.put(key, newContainer); ! return m; }; ! return new CollectorImpl<>(mapFactory, accumulator, mapMerger(downstream.combiner()), CH_STRICT); } /** ! * Returns a {@code Collector} implementing a concurrent "group by" * operation on input elements of type {@code T}, grouping elements * according to a classification function. * * <p>This is a {@link Collector.Characteristics#CONCURRENT concurrent} and * {@link Collector.Characteristics#UNORDERED unordered} Collector. --- 770,812 ---- * * @see #groupingBy(Function, Collector) * @see #groupingBy(Function) * @see #groupingByConcurrent(Function, Supplier, Collector) */ ! public static <T, K, D, A, M extends Map<K, D>> ! Collector<T, ?, M> groupingBy(Function<? super T, ? extends K> classifier, Supplier<M> mapFactory, ! Collector<? super T, A, D> downstream) { ! Supplier<A> downstreamSupplier = downstream.supplier(); ! BiConsumer<A, ? super T> downstreamAccumulator = downstream.accumulator(); ! BiConsumer<Map<K, A>, T> accumulator = (m, t) -> { K key = Objects.requireNonNull(classifier.apply(t), "element cannot be mapped to a null key"); ! A container = m.computeIfAbsent(key, k -> downstreamSupplier.get()); ! downstreamAccumulator.accept(container, t); ! }; ! BinaryOperator<Map<K, A>> merger = Collectors.<K, A, Map<K, A>>mapMerger(downstream.combiner()); ! @SuppressWarnings("unchecked") ! Supplier<Map<K, A>> mangledFactory = (Supplier<Map<K, A>>) mapFactory; ! ! if (downstream.characteristics().contains(Collector.Characteristics.IDENTITY_FINISH)) { ! return new CollectorImpl<>(mangledFactory, accumulator, merger, CH_ID); ! } ! else { ! @SuppressWarnings("unchecked") ! Function<A, A> downstreamFinisher = (Function<A, A>) downstream.finisher(); ! Function<Map<K, A>, M> finisher = intermediate -> { ! intermediate.replaceAll((k, v) -> downstreamFinisher.apply(v)); ! @SuppressWarnings("unchecked") ! M castResult = (M) intermediate; ! return castResult; }; ! return new CollectorImpl<>(mangledFactory, accumulator, merger, finisher, CH_NOID); ! } } /** ! * Returns a concurrent {@code Collector} implementing a "group by" * operation on input elements of type {@code T}, grouping elements * according to a classification function. * * <p>This is a {@link Collector.Characteristics#CONCURRENT concurrent} and * {@link Collector.Characteristics#UNORDERED unordered} Collector.
*** 714,729 **** * @see #groupingBy(Function) * @see #groupingByConcurrent(Function, Collector) * @see #groupingByConcurrent(Function, Supplier, Collector) */ public static <T, K> ! Collector<T, ConcurrentMap<K, List<T>>> groupingByConcurrent(Function<? super T, ? extends K> classifier) { return groupingByConcurrent(classifier, ConcurrentHashMap::new, toList()); } /** ! * Returns a {@code Collector} implementing a concurrent cascaded "group by" * operation on input elements of type {@code T}, grouping elements * according to a classification function, and then performing a reduction * operation on the values associated with a given key using the specified * downstream {@code Collector}. * --- 835,851 ---- * @see #groupingBy(Function) * @see #groupingByConcurrent(Function, Collector) * @see #groupingByConcurrent(Function, Supplier, Collector) */ public static <T, K> ! Collector<T, ?, ConcurrentMap<K, List<T>>> ! groupingByConcurrent(Function<? super T, ? extends K> classifier) { return groupingByConcurrent(classifier, ConcurrentHashMap::new, toList()); } /** ! * Returns a concurrent {@code Collector} implementing a cascaded "group by" * operation on input elements of type {@code T}, grouping elements * according to a classification function, and then performing a reduction * operation on the values associated with a given key using the specified * downstream {@code Collector}. *
*** 737,764 **** * * <p>For example, to compute the set of last names of people in each city, * where the city names are sorted: * <pre>{@code * ConcurrentMap<City, Set<String>> namesByCity ! * = people.stream().collect(groupingByConcurrent(Person::getCity, TreeMap::new, * mapping(Person::getLastName, toSet()))); * }</pre> * * @param <T> the type of the input elements * @param <K> the type of the keys * @param <D> the result type of the downstream reduction * @param classifier a classifier function mapping input elements to keys * @param downstream a {@code Collector} implementing the downstream reduction * @return a {@code Collector} implementing the cascaded group-by operation * * @see #groupingBy(Function, Collector) * @see #groupingByConcurrent(Function) * @see #groupingByConcurrent(Function, Supplier, Collector) */ ! public static <T, K, D> ! Collector<T, ConcurrentMap<K, D>> groupingByConcurrent(Function<? super T, ? extends K> classifier, ! Collector<? super T, D> downstream) { return groupingByConcurrent(classifier, ConcurrentHashMap::new, downstream); } /** * Returns a concurrent {@code Collector} implementing a cascaded "group by" --- 859,887 ---- * * <p>For example, to compute the set of last names of people in each city, * where the city names are sorted: * <pre>{@code * ConcurrentMap<City, Set<String>> namesByCity ! * = people.stream().collect(groupingByConcurrent(Person::getCity, ConcurrentSkipListMap::new, * mapping(Person::getLastName, toSet()))); * }</pre> * * @param <T> the type of the input elements * @param <K> the type of the keys + * @param <A> the intermediate accumulation type of the downstream collector * @param <D> the result type of the downstream reduction * @param classifier a classifier function mapping input elements to keys * @param downstream a {@code Collector} implementing the downstream reduction * @return a {@code Collector} implementing the cascaded group-by operation * * @see #groupingBy(Function, Collector) * @see #groupingByConcurrent(Function) * @see #groupingByConcurrent(Function, Supplier, Collector) */ ! public static <T, K, A, D> ! Collector<T, ?, ConcurrentMap<K, D>> groupingByConcurrent(Function<? super T, ? extends K> classifier, ! Collector<? super T, A, D> downstream) { return groupingByConcurrent(classifier, ConcurrentHashMap::new, downstream); } /** * Returns a concurrent {@code Collector} implementing a cascaded "group by"
*** 785,794 **** --- 908,918 ---- * }</pre> * * * @param <T> the type of the input elements * @param <K> the type of the keys + * @param <A> the intermediate accumulation type of the downstream collector * @param <D> the result type of the downstream reduction * @param <M> the type of the resulting {@code ConcurrentMap} * @param classifier a classifier function mapping input elements to keys * @param downstream a {@code Collector} implementing the downstream reduction * @param mapFactory a function which, when called, produces a new empty
*** 797,851 **** * * @see #groupingByConcurrent(Function) * @see #groupingByConcurrent(Function, Collector) * @see #groupingBy(Function, Supplier, Collector) */ ! public static <T, K, D, M extends ConcurrentMap<K, D>> ! Collector<T, M> groupingByConcurrent(Function<? super T, ? extends K> classifier, Supplier<M> mapFactory, ! Collector<? super T, D> downstream) { ! Supplier<D> downstreamSupplier = downstream.resultSupplier(); ! BiFunction<D, ? super T, D> downstreamAccumulator = downstream.accumulator(); ! BinaryOperator<M> combiner = mapMerger(downstream.combiner()); if (downstream.characteristics().contains(Collector.Characteristics.CONCURRENT)) { ! BiFunction<M, T, M> accumulator = (m, t) -> { K key = Objects.requireNonNull(classifier.apply(t), "element cannot be mapped to a null key"); ! downstreamAccumulator.apply(m.computeIfAbsent(key, k -> downstreamSupplier.get()), t); ! return m; }; ! return new CollectorImpl<>(mapFactory, accumulator, combiner, CH_CONCURRENT); ! } else if (downstream.characteristics().contains(Collector.Characteristics.STRICTLY_MUTATIVE)) { ! BiFunction<M, T, M> accumulator = (m, t) -> { K key = Objects.requireNonNull(classifier.apply(t), "element cannot be mapped to a null key"); ! D resultContainer = m.computeIfAbsent(key, k -> downstreamSupplier.get()); synchronized (resultContainer) { ! downstreamAccumulator.apply(resultContainer, t); } - return m; }; - return new CollectorImpl<>(mapFactory, accumulator, combiner, CH_CONCURRENT); - } else { - BiFunction<M, T, M> accumulator = (m, t) -> { - K key = Objects.requireNonNull(classifier.apply(t), "element cannot be mapped to a null key"); - do { - D oldResult = m.computeIfAbsent(key, k -> downstreamSupplier.get()); - if (oldResult == null) { - if (m.putIfAbsent(key, downstreamAccumulator.apply(null, t)) == null) - return m; - } else { - synchronized (oldResult) { - if (m.get(key) != oldResult) - continue; - D newResult = downstreamAccumulator.apply(oldResult, t); - if (oldResult != newResult) - m.put(key, newResult); - return m; } } ! } while (true); }; ! return new CollectorImpl<>(mapFactory, accumulator, combiner, CH_CONCURRENT); } } /** * Returns a {@code Collector} which partitions the input elements according --- 921,970 ---- * * @see #groupingByConcurrent(Function) * @see #groupingByConcurrent(Function, Collector) * @see #groupingBy(Function, Supplier, Collector) */ ! public static <T, K, A, D, M extends ConcurrentMap<K, D>> ! Collector<T, ?, M> groupingByConcurrent(Function<? super T, ? extends K> classifier, Supplier<M> mapFactory, ! Collector<? super T, A, D> downstream) { ! Supplier<A> downstreamSupplier = downstream.supplier(); ! BiConsumer<A, ? super T> downstreamAccumulator = downstream.accumulator(); ! BinaryOperator<ConcurrentMap<K, A>> merger = Collectors.<K, A, ConcurrentMap<K, A>>mapMerger(downstream.combiner()); ! @SuppressWarnings("unchecked") ! Supplier<ConcurrentMap<K, A>> mangledFactory = (Supplier<ConcurrentMap<K, A>>) mapFactory; ! BiConsumer<ConcurrentMap<K, A>, T> accumulator; if (downstream.characteristics().contains(Collector.Characteristics.CONCURRENT)) { ! accumulator = (m, t) -> { K key = Objects.requireNonNull(classifier.apply(t), "element cannot be mapped to a null key"); ! A resultContainer = m.computeIfAbsent(key, k -> downstreamSupplier.get()); ! downstreamAccumulator.accept(resultContainer, t); }; ! } ! else { ! accumulator = (m, t) -> { K key = Objects.requireNonNull(classifier.apply(t), "element cannot be mapped to a null key"); ! A resultContainer = m.computeIfAbsent(key, k -> downstreamSupplier.get()); synchronized (resultContainer) { ! downstreamAccumulator.accept(resultContainer, t); } }; } + + if (downstream.characteristics().contains(Collector.Characteristics.IDENTITY_FINISH)) { + return new CollectorImpl<>(mangledFactory, accumulator, merger, CH_CONCURRENT_ID); } ! else { ! @SuppressWarnings("unchecked") ! Function<A, A> downstreamFinisher = (Function<A, A>) downstream.finisher(); ! Function<ConcurrentMap<K, A>, M> finisher = intermediate -> { ! intermediate.replaceAll((k, v) -> downstreamFinisher.apply(v)); ! @SuppressWarnings("unchecked") ! M castResult = (M) intermediate; ! return castResult; }; ! return new CollectorImpl<>(mangledFactory, accumulator, merger, finisher, CH_CONCURRENT_NOID); } } /** * Returns a {@code Collector} which partitions the input elements according
*** 860,870 **** * @return a {@code Collector} implementing the partitioning operation * * @see #partitioningBy(Predicate, Collector) */ public static <T> ! Collector<T, Map<Boolean, List<T>>> partitioningBy(Predicate<? super T> predicate) { return partitioningBy(predicate, toList()); } /** * Returns a {@code Collector} which partitions the input elements according --- 979,989 ---- * @return a {@code Collector} implementing the partitioning operation * * @see #partitioningBy(Predicate, Collector) */ public static <T> ! Collector<T, ?, Map<Boolean, List<T>>> partitioningBy(Predicate<? super T> predicate) { return partitioningBy(predicate, toList()); } /** * Returns a {@code Collector} which partitions the input elements according
*** 875,939 **** * * <p>There are no guarantees on the type, mutability, * serializability, or thread-safety of the {@code Map} returned. * * @param <T> the type of the input elements * @param <D> the result type of the downstream reduction * @param predicate a predicate used for classifying input elements * @param downstream a {@code Collector} implementing the downstream * reduction * @return a {@code Collector} implementing the cascaded partitioning * operation * * @see #partitioningBy(Predicate) */ ! public static <T, D> ! Collector<T, Map<Boolean, D>> partitioningBy(Predicate<? super T> predicate, ! Collector<? super T, D> downstream) { ! BiFunction<D, ? super T, D> downstreamAccumulator = downstream.accumulator(); ! BiFunction<Map<Boolean, D>, T, Map<Boolean, D>> accumulator = (result, t) -> { Partition<D> asPartition = ((Partition<D>) result); ! if (predicate.test(t)) { ! D newResult = downstreamAccumulator.apply(asPartition.forTrue, t); ! if (newResult != asPartition.forTrue) ! asPartition.forTrue = newResult; ! } else { ! D newResult = downstreamAccumulator.apply(asPartition.forFalse, t); ! if (newResult != asPartition.forFalse) ! asPartition.forFalse = newResult; ! } ! return result; }; ! return new CollectorImpl<>(() -> new Partition<>(downstream.resultSupplier().get(), ! downstream.resultSupplier().get()), ! accumulator, partitionMerger(downstream.combiner()), CH_STRICT); ! } ! ! /** ! * Merge function for two partitions, given a merge function for the ! * elements. ! */ ! private static <D> BinaryOperator<Map<Boolean, D>> partitionMerger(BinaryOperator<D> op) { ! return (m1, m2) -> { ! Partition<D> left = (Partition<D>) m1; ! Partition<D> right = (Partition<D>) m2; ! if (left.forFalse == null) ! left.forFalse = right.forFalse; ! else if (right.forFalse != null) ! left.forFalse = op.apply(left.forFalse, right.forFalse); ! if (left.forTrue == null) ! left.forTrue = right.forTrue; ! else if (right.forTrue != null) ! left.forTrue = op.apply(left.forTrue, right.forTrue); ! return left; }; } /** ! * Accumulate elements into a {@code Map} whose keys and values are the ! * result of applying mapping functions to the input elements. ! * If the mapped keys contains duplicates (according to * {@link Object#equals(Object)}), an {@code IllegalStateException} is * thrown when the collection operation is performed. If the mapped keys * may have duplicates, use {@link #toMap(Function, Function, BinaryOperator)} * instead. * --- 994,1050 ---- * * <p>There are no guarantees on the type, mutability, * serializability, or thread-safety of the {@code Map} returned. * * @param <T> the type of the input elements + * @param <A> the intermediate accumulation type of the downstream collector * @param <D> the result type of the downstream reduction * @param predicate a predicate used for classifying input elements * @param downstream a {@code Collector} implementing the downstream * reduction * @return a {@code Collector} implementing the cascaded partitioning * operation * * @see #partitioningBy(Predicate) */ ! public static <T, D, A> ! Collector<T, ?, Map<Boolean, D>> partitioningBy(Predicate<? super T> predicate, ! Collector<? super T, A, D> downstream) { ! @SuppressWarnings("unchecked") ! BiConsumer<D, ? super T> downstreamAccumulator = (BiConsumer<D, ? super T>) downstream.accumulator(); ! BiConsumer<Map<Boolean, A>, T> accumulator = (result, t) -> { Partition<D> asPartition = ((Partition<D>) result); ! downstreamAccumulator.accept(predicate.test(t) ? asPartition.forTrue : asPartition.forFalse, t); }; ! BinaryOperator<A> op = downstream.combiner(); ! BinaryOperator<Map<Boolean, A>> merger = (m1, m2) -> { ! Partition<A> left = (Partition<A>) m1; ! Partition<A> right = (Partition<A>) m2; ! return new Partition<>(op.apply(left.forTrue, right.forTrue), ! op.apply(left.forFalse, right.forFalse)); ! }; ! Supplier<Map<Boolean, A>> supplier = () -> new Partition<>(downstream.supplier().get(), ! downstream.supplier().get()); ! if (downstream.characteristics().contains(Collector.Characteristics.IDENTITY_FINISH)) { ! return new CollectorImpl<>(supplier, accumulator, merger, CH_ID); ! } ! else { ! Function<Map<Boolean, A>, Map<Boolean, D>> finisher = (Map<Boolean, A> par) -> { ! Partition<A> asAPartition = (Partition<A>) par; ! return new Partition<>(downstream.finisher().apply(asAPartition.forTrue), ! downstream.finisher().apply(asAPartition.forFalse)); }; + return new CollectorImpl<>(supplier, accumulator, merger, finisher, CH_NOID); + } } /** ! * Returns a {@code Collector} that accumulate elements into a ! * {@code Map} whose keys and values are the result of applying the provided ! * mapping functions to the input elements. ! * ! * <p>If the mapped keys contains duplicates (according to * {@link Object#equals(Object)}), an {@code IllegalStateException} is * thrown when the collection operation is performed. If the mapped keys * may have duplicates, use {@link #toMap(Function, Function, BinaryOperator)} * instead. *
*** 968,995 **** * @see #toMap(Function, Function, BinaryOperator) * @see #toMap(Function, Function, BinaryOperator, Supplier) * @see #toConcurrentMap(Function, Function) */ public static <T, K, U> ! Collector<T, Map<K,U>> toMap(Function<? super T, ? extends K> keyMapper, Function<? super T, ? extends U> valueMapper) { return toMap(keyMapper, valueMapper, throwingMerger(), HashMap::new); } /** ! * Accumulate elements into a {@code Map} whose keys and values are the ! * result of applying mapping functions to the input elements. If the mapped * keys contains duplicates (according to {@link Object#equals(Object)}), * the value mapping function is applied to each equal element, and the * results are merged using the provided merging function. * * @apiNote * There are multiple ways to deal with collisions between multiple elements ! * mapping to the same key. There are some predefined merging functions, ! * such as {@link #throwingMerger()}, {@link #firstWinsMerger()}, and ! * {@link #lastWinsMerger()}, that implement common policies, or you can ! * implement custom policies easily. For example, if you have a stream * of {@code Person}, and you want to produce a "phone book" mapping name to * address, but it is possible that two persons have the same name, you can * do as follows to gracefully deals with these collisions, and produce a * {@code Map} mapping names to a concatenated list of addresses: * <pre>{@code --- 1079,1108 ---- * @see #toMap(Function, Function, BinaryOperator) * @see #toMap(Function, Function, BinaryOperator, Supplier) * @see #toConcurrentMap(Function, Function) */ public static <T, K, U> ! Collector<T, ?, Map<K,U>> toMap(Function<? super T, ? extends K> keyMapper, Function<? super T, ? extends U> valueMapper) { return toMap(keyMapper, valueMapper, throwingMerger(), HashMap::new); } /** ! * Returns a {@code Collector} that accumulate elements into a ! * {@code Map} whose keys and values are the result of applying the provided ! * mapping functions to the input elements. ! * ! * <p>If the mapped * keys contains duplicates (according to {@link Object#equals(Object)}), * the value mapping function is applied to each equal element, and the * results are merged using the provided merging function. * * @apiNote * There are multiple ways to deal with collisions between multiple elements ! * mapping to the same key. The other forms of {@code toMap} simply use ! * a merge function that throws unconditionally, but you can easily write ! * more flexible merge policies. For example, if you have a stream * of {@code Person}, and you want to produce a "phone book" mapping name to * address, but it is possible that two persons have the same name, you can * do as follows to gracefully deals with these collisions, and produce a * {@code Map} mapping names to a concatenated list of addresses: * <pre>{@code
*** 1016,1034 **** * @see #toMap(Function, Function) * @see #toMap(Function, Function, BinaryOperator, Supplier) * @see #toConcurrentMap(Function, Function, BinaryOperator) */ public static <T, K, U> ! Collector<T, Map<K,U>> toMap(Function<? super T, ? extends K> keyMapper, Function<? super T, ? extends U> valueMapper, BinaryOperator<U> mergeFunction) { return toMap(keyMapper, valueMapper, mergeFunction, HashMap::new); } /** ! * Accumulate elements into a {@code Map} whose keys and values are the ! * result of applying mapping functions to the input elements. If the mapped * keys contains duplicates (according to {@link Object#equals(Object)}), * the value mapping function is applied to each equal element, and the * results are merged using the provided merging function. The {@code Map} * is created by a provided supplier function. * --- 1129,1150 ---- * @see #toMap(Function, Function) * @see #toMap(Function, Function, BinaryOperator, Supplier) * @see #toConcurrentMap(Function, Function, BinaryOperator) */ public static <T, K, U> ! Collector<T, ?, Map<K,U>> toMap(Function<? super T, ? extends K> keyMapper, Function<? super T, ? extends U> valueMapper, BinaryOperator<U> mergeFunction) { return toMap(keyMapper, valueMapper, mergeFunction, HashMap::new); } /** ! * Returns a {@code Collector} that accumulate elements into a ! * {@code Map} whose keys and values are the result of applying the provided ! * mapping functions to the input elements. ! * ! * <p>If the mapped * keys contains duplicates (according to {@link Object#equals(Object)}), * the value mapping function is applied to each equal element, and the * results are merged using the provided merging function. The {@code Map} * is created by a provided supplier function. *
*** 1052,1077 **** * @see #toMap(Function, Function) * @see #toMap(Function, Function, BinaryOperator) * @see #toConcurrentMap(Function, Function, BinaryOperator, Supplier) */ public static <T, K, U, M extends Map<K, U>> ! Collector<T, M> toMap(Function<? super T, ? extends K> keyMapper, Function<? super T, ? extends U> valueMapper, BinaryOperator<U> mergeFunction, Supplier<M> mapSupplier) { ! BiFunction<M, T, M> accumulator ! = (map, element) -> { ! map.merge(keyMapper.apply(element), valueMapper.apply(element), mergeFunction); ! return map; ! }; ! return new CollectorImpl<>(mapSupplier, accumulator, mapMerger(mergeFunction), CH_STRICT); } /** ! * Accumulate elements into a {@code ConcurrentMap} whose keys and values ! * are the result of applying mapping functions to the input elements. ! * If the mapped keys contains duplicates (according to * {@link Object#equals(Object)}), an {@code IllegalStateException} is * thrown when the collection operation is performed. If the mapped keys * may have duplicates, use * {@link #toConcurrentMap(Function, Function, BinaryOperator)} instead. * --- 1168,1193 ---- * @see #toMap(Function, Function) * @see #toMap(Function, Function, BinaryOperator) * @see #toConcurrentMap(Function, Function, BinaryOperator, Supplier) */ public static <T, K, U, M extends Map<K, U>> ! Collector<T, ?, M> toMap(Function<? super T, ? extends K> keyMapper, Function<? super T, ? extends U> valueMapper, BinaryOperator<U> mergeFunction, Supplier<M> mapSupplier) { ! BiConsumer<M, T> accumulator ! = (map, element) -> map.merge(keyMapper.apply(element), ! valueMapper.apply(element), mergeFunction); ! return new CollectorImpl<>(mapSupplier, accumulator, mapMerger(mergeFunction), CH_ID); } /** ! * Returns a {@code Collector} that accumulate elements into a ! * {@code ConcurrentMap} whose keys and values are the result of applying ! * the provided mapping functions to the input elements. ! * ! * <p>If the mapped keys contains duplicates (according to * {@link Object#equals(Object)}), an {@code IllegalStateException} is * thrown when the collection operation is performed. If the mapped keys * may have duplicates, use * {@link #toConcurrentMap(Function, Function, BinaryOperator)} instead. *
*** 1110,1137 **** * @see #toMap(Function, Function) * @see #toConcurrentMap(Function, Function, BinaryOperator) * @see #toConcurrentMap(Function, Function, BinaryOperator, Supplier) */ public static <T, K, U> ! Collector<T, ConcurrentMap<K,U>> toConcurrentMap(Function<? super T, ? extends K> keyMapper, Function<? super T, ? extends U> valueMapper) { return toConcurrentMap(keyMapper, valueMapper, throwingMerger(), ConcurrentHashMap::new); } /** ! * Accumulate elements into a {@code ConcurrentMap} whose keys and values ! * are the result of applying mapping functions to the input elements. If ! * the mapped keys contains duplicates (according to {@link Object#equals(Object)}), * the value mapping function is applied to each equal element, and the * results are merged using the provided merging function. * * @apiNote * There are multiple ways to deal with collisions between multiple elements ! * mapping to the same key. There are some predefined merging functions, ! * such as {@link #throwingMerger()}, {@link #firstWinsMerger()}, and ! * {@link #lastWinsMerger()}, that implement common policies, or you can ! * implement custom policies easily. For example, if you have a stream * of {@code Person}, and you want to produce a "phone book" mapping name to * address, but it is possible that two persons have the same name, you can * do as follows to gracefully deals with these collisions, and produce a * {@code Map} mapping names to a concatenated list of addresses: * <pre>{@code --- 1226,1254 ---- * @see #toMap(Function, Function) * @see #toConcurrentMap(Function, Function, BinaryOperator) * @see #toConcurrentMap(Function, Function, BinaryOperator, Supplier) */ public static <T, K, U> ! Collector<T, ?, ConcurrentMap<K,U>> toConcurrentMap(Function<? super T, ? extends K> keyMapper, Function<? super T, ? extends U> valueMapper) { return toConcurrentMap(keyMapper, valueMapper, throwingMerger(), ConcurrentHashMap::new); } /** ! * Returns a {@code Collector} that accumulate elements into a ! * {@code ConcurrentMap} whose keys and values are the result of applying ! * the provided mapping functions to the input elements. ! * ! * <p>If the mapped keys contains duplicates (according to {@link Object#equals(Object)}), * the value mapping function is applied to each equal element, and the * results are merged using the provided merging function. * * @apiNote * There are multiple ways to deal with collisions between multiple elements ! * mapping to the same key. The other forms of {@code toConcurrentMap} simply use ! * a merge function that throws unconditionally, but you can easily write ! * more flexible merge policies. For example, if you have a stream * of {@code Person}, and you want to produce a "phone book" mapping name to * address, but it is possible that two persons have the same name, you can * do as follows to gracefully deals with these collisions, and produce a * {@code Map} mapping names to a concatenated list of addresses: * <pre>{@code
*** 1161,1180 **** * @see #toConcurrentMap(Function, Function) * @see #toConcurrentMap(Function, Function, BinaryOperator, Supplier) * @see #toMap(Function, Function, BinaryOperator) */ public static <T, K, U> ! Collector<T, ConcurrentMap<K,U>> toConcurrentMap(Function<? super T, ? extends K> keyMapper, Function<? super T, ? extends U> valueMapper, BinaryOperator<U> mergeFunction) { return toConcurrentMap(keyMapper, valueMapper, mergeFunction, ConcurrentHashMap::new); } /** ! * Accumulate elements into a {@code ConcurrentMap} whose keys and values ! * are the result of applying mapping functions to the input elements. If ! * the mapped keys contains duplicates (according to {@link Object#equals(Object)}), * the value mapping function is applied to each equal element, and the * results are merged using the provided merging function. The * {@code ConcurrentMap} is created by a provided supplier function. * * <p>This is a {@link Collector.Characteristics#CONCURRENT concurrent} and --- 1278,1300 ---- * @see #toConcurrentMap(Function, Function) * @see #toConcurrentMap(Function, Function, BinaryOperator, Supplier) * @see #toMap(Function, Function, BinaryOperator) */ public static <T, K, U> ! Collector<T, ?, ConcurrentMap<K,U>> ! toConcurrentMap(Function<? super T, ? extends K> keyMapper, Function<? super T, ? extends U> valueMapper, BinaryOperator<U> mergeFunction) { return toConcurrentMap(keyMapper, valueMapper, mergeFunction, ConcurrentHashMap::new); } /** ! * Returns a {@code Collector} that accumulate elements into a ! * {@code ConcurrentMap} whose keys and values are the result of applying ! * the provided mapping functions to the input elements. ! * ! * <p>If the mapped keys contains duplicates (according to {@link Object#equals(Object)}), * the value mapping function is applied to each equal element, and the * results are merged using the provided merging function. The * {@code ConcurrentMap} is created by a provided supplier function. * * <p>This is a {@link Collector.Characteristics#CONCURRENT concurrent} and
*** 1200,1218 **** * @see #toConcurrentMap(Function, Function) * @see #toConcurrentMap(Function, Function, BinaryOperator) * @see #toMap(Function, Function, BinaryOperator, Supplier) */ public static <T, K, U, M extends ConcurrentMap<K, U>> ! Collector<T, M> toConcurrentMap(Function<? super T, ? extends K> keyMapper, Function<? super T, ? extends U> valueMapper, BinaryOperator<U> mergeFunction, Supplier<M> mapSupplier) { ! BiFunction<M, T, M> accumulator = (map, element) -> { ! map.merge(keyMapper.apply(element), valueMapper.apply(element), mergeFunction); ! return map; ! }; ! return new CollectorImpl<>(mapSupplier, accumulator, mapMerger(mergeFunction), CH_CONCURRENT); } /** * Returns a {@code Collector} which applies an {@code int}-producing * mapping function to each input element, and returns summary statistics --- 1320,1337 ---- * @see #toConcurrentMap(Function, Function) * @see #toConcurrentMap(Function, Function, BinaryOperator) * @see #toMap(Function, Function, BinaryOperator, Supplier) */ public static <T, K, U, M extends ConcurrentMap<K, U>> ! Collector<T, ?, M> toConcurrentMap(Function<? super T, ? extends K> keyMapper, Function<? super T, ? extends U> valueMapper, BinaryOperator<U> mergeFunction, Supplier<M> mapSupplier) { ! BiConsumer<M, T> accumulator ! = (map, element) -> map.merge(keyMapper.apply(element), ! valueMapper.apply(element), mergeFunction); ! return new CollectorImpl<>(mapSupplier, accumulator, mapMerger(mergeFunction), CH_CONCURRENT_ID); } /** * Returns a {@code Collector} which applies an {@code int}-producing * mapping function to each input element, and returns summary statistics
*** 1220,1237 **** * * @param <T> the type of the input elements * @param mapper a mapping function to apply to each element * @return a {@code Collector} implementing the summary-statistics reduction * ! * @see #toDoubleSummaryStatistics(ToDoubleFunction) ! * @see #toLongSummaryStatistics(ToLongFunction) */ public static <T> ! Collector<T, IntSummaryStatistics> toIntSummaryStatistics(ToIntFunction<? super T> mapper) { ! return new CollectorImpl<>(IntSummaryStatistics::new, ! (r, t) -> { r.accept(mapper.applyAsInt(t)); return r; }, ! (l, r) -> { l.combine(r); return l; }, CH_STRICT); } /** * Returns a {@code Collector} which applies an {@code long}-producing * mapping function to each input element, and returns summary statistics --- 1339,1357 ---- * * @param <T> the type of the input elements * @param mapper a mapping function to apply to each element * @return a {@code Collector} implementing the summary-statistics reduction * ! * @see #summarizingDouble(ToDoubleFunction) ! * @see #summarizingLong(ToLongFunction) */ public static <T> ! Collector<T, ?, IntSummaryStatistics> summarizingInt(ToIntFunction<? super T> mapper) { ! return new CollectorImpl<T, IntSummaryStatistics, IntSummaryStatistics>( ! IntSummaryStatistics::new, ! (r, t) -> r.accept(mapper.applyAsInt(t)), ! (l, r) -> { l.combine(r); return l; }, CH_ID); } /** * Returns a {@code Collector} which applies an {@code long}-producing * mapping function to each input element, and returns summary statistics
*** 1239,1256 **** * * @param <T> the type of the input elements * @param mapper the mapping function to apply to each element * @return a {@code Collector} implementing the summary-statistics reduction * ! * @see #toDoubleSummaryStatistics(ToDoubleFunction) ! * @see #toIntSummaryStatistics(ToIntFunction) */ public static <T> ! Collector<T, LongSummaryStatistics> toLongSummaryStatistics(ToLongFunction<? super T> mapper) { ! return new CollectorImpl<>(LongSummaryStatistics::new, ! (r, t) -> { r.accept(mapper.applyAsLong(t)); return r; }, ! (l, r) -> { l.combine(r); return l; }, CH_STRICT); } /** * Returns a {@code Collector} which applies an {@code double}-producing * mapping function to each input element, and returns summary statistics --- 1359,1377 ---- * * @param <T> the type of the input elements * @param mapper the mapping function to apply to each element * @return a {@code Collector} implementing the summary-statistics reduction * ! * @see #summarizingDouble(ToDoubleFunction) ! * @see #summarizingInt(ToIntFunction) */ public static <T> ! Collector<T, ?, LongSummaryStatistics> summarizingLong(ToLongFunction<? super T> mapper) { ! return new CollectorImpl<T, LongSummaryStatistics, LongSummaryStatistics>( ! LongSummaryStatistics::new, ! (r, t) -> r.accept(mapper.applyAsLong(t)), ! (l, r) -> { l.combine(r); return l; }, CH_ID); } /** * Returns a {@code Collector} which applies an {@code double}-producing * mapping function to each input element, and returns summary statistics
*** 1258,1285 **** * * @param <T> the type of the input elements * @param mapper a mapping function to apply to each element * @return a {@code Collector} implementing the summary-statistics reduction * ! * @see #toLongSummaryStatistics(ToLongFunction) ! * @see #toIntSummaryStatistics(ToIntFunction) */ public static <T> ! Collector<T, DoubleSummaryStatistics> toDoubleSummaryStatistics(ToDoubleFunction<? super T> mapper) { ! return new CollectorImpl<>(DoubleSummaryStatistics::new, ! (r, t) -> { r.accept(mapper.applyAsDouble(t)); return r; }, ! (l, r) -> { l.combine(r); return l; }, CH_STRICT); } /** * Implementation class used by partitioningBy. */ private static final class Partition<T> extends AbstractMap<Boolean, T> implements Map<Boolean, T> { ! T forTrue; ! T forFalse; Partition(T forTrue, T forFalse) { this.forTrue = forTrue; this.forFalse = forFalse; } --- 1379,1407 ---- * * @param <T> the type of the input elements * @param mapper a mapping function to apply to each element * @return a {@code Collector} implementing the summary-statistics reduction * ! * @see #summarizingLong(ToLongFunction) ! * @see #summarizingInt(ToIntFunction) */ public static <T> ! Collector<T, ?, DoubleSummaryStatistics> summarizingDouble(ToDoubleFunction<? super T> mapper) { ! return new CollectorImpl<T, DoubleSummaryStatistics, DoubleSummaryStatistics>( ! DoubleSummaryStatistics::new, ! (r, t) -> r.accept(mapper.applyAsDouble(t)), ! (l, r) -> { l.combine(r); return l; }, CH_ID); } /** * Implementation class used by partitioningBy. */ private static final class Partition<T> extends AbstractMap<Boolean, T> implements Map<Boolean, T> { ! final T forTrue; ! final T forFalse; Partition(T forTrue, T forFalse) { this.forTrue = forTrue; this.forFalse = forFalse; }
*** 1287,1314 **** @Override public Set<Map.Entry<Boolean, T>> entrySet() { return new AbstractSet<Map.Entry<Boolean, T>>() { @Override public Iterator<Map.Entry<Boolean, T>> iterator() { ! ! return new Iterator<Map.Entry<Boolean, T>>() { ! int state = 0; ! ! @Override ! public boolean hasNext() { ! return state < 2; ! } ! ! @Override ! public Map.Entry<Boolean, T> next() { ! if (state >= 2) ! throw new NoSuchElementException(); ! return (state++ == 0) ! ? new SimpleImmutableEntry<>(false, forFalse) ! : new SimpleImmutableEntry<>(true, forTrue); ! } ! }; } @Override public int size() { return 2; --- 1409,1421 ---- @Override public Set<Map.Entry<Boolean, T>> entrySet() { return new AbstractSet<Map.Entry<Boolean, T>>() { @Override public Iterator<Map.Entry<Boolean, T>> iterator() { ! Map.Entry<Boolean, T> falseEntry = new SimpleImmutableEntry<>(false, forFalse); ! Map.Entry<Boolean, T> trueEntry = new SimpleImmutableEntry<>(true, forTrue); ! return Arrays.asList(falseEntry, trueEntry).iterator(); } @Override public int size() { return 2;