changeset 7325:076eba2ba95c

More Javadoc
author briangoetz
date Wed, 13 Feb 2013 19:25:34 -0500
parents d094d12b3a61
children dad2ec70090f
files src/share/classes/java/util/stream/AbstractPipeline.java src/share/classes/java/util/stream/IntermediateOp.java src/share/classes/java/util/stream/StreamShape.java
diffstat 3 files changed, 77 insertions(+), 32 deletions(-) [+]
line wrap: on
line diff
--- a/src/share/classes/java/util/stream/AbstractPipeline.java	Wed Feb 13 16:08:58 2013 -0500
+++ b/src/share/classes/java/util/stream/AbstractPipeline.java	Wed Feb 13 19:25:34 2013 -0500
@@ -143,8 +143,8 @@
      */
     @SuppressWarnings("unchecked")
     public <E_NEXT, S_NEXT extends BaseStream<E_NEXT, S_NEXT>> S_NEXT pipeline(IntermediateOp<E_OUT, E_NEXT> newOp) {
+        assert getOutputShape() == newOp.inputShape();
         if (newOp.isStateful() && isParallel()) {
-            assert getOutputShape() == newOp.inputShape();
             transitionTo(PipelineState.LINKED);
 
             // @@@ the newFlags and the node.spliterator().characteristics() will be out of sync
--- a/src/share/classes/java/util/stream/IntermediateOp.java	Wed Feb 13 16:08:58 2013 -0500
+++ b/src/share/classes/java/util/stream/IntermediateOp.java	Wed Feb 13 19:25:34 2013 -0500
@@ -25,71 +25,108 @@
 package java.util.stream;
 
 /**
- * An operation performed upon elements from an input stream to produce elements to
- * an output stream.
- * <p>By default the operation is stateless and not short-circuiting.</p>
+ * An operation in a stream pipeline that takes a stream as input and produces a stream, possibly of a different
+ * type, as output.  An intermediate operation has an input type and an output type (and, an associated input
+ * shape and output shape).  An intermediate operation also has a set of <em>operation flags</em> that describes
+ * how the operation transforms characteristics of the stream (such as sortedness or size; see
+ * {@link StreamOpFlag}).
  *
- * @param <E_IN>  Type of input elements to the operation.
- * @param <E_OUT> Type of output elements to the operation.
+ * <p>Intermediate operations are implemented in terms of <em>sink transforms</em>; given a {@code Sink} for the
+ * output type of the operation, produce a {@code Sink} for the input type of the operation, which, when fed with
+ * values, has the effect of implementing the desired operation on the input values and feeding them to the output
+ * sink.
+ *
+ * <p>Some intermediate operations are <em>stateful</em>.  This means that the sinks they produce as a result of
+ * the above wrapping may maintain state from processing earlier elements.  Stateful intermediate operations must
+ * implement the {@link StatefulOp} interface.
+ *
+ * @apiNote
+ * As an example, consider the stream pipeline:
+ * <pre>
+ *     int oldestBob = people.stream().filter(p -> p.getFirstName.equals("Bob")).map(p -> p.getAge()).max();
+ * </pre>
+ *
+ * <p>This pipeline has two intermediate operations, filter and map.  The filtering operation has input and output types
+ * of {@code Person} (with input and output shape of {@code REFERENCE}), and the mapping operation has an input
+ * type of {@code Person} and an output type of {@code Integer} (with shape {@code INT_VALUE}.)  When we construct
+ * a sink chain, the mapping operation will be asked to transform a {@code Sink.OfInt} which computes the maximum
+ * value into a {@code Sink} which accepts {@code Person} objects, and whose behavior is to take the supplied
+ * {@code Person}, call {@code getAge()} on it, and pass the resulting value to the downstream sink.  This sink
+ * transform might be implement as:
+ *
+ * <pre>
+ *     new Sink.ChainedReference<U>(sink) {
+ *         public void accept(U u) {
+ *             downstream.accept(mappingFunction.applyAsInt(u));
+ *         }
+ *     }
+ * </pre>
+ *
+ * @param <E_IN>  Type of input elements to the operation
+ * @param <E_OUT> Type of output elements to the operation
  * @author Brian Goetz
  */
 interface IntermediateOp<E_IN, E_OUT> {
 
     /**
-     *
-     * @return the input shape of this operation.
+     * Get the shape of the input type of this operation
+     * @implSpec The default returns {@code StreamShape.REFERENCE}
+     * @return Shape of the input type of this operation
      */
     default StreamShape inputShape() { return StreamShape.REFERENCE; }
 
     /**
-     *
-     * @return the output shape of this operation.
+     * Get the shape of the output type of this operation
+     * @implSpec The default returns {@code StreamShape.REFERENCE}
+     * @return Shape of the output type of this operation
      */
     default StreamShape outputShape() { return StreamShape.REFERENCE; }
 
     /**
-     * Get the properties of the operation.
-     * <p>The properties correspond to the properties the output stream is
-     * known to have or is not known to have when this operation is applied, in
-     * encounter order, to elements of an input stream.</p>
+     * Get the operation flags of this operation.
      *
-     * @return the properties of the operation.
-     * @see {@link StreamOpFlag}
+     * @implSpec The default returns {@code 0}
+     * @return a bitmap describing the operation flags of this operation
+     * @see StreamOpFlag
      */
     default int getOpFlags() { return 0; }
 
     /**
-     * If {@code true} then operation is stateful, accumulates state and
-     * can be evaluated in parallel by invoking {@link #evaluateParallel(PipelineHelper)}.
+     * Return whether this operation is stateful or not.  If it is stateful, then the method
+     * {@link #evaluateParallel(PipelineHelper)} must be overridden.
      *
-     * <p>The default implementation returns false.
+     * @implSpec The default implementation returns {@code false}.
      *
-     * @return {@code true} then operation is stateful and accumulates state.
+     * @return {@code true} then operation is stateful
      */
     default boolean isStateful() { return false; }
 
     /**
-     * Return a sink which will accept elements, perform the operation upon
-     * each element and send it to the provided sink.
+     * Accept a {@code Sink} which will receive the results of this operation, and return a {@code Sink}
+     * which accepts elements of the input type of this operation and which performs the operation, passing
+     * the results to the provided {@code Sink}.
      *
+     * <p>The implementation may use the {@code flags} parameter to optimize the sink wrapping.  For example, if
+     * the input is already {@code DISTINCT}, the implementation for the {@code Stream#distinct()} method could
+     * just return the sink it was passed.
      *
-     * @param flags the combined stream and operation flags up to but not including this operation.
+     * @param flags The combined stream and operation flags up to, but not including, this operation.
      * @param sink elements will be sent to this sink after the processing.
      * @return a sink which will accept elements and perform the operation upon
-     *         each element.
+     *         each element, passing the results (if any) to the provided {@code Sink}.
      */
     Sink<E_IN> wrapSink(int flags, Sink<E_OUT> sink);
 
     /**
-     * Evaluate the operation in parallel.
+     * Perform a parallel evaluation of the operation using the specified {@code PipelineHelper}, which describes
+     * the stream source and upstream intermediate operations.  Only called on stateful operations.
+     * If {@link #isStateful()} returns true then implementations must override the default implementation.
      *
-     * <p>The default implementation throws an {@link UnsupportedOperationException}.
-     * If {@link #isStateful()} returns true then sub-classes or interfaces must override
-     * the default implementation.
+     * @implSpec The default implementation throws an {@link UnsupportedOperationException}
      *
-     * @param helper the pipeline helper.
-     * @param <P_IN> the type of elements input to the pipeline.
-     * @return a node encapsulated the result evaluated in parallel.
+     * @param helper the pipeline helper
+     * @param <P_IN> the type of elements in the pipeline source
+     * @return a {@code Node} describing the result of the evaluation
      */
     default <P_IN> Node<E_OUT> evaluateParallel(PipelineHelper<P_IN, E_OUT> helper) {
         throw new UnsupportedOperationException("Parallel evaluation is not supported");
--- a/src/share/classes/java/util/stream/StreamShape.java	Wed Feb 13 16:08:58 2013 -0500
+++ b/src/share/classes/java/util/stream/StreamShape.java	Wed Feb 13 19:25:34 2013 -0500
@@ -24,6 +24,8 @@
  */
 package java.util.stream;
 
+import java.util.function.ToIntFunction;
+
 /**
  * An enum describing the known shape specializations for stream abstractions.  Each will correspond to
  * a specific subinterface of {@link BaseStream} (e.g., {@code INT_VALUE} corresponds to {@code IntStream},
@@ -32,7 +34,13 @@
  * @apiNote
  * This enum is used by implementations to determine compatibility between streams and operations (i.e., if the
  * output shape of a stream is compatible with the input shape of the next operation).  It is also used to avoid
- * code bloat by allowing some abstractions to be shape-independent.
+ * code bloat by allowing some code to be largely shape-independent.
+ *
+ * <p>Many APIs require you to specify both a generic type and a stream shape, such as {@link IntermediateOp} which
+ * has both generic type parameters for its input and output types, and getters for the input and output shape.
+ * When representing primitive streams in this way, use the wrapper type as the generic type.  Accordingly, the
+ * implementation of {@link Stream#map(ToIntFunction)} would have an output type parameter of {@code Integer} and an
+ * output shape of @{code INT_VALUE}.
  */
 enum StreamShape {
     REFERENCE,