From eb9bab1096125e49367c4780c25f0022334dd56f Mon Sep 17 00:00:00 2001 From: Fabian Hueske Date: Mon, 8 Feb 2016 14:14:01 +0100 Subject: [PATCH 1/2] [FLINK-3366] Rename @Experimental annotation to @PublicEvolving --- ...{Experimental.java => PublicEvolving.java} | 2 +- .../flink/api/common/ExecutionConfig.java | 16 ++--- .../flink/api/common/JobExecutionResult.java | 4 +- .../functions/IterationRuntimeContext.java | 4 +- .../api/common/functions/RuntimeContext.java | 22 +++---- .../util/AbstractRuntimeUDFContext.java | 12 ++-- .../common/io/statistics/BaseStatistics.java | 14 ++-- .../common/typeinfo/BasicArrayTypeInfo.java | 22 +++---- .../api/common/typeinfo/BasicTypeInfo.java | 22 +++---- .../api/common/typeinfo/NothingTypeInfo.java | 16 ++--- .../typeinfo/PrimitiveArrayTypeInfo.java | 24 +++---- .../api/common/typeinfo/TypeInformation.java | 20 +++--- .../api/common/typeutils/CompositeType.java | 36 +++++----- .../api/java/typeutils/AvroTypeInfo.java | 4 +- .../api/java/typeutils/EitherTypeInfo.java | 18 ++--- .../api/java/typeutils/EnumTypeInfo.java | 20 +++--- .../api/java/typeutils/GenericTypeInfo.java | 20 +++--- .../java/typeutils/ObjectArrayTypeInfo.java | 22 +++---- .../api/java/typeutils/PojoTypeInfo.java | 28 ++++---- .../api/java/typeutils/TupleTypeInfo.java | 16 ++--- .../api/java/typeutils/TypeExtractor.java | 66 +++++++++---------- .../api/java/typeutils/ValueTypeInfo.java | 24 +++---- .../api/java/typeutils/WritableTypeInfo.java | 22 +++---- .../org/apache/flink/api/java/DataSet.java | 6 +- .../flink/api/java/ExecutionEnvironment.java | 32 ++++----- .../flink/api/java/LocalEnvironment.java | 4 +- .../flink/api/java/RemoteEnvironment.java | 4 +- .../api/java/functions/FirstReducer.java | 1 + .../java/functions/FunctionAnnotation.java | 10 +-- .../apache/flink/api/java/io/CsvReader.java | 4 +- .../api/java/operators/CrossOperator.java | 4 +- .../flink/api/java/operators/DataSink.java | 6 +- .../flink/api/java/operators/DataSource.java | 4 +- .../api/java/operators/DeltaIteration.java | 6 +- .../api/java/operators/IterativeDataSet.java | 8 +-- .../api/java/operators/JoinOperator.java | 4 +- .../api/java/operators/ProjectOperator.java | 4 +- .../flink/api/java/utils/DataSetUtils.java | 4 +- .../flink/api/java/utils/ParameterTool.java | 4 +- .../org/apache/flink/api/scala/DataSet.scala | 8 +-- .../api/scala/ExecutionEnvironment.scala | 34 +++++----- .../scala/typeutils/CaseClassTypeInfo.scala | 16 ++--- .../api/scala/typeutils/EitherTypeInfo.scala | 18 ++--- .../scala/typeutils/EnumValueTypeInfo.scala | 20 +++--- .../api/scala/typeutils/OptionTypeInfo.scala | 18 ++--- .../typeutils/ScalaNothingTypeInfo.scala | 16 ++--- .../scala/typeutils/TraversableTypeInfo.scala | 18 ++--- .../api/scala/typeutils/TryTypeInfo.scala | 18 ++--- .../api/scala/typeutils/UnitTypeInfo.scala | 16 ++--- .../flink/api/scala/utils/package.scala | 5 +- .../api/datastream/AllWindowedStream.java | 8 +-- .../api/datastream/CoGroupedStreams.java | 8 +-- .../api/datastream/ConnectedStreams.java | 4 +- .../streaming/api/datastream/DataStream.java | 46 ++++++------- .../api/datastream/DataStreamSink.java | 6 +- .../api/datastream/IterativeStream.java | 4 +- .../api/datastream/JoinedStreams.java | 10 +-- .../streaming/api/datastream/KeyedStream.java | 6 +- .../SingleOutputStreamOperator.java | 20 +++--- .../streaming/api/datastream/SplitStream.java | 4 +- .../api/datastream/WindowedStream.java | 8 +-- .../api/environment/CheckpointConfig.java | 6 +- .../StreamExecutionEnvironment.java | 36 +++++----- .../source/EventTimeSourceFunction.java | 4 +- .../api/functions/source/SourceFunction.java | 6 +- .../api/scala/AllWindowedStream.scala | 6 +- .../api/scala/CoGroupedStreams.scala | 10 +-- .../streaming/api/scala/DataStream.scala | 57 ++++++++-------- .../streaming/api/scala/JoinedStreams.scala | 8 +-- .../streaming/api/scala/KeyedStream.scala | 4 +- .../scala/StreamExecutionEnvironment.scala | 28 ++++---- .../streaming/api/scala/WindowedStream.scala | 6 +- 72 files changed, 522 insertions(+), 519 deletions(-) rename flink-annotations/src/main/java/org/apache/flink/annotation/{Experimental.java => PublicEvolving.java} (97%) diff --git a/flink-annotations/src/main/java/org/apache/flink/annotation/Experimental.java b/flink-annotations/src/main/java/org/apache/flink/annotation/PublicEvolving.java similarity index 97% rename from flink-annotations/src/main/java/org/apache/flink/annotation/Experimental.java rename to flink-annotations/src/main/java/org/apache/flink/annotation/PublicEvolving.java index 3f4a66139f94f..925c19d25f558 100644 --- a/flink-annotations/src/main/java/org/apache/flink/annotation/Experimental.java +++ b/flink-annotations/src/main/java/org/apache/flink/annotation/PublicEvolving.java @@ -31,5 +31,5 @@ @Documented @Target({ ElementType.TYPE, ElementType.METHOD, ElementType.FIELD, ElementType.CONSTRUCTOR }) @Public -public @interface Experimental { +public @interface PublicEvolving { } diff --git a/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java b/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java index 59ad5dddefd83..8d5211bba349f 100644 --- a/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java +++ b/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java @@ -19,7 +19,7 @@ package org.apache.flink.api.common; import com.esotericsoftware.kryo.Serializer; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import java.io.Serializable; @@ -153,7 +153,7 @@ public boolean isClosureCleanerEnabled() { * * @param interval The interval between watermarks in milliseconds. */ - @Experimental + @PublicEvolving public ExecutionConfig setAutoWatermarkInterval(long interval) { enableTimestamps(); this.autoWatermarkInterval = interval; @@ -171,7 +171,7 @@ public ExecutionConfig setAutoWatermarkInterval(long interval) { * * @see #setAutoWatermarkInterval(long) */ - @Experimental + @PublicEvolving public ExecutionConfig enableTimestamps() { this.timestampsEnabled = true; return this; @@ -182,7 +182,7 @@ public ExecutionConfig enableTimestamps() { * * @see #enableTimestamps() */ - @Experimental + @PublicEvolving public ExecutionConfig disableTimestamps() { this.timestampsEnabled = false; return this; @@ -193,7 +193,7 @@ public ExecutionConfig disableTimestamps() { * * @see #enableTimestamps() */ - @Experimental + @PublicEvolving public boolean areTimestampsEnabled() { return timestampsEnabled; } @@ -203,7 +203,7 @@ public boolean areTimestampsEnabled() { * * @see #setAutoWatermarkInterval(long) */ - @Experimental + @PublicEvolving public long getAutoWatermarkInterval() { return this.autoWatermarkInterval; } @@ -385,7 +385,7 @@ public boolean isObjectReuseEnabled() { * * @param codeAnalysisMode see {@link CodeAnalysisMode} */ - @Experimental + @PublicEvolving public void setCodeAnalysisMode(CodeAnalysisMode codeAnalysisMode) { this.codeAnalysisMode = codeAnalysisMode; } @@ -393,7 +393,7 @@ public void setCodeAnalysisMode(CodeAnalysisMode codeAnalysisMode) { /** * Returns the {@link CodeAnalysisMode} of the program. */ - @Experimental + @PublicEvolving public CodeAnalysisMode getCodeAnalysisMode() { return codeAnalysisMode; } diff --git a/flink-core/src/main/java/org/apache/flink/api/common/JobExecutionResult.java b/flink-core/src/main/java/org/apache/flink/api/common/JobExecutionResult.java index 1fff4daa42768..7962fce8c86e6 100644 --- a/flink-core/src/main/java/org/apache/flink/api/common/JobExecutionResult.java +++ b/flink-core/src/main/java/org/apache/flink/api/common/JobExecutionResult.java @@ -18,7 +18,7 @@ package org.apache.flink.api.common; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import java.util.Collections; @@ -104,7 +104,7 @@ public Map getAllAccumulatorResults() { * @throws java.lang.ClassCastException Thrown, if the accumulator was not aggregating a {@link java.lang.Integer} */ @Deprecated - @Experimental + @PublicEvolving public Integer getIntCounterResult(String accumulatorName) { Object result = this.accumulatorResults.get(accumulatorName); if (result == null) { diff --git a/flink-core/src/main/java/org/apache/flink/api/common/functions/IterationRuntimeContext.java b/flink-core/src/main/java/org/apache/flink/api/common/functions/IterationRuntimeContext.java index 5dc4ec8f14b8d..0019c32295779 100644 --- a/flink-core/src/main/java/org/apache/flink/api/common/functions/IterationRuntimeContext.java +++ b/flink-core/src/main/java/org/apache/flink/api/common/functions/IterationRuntimeContext.java @@ -18,7 +18,7 @@ package org.apache.flink.api.common.functions; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.aggregators.Aggregator; import org.apache.flink.types.Value; @@ -33,7 +33,7 @@ public interface IterationRuntimeContext extends RuntimeContext { */ int getSuperstepNumber(); - @Experimental + @PublicEvolving > T getIterationAggregator(String name); T getPreviousIterationAggregate(String name); diff --git a/flink-core/src/main/java/org/apache/flink/api/common/functions/RuntimeContext.java b/flink-core/src/main/java/org/apache/flink/api/common/functions/RuntimeContext.java index c96ecde3bf81d..86ca789d7a84e 100644 --- a/flink-core/src/main/java/org/apache/flink/api/common/functions/RuntimeContext.java +++ b/flink-core/src/main/java/org/apache/flink/api/common/functions/RuntimeContext.java @@ -22,7 +22,7 @@ import java.util.List; import java.util.Map; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.accumulators.Accumulator; @@ -125,31 +125,31 @@ public interface RuntimeContext { * @deprecated Use getAccumulator(..) to obtain the value of an accumulator. */ @Deprecated - @Experimental + @PublicEvolving Map> getAllAccumulators(); /** * Convenience function to create a counter object for integers. */ - @Experimental + @PublicEvolving IntCounter getIntCounter(String name); /** * Convenience function to create a counter object for longs. */ - @Experimental + @PublicEvolving LongCounter getLongCounter(String name); /** * Convenience function to create a counter object for doubles. */ - @Experimental + @PublicEvolving DoubleCounter getDoubleCounter(String name); /** * Convenience function to create a counter object for histograms. */ - @Experimental + @PublicEvolving Histogram getHistogram(String name); // -------------------------------------------------------------------------------------------- @@ -239,7 +239,7 @@ public interface RuntimeContext { * @throws UnsupportedOperationException Thrown, if no partitioned state is available for the * function (function is not part of a KeyedStream). */ - @Experimental + @PublicEvolving ValueState getState(ValueStateDescriptor stateProperties); /** @@ -283,7 +283,7 @@ public interface RuntimeContext { * @throws UnsupportedOperationException Thrown, if no partitioned state is available for the * function (function is not part os a KeyedStream). */ - @Experimental + @PublicEvolving ListState getListState(ListStateDescriptor stateProperties); /** @@ -323,7 +323,7 @@ public interface RuntimeContext { * @throws UnsupportedOperationException Thrown, if no partitioned state is available for the * function (function is not part of a KeyedStream). */ - @Experimental + @PublicEvolving ReducingState getReducingState(ReducingStateDescriptor stateProperties); /** @@ -383,7 +383,7 @@ public interface RuntimeContext { * @deprecated Use the more expressive {@link #getState(ValueStateDescriptor)} instead. */ @Deprecated - @Experimental + @PublicEvolving OperatorState getKeyValueState(String name, Class stateType, S defaultState); /** @@ -436,6 +436,6 @@ public interface RuntimeContext { * @deprecated Use the more expressive {@link #getState(ValueStateDescriptor)} instead. */ @Deprecated - @Experimental + @PublicEvolving OperatorState getKeyValueState(String name, TypeInformation stateType, S defaultState); } diff --git a/flink-core/src/main/java/org/apache/flink/api/common/functions/util/AbstractRuntimeUDFContext.java b/flink-core/src/main/java/org/apache/flink/api/common/functions/util/AbstractRuntimeUDFContext.java index a8078fbb459e0..a300f38e395df 100644 --- a/flink-core/src/main/java/org/apache/flink/api/common/functions/util/AbstractRuntimeUDFContext.java +++ b/flink-core/src/main/java/org/apache/flink/api/common/functions/util/AbstractRuntimeUDFContext.java @@ -24,7 +24,7 @@ import java.util.concurrent.Future; import com.google.common.base.Preconditions; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.TaskInfo; import org.apache.flink.api.common.accumulators.Accumulator; @@ -176,21 +176,21 @@ private Accumulator getAccumulator(String name } @Override - @Experimental + @PublicEvolving public ValueState getState(ValueStateDescriptor stateProperties) { throw new UnsupportedOperationException( "This state is only accessible by functions executed on a KeyedStream"); } @Override - @Experimental + @PublicEvolving public ListState getListState(ListStateDescriptor stateProperties) { throw new UnsupportedOperationException( "This state is only accessible by functions executed on a KeyedStream"); } @Override - @Experimental + @PublicEvolving public ReducingState getReducingState(ReducingStateDescriptor stateProperties) { throw new UnsupportedOperationException( "This state is only accessible by functions executed on a KeyedStream"); @@ -198,7 +198,7 @@ public ReducingState getReducingState(ReducingStateDescriptor statePro @Override @Deprecated - @Experimental + @PublicEvolving public OperatorState getKeyValueState(String name, Class stateType, S defaultState) { throw new UnsupportedOperationException( "This state is only accessible by functions executed on a KeyedStream"); @@ -206,7 +206,7 @@ public OperatorState getKeyValueState(String name, Class stateType, S @Override @Deprecated - @Experimental + @PublicEvolving public OperatorState getKeyValueState(String name, TypeInformation stateType, S defaultState) { throw new UnsupportedOperationException( "This state is only accessible by functions executed on a KeyedStream"); diff --git a/flink-core/src/main/java/org/apache/flink/api/common/io/statistics/BaseStatistics.java b/flink-core/src/main/java/org/apache/flink/api/common/io/statistics/BaseStatistics.java index cb194ae43fda1..9411355b91c20 100644 --- a/flink-core/src/main/java/org/apache/flink/api/common/io/statistics/BaseStatistics.java +++ b/flink-core/src/main/java/org/apache/flink/api/common/io/statistics/BaseStatistics.java @@ -19,7 +19,7 @@ package org.apache.flink.api.common.io.statistics; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; /** @@ -31,19 +31,19 @@ public interface BaseStatistics { /** * Constant indicating that the input size is unknown. */ - @Experimental + @PublicEvolving public static final long SIZE_UNKNOWN = -1; /** * Constant indicating that the number of records is unknown; */ - @Experimental + @PublicEvolving public static final long NUM_RECORDS_UNKNOWN = -1; /** * Constant indicating that average record width is unknown. */ - @Experimental + @PublicEvolving public static final float AVG_RECORD_BYTES_UNKNOWN = -1.0f; // -------------------------------------------------------------------------------------------- @@ -53,7 +53,7 @@ public interface BaseStatistics { * * @return The total size of the input, in bytes. */ - @Experimental + @PublicEvolving public long getTotalInputSize(); /** @@ -61,7 +61,7 @@ public interface BaseStatistics { * * @return The number of records in the input. */ - @Experimental + @PublicEvolving public long getNumberOfRecords(); /** @@ -69,6 +69,6 @@ public interface BaseStatistics { * * @return The average width of a record in bytes. */ - @Experimental + @PublicEvolving public float getAverageRecordWidth(); } diff --git a/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/BasicArrayTypeInfo.java b/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/BasicArrayTypeInfo.java index 995c2fe0a31cb..2c61fb225b850 100644 --- a/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/BasicArrayTypeInfo.java +++ b/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/BasicArrayTypeInfo.java @@ -23,7 +23,7 @@ import java.util.Objects; import com.google.common.base.Preconditions; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.typeutils.TypeSerializer; @@ -61,54 +61,54 @@ private BasicArrayTypeInfo(Class arrayClass, BasicTypeInfo componentInfo) // -------------------------------------------------------------------------------------------- @Override - @Experimental + @PublicEvolving public boolean isBasicType() { return false; } @Override - @Experimental + @PublicEvolving public boolean isTupleType() { return false; } @Override - @Experimental + @PublicEvolving public int getArity() { return 1; } @Override - @Experimental + @PublicEvolving public int getTotalFields() { return 1; } @Override - @Experimental + @PublicEvolving public Class getTypeClass() { return this.arrayClass; } - @Experimental + @PublicEvolving public Class getComponentTypeClass() { return this.componentInfo.getTypeClass(); } - @Experimental + @PublicEvolving public TypeInformation getComponentInfo() { return componentInfo; } @Override - @Experimental + @PublicEvolving public boolean isKeyType() { return false; } @Override @SuppressWarnings("unchecked") - @Experimental + @PublicEvolving public TypeSerializer createSerializer(ExecutionConfig executionConfig) { // special case the string array if (componentInfo.getTypeClass().equals(String.class)) { @@ -152,7 +152,7 @@ public String toString() { // -------------------------------------------------------------------------------------------- @SuppressWarnings("unchecked") - @Experimental + @PublicEvolving public static BasicArrayTypeInfo getInfoFor(Class type) { if (!type.isArray()) { throw new InvalidTypesException("The given class is no array."); diff --git a/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/BasicTypeInfo.java b/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/BasicTypeInfo.java index 0b6b0671565aa..4eb70c12653bb 100644 --- a/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/BasicTypeInfo.java +++ b/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/BasicTypeInfo.java @@ -26,7 +26,7 @@ import java.util.Objects; import com.google.common.base.Preconditions; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.functions.InvalidTypesException; @@ -100,7 +100,7 @@ protected BasicTypeInfo(Class clazz, Class[] possibleCastTargetTypes, Type * Returns whether this type should be automatically casted to * the target type in an arithmetic operation. */ - @Experimental + @PublicEvolving public boolean shouldAutocastTo(BasicTypeInfo to) { for (Class possibleTo: possibleCastTargetTypes) { if (possibleTo.equals(to.getTypeClass())) { @@ -111,49 +111,49 @@ public boolean shouldAutocastTo(BasicTypeInfo to) { } @Override - @Experimental + @PublicEvolving public boolean isBasicType() { return true; } @Override - @Experimental + @PublicEvolving public boolean isTupleType() { return false; } @Override - @Experimental + @PublicEvolving public int getArity() { return 1; } @Override - @Experimental + @PublicEvolving public int getTotalFields() { return 1; } @Override - @Experimental + @PublicEvolving public Class getTypeClass() { return this.clazz; } @Override - @Experimental + @PublicEvolving public boolean isKeyType() { return true; } @Override - @Experimental + @PublicEvolving public TypeSerializer createSerializer(ExecutionConfig executionConfig) { return this.serializer; } @Override - @Experimental + @PublicEvolving public TypeComparator createComparator(boolean sortOrderAscending, ExecutionConfig executionConfig) { if (comparatorClass != null) { return instantiateComparator(comparatorClass, sortOrderAscending); @@ -196,7 +196,7 @@ public String toString() { // -------------------------------------------------------------------------------------------- - @Experimental + @PublicEvolving public static BasicTypeInfo getInfoFor(Class type) { if (type == null) { throw new NullPointerException(); diff --git a/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/NothingTypeInfo.java b/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/NothingTypeInfo.java index 1e60265747dc8..033a0e91c19f0 100644 --- a/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/NothingTypeInfo.java +++ b/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/NothingTypeInfo.java @@ -18,7 +18,7 @@ package org.apache.flink.api.common.typeinfo; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.typeutils.TypeSerializer; @@ -33,43 +33,43 @@ public class NothingTypeInfo extends TypeInformation { private static final long serialVersionUID = 1L; @Override - @Experimental + @PublicEvolving public boolean isBasicType() { return false; } @Override - @Experimental + @PublicEvolving public boolean isTupleType() { return false; } @Override - @Experimental + @PublicEvolving public int getArity() { return 0; } @Override - @Experimental + @PublicEvolving public int getTotalFields() { return 0; } @Override - @Experimental + @PublicEvolving public Class getTypeClass() { return Nothing.class; } @Override - @Experimental + @PublicEvolving public boolean isKeyType() { return false; } @Override - @Experimental + @PublicEvolving public TypeSerializer createSerializer(ExecutionConfig executionConfig) { throw new RuntimeException("The Nothing type cannot have a serializer."); } diff --git a/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/PrimitiveArrayTypeInfo.java b/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/PrimitiveArrayTypeInfo.java index a4cf434d390a1..2c75458482eb7 100644 --- a/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/PrimitiveArrayTypeInfo.java +++ b/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/PrimitiveArrayTypeInfo.java @@ -23,7 +23,7 @@ import java.util.Objects; import com.google.common.base.Preconditions; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.functions.InvalidTypesException; @@ -95,43 +95,43 @@ private PrimitiveArrayTypeInfo(Class arrayClass, TypeSerializer serializer // -------------------------------------------------------------------------------------------- @Override - @Experimental + @PublicEvolving public boolean isBasicType() { return false; } @Override - @Experimental + @PublicEvolving public boolean isTupleType() { return false; } @Override - @Experimental + @PublicEvolving public int getArity() { return 1; } @Override - @Experimental + @PublicEvolving public int getTotalFields() { return 1; } @Override - @Experimental + @PublicEvolving public Class getTypeClass() { return this.arrayClass; } @Override - @Experimental + @PublicEvolving public boolean isKeyType() { return true; } @Override - @Experimental + @PublicEvolving public TypeSerializer createSerializer(ExecutionConfig executionConfig) { return this.serializer; } @@ -140,7 +140,7 @@ public TypeSerializer createSerializer(ExecutionConfig executionConfig) { * Gets the class that represents the component type. * @return The class of the component type. */ - @Experimental + @PublicEvolving public Class getComponentClass() { return this.arrayClass.getComponentType(); } @@ -149,7 +149,7 @@ public Class getComponentClass() { * Gets the type information of the component type. * @return The type information of the component type. */ - @Experimental + @PublicEvolving public TypeInformation getComponentType() { return BasicTypeInfo.getInfoFor(getComponentClass()); } @@ -195,7 +195,7 @@ public boolean canEqual(Object obj) { * @throws InvalidTypesException Thrown, if the given class does not represent an array. */ @SuppressWarnings("unchecked") - @Experimental + @PublicEvolving public static PrimitiveArrayTypeInfo getInfoFor(Class type) { if (!type.isArray()) { throw new InvalidTypesException("The given class is no array."); @@ -221,7 +221,7 @@ public static PrimitiveArrayTypeInfo getInfoFor(Class type) { } @Override - @Experimental + @PublicEvolving public PrimitiveArrayComparator createComparator(boolean sortOrderAscending, ExecutionConfig executionConfig) { try { return comparatorClass.getConstructor(boolean.class).newInstance(sortOrderAscending); diff --git a/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/TypeInformation.java b/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/TypeInformation.java index 8d96d34c80097..1c95be01b8be0 100644 --- a/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/TypeInformation.java +++ b/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/TypeInformation.java @@ -18,7 +18,7 @@ package org.apache.flink.api.common.typeinfo; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.typeutils.TypeSerializer; @@ -81,7 +81,7 @@ public abstract class TypeInformation implements Serializable { * * @return True, if this type information describes a basic type, false otherwise. */ - @Experimental + @PublicEvolving public abstract boolean isBasicType(); /** @@ -90,7 +90,7 @@ public abstract class TypeInformation implements Serializable { * * @return True, if this type information describes a tuple type, false otherwise. */ - @Experimental + @PublicEvolving public abstract boolean isTupleType(); /** @@ -98,7 +98,7 @@ public abstract class TypeInformation implements Serializable { * * @return Gets the number of fields in this type without nesting. */ - @Experimental + @PublicEvolving public abstract int getArity(); /** @@ -109,7 +109,7 @@ public abstract class TypeInformation implements Serializable { * * @return The number of fields in this type, including its sub-fields (for composite types) */ - @Experimental + @PublicEvolving public abstract int getTotalFields(); /** @@ -117,7 +117,7 @@ public abstract class TypeInformation implements Serializable { * * @return The class of the type represented by this type information. */ - @Experimental + @PublicEvolving public abstract Class getTypeClass(); /** @@ -125,7 +125,7 @@ public abstract class TypeInformation implements Serializable { * * @return The list of generic parameters. This list can be empty. */ - @Experimental + @PublicEvolving public List> getGenericParameters() { // Return an empty list as the default implementation return new LinkedList<>(); @@ -137,14 +137,14 @@ public List> getGenericParameters() { * * @return True, if the type can be used as a key, false otherwise. */ - @Experimental + @PublicEvolving public abstract boolean isKeyType(); /** * Checks whether this type can be used as a key for sorting. * The order produced by sorting this type must be meaningful. */ - @Experimental + @PublicEvolving public boolean isSortKeyType() { return isKeyType(); } @@ -156,7 +156,7 @@ public boolean isSortKeyType() { * @param config The config used to parameterize the serializer. * @return A serializer for this type. */ - @Experimental + @PublicEvolving public abstract TypeSerializer createSerializer(ExecutionConfig config); @Override diff --git a/flink-core/src/main/java/org/apache/flink/api/common/typeutils/CompositeType.java b/flink-core/src/main/java/org/apache/flink/api/common/typeutils/CompositeType.java index 959750a23231a..19b6eaf37935b 100644 --- a/flink-core/src/main/java/org/apache/flink/api/common/typeutils/CompositeType.java +++ b/flink-core/src/main/java/org/apache/flink/api/common/typeutils/CompositeType.java @@ -23,7 +23,7 @@ import java.util.Objects; import com.google.common.base.Preconditions; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.typeinfo.AtomicType; @@ -42,7 +42,7 @@ public abstract class CompositeType extends TypeInformation { private final Class typeClass; - @Experimental + @PublicEvolving public CompositeType(Class typeClass) { this.typeClass = Preconditions.checkNotNull(typeClass); } @@ -52,7 +52,7 @@ public CompositeType(Class typeClass) { * * @return Type class of the composite type */ - @Experimental + @PublicEvolving public Class getTypeClass() { return typeClass; } @@ -63,7 +63,7 @@ public Class getTypeClass() { * @param fieldExpression The field expression for which the flat field descriptors are computed. * @return The list of descriptors for the flat fields which are specified by the field expression. */ - @Experimental + @PublicEvolving public List getFlatFields(String fieldExpression) { List result = new ArrayList(); this.getFlatFields(fieldExpression, 0, result); @@ -77,7 +77,7 @@ public List getFlatFields(String fieldExpression) { * @param offset The offset to use when computing the positions of the flat fields. * @param result The list into which all flat field descriptors are inserted. */ - @Experimental + @PublicEvolving public abstract void getFlatFields(String fieldExpression, int offset, List result); /** @@ -87,7 +87,7 @@ public List getFlatFields(String fieldExpression) { * @param fieldExpression The field expression for which the field of which the type is returned. * @return The type of the field at the given field expression. */ - @Experimental + @PublicEvolving public abstract TypeInformation getTypeAt(String fieldExpression); /** @@ -96,10 +96,10 @@ public List getFlatFields(String fieldExpression) { * @param pos The position of the (unnested) field in this composite type. * @return The type of the field at the given position. */ - @Experimental + @PublicEvolving public abstract TypeInformation getTypeAt(int pos); - @Experimental + @PublicEvolving protected abstract TypeComparatorBuilder createTypeComparatorBuilder(); /** @@ -107,7 +107,7 @@ public List getFlatFields(String fieldExpression) { * to create the actual comparators * @return The comparator */ - @Experimental + @PublicEvolving public TypeComparator createComparator(int[] logicalKeyFields, boolean[] orders, int logicalFieldOffset, ExecutionConfig config) { TypeComparatorBuilder builder = createTypeComparatorBuilder(); @@ -169,7 +169,7 @@ else if (localFieldType instanceof CompositeType && // -------------------------------------------------------------------------------------------- - @Experimental + @PublicEvolving protected interface TypeComparatorBuilder { void initializeTypeComparatorBuilder(int size); @@ -178,7 +178,7 @@ protected interface TypeComparatorBuilder { TypeComparator createTypeComparator(ExecutionConfig config); } - @Experimental + @PublicEvolving public static class FlatFieldDescriptor { private int keyPosition; private TypeInformation type; @@ -209,13 +209,13 @@ public String toString() { /** * Returns true when this type has a composite field with the given name. */ - @Experimental + @PublicEvolving public boolean hasField(String fieldName) { return getFieldIndex(fieldName) >= 0; } @Override - @Experimental + @PublicEvolving public boolean isKeyType() { for(int i=0;i extends PojoTypeInfo { - @Experimental + @PublicEvolving public AvroTypeInfo(Class typeClass) { super(typeClass, generateFieldsFromAvroSchema(typeClass)); } diff --git a/flink-core/src/main/java/org/apache/flink/api/java/typeutils/EitherTypeInfo.java b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/EitherTypeInfo.java index fdd101c26a446..058de129c532f 100644 --- a/flink-core/src/main/java/org/apache/flink/api/java/typeutils/EitherTypeInfo.java +++ b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/EitherTypeInfo.java @@ -18,7 +18,7 @@ package org.apache.flink.api.java.typeutils; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.typeinfo.TypeInformation; @@ -41,51 +41,51 @@ public class EitherTypeInfo extends TypeInformation> { private final TypeInformation rightType; - @Experimental + @PublicEvolving public EitherTypeInfo(TypeInformation leftType, TypeInformation rightType) { this.leftType = leftType; this.rightType = rightType; } @Override - @Experimental + @PublicEvolving public boolean isBasicType() { return false; } @Override - @Experimental + @PublicEvolving public boolean isTupleType() { return false; } @Override - @Experimental + @PublicEvolving public int getArity() { return 1; } @Override - @Experimental + @PublicEvolving public int getTotalFields() { return 1; } @SuppressWarnings("unchecked") @Override - @Experimental + @PublicEvolving public Class> getTypeClass() { return (Class>) (Class) Either.class; } @Override - @Experimental + @PublicEvolving public boolean isKeyType() { return false; } @Override - @Experimental + @PublicEvolving public TypeSerializer> createSerializer(ExecutionConfig config) { return new EitherSerializer(leftType.createSerializer(config), rightType.createSerializer(config)); diff --git a/flink-core/src/main/java/org/apache/flink/api/java/typeutils/EnumTypeInfo.java b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/EnumTypeInfo.java index eb8dd6d687bdf..aec3c1d98411e 100644 --- a/flink-core/src/main/java/org/apache/flink/api/java/typeutils/EnumTypeInfo.java +++ b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/EnumTypeInfo.java @@ -19,7 +19,7 @@ package org.apache.flink.api.java.typeutils; import com.google.common.base.Preconditions; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.typeinfo.AtomicType; import org.apache.flink.api.common.typeinfo.TypeInformation; @@ -41,7 +41,7 @@ public class EnumTypeInfo> extends TypeInformation implemen private final Class typeClass; - @Experimental + @PublicEvolving public EnumTypeInfo(Class typeClass) { Preconditions.checkNotNull(typeClass, "Enum type class must not be null."); @@ -53,49 +53,49 @@ public EnumTypeInfo(Class typeClass) { } @Override - @Experimental + @PublicEvolving public TypeComparator createComparator(boolean sortOrderAscending, ExecutionConfig executionConfig) { return new EnumComparator(sortOrderAscending); } @Override - @Experimental + @PublicEvolving public boolean isBasicType() { return false; } @Override - @Experimental + @PublicEvolving public boolean isTupleType() { return false; } @Override - @Experimental + @PublicEvolving public int getArity() { return 1; } @Override - @Experimental + @PublicEvolving public int getTotalFields() { return 1; } @Override - @Experimental + @PublicEvolving public Class getTypeClass() { return this.typeClass; } @Override - @Experimental + @PublicEvolving public boolean isKeyType() { return true; } @Override - @Experimental + @PublicEvolving public TypeSerializer createSerializer(ExecutionConfig executionConfig) { return new EnumSerializer(typeClass); } diff --git a/flink-core/src/main/java/org/apache/flink/api/java/typeutils/GenericTypeInfo.java b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/GenericTypeInfo.java index 7e669281bfccd..0cca8bd634534 100644 --- a/flink-core/src/main/java/org/apache/flink/api/java/typeutils/GenericTypeInfo.java +++ b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/GenericTypeInfo.java @@ -19,7 +19,7 @@ package org.apache.flink.api.java.typeutils; import com.google.common.base.Preconditions; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.typeinfo.AtomicType; @@ -37,56 +37,56 @@ public class GenericTypeInfo extends TypeInformation implements AtomicType private final Class typeClass; - @Experimental + @PublicEvolving public GenericTypeInfo(Class typeClass) { this.typeClass = Preconditions.checkNotNull(typeClass); } @Override - @Experimental + @PublicEvolving public boolean isBasicType() { return false; } @Override - @Experimental + @PublicEvolving public boolean isTupleType() { return false; } @Override - @Experimental + @PublicEvolving public int getArity() { return 1; } @Override - @Experimental + @PublicEvolving public int getTotalFields() { return 1; } @Override - @Experimental + @PublicEvolving public Class getTypeClass() { return typeClass; } @Override - @Experimental + @PublicEvolving public boolean isKeyType() { return Comparable.class.isAssignableFrom(typeClass); } @Override - @Experimental + @PublicEvolving public TypeSerializer createSerializer(ExecutionConfig config) { return new KryoSerializer(this.typeClass, config); } @SuppressWarnings("unchecked") @Override - @Experimental + @PublicEvolving public TypeComparator createComparator(boolean sortOrderAscending, ExecutionConfig executionConfig) { if (isKeyType()) { @SuppressWarnings("rawtypes") diff --git a/flink-core/src/main/java/org/apache/flink/api/java/typeutils/ObjectArrayTypeInfo.java b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/ObjectArrayTypeInfo.java index 024fe598f6470..1e8fbe237535b 100644 --- a/flink-core/src/main/java/org/apache/flink/api/java/typeutils/ObjectArrayTypeInfo.java +++ b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/ObjectArrayTypeInfo.java @@ -20,7 +20,7 @@ import java.lang.reflect.Array; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.typeinfo.TypeInformation; @@ -45,50 +45,50 @@ private ObjectArrayTypeInfo(Class arrayType, TypeInformation componentInfo // -------------------------------------------------------------------------------------------- @Override - @Experimental + @PublicEvolving public boolean isBasicType() { return false; } @Override - @Experimental + @PublicEvolving public boolean isTupleType() { return false; } @Override - @Experimental + @PublicEvolving public int getArity() { return 1; } @Override - @Experimental + @PublicEvolving public int getTotalFields() { return 1; } @SuppressWarnings("unchecked") @Override - @Experimental + @PublicEvolving public Class getTypeClass() { return arrayType; } - @Experimental + @PublicEvolving public TypeInformation getComponentInfo() { return componentInfo; } @Override - @Experimental + @PublicEvolving public boolean isKeyType() { return false; } @SuppressWarnings("unchecked") @Override - @Experimental + @PublicEvolving public TypeSerializer createSerializer(ExecutionConfig executionConfig) { return (TypeSerializer) new GenericArraySerializer( componentInfo.getTypeClass(), @@ -126,7 +126,7 @@ public int hashCode() { // -------------------------------------------------------------------------------------------- - @Experimental + @PublicEvolving public static ObjectArrayTypeInfo getInfoFor(Class arrayClass, TypeInformation componentInfo) { Preconditions.checkNotNull(arrayClass); Preconditions.checkNotNull(componentInfo); @@ -144,7 +144,7 @@ public static ObjectArrayTypeInfo getInfoFor(Class arrayClass, T * {@link java.lang.reflect.Type} or {@link java.lang.Class}. */ @SuppressWarnings("unchecked") - @Experimental + @PublicEvolving public static ObjectArrayTypeInfo getInfoFor(TypeInformation componentInfo) { Preconditions.checkNotNull(componentInfo); diff --git a/flink-core/src/main/java/org/apache/flink/api/java/typeutils/PojoTypeInfo.java b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/PojoTypeInfo.java index 39d3bcd300ed5..be2a027217b4c 100644 --- a/flink-core/src/main/java/org/apache/flink/api/java/typeutils/PojoTypeInfo.java +++ b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/PojoTypeInfo.java @@ -29,7 +29,7 @@ import com.google.common.base.Preconditions; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.operators.Keys.ExpressionKeys; @@ -74,7 +74,7 @@ public class PojoTypeInfo extends CompositeType { private final int totalFields; - @Experimental + @PublicEvolving public PojoTypeInfo(Class typeClass, List fields) { super(typeClass); @@ -100,32 +100,32 @@ public int compare(PojoField o1, PojoField o2) { } @Override - @Experimental + @PublicEvolving public boolean isBasicType() { return false; } @Override - @Experimental + @PublicEvolving public boolean isTupleType() { return false; } @Override - @Experimental + @PublicEvolving public int getArity() { return fields.length; } @Override - @Experimental + @PublicEvolving public int getTotalFields() { return totalFields; } @Override - @Experimental + @PublicEvolving public boolean isSortKeyType() { // Support for sorting POJOs that implement Comparable is not implemented yet. // Since the order of fields in a POJO type is not well defined, sorting on fields @@ -135,7 +135,7 @@ public boolean isSortKeyType() { @Override - @Experimental + @PublicEvolving public void getFlatFields(String fieldExpression, int offset, List result) { Matcher matcher = PATTERN_NESTED_FIELDS_WILDCARD.matcher(fieldExpression); @@ -212,7 +212,7 @@ public void getFlatFields(String fieldExpression, int offset, List TypeInformation getTypeAt(String fieldExpression) { Matcher matcher = PATTERN_NESTED_FIELDS.matcher(fieldExpression); @@ -253,7 +253,7 @@ public TypeInformation getTypeAt(String fieldExpression) { } @Override - @Experimental + @PublicEvolving public TypeInformation getTypeAt(int pos) { if (pos < 0 || pos >= this.fields.length) { throw new IndexOutOfBoundsException(); @@ -269,7 +269,7 @@ protected TypeComparatorBuilder createTypeComparatorBuilder() { } // used for testing. Maybe use mockito here - @Experimental + @PublicEvolving public PojoField getPojoFieldAt(int pos) { if (pos < 0 || pos >= this.fields.length) { throw new IndexOutOfBoundsException(); @@ -277,7 +277,7 @@ public PojoField getPojoFieldAt(int pos) { return this.fields[pos]; } - @Experimental + @PublicEvolving public String[] getFieldNames() { String[] result = new String[fields.length]; for (int i = 0; i < fields.length; i++) { @@ -287,7 +287,7 @@ public String[] getFieldNames() { } @Override - @Experimental + @PublicEvolving public int getFieldIndex(String fieldName) { for (int i = 0; i < fields.length; i++) { if (fields[i].getField().getName().equals(fieldName)) { @@ -298,7 +298,7 @@ public int getFieldIndex(String fieldName) { } @Override - @Experimental + @PublicEvolving public TypeSerializer createSerializer(ExecutionConfig config) { if(config.isForceKryoEnabled()) { return new KryoSerializer(getTypeClass(), config); diff --git a/flink-core/src/main/java/org/apache/flink/api/java/typeutils/TupleTypeInfo.java b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/TupleTypeInfo.java index 4d1927d42e548..9ecbe734d7294 100644 --- a/flink-core/src/main/java/org/apache/flink/api/java/typeutils/TupleTypeInfo.java +++ b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/TupleTypeInfo.java @@ -24,7 +24,7 @@ import com.google.common.base.Preconditions; import com.google.common.primitives.Ints; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.functions.InvalidTypesException; @@ -53,12 +53,12 @@ public final class TupleTypeInfo extends TupleTypeInfoBase { protected final String[] fieldNames; @SuppressWarnings("unchecked") - @Experimental + @PublicEvolving public TupleTypeInfo(TypeInformation... types) { this((Class) Tuple.getTupleClass(types.length), types); } - @Experimental + @PublicEvolving public TupleTypeInfo(Class tupleType, TypeInformation... types) { super(tupleType, types); @@ -74,13 +74,13 @@ public TupleTypeInfo(Class tupleType, TypeInformation... types) { } @Override - @Experimental + @PublicEvolving public String[] getFieldNames() { return fieldNames; } @Override - @Experimental + @PublicEvolving public int getFieldIndex(String fieldName) { int fieldIndex = Integer.parseInt(fieldName.substring(1)); if (fieldIndex >= getArity()) { @@ -91,7 +91,7 @@ public int getFieldIndex(String fieldName) { @SuppressWarnings("unchecked") @Override - @Experimental + @PublicEvolving public TupleSerializer createSerializer(ExecutionConfig executionConfig) { if (getTypeClass() == Tuple0.class) { return (TupleSerializer) Tuple0Serializer.INSTANCE; @@ -199,7 +199,7 @@ public String toString() { // -------------------------------------------------------------------------------------------- - @Experimental + @PublicEvolving public static TupleTypeInfo getBasicTupleTypeInfo(Class... basicTypes) { if (basicTypes == null || basicTypes.length == 0) { throw new IllegalArgumentException(); @@ -225,7 +225,7 @@ public static TupleTypeInfo getBasicTupleTypeInfo(Class. } @SuppressWarnings("unchecked") - @Experimental + @PublicEvolving public static TupleTypeInfo getBasicAndBasicValueTupleTypeInfo(Class... basicTypes) { if (basicTypes == null || basicTypes.length == 0) { throw new IllegalArgumentException(); diff --git a/flink-core/src/main/java/org/apache/flink/api/java/typeutils/TypeExtractor.java b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/TypeExtractor.java index de3503b6ebf7b..01afe14bec689 100644 --- a/flink-core/src/main/java/org/apache/flink/api/java/typeutils/TypeExtractor.java +++ b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/TypeExtractor.java @@ -32,7 +32,7 @@ import org.apache.avro.specific.SpecificRecordBase; import org.apache.commons.lang3.ClassUtils; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Internal; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.functions.CoGroupFunction; @@ -108,12 +108,12 @@ protected TypeExtractor() { // Function specific methods // -------------------------------------------------------------------------------------------- - @Experimental + @PublicEvolving public static TypeInformation getMapReturnTypes(MapFunction mapInterface, TypeInformation inType) { return getMapReturnTypes(mapInterface, inType, null, false); } - @Experimental + @PublicEvolving public static TypeInformation getMapReturnTypes(MapFunction mapInterface, TypeInformation inType, String functionName, boolean allowMissing) { @@ -121,74 +121,74 @@ public static TypeInformation getMapReturnTypes(MapFunction TypeInformation getFlatMapReturnTypes(FlatMapFunction flatMapInterface, TypeInformation inType) { return getFlatMapReturnTypes(flatMapInterface, inType, null, false); } - @Experimental + @PublicEvolving public static TypeInformation getFlatMapReturnTypes(FlatMapFunction flatMapInterface, TypeInformation inType, String functionName, boolean allowMissing) { return getUnaryOperatorReturnType((Function) flatMapInterface, FlatMapFunction.class, false, true, inType, functionName, allowMissing); } - @Experimental + @PublicEvolving public static TypeInformation getFoldReturnTypes(FoldFunction foldInterface, TypeInformation inType) { return getFoldReturnTypes(foldInterface, inType, null, false); } - @Experimental + @PublicEvolving public static TypeInformation getFoldReturnTypes(FoldFunction foldInterface, TypeInformation inType, String functionName, boolean allowMissing) { return getUnaryOperatorReturnType((Function) foldInterface, FoldFunction.class, false, false, inType, functionName, allowMissing); } - @Experimental + @PublicEvolving public static TypeInformation getMapPartitionReturnTypes(MapPartitionFunction mapPartitionInterface, TypeInformation inType) { return getMapPartitionReturnTypes(mapPartitionInterface, inType, null, false); } - @Experimental + @PublicEvolving public static TypeInformation getMapPartitionReturnTypes(MapPartitionFunction mapPartitionInterface, TypeInformation inType, String functionName, boolean allowMissing) { return getUnaryOperatorReturnType((Function) mapPartitionInterface, MapPartitionFunction.class, true, true, inType, functionName, allowMissing); } - @Experimental + @PublicEvolving public static TypeInformation getGroupReduceReturnTypes(GroupReduceFunction groupReduceInterface, TypeInformation inType) { return getGroupReduceReturnTypes(groupReduceInterface, inType, null, false); } - @Experimental + @PublicEvolving public static TypeInformation getGroupReduceReturnTypes(GroupReduceFunction groupReduceInterface, TypeInformation inType, String functionName, boolean allowMissing) { return getUnaryOperatorReturnType((Function) groupReduceInterface, GroupReduceFunction.class, true, true, inType, functionName, allowMissing); } - @Experimental + @PublicEvolving public static TypeInformation getGroupCombineReturnTypes(GroupCombineFunction combineInterface, TypeInformation inType) { return getGroupCombineReturnTypes(combineInterface, inType, null, false); } - @Experimental + @PublicEvolving public static TypeInformation getGroupCombineReturnTypes(GroupCombineFunction combineInterface, TypeInformation inType, String functionName, boolean allowMissing) { return getUnaryOperatorReturnType((Function) combineInterface, GroupCombineFunction.class, true, true, inType, functionName, allowMissing); } - @Experimental + @PublicEvolving public static TypeInformation getFlatJoinReturnTypes(FlatJoinFunction joinInterface, TypeInformation in1Type, TypeInformation in2Type) { return getFlatJoinReturnTypes(joinInterface, in1Type, in2Type, null, false); } - @Experimental + @PublicEvolving public static TypeInformation getFlatJoinReturnTypes(FlatJoinFunction joinInterface, TypeInformation in1Type, TypeInformation in2Type, String functionName, boolean allowMissing) { @@ -196,14 +196,14 @@ public static TypeInformation getFlatJoinReturnTypes(FlatJo in1Type, in2Type, functionName, allowMissing); } - @Experimental + @PublicEvolving public static TypeInformation getJoinReturnTypes(JoinFunction joinInterface, TypeInformation in1Type, TypeInformation in2Type) { return getJoinReturnTypes(joinInterface, in1Type, in2Type, null, false); } - @Experimental + @PublicEvolving public static TypeInformation getJoinReturnTypes(JoinFunction joinInterface, TypeInformation in1Type, TypeInformation in2Type, String functionName, boolean allowMissing) { @@ -211,14 +211,14 @@ public static TypeInformation getJoinReturnTypes(JoinFuncti in1Type, in2Type, functionName, allowMissing); } - @Experimental + @PublicEvolving public static TypeInformation getCoGroupReturnTypes(CoGroupFunction coGroupInterface, TypeInformation in1Type, TypeInformation in2Type) { return getCoGroupReturnTypes(coGroupInterface, in1Type, in2Type, null, false); } - @Experimental + @PublicEvolving public static TypeInformation getCoGroupReturnTypes(CoGroupFunction coGroupInterface, TypeInformation in1Type, TypeInformation in2Type, String functionName, boolean allowMissing) { @@ -226,14 +226,14 @@ public static TypeInformation getCoGroupReturnTypes(CoGroup in1Type, in2Type, functionName, allowMissing); } - @Experimental + @PublicEvolving public static TypeInformation getCrossReturnTypes(CrossFunction crossInterface, TypeInformation in1Type, TypeInformation in2Type) { return getCrossReturnTypes(crossInterface, in1Type, in2Type, null, false); } - @Experimental + @PublicEvolving public static TypeInformation getCrossReturnTypes(CrossFunction crossInterface, TypeInformation in1Type, TypeInformation in2Type, String functionName, boolean allowMissing) { @@ -241,31 +241,31 @@ public static TypeInformation getCrossReturnTypes(CrossFunc in1Type, in2Type, functionName, allowMissing); } - @Experimental + @PublicEvolving public static TypeInformation getKeySelectorTypes(KeySelector selectorInterface, TypeInformation inType) { return getKeySelectorTypes(selectorInterface, inType, null, false); } - @Experimental + @PublicEvolving public static TypeInformation getKeySelectorTypes(KeySelector selectorInterface, TypeInformation inType, String functionName, boolean allowMissing) { return getUnaryOperatorReturnType((Function) selectorInterface, KeySelector.class, false, false, inType, functionName, allowMissing); } - @Experimental + @PublicEvolving public static TypeInformation getPartitionerTypes(Partitioner partitioner) { return getPartitionerTypes(partitioner, null, false); } - @Experimental + @PublicEvolving public static TypeInformation getPartitionerTypes(Partitioner partitioner, String functionName, boolean allowMissing) { return new TypeExtractor().privateCreateTypeInfo(Partitioner.class, partitioner.getClass(), 0, null, null); } @SuppressWarnings("unchecked") - @Experimental + @PublicEvolving public static TypeInformation getInputFormatTypes(InputFormat inputFormatInterface) { if (inputFormatInterface instanceof ResultTypeQueryable) { return ((ResultTypeQueryable) inputFormatInterface).getProducedType(); @@ -278,7 +278,7 @@ public static TypeInformation getInputFormatTypes(InputFormat in // -------------------------------------------------------------------------------------------- @SuppressWarnings("unchecked") - @Experimental + @PublicEvolving public static TypeInformation getUnaryOperatorReturnType(Function function, Class baseClass, boolean hasIterable, boolean hasCollector, TypeInformation inType, String functionName, boolean allowMissing) @@ -316,7 +316,7 @@ public static TypeInformation getUnaryOperatorReturnType(Function } @SuppressWarnings("unchecked") - @Experimental + @PublicEvolving public static TypeInformation getBinaryOperatorReturnType(Function function, Class baseClass, boolean hasIterables, boolean hasCollector, TypeInformation in1Type, TypeInformation in2Type, String functionName, boolean allowMissing) @@ -388,7 +388,7 @@ public static TypeInformation createTypeInfo(Type t) { * @return type information */ @SuppressWarnings("unchecked") - @Experimental + @PublicEvolving public static TypeInformation createTypeInfo(Object instance, Class baseClass, Class clazz, int returnParamPos) { if (instance instanceof ResultTypeQueryable) { return ((ResultTypeQueryable) instance).getProducedType(); @@ -397,7 +397,7 @@ public static TypeInformation createTypeInfo(Object instance, Class TypeInformation createTypeInfo(Class baseClass, Class clazz, int returnParamPos, TypeInformation in1Type, TypeInformation in2Type) { TypeInformation ti = new TypeExtractor().privateCreateTypeInfo(baseClass, clazz, returnParamPos, in1Type, in2Type); @@ -790,7 +790,7 @@ private TypeInformation[] createSubTypesInfo(Type originalType, Pa // Extract type parameters // -------------------------------------------------------------------------------------------- - @Experimental + @PublicEvolving public static Type getParameterType(Class baseClass, Class clazz, int pos) { return getParameterType(baseClass, null, clazz, pos); } @@ -1581,7 +1581,7 @@ else if (typeHierarchy.size() <= 1) { * This is required because class.getFields() is not returning fields defined * in parent classes. */ - @Experimental + @PublicEvolving public static List getAllDeclaredFields(Class clazz) { List result = new ArrayList(); while (clazz != null) { @@ -1601,7 +1601,7 @@ public static List getAllDeclaredFields(Class clazz) { return result; } - @Experimental + @PublicEvolving public static Field getDeclaredField(Class clazz, String name) { for (Field field : getAllDeclaredFields(clazz)) { if (field.getName().equals(name)) { diff --git a/flink-core/src/main/java/org/apache/flink/api/java/typeutils/ValueTypeInfo.java b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/ValueTypeInfo.java index 9f30716b5a021..7c173c06b0104 100644 --- a/flink-core/src/main/java/org/apache/flink/api/java/typeutils/ValueTypeInfo.java +++ b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/ValueTypeInfo.java @@ -19,7 +19,7 @@ package org.apache.flink.api.java.typeutils; import com.google.common.base.Preconditions; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.functions.InvalidTypesException; @@ -68,7 +68,7 @@ public class ValueTypeInfo extends TypeInformation implement private final Class type; - @Experimental + @PublicEvolving public ValueTypeInfo(Class type) { this.type = Preconditions.checkNotNull(type); @@ -78,30 +78,30 @@ public ValueTypeInfo(Class type) { } @Override - @Experimental + @PublicEvolving public int getArity() { return 1; } @Override - @Experimental + @PublicEvolving public int getTotalFields() { return 1; } @Override - @Experimental + @PublicEvolving public Class getTypeClass() { return this.type; } @Override - @Experimental + @PublicEvolving public boolean isBasicType() { return false; } - @Experimental + @PublicEvolving public boolean isBasicValueType() { return type.equals(StringValue.class) || type.equals(ByteValue.class) || type.equals(ShortValue.class) || type.equals(CharValue.class) || type.equals(DoubleValue.class) || type.equals(FloatValue.class) || type.equals(IntValue.class) || type.equals(LongValue.class) || @@ -109,20 +109,20 @@ public boolean isBasicValueType() { } @Override - @Experimental + @PublicEvolving public boolean isTupleType() { return false; } @Override - @Experimental + @PublicEvolving public boolean isKeyType() { return Comparable.class.isAssignableFrom(type); } @Override @SuppressWarnings("unchecked") - @Experimental + @PublicEvolving public TypeSerializer createSerializer(ExecutionConfig executionConfig) { if (CopyableValue.class.isAssignableFrom(type)) { return (TypeSerializer) createCopyableValueSerializer(type.asSubclass(CopyableValue.class)); @@ -134,7 +134,7 @@ public TypeSerializer createSerializer(ExecutionConfig executionConfig) { @SuppressWarnings({ "unchecked", "rawtypes" }) @Override - @Experimental + @PublicEvolving public TypeComparator createComparator(boolean sortOrderAscending, ExecutionConfig executionConfig) { if (!isKeyType()) { throw new RuntimeException("The type " + type.getName() + " is not Comparable."); @@ -185,7 +185,7 @@ public String toString() { // -------------------------------------------------------------------------------------------- - @Experimental + @PublicEvolving static TypeInformation getValueTypeInfo(Class typeClass) { if (Value.class.isAssignableFrom(typeClass) && !typeClass.equals(Value.class)) { return new ValueTypeInfo(typeClass); diff --git a/flink-core/src/main/java/org/apache/flink/api/java/typeutils/WritableTypeInfo.java b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/WritableTypeInfo.java index 3899f180e0eab..5e3b2bc50e99f 100644 --- a/flink-core/src/main/java/org/apache/flink/api/java/typeutils/WritableTypeInfo.java +++ b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/WritableTypeInfo.java @@ -19,7 +19,7 @@ package org.apache.flink.api.java.typeutils; import com.google.common.base.Preconditions; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.functions.InvalidTypesException; @@ -44,7 +44,7 @@ public class WritableTypeInfo extends TypeInformation imp private final Class typeClass; - @Experimental + @PublicEvolving public WritableTypeInfo(Class typeClass) { this.typeClass = Preconditions.checkNotNull(typeClass); @@ -55,7 +55,7 @@ public WritableTypeInfo(Class typeClass) { @SuppressWarnings({ "rawtypes", "unchecked" }) @Override - @Experimental + @PublicEvolving public TypeComparator createComparator(boolean sortOrderAscending, ExecutionConfig executionConfig) { if(Comparable.class.isAssignableFrom(typeClass)) { return new WritableComparator(sortOrderAscending, typeClass); @@ -67,43 +67,43 @@ public TypeComparator createComparator(boolean sortOrderAscending, ExecutionC } @Override - @Experimental + @PublicEvolving public boolean isBasicType() { return false; } @Override - @Experimental + @PublicEvolving public boolean isTupleType() { return false; } @Override - @Experimental + @PublicEvolving public int getArity() { return 1; } @Override - @Experimental + @PublicEvolving public int getTotalFields() { return 1; } @Override - @Experimental + @PublicEvolving public Class getTypeClass() { return this.typeClass; } @Override - @Experimental + @PublicEvolving public boolean isKeyType() { return Comparable.class.isAssignableFrom(typeClass); } @Override - @Experimental + @PublicEvolving public TypeSerializer createSerializer(ExecutionConfig executionConfig) { return new WritableSerializer(typeClass); } @@ -139,7 +139,7 @@ public boolean canEqual(Object obj) { // -------------------------------------------------------------------------------------------- - @Experimental + @PublicEvolving static TypeInformation getWritableTypeInfo(Class typeClass) { if (Writable.class.isAssignableFrom(typeClass) && !typeClass.equals(Writable.class)) { return new WritableTypeInfo(typeClass); diff --git a/flink-java/src/main/java/org/apache/flink/api/java/DataSet.java b/flink-java/src/main/java/org/apache/flink/api/java/DataSet.java index e205bef487251..bfb97f4d8b7e8 100644 --- a/flink-java/src/main/java/org/apache/flink/api/java/DataSet.java +++ b/flink-java/src/main/java/org/apache/flink/api/java/DataSet.java @@ -19,7 +19,7 @@ package org.apache.flink.api.java; import com.google.common.base.Preconditions; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.InvalidProgramException; import org.apache.flink.api.common.JobExecutionResult; @@ -1642,7 +1642,7 @@ public DataSink printOnTaskManager(String prefix) { * @deprecated Use {@link #printOnTaskManager(String)} instead. */ @Deprecated - @Experimental + @PublicEvolving public DataSink print(String sinkIdentifier) { return output(new PrintingOutputFormat(sinkIdentifier, false)); } @@ -1659,7 +1659,7 @@ public DataSink print(String sinkIdentifier) { * {@link PrintingOutputFormat} instead. */ @Deprecated - @Experimental + @PublicEvolving public DataSink printToErr(String sinkIdentifier) { return output(new PrintingOutputFormat(sinkIdentifier, true)); } diff --git a/flink-java/src/main/java/org/apache/flink/api/java/ExecutionEnvironment.java b/flink-java/src/main/java/org/apache/flink/api/java/ExecutionEnvironment.java index 7fc45b3dd1f8e..512fe4254a0d7 100644 --- a/flink-java/src/main/java/org/apache/flink/api/java/ExecutionEnvironment.java +++ b/flink-java/src/main/java/org/apache/flink/api/java/ExecutionEnvironment.java @@ -23,7 +23,7 @@ import com.google.common.base.Joiner; import com.google.common.base.Preconditions; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Internal; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.ExecutionConfig; @@ -188,7 +188,7 @@ public void setParallelism(int parallelism) { * * @param numberOfExecutionRetries The number of times the system will try to re-execute failed tasks. */ - @Experimental + @PublicEvolving public void setNumberOfExecutionRetries(int numberOfExecutionRetries) { config.setNumberOfExecutionRetries(numberOfExecutionRetries); } @@ -200,7 +200,7 @@ public void setNumberOfExecutionRetries(int numberOfExecutionRetries) { * * @return The number of times the system will try to re-execute failed tasks. */ - @Experimental + @PublicEvolving public int getNumberOfExecutionRetries() { return config.getNumberOfExecutionRetries(); } @@ -225,7 +225,7 @@ public JobExecutionResult getLastJobExecutionResult(){ * @return The JobID of this environment. * @see #getIdString() */ - @Experimental + @PublicEvolving public JobID getId() { return this.jobID; } @@ -236,7 +236,7 @@ public JobID getId() { * @return The JobID as a string. * @see #getId() */ - @Experimental + @PublicEvolving public String getIdString() { return this.jobID.toString(); } @@ -247,7 +247,7 @@ public String getIdString() { * * @param timeout The timeout, in seconds. */ - @Experimental + @PublicEvolving public void setSessionTimeout(long timeout) { throw new IllegalStateException("Support for sessions is currently disabled. " + "It will be enabled in future Flink versions."); @@ -265,7 +265,7 @@ public void setSessionTimeout(long timeout) { * * @return The session timeout, in seconds. */ - @Experimental + @PublicEvolving public long getSessionTimeout() { return sessionTimeout; } @@ -273,7 +273,7 @@ public long getSessionTimeout() { /** * Starts a new session, discarding the previous data flow and all of its intermediate results. */ - @Experimental + @PublicEvolving public abstract void startNewSession() throws Exception; // -------------------------------------------------------------------------------------------- @@ -558,7 +558,7 @@ public DataSource createInput(InputFormat inputFormat, TypeInformat * Creates a {@link DataSet} from the given {@link org.apache.hadoop.mapred.FileInputFormat}. The * given inputName is set on the given job. */ - @Experimental + @PublicEvolving public DataSource> readHadoopFile(org.apache.hadoop.mapred.FileInputFormat mapredInputFormat, Class key, Class value, String inputPath, JobConf job) { DataSource> result = createHadoopInput(mapredInputFormat, key, value, job); @@ -571,7 +571,7 @@ public DataSource> readHadoopFile(org.apache.hadoop.mapred.Fi * Creates a {@link DataSet} from {@link org.apache.hadoop.mapred.SequenceFileInputFormat} * A {@link org.apache.hadoop.mapred.JobConf} with the given inputPath is created. */ - @Experimental + @PublicEvolving public DataSource> readSequenceFile(Class key, Class value, String inputPath) throws IOException { return readHadoopFile(new org.apache.hadoop.mapred.SequenceFileInputFormat(), key, value, inputPath); } @@ -580,7 +580,7 @@ public DataSource> readSequenceFile(Class key, Class va * Creates a {@link DataSet} from the given {@link org.apache.hadoop.mapred.FileInputFormat}. A * {@link org.apache.hadoop.mapred.JobConf} with the given inputPath is created. */ - @Experimental + @PublicEvolving public DataSource> readHadoopFile(org.apache.hadoop.mapred.FileInputFormat mapredInputFormat, Class key, Class value, String inputPath) { return readHadoopFile(mapredInputFormat, key, value, inputPath, new JobConf()); } @@ -588,7 +588,7 @@ public DataSource> readHadoopFile(org.apache.hadoop.mapred.Fi /** * Creates a {@link DataSet} from the given {@link org.apache.hadoop.mapred.InputFormat}. */ - @Experimental + @PublicEvolving public DataSource> createHadoopInput(org.apache.hadoop.mapred.InputFormat mapredInputFormat, Class key, Class value, JobConf job) { HadoopInputFormat hadoopInputFormat = new HadoopInputFormat<>(mapredInputFormat, key, value, job); @@ -599,7 +599,7 @@ public DataSource> createHadoopInput(org.apache.hadoop.mapred * Creates a {@link DataSet} from the given {@link org.apache.hadoop.mapreduce.lib.input.FileInputFormat}. The * given inputName is set on the given job. */ - @Experimental + @PublicEvolving public DataSource> readHadoopFile(org.apache.hadoop.mapreduce.lib.input.FileInputFormat mapreduceInputFormat, Class key, Class value, String inputPath, Job job) throws IOException { DataSource> result = createHadoopInput(mapreduceInputFormat, key, value, job); @@ -613,7 +613,7 @@ public DataSource> readHadoopFile(org.apache.hadoop.mapreduce * Creates a {@link DataSet} from the given {@link org.apache.hadoop.mapreduce.lib.input.FileInputFormat}. A * {@link org.apache.hadoop.mapreduce.Job} with the given inputPath is created. */ - @Experimental + @PublicEvolving public DataSource> readHadoopFile(org.apache.hadoop.mapreduce.lib.input.FileInputFormat mapreduceInputFormat, Class key, Class value, String inputPath) throws IOException { return readHadoopFile(mapreduceInputFormat, key, value, inputPath, Job.getInstance()); } @@ -621,7 +621,7 @@ public DataSource> readHadoopFile(org.apache.hadoop.mapreduce /** * Creates a {@link DataSet} from the given {@link org.apache.hadoop.mapreduce.InputFormat}. */ - @Experimental + @PublicEvolving public DataSource> createHadoopInput(org.apache.hadoop.mapreduce.InputFormat mapreduceInputFormat, Class key, Class value, Job job) { org.apache.flink.api.java.hadoop.mapreduce.HadoopInputFormat hadoopInputFormat = new org.apache.flink.api.java.hadoop.mapreduce.HadoopInputFormat<>(mapreduceInputFormat, key, value, job); @@ -1072,7 +1072,7 @@ public static ExecutionEnvironment getExecutionEnvironment() { * memory. parallelism will always be 1. This is useful during implementation and for debugging. * @return A Collection Environment */ - @Experimental + @PublicEvolving public static CollectionEnvironment createCollectionsEnvironment(){ CollectionEnvironment ce = new CollectionEnvironment(); ce.setParallelism(1); diff --git a/flink-java/src/main/java/org/apache/flink/api/java/LocalEnvironment.java b/flink-java/src/main/java/org/apache/flink/api/java/LocalEnvironment.java index 8c7d6b8222ecb..d1fe298e346f9 100644 --- a/flink-java/src/main/java/org/apache/flink/api/java/LocalEnvironment.java +++ b/flink-java/src/main/java/org/apache/flink/api/java/LocalEnvironment.java @@ -18,7 +18,7 @@ package org.apache.flink.api.java; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.InvalidProgramException; import org.apache.flink.api.common.JobExecutionResult; @@ -109,7 +109,7 @@ public String getExecutionPlan() throws Exception { } @Override - @Experimental + @PublicEvolving public void startNewSession() throws Exception { if (executor != null) { // we need to end the previous session diff --git a/flink-java/src/main/java/org/apache/flink/api/java/RemoteEnvironment.java b/flink-java/src/main/java/org/apache/flink/api/java/RemoteEnvironment.java index 5dd298810c3b2..223ebeea7eb69 100644 --- a/flink-java/src/main/java/org/apache/flink/api/java/RemoteEnvironment.java +++ b/flink-java/src/main/java/org/apache/flink/api/java/RemoteEnvironment.java @@ -18,7 +18,7 @@ package org.apache.flink.api.java; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.InvalidProgramException; import org.apache.flink.api.common.JobExecutionResult; @@ -183,7 +183,7 @@ public String getExecutionPlan() throws Exception { } @Override - @Experimental + @PublicEvolving public void startNewSession() throws Exception { dispose(); jobID = JobID.generate(); diff --git a/flink-java/src/main/java/org/apache/flink/api/java/functions/FirstReducer.java b/flink-java/src/main/java/org/apache/flink/api/java/functions/FirstReducer.java index 2eda0778eed62..fdd114e10ba8d 100644 --- a/flink-java/src/main/java/org/apache/flink/api/java/functions/FirstReducer.java +++ b/flink-java/src/main/java/org/apache/flink/api/java/functions/FirstReducer.java @@ -17,6 +17,7 @@ */ package org.apache.flink.api.java.functions; + import org.apache.flink.api.common.functions.GroupCombineFunction; import org.apache.flink.api.common.functions.GroupReduceFunction; import org.apache.flink.util.Collector; diff --git a/flink-java/src/main/java/org/apache/flink/api/java/functions/FunctionAnnotation.java b/flink-java/src/main/java/org/apache/flink/api/java/functions/FunctionAnnotation.java index dd00c312171a1..0ce518eac190a 100644 --- a/flink-java/src/main/java/org/apache/flink/api/java/functions/FunctionAnnotation.java +++ b/flink-java/src/main/java/org/apache/flink/api/java/functions/FunctionAnnotation.java @@ -26,7 +26,7 @@ import java.util.HashSet; import java.util.Set; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Internal; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.InvalidProgramException; @@ -310,7 +310,7 @@ public class FunctionAnnotation { */ @Target(ElementType.TYPE) @Retention(RetentionPolicy.RUNTIME) - @Experimental + @PublicEvolving public @interface ReadFields { String[] value(); } @@ -341,7 +341,7 @@ public class FunctionAnnotation { */ @Target(ElementType.TYPE) @Retention(RetentionPolicy.RUNTIME) - @Experimental + @PublicEvolving public @interface ReadFieldsFirst { String[] value(); } @@ -372,7 +372,7 @@ public class FunctionAnnotation { */ @Target(ElementType.TYPE) @Retention(RetentionPolicy.RUNTIME) - @Experimental + @PublicEvolving public @interface ReadFieldsSecond { String[] value(); } @@ -389,7 +389,7 @@ public class FunctionAnnotation { */ @Target(ElementType.TYPE) @Retention(RetentionPolicy.RUNTIME) - @Experimental + @PublicEvolving public @interface SkipCodeAnalysis { } diff --git a/flink-java/src/main/java/org/apache/flink/api/java/io/CsvReader.java b/flink-java/src/main/java/org/apache/flink/api/java/io/CsvReader.java index 9c6621df2b25e..3d656a4f7b253 100644 --- a/flink-java/src/main/java/org/apache/flink/api/java/io/CsvReader.java +++ b/flink-java/src/main/java/org/apache/flink/api/java/io/CsvReader.java @@ -22,7 +22,7 @@ import java.util.Arrays; import org.apache.flink.annotation.Public; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.api.java.ExecutionEnvironment; import org.apache.flink.api.java.Utils; import org.apache.flink.api.java.operators.DataSource; @@ -110,7 +110,7 @@ public CsvReader lineDelimiter(String delimiter) { * @return The CSV reader instance itself, to allow for fluent function chaining. */ @Deprecated - @Experimental + @PublicEvolving public CsvReader fieldDelimiter(char delimiter) { this.fieldDelimiter = String.valueOf(delimiter); return this; diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/CrossOperator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/CrossOperator.java index 3b5ff2d28e704..e69e16e9ae9a7 100644 --- a/flink-java/src/main/java/org/apache/flink/api/java/operators/CrossOperator.java +++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/CrossOperator.java @@ -20,7 +20,7 @@ import java.util.Arrays; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Internal; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.InvalidProgramException; @@ -315,7 +315,7 @@ public ProjectCross projectSecond(int... second */ @SuppressWarnings({ "hiding", "unchecked" }) @Deprecated - @Experimental + @PublicEvolving public CrossOperator types(Class... types) { TupleTypeInfo typeInfo = (TupleTypeInfo)this.getResultType(); diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/DataSink.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/DataSink.java index e11f48904d4f3..37f6cc2df1db1 100644 --- a/flink-java/src/main/java/org/apache/flink/api/java/operators/DataSink.java +++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/DataSink.java @@ -20,7 +20,7 @@ import org.apache.flink.annotation.Internal; import org.apache.flink.annotation.Public; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.api.common.InvalidProgramException; import org.apache.flink.api.common.io.OutputFormat; import org.apache.flink.api.common.operators.GenericDataSinkBase; @@ -112,7 +112,7 @@ public DataSink withParameters(Configuration parameters) { * @see Order */ @Deprecated - @Experimental + @PublicEvolving public DataSink sortLocalOutput(int field, Order order) { // get flat keys @@ -159,7 +159,7 @@ public DataSink sortLocalOutput(int field, Order order) { * @see Order */ @Deprecated - @Experimental + @PublicEvolving public DataSink sortLocalOutput(String fieldExpression, Order order) { int numFields; diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/DataSource.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/DataSource.java index 4e6b5a468a5d8..af6f65bb3c94a 100644 --- a/flink-java/src/main/java/org/apache/flink/api/java/operators/DataSource.java +++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/DataSource.java @@ -20,7 +20,7 @@ import org.apache.flink.annotation.Internal; import org.apache.flink.annotation.Public; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.api.common.io.InputFormat; import org.apache.flink.api.common.io.NonParallelInput; import org.apache.flink.api.common.operators.GenericDataSourceBase; @@ -113,7 +113,7 @@ public Configuration getParameters() { * * @return The SplitDataProperties for the InputSplits of this DataSource. */ - @Experimental + @PublicEvolving public SplitDataProperties getSplitDataProperties() { if(this.splitDataProperties == null) { this.splitDataProperties = new SplitDataProperties(this); diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/DeltaIteration.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/DeltaIteration.java index 85f7fe840403a..cc1cd66a6cac9 100644 --- a/flink-java/src/main/java/org/apache/flink/api/java/operators/DeltaIteration.java +++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/DeltaIteration.java @@ -20,7 +20,7 @@ import java.util.Arrays; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.InvalidProgramException; import org.apache.flink.api.common.aggregators.Aggregator; @@ -204,7 +204,7 @@ public int getParallelism() { * * @return The DeltaIteration itself, to allow chaining function calls. */ - @Experimental + @PublicEvolving public DeltaIteration registerAggregator(String name, Aggregator aggregator) { this.aggregators.registerAggregator(name, aggregator); return this; @@ -215,7 +215,7 @@ public DeltaIteration registerAggregator(String name, Aggregator aggr * * @return The registry with all aggregators. */ - @Experimental + @PublicEvolving public AggregatorRegistry getAggregators() { return this.aggregators; } diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/IterativeDataSet.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/IterativeDataSet.java index 48c72bb8fecd7..c7ff6ab0cabee 100644 --- a/flink-java/src/main/java/org/apache/flink/api/java/operators/IterativeDataSet.java +++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/IterativeDataSet.java @@ -19,7 +19,7 @@ package org.apache.flink.api.java.operators; import org.apache.flink.annotation.Public; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.api.common.InvalidProgramException; import org.apache.flink.api.common.aggregators.Aggregator; import org.apache.flink.api.common.aggregators.AggregatorRegistry; @@ -106,7 +106,7 @@ public int getMaxIterations() { * * @return The IterativeDataSet itself, to allow chaining function calls. */ - @Experimental + @PublicEvolving public IterativeDataSet registerAggregator(String name, Aggregator aggregator) { this.aggregators.registerAggregator(name, aggregator); return this; @@ -126,7 +126,7 @@ public IterativeDataSet registerAggregator(String name, Aggregator aggrega * * @return The IterativeDataSet itself, to allow chaining function calls. */ - @Experimental + @PublicEvolving public IterativeDataSet registerAggregationConvergenceCriterion( String name, Aggregator aggregator, ConvergenceCriterion convergenceCheck) { @@ -141,7 +141,7 @@ public IterativeDataSet registerAggregationConvergenceCrite * * @return The registry for aggregators. */ - @Experimental + @PublicEvolving public AggregatorRegistry getAggregators() { return aggregators; } diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/JoinOperator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/JoinOperator.java index 4274a4a8ea11f..1d1ec27928e38 100644 --- a/flink-java/src/main/java/org/apache/flink/api/java/operators/JoinOperator.java +++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/JoinOperator.java @@ -24,7 +24,7 @@ import org.apache.flink.annotation.Internal; import org.apache.flink.annotation.Public; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.api.common.InvalidProgramException; import org.apache.flink.api.common.functions.FlatJoinFunction; import org.apache.flink.api.common.functions.JoinFunction; @@ -742,7 +742,7 @@ public ProjectJoin projectSecond(int... secondF */ @SuppressWarnings({ "unchecked", "hiding" }) @Deprecated - @Experimental + @PublicEvolving public JoinOperator types(Class... types) { TupleTypeInfo typeInfo = (TupleTypeInfo)this.getResultType(); diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/ProjectOperator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/ProjectOperator.java index 95fe5c828ae90..d8a583549eaf2 100644 --- a/flink-java/src/main/java/org/apache/flink/api/java/operators/ProjectOperator.java +++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/ProjectOperator.java @@ -22,7 +22,7 @@ import org.apache.flink.annotation.Internal; import org.apache.flink.annotation.Public; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.api.common.InvalidProgramException; import org.apache.flink.api.common.functions.MapFunction; import org.apache.flink.api.common.operators.Operator; @@ -76,7 +76,7 @@ protected org.apache.flink.api.common.operators.base.MapOperatorBase ProjectOperator types(Class... types) { TupleTypeInfo typeInfo = (TupleTypeInfo)this.getResultType(); diff --git a/flink-java/src/main/java/org/apache/flink/api/java/utils/DataSetUtils.java b/flink-java/src/main/java/org/apache/flink/api/java/utils/DataSetUtils.java index 3834df1722b82..78e52319820e4 100644 --- a/flink-java/src/main/java/org/apache/flink/api/java/utils/DataSetUtils.java +++ b/flink-java/src/main/java/org/apache/flink/api/java/utils/DataSetUtils.java @@ -19,7 +19,7 @@ package org.apache.flink.api.java.utils; import com.google.common.collect.Lists; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.api.common.JobExecutionResult; import org.apache.flink.api.common.functions.BroadcastVariableInitializer; import org.apache.flink.api.common.functions.RichMapPartitionFunction; @@ -43,7 +43,7 @@ * This class provides simple utility methods for zipping elements in a data set with an index * or with a unique identifier. */ -@Experimental +@PublicEvolving public final class DataSetUtils { /** diff --git a/flink-java/src/main/java/org/apache/flink/api/java/utils/ParameterTool.java b/flink-java/src/main/java/org/apache/flink/api/java/utils/ParameterTool.java index fb049f39df346..bfd6d12750c5f 100644 --- a/flink-java/src/main/java/org/apache/flink/api/java/utils/ParameterTool.java +++ b/flink-java/src/main/java/org/apache/flink/api/java/utils/ParameterTool.java @@ -20,7 +20,7 @@ import com.google.common.base.Preconditions; import org.apache.commons.cli.Option; import org.apache.commons.lang3.math.NumberUtils; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.configuration.Configuration; @@ -191,7 +191,7 @@ public static ParameterTool fromSystemProperties() { * @throws IOException If arguments cannot be parsed by {@link GenericOptionsParser} * @see GenericOptionsParser */ - @Experimental + @PublicEvolving public static ParameterTool fromGenericOptionsParser(String[] args) throws IOException { Option[] options = new GenericOptionsParser(args).getCommandLine().getOptions(); Map map = new HashMap(); diff --git a/flink-scala/src/main/scala/org/apache/flink/api/scala/DataSet.scala b/flink-scala/src/main/scala/org/apache/flink/api/scala/DataSet.scala index b8cbbd2ee7f61..e47bc4200d74f 100644 --- a/flink-scala/src/main/scala/org/apache/flink/api/scala/DataSet.scala +++ b/flink-scala/src/main/scala/org/apache/flink/api/scala/DataSet.scala @@ -17,7 +17,7 @@ */ package org.apache.flink.api.scala -import org.apache.flink.annotation.{Experimental, Public} +import org.apache.flink.annotation.{PublicEvolving, Public} import org.apache.flink.api.common.InvalidProgramException import org.apache.flink.api.common.accumulators.SerializedListAccumulator import org.apache.flink.api.common.aggregators.Aggregator @@ -190,7 +190,7 @@ class DataSet[T: ClassTag](set: JavaDataSet[T]) { * @param name The name under which the aggregator is registered. * @param aggregator The aggregator class. */ - @Experimental + @PublicEvolving def registerAggregator(name: String, aggregator: Aggregator[_]): DataSet[T] = { javaSet match { case di: DeltaIterationResultSet[_, _] => @@ -1632,7 +1632,7 @@ class DataSet[T: ClassTag](set: JavaDataSet[T]) { * @deprecated Use [[printOnTaskManager(String)]] instead. */ @deprecated - @Experimental + @PublicEvolving def print(sinkIdentifier: String): DataSink[T] = { output(new PrintingOutputFormat[T](sinkIdentifier, false)) } @@ -1645,7 +1645,7 @@ class DataSet[T: ClassTag](set: JavaDataSet[T]) { * @deprecated Use [[printOnTaskManager(String)]] instead. */ @deprecated - @Experimental + @PublicEvolving def printToErr(sinkIdentifier: String): DataSink[T] = { output(new PrintingOutputFormat[T](sinkIdentifier, true)) } diff --git a/flink-scala/src/main/scala/org/apache/flink/api/scala/ExecutionEnvironment.scala b/flink-scala/src/main/scala/org/apache/flink/api/scala/ExecutionEnvironment.scala index a92750a74c0a0..b3d2430939ecd 100644 --- a/flink-scala/src/main/scala/org/apache/flink/api/scala/ExecutionEnvironment.scala +++ b/flink-scala/src/main/scala/org/apache/flink/api/scala/ExecutionEnvironment.scala @@ -19,7 +19,7 @@ package org.apache.flink.api.scala import com.esotericsoftware.kryo.Serializer import com.google.common.base.Preconditions -import org.apache.flink.annotation.{Experimental, Public} +import org.apache.flink.annotation.{PublicEvolving, Public} import org.apache.flink.api.common.io.{FileInputFormat, InputFormat} import org.apache.flink.api.common.typeinfo.{BasicTypeInfo, TypeInformation} import org.apache.flink.api.common.typeutils.CompositeType @@ -97,7 +97,7 @@ class ExecutionEnvironment(javaEnv: JavaEnv) { * effectively disables fault tolerance. A value of "-1" indicates that the system * default value (as defined in the configuration) should be used. */ - @Experimental + @PublicEvolving def setNumberOfExecutionRetries(numRetries: Int): Unit = { javaEnv.setNumberOfExecutionRetries(numRetries) } @@ -107,14 +107,14 @@ class ExecutionEnvironment(javaEnv: JavaEnv) { * of "-1" indicates that the system default value (as defined in the configuration) * should be used. */ - @Experimental + @PublicEvolving def getNumberOfExecutionRetries = javaEnv.getNumberOfExecutionRetries /** * Gets the UUID by which this environment is identified. The UUID sets the execution context * in the cluster or local environment. */ - @Experimental + @PublicEvolving def getId: JobID = { javaEnv.getId } @@ -127,7 +127,7 @@ class ExecutionEnvironment(javaEnv: JavaEnv) { /** * Gets the UUID by which this environment is identified, as a string. */ - @Experimental + @PublicEvolving def getIdString: String = { javaEnv.getIdString } @@ -135,7 +135,7 @@ class ExecutionEnvironment(javaEnv: JavaEnv) { /** * Starts a new session, discarding all intermediate results. */ - @Experimental + @PublicEvolving def startNewSession() { javaEnv.startNewSession() } @@ -143,9 +143,10 @@ class ExecutionEnvironment(javaEnv: JavaEnv) { /** * Sets the session timeout to hold the intermediate results of a job. This only * applies the updated timeout in future executions. + * * @param timeout The timeout in seconds. */ - @Experimental + @PublicEvolving def setSessionTimeout(timeout: Long) { javaEnv.setSessionTimeout(timeout) } @@ -157,7 +158,7 @@ class ExecutionEnvironment(javaEnv: JavaEnv) { * * @return The session timeout, in seconds. */ - @Experimental + @PublicEvolving def getSessionTimeout: Long = { javaEnv.getSessionTimeout } @@ -383,7 +384,7 @@ class ExecutionEnvironment(javaEnv: JavaEnv) { * Creates a [[DataSet]] from the given [[org.apache.hadoop.mapred.FileInputFormat]]. The * given inputName is set on the given job. */ - @Experimental + @PublicEvolving def readHadoopFile[K, V]( mapredInputFormat: MapredFileInputFormat[K, V], key: Class[K], @@ -400,7 +401,7 @@ class ExecutionEnvironment(javaEnv: JavaEnv) { * Creates a [[DataSet]] from the given [[org.apache.hadoop.mapred.FileInputFormat]]. A * [[org.apache.hadoop.mapred.JobConf]] with the given inputPath is created. */ - @Experimental + @PublicEvolving def readHadoopFile[K, V]( mapredInputFormat: MapredFileInputFormat[K, V], key: Class[K], @@ -414,7 +415,7 @@ class ExecutionEnvironment(javaEnv: JavaEnv) { * Creates a [[DataSet]] from [[org.apache.hadoop.mapred.SequenceFileInputFormat]] * A [[org.apache.hadoop.mapred.JobConf]] with the given inputPath is created. */ - @Experimental + @PublicEvolving def readSequenceFile[K, V]( key: Class[K], value: Class[V], @@ -427,7 +428,7 @@ class ExecutionEnvironment(javaEnv: JavaEnv) { /** * Creates a [[DataSet]] from the given [[org.apache.hadoop.mapred.InputFormat]]. */ - @Experimental + @PublicEvolving def createHadoopInput[K, V]( mapredInputFormat: MapredInputFormat[K, V], key: Class[K], @@ -442,7 +443,7 @@ class ExecutionEnvironment(javaEnv: JavaEnv) { * Creates a [[DataSet]] from the given [[org.apache.hadoop.mapreduce.lib.input.FileInputFormat]]. * The given inputName is set on the given job. */ - @Experimental + @PublicEvolving def readHadoopFile[K, V]( mapreduceInputFormat: MapreduceFileInputFormat[K, V], key: Class[K], @@ -460,7 +461,7 @@ class ExecutionEnvironment(javaEnv: JavaEnv) { * [[org.apache.hadoop.mapreduce.lib.input.FileInputFormat]]. A * [[org.apache.hadoop.mapreduce.Job]] with the given inputPath will be created. */ - @Experimental + @PublicEvolving def readHadoopFile[K, V]( mapreduceInputFormat: MapreduceFileInputFormat[K, V], key: Class[K], @@ -473,7 +474,7 @@ class ExecutionEnvironment(javaEnv: JavaEnv) { /** * Creates a [[DataSet]] from the given [[org.apache.hadoop.mapreduce.InputFormat]]. */ - @Experimental + @PublicEvolving def createHadoopInput[K, V]( mapreduceInputFormat: MapreduceInputFormat[K, V], key: Class[K], @@ -686,9 +687,10 @@ object ExecutionEnvironment { * Creates an execution environment that uses Java Collections underneath. This will execute in a * single thread in the current JVM. It is very fast but will fail if the data does not fit into * memory. This is useful during implementation and for debugging. + * * @return */ - @Experimental + @PublicEvolving def createCollectionsEnvironment: ExecutionEnvironment = { new ExecutionEnvironment(new CollectionEnvironment) } diff --git a/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/CaseClassTypeInfo.scala b/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/CaseClassTypeInfo.scala index eb41b4b75e53f..d658fdec3ce0d 100644 --- a/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/CaseClassTypeInfo.scala +++ b/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/CaseClassTypeInfo.scala @@ -21,7 +21,7 @@ package org.apache.flink.api.scala.typeutils import java.util import java.util.regex.{Pattern, Matcher} -import org.apache.flink.annotation.{Experimental, Public} +import org.apache.flink.annotation.{PublicEvolving, Public} import org.apache.flink.api.common.ExecutionConfig import org.apache.flink.api.common.operators.Keys import org.apache.flink.api.common.typeinfo.TypeInformation @@ -46,7 +46,7 @@ abstract class CaseClassTypeInfo[T <: Product]( val fieldNames: Seq[String]) extends TupleTypeInfoBase[T](clazz, fieldTypes: _*) { - @Experimental + @PublicEvolving override def getGenericParameters: java.util.List[TypeInformation[_]] = { typeParamTypeInfos.toList.asJava } @@ -63,12 +63,12 @@ abstract class CaseClassTypeInfo[T <: Product]( Pattern.compile(REGEX_NESTED_FIELDS_WILDCARD) private val PATTERN_INT_FIELD: Pattern = Pattern.compile(REGEX_INT_FIELD) - @Experimental + @PublicEvolving def getFieldIndices(fields: Array[String]): Array[Int] = { fields map { x => fieldNames.indexOf(x) } } - @Experimental + @PublicEvolving override def getFlatFields( fieldExpression: String, offset: Int, @@ -150,7 +150,7 @@ abstract class CaseClassTypeInfo[T <: Product]( } } - @Experimental + @PublicEvolving override def getTypeAt[X](fieldExpression: String) : TypeInformation[X] = { val matcher: Matcher = PATTERN_NESTED_FIELDS.matcher(fieldExpression) @@ -193,10 +193,10 @@ abstract class CaseClassTypeInfo[T <: Product]( "\" in type " + this + ".") } - @Experimental + @PublicEvolving override def getFieldNames: Array[String] = fieldNames.toArray - @Experimental + @PublicEvolving override def getFieldIndex(fieldName: String): Int = { val result = fieldNames.indexOf(fieldName) if (result != fieldNames.lastIndexOf(fieldName)) { @@ -206,7 +206,7 @@ abstract class CaseClassTypeInfo[T <: Product]( } } - @Experimental + @PublicEvolving override def createTypeComparatorBuilder(): TypeComparatorBuilder[T] = { new CaseClassTypeComparatorBuilder } diff --git a/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/EitherTypeInfo.scala b/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/EitherTypeInfo.scala index cb39e7b90fa1c..406f0735115b9 100644 --- a/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/EitherTypeInfo.scala +++ b/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/EitherTypeInfo.scala @@ -17,7 +17,7 @@ */ package org.apache.flink.api.scala.typeutils -import org.apache.flink.annotation.{Experimental, Public} +import org.apache.flink.annotation.{PublicEvolving, Public} import org.apache.flink.api.common.ExecutionConfig import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.common.typeutils.TypeSerializer @@ -34,22 +34,22 @@ class EitherTypeInfo[A, B, T <: Either[A, B]]( val rightTypeInfo: TypeInformation[B]) extends TypeInformation[T] { - @Experimental + @PublicEvolving override def isBasicType: Boolean = false - @Experimental + @PublicEvolving override def isTupleType: Boolean = false - @Experimental + @PublicEvolving override def isKeyType: Boolean = false - @Experimental + @PublicEvolving override def getTotalFields: Int = 1 - @Experimental + @PublicEvolving override def getArity: Int = 1 - @Experimental + @PublicEvolving override def getTypeClass = clazz - @Experimental + @PublicEvolving override def getGenericParameters = List[TypeInformation[_]](leftTypeInfo, rightTypeInfo).asJava - @Experimental + @PublicEvolving def createSerializer(executionConfig: ExecutionConfig): TypeSerializer[T] = { val leftSerializer = if (leftTypeInfo != null) { leftTypeInfo.createSerializer(executionConfig) diff --git a/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/EnumValueTypeInfo.scala b/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/EnumValueTypeInfo.scala index 79a2866c0e205..92d2704b5500d 100644 --- a/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/EnumValueTypeInfo.scala +++ b/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/EnumValueTypeInfo.scala @@ -17,7 +17,7 @@ */ package org.apache.flink.api.scala.typeutils -import org.apache.flink.annotation.{Experimental, Public} +import org.apache.flink.annotation.{PublicEvolving, Public} import org.apache.flink.api.common.ExecutionConfig import org.apache.flink.api.common.typeinfo.{AtomicType, TypeInformation} import org.apache.flink.api.common.typeutils.{TypeComparator, TypeSerializer} @@ -33,28 +33,28 @@ class EnumValueTypeInfo[E <: Enumeration](val enum: E, val clazz: Class[E#Value] type T = E#Value - @Experimental + @PublicEvolving override def isBasicType: Boolean = false - @Experimental + @PublicEvolving override def isTupleType: Boolean = false - @Experimental + @PublicEvolving override def isKeyType: Boolean = true - @Experimental + @PublicEvolving override def getTotalFields: Int = 1 - @Experimental + @PublicEvolving override def getArity: Int = 1 - @Experimental + @PublicEvolving override def getTypeClass = clazz - @Experimental + @PublicEvolving override def getGenericParameters = List.empty[TypeInformation[_]].asJava - @Experimental + @PublicEvolving def createSerializer(executionConfig: ExecutionConfig): TypeSerializer[T] = { new EnumValueSerializer[E](enum) } - @Experimental + @PublicEvolving override def createComparator(ascOrder: Boolean, config: ExecutionConfig): TypeComparator[T] = { new EnumValueComparator[E](ascOrder) } diff --git a/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/OptionTypeInfo.scala b/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/OptionTypeInfo.scala index df12955719ba2..70db4fa8ddc2f 100644 --- a/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/OptionTypeInfo.scala +++ b/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/OptionTypeInfo.scala @@ -17,7 +17,7 @@ */ package org.apache.flink.api.scala.typeutils -import org.apache.flink.annotation.{Experimental, Public} +import org.apache.flink.annotation.{PublicEvolving, Public} import org.apache.flink.api.common.ExecutionConfig import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.common.typeutils.TypeSerializer @@ -31,23 +31,23 @@ import scala.collection.JavaConverters._ class OptionTypeInfo[A, T <: Option[A]](private val elemTypeInfo: TypeInformation[A]) extends TypeInformation[T] { - @Experimental + @PublicEvolving override def isBasicType: Boolean = false - @Experimental + @PublicEvolving override def isTupleType: Boolean = false - @Experimental + @PublicEvolving override def isKeyType: Boolean = false - @Experimental + @PublicEvolving override def getTotalFields: Int = 1 - @Experimental + @PublicEvolving override def getArity: Int = 1 - @Experimental + @PublicEvolving override def getTypeClass = classOf[Option[_]].asInstanceOf[Class[T]] - @Experimental + @PublicEvolving override def getGenericParameters = List[TypeInformation[_]](elemTypeInfo).asJava - @Experimental + @PublicEvolving def createSerializer(executionConfig: ExecutionConfig): TypeSerializer[T] = { if (elemTypeInfo == null) { // this happens when the type of a DataSet is None, i.e. DataSet[None] diff --git a/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/ScalaNothingTypeInfo.scala b/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/ScalaNothingTypeInfo.scala index b0f760af2915e..bc2aabfffc209 100644 --- a/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/ScalaNothingTypeInfo.scala +++ b/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/ScalaNothingTypeInfo.scala @@ -17,7 +17,7 @@ */ package org.apache.flink.api.scala.typeutils -import org.apache.flink.annotation.{Experimental, Public} +import org.apache.flink.annotation.{PublicEvolving, Public} import org.apache.flink.api.common.ExecutionConfig import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.common.typeutils.TypeSerializer @@ -25,20 +25,20 @@ import org.apache.flink.api.common.typeutils.TypeSerializer @Public class ScalaNothingTypeInfo extends TypeInformation[Nothing] { - @Experimental + @PublicEvolving override def isBasicType: Boolean = false - @Experimental + @PublicEvolving override def isTupleType: Boolean = false - @Experimental + @PublicEvolving override def getArity: Int = 0 - @Experimental + @PublicEvolving override def getTotalFields: Int = 0 - @Experimental + @PublicEvolving override def getTypeClass: Class[Nothing] = classOf[Nothing] - @Experimental + @PublicEvolving override def isKeyType: Boolean = false - @Experimental + @PublicEvolving override def createSerializer(config: ExecutionConfig): TypeSerializer[Nothing] = (new NothingSerializer).asInstanceOf[TypeSerializer[Nothing]] diff --git a/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/TraversableTypeInfo.scala b/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/TraversableTypeInfo.scala index 855caa9e9e839..82fd8ae5d80c7 100644 --- a/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/TraversableTypeInfo.scala +++ b/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/TraversableTypeInfo.scala @@ -17,7 +17,7 @@ */ package org.apache.flink.api.scala.typeutils -import org.apache.flink.annotation.{Experimental, Public} +import org.apache.flink.annotation.{PublicEvolving, Public} import org.apache.flink.api.common.ExecutionConfig import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.common.typeutils.TypeSerializer @@ -33,22 +33,22 @@ abstract class TraversableTypeInfo[T <: TraversableOnce[E], E]( val elementTypeInfo: TypeInformation[E]) extends TypeInformation[T] { - @Experimental + @PublicEvolving override def isBasicType: Boolean = false - @Experimental + @PublicEvolving override def isTupleType: Boolean = false - @Experimental + @PublicEvolving override def isKeyType: Boolean = false - @Experimental + @PublicEvolving override def getTotalFields: Int = 1 - @Experimental + @PublicEvolving override def getArity: Int = 1 - @Experimental + @PublicEvolving override def getTypeClass: Class[T] = clazz - @Experimental + @PublicEvolving override def getGenericParameters = List[TypeInformation[_]](elementTypeInfo).asJava - @Experimental + @PublicEvolving def createSerializer(executionConfig: ExecutionConfig): TypeSerializer[T] override def equals(other: Any): Boolean = { diff --git a/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/TryTypeInfo.scala b/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/TryTypeInfo.scala index 880c63670c40d..0a5a06da87caa 100644 --- a/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/TryTypeInfo.scala +++ b/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/TryTypeInfo.scala @@ -17,7 +17,7 @@ */ package org.apache.flink.api.scala.typeutils -import org.apache.flink.annotation.{Experimental, Public} +import org.apache.flink.annotation.{PublicEvolving, Public} import org.apache.flink.api.common.ExecutionConfig import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.common.typeutils.TypeSerializer @@ -33,22 +33,22 @@ import scala.util.Try class TryTypeInfo[A, T <: Try[A]](val elemTypeInfo: TypeInformation[A]) extends TypeInformation[T] { - @Experimental + @PublicEvolving override def isBasicType: Boolean = false - @Experimental + @PublicEvolving override def isTupleType: Boolean = false - @Experimental + @PublicEvolving override def isKeyType: Boolean = false - @Experimental + @PublicEvolving override def getTotalFields: Int = 1 - @Experimental + @PublicEvolving override def getArity: Int = 1 - @Experimental + @PublicEvolving override def getTypeClass = classOf[Try[_]].asInstanceOf[Class[T]] - @Experimental + @PublicEvolving override def getGenericParameters = List[TypeInformation[_]](elemTypeInfo).asJava - @Experimental + @PublicEvolving def createSerializer(executionConfig: ExecutionConfig): TypeSerializer[T] = { if (elemTypeInfo == null) { // this happens when the type of a DataSet is None, i.e. DataSet[Failure] diff --git a/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/UnitTypeInfo.scala b/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/UnitTypeInfo.scala index fa46a8a2a85d4..5d4a44384e023 100644 --- a/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/UnitTypeInfo.scala +++ b/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/UnitTypeInfo.scala @@ -17,27 +17,27 @@ */ package org.apache.flink.api.scala.typeutils -import org.apache.flink.annotation.{Experimental, Public} +import org.apache.flink.annotation.{PublicEvolving, Public} import org.apache.flink.api.common.ExecutionConfig import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.common.typeutils.TypeSerializer @Public class UnitTypeInfo extends TypeInformation[Unit] { - @Experimental + @PublicEvolving override def isBasicType(): Boolean = false - @Experimental + @PublicEvolving override def isTupleType(): Boolean = false - @Experimental + @PublicEvolving override def getArity(): Int = 0 - @Experimental + @PublicEvolving override def getTotalFields(): Int = 0 - @Experimental + @PublicEvolving override def getTypeClass(): Class[Unit] = classOf[Unit] - @Experimental + @PublicEvolving override def isKeyType(): Boolean = false - @Experimental + @PublicEvolving override def createSerializer(config: ExecutionConfig): TypeSerializer[Unit] = (new UnitSerializer).asInstanceOf[TypeSerializer[Unit]] diff --git a/flink-scala/src/main/scala/org/apache/flink/api/scala/utils/package.scala b/flink-scala/src/main/scala/org/apache/flink/api/scala/utils/package.scala index 7a03053f7d89d..6407093cb58d2 100644 --- a/flink-scala/src/main/scala/org/apache/flink/api/scala/utils/package.scala +++ b/flink-scala/src/main/scala/org/apache/flink/api/scala/utils/package.scala @@ -18,7 +18,7 @@ package org.apache.flink.api.scala -import org.apache.flink.annotation.Experimental +import org.apache.flink.annotation.PublicEvolving import org.apache.flink.api.common.typeinfo.{BasicTypeInfo, TypeInformation} import org.apache.flink.api.java.Utils import org.apache.flink.api.java.Utils.ChecksumHashCode @@ -37,7 +37,7 @@ package object utils { * * @param self Data Set */ - @Experimental + @PublicEvolving implicit class DataSetUtils[T: TypeInformation : ClassTag](val self: DataSet[T]) { /** @@ -118,7 +118,6 @@ package object utils { * as well as the checksum (sum over element hashes). * * @return A ChecksumHashCode with the count and checksum of elements in the data set. - * * @see [[org.apache.flink.api.java.Utils.ChecksumHashCodeHelper]] */ def checksumHashCode(): ChecksumHashCode = { diff --git a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/AllWindowedStream.java b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/AllWindowedStream.java index b7dc79579c658..2902795267ee0 100644 --- a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/AllWindowedStream.java +++ b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/AllWindowedStream.java @@ -18,7 +18,7 @@ package org.apache.flink.streaming.api.datastream; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.functions.FoldFunction; import org.apache.flink.api.common.functions.Function; @@ -84,7 +84,7 @@ public class AllWindowedStream { private Evictor evictor; - @Experimental + @PublicEvolving public AllWindowedStream(DataStream input, WindowAssigner windowAssigner) { this.input = input; @@ -95,7 +95,7 @@ public AllWindowedStream(DataStream input, /** * Sets the {@code Trigger} that should be used to trigger window emission. */ - @Experimental + @PublicEvolving public AllWindowedStream trigger(Trigger trigger) { this.trigger = trigger; return this; @@ -108,7 +108,7 @@ public AllWindowedStream trigger(Trigger trigger) { * Note: When using an evictor window performance will degrade significantly, since * pre-aggregation of window results cannot be used. */ - @Experimental + @PublicEvolving public AllWindowedStream evictor(Evictor evictor) { this.evictor = evictor; return this; diff --git a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/CoGroupedStreams.java b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/CoGroupedStreams.java index b552a264c0815..9e2bc5d4f1d58 100644 --- a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/CoGroupedStreams.java +++ b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/CoGroupedStreams.java @@ -17,7 +17,7 @@ package org.apache.flink.streaming.api.datastream; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Internal; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.ExecutionConfig; @@ -145,7 +145,7 @@ public class EqualTo { /** * Specifies the window on which the co-group operation works. */ - @Experimental + @PublicEvolving public WithWindow window(WindowAssigner, W> assigner) { return new WithWindow<>(input1, input2, keySelector1, keySelector2, keyType, assigner, null, null); } @@ -202,7 +202,7 @@ protected WithWindow(DataStream input1, /** * Sets the {@code Trigger} that should be used to trigger window emission. */ - @Experimental + @PublicEvolving public WithWindow trigger(Trigger, ? super W> newTrigger) { return new WithWindow<>(input1, input2, keySelector1, keySelector2, keyType, windowAssigner, newTrigger, evictor); @@ -215,7 +215,7 @@ public WithWindow trigger(Trigger, ? * Note: When using an evictor window performance will degrade significantly, since * pre-aggregation of window results cannot be used. */ - @Experimental + @PublicEvolving public WithWindow evictor(Evictor, ? super W> newEvictor) { return new WithWindow<>(input1, input2, keySelector1, keySelector2, keyType, windowAssigner, trigger, newEvictor); diff --git a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/ConnectedStreams.java b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/ConnectedStreams.java index 0d3064d24b780..b340e6ed89621 100644 --- a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/ConnectedStreams.java +++ b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/ConnectedStreams.java @@ -17,7 +17,7 @@ package org.apache.flink.streaming.api.datastream; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.java.Utils; @@ -308,7 +308,7 @@ CoFlatMapFunction.class, false, true, getType1(), getType2(), return transform("Co-Flat Map", outTypeInfo, new CoStreamFlatMap<>(inputStream1.clean(coFlatMapper))); } - @Experimental + @PublicEvolving public SingleOutputStreamOperator transform(String functionName, TypeInformation outTypeInfo, TwoInputStreamOperator operator) { diff --git a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/DataStream.java b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/DataStream.java index 3eae2e82fc443..64d0821a50c4b 100644 --- a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/DataStream.java +++ b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/DataStream.java @@ -21,7 +21,7 @@ import java.util.Collection; import java.util.List; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Internal; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.ExecutionConfig; @@ -406,7 +406,7 @@ public DataStream broadcast() { * * @return The DataStream with shuffle partitioning set. */ - @Experimental + @PublicEvolving public DataStream shuffle() { return setConnectionType(new ShufflePartitioner()); } @@ -452,7 +452,7 @@ public DataStream rebalance() { * * @return The DataStream with rescale partitioning set. */ - @Experimental + @PublicEvolving public DataStream rescale() { return setConnectionType(new RescalePartitioner()); } @@ -465,7 +465,7 @@ public DataStream rescale() { * * @return The DataStream with shuffle partitioning set. */ - @Experimental + @PublicEvolving public DataStream global() { return setConnectionType(new GlobalPartitioner()); } @@ -497,7 +497,7 @@ public DataStream global() { * * @return The iterative data stream created. */ - @Experimental + @PublicEvolving public IterativeStream iterate() { return new IterativeStream(this, 0); } @@ -533,7 +533,7 @@ public IterativeStream iterate() { * * @return The iterative data stream created. */ - @Experimental + @PublicEvolving public IterativeStream iterate(long maxWaitTimeMillis) { return new IterativeStream(this, maxWaitTimeMillis); } @@ -621,7 +621,7 @@ public IterativeStream iterate(long maxWaitTimeMillis) { * @see Tuple * @see DataStream */ - @Experimental + @PublicEvolving public SingleOutputStreamOperator project(int... fieldIndexes) { return new StreamProjection(this, fieldIndexes).projectTupleX(); } @@ -733,7 +733,7 @@ public AllWindowedStream countWindowAll(long size, long slide) * @param assigner The {@code WindowAssigner} that assigns elements to windows. * @return The trigger windows data stream. */ - @Experimental + @PublicEvolving public AllWindowedStream windowAll(WindowAssigner assigner) { return new AllWindowedStream<>(this, assigner); } @@ -752,7 +752,7 @@ public AllWindowedStream windowAll(WindowAssigner assignTimestamps(TimestampExtractor extractor) { // match parallelism to input, otherwise dop=1 sources could lead to some strange // behaviour: the watermark will creep along very slowly because the elements @@ -772,7 +772,7 @@ public AllWindowedStream windowAll(WindowAssigner print() { PrintSinkFunction printFunction = new PrintSinkFunction(); return addSink(printFunction); @@ -787,7 +787,7 @@ public DataStreamSink print() { * * @return The closed DataStream. */ - @Experimental + @PublicEvolving public DataStreamSink printToErr() { PrintSinkFunction printFunction = new PrintSinkFunction(true); return addSink(printFunction); @@ -805,7 +805,7 @@ public DataStreamSink printToErr() { * * @return The closed DataStream. */ - @Experimental + @PublicEvolving public DataStreamSink writeAsText(String path) { return write(new TextOutputFormat(new Path(path)), 0L); } @@ -825,7 +825,7 @@ public DataStreamSink writeAsText(String path) { * * @return The closed DataStream. */ - @Experimental + @PublicEvolving public DataStreamSink writeAsText(String path, long millis) { TextOutputFormat tof = new TextOutputFormat(new Path(path)); return write(tof, millis); @@ -846,7 +846,7 @@ public DataStreamSink writeAsText(String path, long millis) { * * @return The closed DataStream. */ - @Experimental + @PublicEvolving public DataStreamSink writeAsText(String path, WriteMode writeMode) { TextOutputFormat tof = new TextOutputFormat(new Path(path)); tof.setWriteMode(writeMode); @@ -870,7 +870,7 @@ public DataStreamSink writeAsText(String path, WriteMode writeMode) { * * @return The closed DataStream. */ - @Experimental + @PublicEvolving public DataStreamSink writeAsText(String path, WriteMode writeMode, long millis) { TextOutputFormat tof = new TextOutputFormat(new Path(path)); tof.setWriteMode(writeMode); @@ -889,7 +889,7 @@ public DataStreamSink writeAsText(String path, WriteMode writeMode, long mill * * @return the closed DataStream */ - @Experimental + @PublicEvolving public DataStreamSink writeAsCsv(String path) { return writeAsCsv(path, null, 0L, CsvOutputFormat.DEFAULT_LINE_DELIMITER, CsvOutputFormat.DEFAULT_FIELD_DELIMITER); } @@ -909,7 +909,7 @@ public DataStreamSink writeAsCsv(String path) { * * @return the closed DataStream */ - @Experimental + @PublicEvolving public DataStreamSink writeAsCsv(String path, long millis) { return writeAsCsv(path, null, millis, CsvOutputFormat.DEFAULT_LINE_DELIMITER, CsvOutputFormat.DEFAULT_FIELD_DELIMITER); } @@ -929,7 +929,7 @@ public DataStreamSink writeAsCsv(String path, long millis) { * * @return the closed DataStream */ - @Experimental + @PublicEvolving public DataStreamSink writeAsCsv(String path, WriteMode writeMode) { return writeAsCsv(path, writeMode, 0L, CsvOutputFormat.DEFAULT_LINE_DELIMITER, CsvOutputFormat.DEFAULT_FIELD_DELIMITER); } @@ -952,7 +952,7 @@ public DataStreamSink writeAsCsv(String path, WriteMode writeMode) { * * @return the closed DataStream */ - @Experimental + @PublicEvolving public DataStreamSink writeAsCsv(String path, WriteMode writeMode, long millis) { return writeAsCsv(path, writeMode, millis, CsvOutputFormat.DEFAULT_LINE_DELIMITER, CsvOutputFormat.DEFAULT_FIELD_DELIMITER); } @@ -980,7 +980,7 @@ public DataStreamSink writeAsCsv(String path, WriteMode writeMode, long milli * @return the closed DataStream */ @SuppressWarnings("unchecked") - @Experimental + @PublicEvolving public DataStreamSink writeAsCsv( String path, WriteMode writeMode, @@ -1015,7 +1015,7 @@ public DataStreamSink writeAsCsv( * schema for serialization * @return the closed DataStream */ - @Experimental + @PublicEvolving public DataStreamSink writeToSocket(String hostName, int port, SerializationSchema schema) { DataStreamSink returnStream = addSink(new SocketClientSink(hostName, port, schema, 0)); returnStream.setParallelism(1); // It would not work if multiple instances would connect to the same port @@ -1029,7 +1029,7 @@ public DataStreamSink writeToSocket(String hostName, int port, SerializationS * @param millis the write frequency * @return The closed DataStream */ - @Experimental + @PublicEvolving public DataStreamSink write(OutputFormat format, long millis) { return addSink(new FileSinkFunctionByMillis(format, millis)); } @@ -1048,7 +1048,7 @@ public DataStreamSink write(OutputFormat format, long millis) { * type of the return stream * @return the data stream constructed */ - @Experimental + @PublicEvolving public SingleOutputStreamOperator transform(String operatorName, TypeInformation outTypeInfo, OneInputStreamOperator operator) { // read the output type of the input Transform to coax out errors about MissingTypeInfo diff --git a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/DataStreamSink.java b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/DataStreamSink.java index fcfe98df16068..d6cdeff32ce01 100644 --- a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/DataStreamSink.java +++ b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/DataStreamSink.java @@ -17,7 +17,7 @@ package org.apache.flink.streaming.api.datastream; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Internal; import org.apache.flink.annotation.Public; import org.apache.flink.streaming.api.operators.ChainingStrategy; @@ -70,7 +70,7 @@ public DataStreamSink name(String name) { * @param uid The unique user-specified ID of this transformation. * @return The operator with the specified ID. */ - @Experimental + @PublicEvolving public DataStreamSink uid(String uid) { transformation.setUid(uid); return this; @@ -98,7 +98,7 @@ public DataStreamSink setParallelism(int parallelism) { * * @return The sink with chaining disabled */ - @Experimental + @PublicEvolving public DataStreamSink disableChaining() { this.transformation.setChainingStrategy(ChainingStrategy.NEVER); return this; diff --git a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/IterativeStream.java b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/IterativeStream.java index d03e8e0ad030e..f6b54b7de5ac0 100644 --- a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/IterativeStream.java +++ b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/IterativeStream.java @@ -17,7 +17,7 @@ package org.apache.flink.streaming.api.datastream; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.java.functions.KeySelector; @@ -34,7 +34,7 @@ * * @param Type of the elements in this Stream */ -@Experimental +@PublicEvolving public class IterativeStream extends SingleOutputStreamOperator> { // We store these so that we can create a co-iteration if we need to diff --git a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/JoinedStreams.java b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/JoinedStreams.java index aa866eb6a2008..f131b6ec9487f 100644 --- a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/JoinedStreams.java +++ b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/JoinedStreams.java @@ -17,7 +17,7 @@ package org.apache.flink.streaming.api.datastream; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.functions.CoGroupFunction; import org.apache.flink.api.common.functions.FlatJoinFunction; @@ -137,7 +137,7 @@ public class EqualTo { /** * Specifies the window on which the join operation works. */ - @Experimental + @PublicEvolving public WithWindow window(WindowAssigner, W> assigner) { return new WithWindow<>(input1, input2, keySelector1, keySelector2, keyType, assigner, null, null); } @@ -171,7 +171,7 @@ public static class WithWindow { private final Evictor, ? super W> evictor; - @Experimental + @PublicEvolving protected WithWindow(DataStream input1, DataStream input2, KeySelector keySelector1, @@ -197,7 +197,7 @@ protected WithWindow(DataStream input1, /** * Sets the {@code Trigger} that should be used to trigger window emission. */ - @Experimental + @PublicEvolving public WithWindow trigger(Trigger, ? super W> newTrigger) { return new WithWindow<>(input1, input2, keySelector1, keySelector2, keyType, windowAssigner, newTrigger, evictor); @@ -210,7 +210,7 @@ public WithWindow trigger(Trigger, ? * Note: When using an evictor window performance will degrade significantly, since * pre-aggregation of window results cannot be used. */ - @Experimental + @PublicEvolving public WithWindow evictor(Evictor, ? super W> newEvictor) { return new WithWindow<>(input1, input2, keySelector1, keySelector2, keyType, windowAssigner, trigger, newEvictor); diff --git a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/KeyedStream.java b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/KeyedStream.java index 60773814476dc..9fa454531fecc 100644 --- a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/KeyedStream.java +++ b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/KeyedStream.java @@ -17,7 +17,7 @@ package org.apache.flink.streaming.api.datastream; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Internal; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.functions.FoldFunction; @@ -132,7 +132,7 @@ protected DataStream setConnectionType(StreamPartitioner partitioner) { // ------------------------------------------------------------------------ @Override - @Experimental + @PublicEvolving public SingleOutputStreamOperator transform(String operatorName, TypeInformation outTypeInfo, OneInputStreamOperator operator) { @@ -222,7 +222,7 @@ public WindowedStream countWindow(long size, long slide) { * @param assigner The {@code WindowAssigner} that assigns elements to windows. * @return The trigger windows data stream. */ - @Experimental + @PublicEvolving public WindowedStream window(WindowAssigner assigner) { return new WindowedStream<>(this, assigner); } diff --git a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/SingleOutputStreamOperator.java b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/SingleOutputStreamOperator.java index 95e6d36ea2900..a11d53bc1c901 100644 --- a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/SingleOutputStreamOperator.java +++ b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/SingleOutputStreamOperator.java @@ -17,7 +17,7 @@ package org.apache.flink.streaming.api.datastream; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.functions.InvalidTypesException; import org.apache.flink.api.common.typeinfo.TypeInformation; @@ -77,7 +77,7 @@ public SingleOutputStreamOperator name(String name){ * @param uid The unique user-specified ID of this transformation. * @return The operator with the specified ID. */ - @Experimental + @PublicEvolving public SingleOutputStreamOperator uid(String uid) { transformation.setUid(uid); return this; @@ -121,7 +121,7 @@ public SingleOutputStreamOperator broadcast() { @SuppressWarnings("unchecked") @Override - @Experimental + @PublicEvolving public SingleOutputStreamOperator shuffle() { return (SingleOutputStreamOperator) super.shuffle(); } @@ -140,14 +140,14 @@ public SingleOutputStreamOperator rebalance() { @SuppressWarnings("unchecked") @Override - @Experimental + @PublicEvolving public SingleOutputStreamOperator rescale() { return (SingleOutputStreamOperator) super.rescale(); } @SuppressWarnings("unchecked") @Override - @Experimental + @PublicEvolving public SingleOutputStreamOperator global() { return (SingleOutputStreamOperator) super.global(); } @@ -161,7 +161,7 @@ public SingleOutputStreamOperator global() { * The selected {@link ChainingStrategy} * @return The operator with the modified chaining strategy */ - @Experimental + @PublicEvolving private SingleOutputStreamOperator setChainingStrategy(ChainingStrategy strategy) { this.transformation.setChainingStrategy(strategy); return this; @@ -176,7 +176,7 @@ private SingleOutputStreamOperator setChainingStrategy(ChainingStrategy st * * @return The operator with chaining disabled */ - @Experimental + @PublicEvolving public SingleOutputStreamOperator disableChaining() { return setChainingStrategy(ChainingStrategy.NEVER); } @@ -188,7 +188,7 @@ public SingleOutputStreamOperator disableChaining() { * * @return The operator with chaining set. */ - @Experimental + @PublicEvolving public SingleOutputStreamOperator startNewChain() { return setChainingStrategy(ChainingStrategy.HEAD); } @@ -327,7 +327,7 @@ protected DataStream setConnectionType(StreamPartitioner partitioner) { * * @return The operator as a part of a new resource group. */ - @Experimental + @PublicEvolving public SingleOutputStreamOperator startNewResourceGroup() { transformation.setResourceStrategy(ResourceStrategy.NEWGROUP); return this; @@ -343,7 +343,7 @@ public SingleOutputStreamOperator startNewResourceGroup() { * * @return The operator with isolated resource group. */ - @Experimental + @PublicEvolving public SingleOutputStreamOperator isolateResources() { transformation.setResourceStrategy(ResourceStrategy.ISOLATE); return this; diff --git a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/SplitStream.java b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/SplitStream.java index 0f0f301c6c75a..50c3aa9ff4c9e 100644 --- a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/SplitStream.java +++ b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/SplitStream.java @@ -18,7 +18,7 @@ package org.apache.flink.streaming.api.datastream; import com.google.common.collect.Lists; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.streaming.api.collector.selector.OutputSelector; import org.apache.flink.streaming.api.transformations.SelectTransformation; import org.apache.flink.streaming.api.transformations.SplitTransformation; @@ -32,7 +32,7 @@ * @param The type of the elements in the Stream */ -@Experimental +@PublicEvolving public class SplitStream extends DataStream { protected SplitStream(DataStream dataStream, OutputSelector outputSelector) { diff --git a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/WindowedStream.java b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/WindowedStream.java index f94539980810b..88e619a091311 100644 --- a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/WindowedStream.java +++ b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/WindowedStream.java @@ -18,7 +18,7 @@ package org.apache.flink.streaming.api.datastream; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.functions.FoldFunction; import org.apache.flink.api.common.functions.Function; @@ -98,7 +98,7 @@ public class WindowedStream { private Evictor evictor; - @Experimental + @PublicEvolving public WindowedStream(KeyedStream input, WindowAssigner windowAssigner) { this.input = input; @@ -109,7 +109,7 @@ public WindowedStream(KeyedStream input, /** * Sets the {@code Trigger} that should be used to trigger window emission. */ - @Experimental + @PublicEvolving public WindowedStream trigger(Trigger trigger) { this.trigger = trigger; return this; @@ -122,7 +122,7 @@ public WindowedStream trigger(Trigger trigger) { * Note: When using an evictor window performance will degrade significantly, since * pre-aggregation of window results cannot be used. */ - @Experimental + @PublicEvolving public WindowedStream evictor(Evictor evictor) { this.evictor = evictor; return this; diff --git a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/environment/CheckpointConfig.java b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/environment/CheckpointConfig.java index 6c2d72c667508..327b524eee987 100644 --- a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/environment/CheckpointConfig.java +++ b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/environment/CheckpointConfig.java @@ -18,7 +18,7 @@ package org.apache.flink.streaming.api.environment; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.streaming.api.CheckpointingMode; @@ -206,7 +206,7 @@ public void setMaxConcurrentCheckpoints(int maxConcurrentCheckpoints) { * @deprecated This will be removed once iterations properly participate in checkpointing. */ @Deprecated - @Experimental + @PublicEvolving public boolean isForceCheckpointing() { return forceCheckpointing; } @@ -219,7 +219,7 @@ public boolean isForceCheckpointing() { * @deprecated This will be removed once iterations properly participate in checkpointing. */ @Deprecated - @Experimental + @PublicEvolving public void setForceCheckpointing(boolean forceCheckpointing) { this.forceCheckpointing = forceCheckpointing; } diff --git a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/environment/StreamExecutionEnvironment.java b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/environment/StreamExecutionEnvironment.java index 1e2915586dbb4..8d829c6ec0ac1 100644 --- a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/environment/StreamExecutionEnvironment.java +++ b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/environment/StreamExecutionEnvironment.java @@ -20,7 +20,7 @@ import com.esotericsoftware.kryo.Serializer; import com.google.common.base.Preconditions; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Internal; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.ExecutionConfig; @@ -224,7 +224,7 @@ public long getBufferTimeout() { * * @return StreamExecutionEnvironment with chaining disabled. */ - @Experimental + @PublicEvolving public StreamExecutionEnvironment disableOperatorChaining() { this.isChainingEnabled = false; return this; @@ -235,7 +235,7 @@ public StreamExecutionEnvironment disableOperatorChaining() { * * @return {@code true} if chaining is enabled, false otherwise. */ - @Experimental + @PublicEvolving public boolean isChainingEnabled() { return isChainingEnabled; } @@ -321,7 +321,7 @@ public StreamExecutionEnvironment enableCheckpointing(long interval, Checkpointi */ @Deprecated @SuppressWarnings("deprecation") - @Experimental + @PublicEvolving public StreamExecutionEnvironment enableCheckpointing(long interval, CheckpointingMode mode, boolean force) { checkpointCfg.setCheckpointingMode(mode); checkpointCfg.setCheckpointInterval(interval); @@ -346,7 +346,7 @@ public StreamExecutionEnvironment enableCheckpointing(long interval, Checkpointi * @deprecated Use {@link #enableCheckpointing(long)} instead. */ @Deprecated - @Experimental + @PublicEvolving public StreamExecutionEnvironment enableCheckpointing() { checkpointCfg.setCheckpointInterval(500); return this; @@ -368,7 +368,7 @@ public long getCheckpointInterval() { */ @Deprecated @SuppressWarnings("deprecation") - @Experimental + @PublicEvolving public boolean isForceCheckpointing() { return checkpointCfg.isForceCheckpointing(); } @@ -406,7 +406,7 @@ public CheckpointingMode getCheckpointingMode() { * * @see #getStateBackend() */ - @Experimental + @PublicEvolving public StreamExecutionEnvironment setStateBackend(AbstractStateBackend backend) { this.defaultStateBackend = requireNonNull(backend); return this; @@ -418,7 +418,7 @@ public StreamExecutionEnvironment setStateBackend(AbstractStateBackend backend) * * @see #setStateBackend(AbstractStateBackend) */ - @Experimental + @PublicEvolving public AbstractStateBackend getStateBackend() { return defaultStateBackend; } @@ -432,7 +432,7 @@ public AbstractStateBackend getStateBackend() { * @param numberOfExecutionRetries * The number of times the system will try to re-execute failed tasks. */ - @Experimental + @PublicEvolving public void setNumberOfExecutionRetries(int numberOfExecutionRetries) { config.setNumberOfExecutionRetries(numberOfExecutionRetries); } @@ -444,7 +444,7 @@ public void setNumberOfExecutionRetries(int numberOfExecutionRetries) { * * @return The number of times the system will try to re-execute failed tasks. */ - @Experimental + @PublicEvolving public int getNumberOfExecutionRetries() { return config.getNumberOfExecutionRetries(); } @@ -456,7 +456,7 @@ public int getNumberOfExecutionRetries() { * @param parallelism * The parallelism to use as the default local parallelism. */ - @Experimental + @PublicEvolving public static void setDefaultLocalParallelism(int parallelism) { defaultLocalParallelism = parallelism; } @@ -562,7 +562,7 @@ public void registerType(Class type) { * * @param characteristic The time characteristic. */ - @Experimental + @PublicEvolving public void setStreamTimeCharacteristic(TimeCharacteristic characteristic) { this.timeCharacteristic = requireNonNull(characteristic); if (characteristic == TimeCharacteristic.ProcessingTime) { @@ -581,7 +581,7 @@ public void setStreamTimeCharacteristic(TimeCharacteristic characteristic) { * * @return The time characteristic. */ - @Experimental + @PublicEvolving public TimeCharacteristic getStreamTimeCharacteristic() { return timeCharacteristic; } @@ -1013,7 +1013,7 @@ public DataStream readFileStream(String filePath, long intervalMillis, * a negative value ensures retrying forever. * @return A data stream containing the strings received from the socket */ - @Experimental + @PublicEvolving public DataStreamSource socketTextStream(String hostname, int port, char delimiter, long maxRetry) { return addSource(new SocketTextStreamFunction(hostname, port, delimiter, maxRetry), "Socket Stream"); @@ -1032,7 +1032,7 @@ public DataStreamSource socketTextStream(String hostname, int port, char * A character which splits received strings into records * @return A data stream containing the strings received from the socket */ - @Experimental + @PublicEvolving public DataStreamSource socketTextStream(String hostname, int port, char delimiter) { return socketTextStream(hostname, port, delimiter, 0); } @@ -1049,7 +1049,7 @@ public DataStreamSource socketTextStream(String hostname, int port, char * allocated. * @return A data stream containing the strings received from the socket */ - @Experimental + @PublicEvolving public DataStreamSource socketTextStream(String hostname, int port) { return socketTextStream(hostname, port, '\n'); } @@ -1070,7 +1070,7 @@ public DataStreamSource socketTextStream(String hostname, int port) { * The type of the returned data stream * @return The data stream that represents the data created by the input format */ - @Experimental + @PublicEvolving public DataStreamSource createInput(InputFormat inputFormat) { return createInput(inputFormat, TypeExtractor.getInputFormatTypes(inputFormat), "Custom File source"); } @@ -1089,7 +1089,7 @@ public DataStreamSource createInput(InputFormat inputFormat) * The type of the returned data stream * @return The data stream that represents the data created by the input format */ - @Experimental + @PublicEvolving public DataStreamSource createInput(InputFormat inputFormat, TypeInformation typeInfo) { return createInput(inputFormat, typeInfo, "Custom File source"); } diff --git a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/functions/source/EventTimeSourceFunction.java b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/functions/source/EventTimeSourceFunction.java index 8a516f19934c4..9b04ad932902d 100644 --- a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/functions/source/EventTimeSourceFunction.java +++ b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/functions/source/EventTimeSourceFunction.java @@ -18,7 +18,7 @@ package org.apache.flink.streaming.api.functions.source; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; /** * A marker interface that must be implemented by {@link SourceFunction}s that emit elements with @@ -38,5 +38,5 @@ * * @param Type of the elements emitted by this source. */ -@Experimental +@PublicEvolving public interface EventTimeSourceFunction extends SourceFunction { } diff --git a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/functions/source/SourceFunction.java b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/functions/source/SourceFunction.java index 7ba10fd550704..c4139bdfbea5b 100644 --- a/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/functions/source/SourceFunction.java +++ b/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/functions/source/SourceFunction.java @@ -18,7 +18,7 @@ package org.apache.flink.streaming.api.functions.source; -import org.apache.flink.annotation.Experimental; +import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.Public; import org.apache.flink.api.common.functions.Function; import org.apache.flink.streaming.api.watermark.Watermark; @@ -132,7 +132,7 @@ public static interface SourceContext { * @param element The element to emit * @param timestamp The timestamp in milliseconds */ - @Experimental + @PublicEvolving public void collectWithTimestamp(T element, long timestamp); /** @@ -147,7 +147,7 @@ public static interface SourceContext { * * @param mark The {@link Watermark} to emit */ - @Experimental + @PublicEvolving void emitWatermark(Watermark mark); diff --git a/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/AllWindowedStream.scala b/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/AllWindowedStream.scala index 904bd89f53736..8f0d785c28874 100644 --- a/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/AllWindowedStream.scala +++ b/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/AllWindowedStream.scala @@ -18,7 +18,7 @@ package org.apache.flink.streaming.api.scala -import org.apache.flink.annotation.{Experimental, Public} +import org.apache.flink.annotation.{PublicEvolving, Public} import org.apache.flink.api.common.functions.{FoldFunction, ReduceFunction} import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.streaming.api.datastream.{AllWindowedStream => JavaAllWStream} @@ -61,7 +61,7 @@ class AllWindowedStream[T, W <: Window](javaStream: JavaAllWStream[T, W]) { /** * Sets the [[Trigger]] that should be used to trigger window emission. */ - @Experimental + @PublicEvolving def trigger(trigger: Trigger[_ >: T, _ >: W]): AllWindowedStream[T, W] = { javaStream.trigger(trigger) this @@ -73,7 +73,7 @@ class AllWindowedStream[T, W <: Window](javaStream: JavaAllWStream[T, W]) { * Note: When using an evictor window performance will degrade significantly, since * pre-aggregation of window results cannot be used. */ - @Experimental + @PublicEvolving def evictor(evictor: Evictor[_ >: T, _ >: W]): AllWindowedStream[T, W] = { javaStream.evictor(evictor) this diff --git a/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/CoGroupedStreams.scala b/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/CoGroupedStreams.scala index 27bc4977a9182..ce96e4fc652b4 100644 --- a/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/CoGroupedStreams.scala +++ b/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/CoGroupedStreams.scala @@ -18,7 +18,7 @@ package org.apache.flink.streaming.api.scala -import org.apache.flink.annotation.{Experimental, Public} +import org.apache.flink.annotation.{PublicEvolving, Public} import org.apache.flink.api.common.functions.CoGroupFunction import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.java.functions.KeySelector @@ -150,7 +150,7 @@ object CoGroupedStreams { /** * Specifies the window on which the co-group operation works. */ - @Experimental + @PublicEvolving def window[W <: Window]( assigner: WindowAssigner[_ >: JavaCoGroupedStreams.TaggedUnion[T1, T2], W]) : CoGroupedStreams.WithWindow[T1, T2, KEY, W] = { @@ -186,7 +186,7 @@ object CoGroupedStreams { * @tparam KEY Type of the key. This must be the same for both inputs * @tparam W Type of { @link Window} on which the co-group operation works. */ - @Experimental + @PublicEvolving class WithWindow[T1, T2, KEY, W <: Window]( input1: DataStream[T1], input2: DataStream[T2], @@ -200,7 +200,7 @@ object CoGroupedStreams { /** * Sets the [[Trigger]] that should be used to trigger window emission. */ - @Experimental + @PublicEvolving def trigger(newTrigger: Trigger[_ >: JavaCoGroupedStreams.TaggedUnion[T1, T2], _ >: W]) : CoGroupedStreams.WithWindow[T1, T2, KEY, W] = { new WithWindow[T1, T2, KEY, W]( @@ -219,7 +219,7 @@ object CoGroupedStreams { * Note: When using an evictor window performance will degrade significantly, since * pre-aggregation of window results cannot be used. */ - @Experimental + @PublicEvolving def evictor(newEvictor: Evictor[_ >: JavaCoGroupedStreams.TaggedUnion[T1, T2], _ >: W]) : CoGroupedStreams.WithWindow[T1, T2, KEY, W] = { new WithWindow[T1, T2, KEY, W]( diff --git a/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/DataStream.scala b/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/DataStream.scala index 736c41ba53a7a..04c1980aa7a94 100644 --- a/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/DataStream.scala +++ b/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/DataStream.scala @@ -18,7 +18,7 @@ package org.apache.flink.streaming.api.scala -import org.apache.flink.annotation.{Experimental, Public} +import org.apache.flink.annotation.{PublicEvolving, Public} import org.apache.flink.api.common.functions.{FilterFunction, FlatMapFunction, MapFunction, Partitioner} import org.apache.flink.api.common.io.OutputFormat import org.apache.flink.api.common.typeinfo.TypeInformation @@ -50,6 +50,7 @@ class DataStream[T](stream: JavaStream[T]) { /** * Returns the [[StreamExecutionEnvironment]] associated with the current [[DataStream]]. + * * @return associated execution environment */ def getExecutionEnvironment: StreamExecutionEnvironment = @@ -60,7 +61,7 @@ class DataStream[T](stream: JavaStream[T]) { * * @return ID of the DataStream */ - @Experimental + @PublicEvolving def getId = stream.getId /** @@ -128,7 +129,7 @@ class DataStream[T](stream: JavaStream[T]) { * @param uid The unique user-specified ID of this transformation. * @return The operator with the specified ID. */ - @Experimental + @PublicEvolving def uid(uid: String) : DataStream[T] = javaStream match { case stream : SingleOutputStreamOperator[T,_] => stream.uid(uid) case _ => throw new UnsupportedOperationException("Only supported for operators.") @@ -142,7 +143,7 @@ class DataStream[T](stream: JavaStream[T]) { * however it is not advised for performance considerations. * */ - @Experimental + @PublicEvolving def disableChaining(): DataStream[T] = { stream match { case ds: SingleOutputStreamOperator[_, _] => ds.disableChaining(); @@ -158,7 +159,7 @@ class DataStream[T](stream: JavaStream[T]) { * previous tasks even if possible. * */ - @Experimental + @PublicEvolving def startNewChain(): DataStream[T] = { stream match { case ds: SingleOutputStreamOperator[_, _] => ds.startNewChain(); @@ -175,7 +176,7 @@ class DataStream[T](stream: JavaStream[T]) { * All subsequent operators are assigned to the default resource group. * */ - @Experimental + @PublicEvolving def isolateResources(): DataStream[T] = { stream match { case ds: SingleOutputStreamOperator[_, _] => ds.isolateResources(); @@ -196,7 +197,7 @@ class DataStream[T](stream: JavaStream[T]) { * degree of parallelism for the operators must be decreased from the * default. */ - @Experimental + @PublicEvolving def startNewResourceGroup(): DataStream[T] = { stream match { case ds: SingleOutputStreamOperator[_, _] => ds.startNewResourceGroup(); @@ -345,14 +346,14 @@ class DataStream[T](stream: JavaStream[T]) { * the first instance of the next processing operator. Use this setting with care * since it might cause a serious performance bottleneck in the application. */ - @Experimental + @PublicEvolving def global: DataStream[T] = stream.global() /** * Sets the partitioning of the DataStream so that the output tuples * are shuffled to the next component. */ - @Experimental + @PublicEvolving def shuffle: DataStream[T] = stream.shuffle() /** @@ -385,7 +386,7 @@ class DataStream[T](stream: JavaStream[T]) { * In cases where the different parallelisms are not multiples of each other one or several * downstream operations will have a differing number of inputs from upstream operations. */ - @Experimental + @PublicEvolving def rescale: DataStream[T] = stream.rescale() /** @@ -408,7 +409,7 @@ class DataStream[T](stream: JavaStream[T]) { * the keepPartitioning flag to true * */ - @Experimental + @PublicEvolving def iterate[R](stepFunction: DataStream[T] => (DataStream[T], DataStream[R]), maxWaitTimeMillis:Long = 0, keepPartitioning: Boolean = false) : DataStream[R] = { @@ -438,7 +439,7 @@ class DataStream[T](stream: JavaStream[T]) { * to 0 then the iteration sources will indefinitely, so the job must be killed to stop. * */ - @Experimental + @PublicEvolving def iterate[R, F: TypeInformation: ClassTag](stepFunction: ConnectedStreams[T, F] => (DataStream[F], DataStream[R]), maxWaitTimeMillis:Long): DataStream[R] = { val feedbackType: TypeInformation[F] = implicitly[TypeInformation[F]] @@ -625,7 +626,7 @@ class DataStream[T](stream: JavaStream[T]) { * @param assigner The `WindowAssigner` that assigns elements to windows. * @return The trigger windows data stream. */ - @Experimental + @PublicEvolving def windowAll[W <: Window](assigner: WindowAssigner[_ >: T, W]): AllWindowedStream[T, W] = { new AllWindowedStream[T, W](new JavaAllWindowedStream[T, W](stream, assigner)) } @@ -640,7 +641,7 @@ class DataStream[T](stream: JavaStream[T]) { * * @see org.apache.flink.streaming.api.watermark.Watermark */ - @Experimental + @PublicEvolving def assignTimestamps(extractor: TimestampExtractor[T]): DataStream[T] = { stream.assignTimestamps(clean(extractor)) } @@ -656,7 +657,7 @@ class DataStream[T](stream: JavaStream[T]) { * * @see org.apache.flink.streaming.api.watermark.Watermark */ - @Experimental + @PublicEvolving def assignAscendingTimestamps(extractor: T => Long): DataStream[T] = { val cleanExtractor = clean(extractor) val extractorFunction = new AscendingTimestampExtractor[T] { @@ -714,7 +715,7 @@ class DataStream[T](stream: JavaStream[T]) { * written. * */ - @Experimental + @PublicEvolving def print(): DataStreamSink[T] = stream.print() /** @@ -725,7 +726,7 @@ class DataStream[T](stream: JavaStream[T]) { * * @return The closed DataStream. */ - @Experimental + @PublicEvolving def printToErr() = stream.printToErr() /** @@ -735,7 +736,7 @@ class DataStream[T](stream: JavaStream[T]) { * @param path The path pointing to the location the text file is written to * @return The closed DataStream */ - @Experimental + @PublicEvolving def writeAsText(path: String): DataStreamSink[T] = stream.writeAsText(path, 0L) @@ -749,7 +750,7 @@ class DataStream[T](stream: JavaStream[T]) { * @param millis The file update frequency * @return The closed DataStream */ - @Experimental + @PublicEvolving def writeAsText(path: String, millis: Long): DataStreamSink[T] = stream.writeAsText(path, millis) @@ -762,7 +763,7 @@ class DataStream[T](stream: JavaStream[T]) { * OVERWRITE. * @return The closed DataStream */ - @Experimental + @PublicEvolving def writeAsText(path: String, writeMode: FileSystem.WriteMode): DataStreamSink[T] = { if (writeMode != null) { stream.writeAsText(path, writeMode) @@ -782,7 +783,7 @@ class DataStream[T](stream: JavaStream[T]) { * @param millis The file update frequency * @return The closed DataStream */ - @Experimental + @PublicEvolving def writeAsText( path: String, writeMode: FileSystem.WriteMode, @@ -802,7 +803,7 @@ class DataStream[T](stream: JavaStream[T]) { * @param path Path to the location of the CSV file * @return The closed DataStream */ - @Experimental + @PublicEvolving def writeAsCsv(path: String): DataStreamSink[T] = { writeAsCsv( path, @@ -820,7 +821,7 @@ class DataStream[T](stream: JavaStream[T]) { * @param millis File update frequency * @return The closed DataStream */ - @Experimental + @PublicEvolving def writeAsCsv(path: String, millis: Long): DataStreamSink[T] = { writeAsCsv( path, @@ -838,7 +839,7 @@ class DataStream[T](stream: JavaStream[T]) { * @param writeMode Controls whether an existing file is overwritten or not * @return The closed DataStream */ - @Experimental + @PublicEvolving def writeAsCsv(path: String, writeMode: FileSystem.WriteMode): DataStreamSink[T] = { writeAsCsv( path, @@ -857,7 +858,7 @@ class DataStream[T](stream: JavaStream[T]) { * @param millis File update frequency * @return The closed DataStream */ - @Experimental + @PublicEvolving def writeAsCsv(path: String, writeMode: FileSystem.WriteMode, millis: Long): DataStreamSink[T] = { writeAsCsv( path, @@ -878,7 +879,7 @@ class DataStream[T](stream: JavaStream[T]) { * @param fieldDelimiter Delimiter for consecutive fields * @return The closed DataStream */ - @Experimental + @PublicEvolving def writeAsCsv( path: String, writeMode: FileSystem.WriteMode, @@ -898,7 +899,7 @@ class DataStream[T](stream: JavaStream[T]) { * Writes a DataStream using the given [[OutputFormat]]. The * writing is performed periodically, in every millis milliseconds. */ - @Experimental + @PublicEvolving def write(format: OutputFormat[T], millis: Long): DataStreamSink[T] = { stream.write(format, millis) } @@ -907,7 +908,7 @@ class DataStream[T](stream: JavaStream[T]) { * Writes the DataStream to a socket as a byte array. The format of the output is * specified by a [[SerializationSchema]]. */ - @Experimental + @PublicEvolving def writeToSocket( hostname: String, port: Integer, diff --git a/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/JoinedStreams.scala b/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/JoinedStreams.scala index b6fbadf4a3e26..21c5d84d55fe9 100644 --- a/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/JoinedStreams.scala +++ b/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/JoinedStreams.scala @@ -18,7 +18,7 @@ package org.apache.flink.streaming.api.scala -import org.apache.flink.annotation.{Experimental, Public} +import org.apache.flink.annotation.{PublicEvolving, Public} import org.apache.flink.api.common.functions.{FlatJoinFunction, JoinFunction} import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.java.functions.KeySelector @@ -148,7 +148,7 @@ object JoinedStreams { /** * Specifies the window on which the join operation works. */ - @Experimental + @PublicEvolving def window[W <: Window]( assigner: WindowAssigner[_ >: JavaCoGroupedStreams.TaggedUnion[T1, T2], W]) : JoinedStreams.WithWindow[T1, T2, KEY, W] = { @@ -197,7 +197,7 @@ object JoinedStreams { /** * Sets the [[Trigger]] that should be used to trigger window emission. */ - @Experimental + @PublicEvolving def trigger(newTrigger: Trigger[_ >: JavaCoGroupedStreams.TaggedUnion[T1, T2], _ >: W]) : JoinedStreams.WithWindow[T1, T2, KEY, W] = { new WithWindow[T1, T2, KEY, W]( @@ -216,7 +216,7 @@ object JoinedStreams { * Note: When using an evictor window performance will degrade significantly, since * pre-aggregation of window results cannot be used. */ - @Experimental + @PublicEvolving def evictor(newEvictor: Evictor[_ >: JavaCoGroupedStreams.TaggedUnion[T1, T2], _ >: W]) : JoinedStreams.WithWindow[T1, T2, KEY, W] = { new WithWindow[T1, T2, KEY, W]( diff --git a/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/KeyedStream.scala b/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/KeyedStream.scala index 271796bce189d..923aad686ddf7 100644 --- a/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/KeyedStream.scala +++ b/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/KeyedStream.scala @@ -18,7 +18,7 @@ package org.apache.flink.streaming.api.scala -import org.apache.flink.annotation.{Experimental, Internal, Public} +import org.apache.flink.annotation.{PublicEvolving, Internal, Public} import org.apache.flink.api.common.functions._ import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.common.typeutils.TypeSerializer @@ -112,7 +112,7 @@ class KeyedStream[T, K](javaStream: KeyedJavaStream[T, K]) extends DataStream[T] * @param assigner The `WindowAssigner` that assigns elements to windows. * @return The trigger windows data stream. */ - @Experimental + @PublicEvolving def window[W <: Window](assigner: WindowAssigner[_ >: T, W]): WindowedStream[T, K, W] = { new WindowedStream(new WindowedJavaStream[T, K, W](javaStream, assigner)) } diff --git a/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/StreamExecutionEnvironment.scala b/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/StreamExecutionEnvironment.scala index 431793197ca34..58b100ed12183 100644 --- a/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/StreamExecutionEnvironment.scala +++ b/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/StreamExecutionEnvironment.scala @@ -19,7 +19,7 @@ package org.apache.flink.streaming.api.scala import com.esotericsoftware.kryo.Serializer -import org.apache.flink.annotation.{Internal, Experimental, Public} +import org.apache.flink.annotation.{Internal, PublicEvolving, Public} import org.apache.flink.api.common.io.{FileInputFormat, InputFormat} import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.java.typeutils.runtime.kryo.KryoSerializer @@ -91,7 +91,7 @@ class StreamExecutionEnvironment(javaEnv: JavaEnv) { * avoiding serialization and de-serialization. * */ - @Experimental + @PublicEvolving def disableOperatorChaining(): StreamExecutionEnvironment = { javaEnv.disableOperatorChaining() this @@ -127,7 +127,7 @@ class StreamExecutionEnvironment(javaEnv: JavaEnv) { * If true checkpointing will be enabled for iterative jobs as well. */ @deprecated - @Experimental + @PublicEvolving def enableCheckpointing(interval : Long, mode: CheckpointingMode, force: Boolean) : StreamExecutionEnvironment = { @@ -191,7 +191,7 @@ class StreamExecutionEnvironment(javaEnv: JavaEnv) { * failure the job will be resubmitted to the cluster indefinitely. */ @deprecated - @Experimental + @PublicEvolving def enableCheckpointing() : StreamExecutionEnvironment = { javaEnv.enableCheckpointing() this @@ -218,7 +218,7 @@ class StreamExecutionEnvironment(javaEnv: JavaEnv) { * program can be executed highly available and strongly consistent (assuming that Flink * is run in high-availability mode). */ - @Experimental + @PublicEvolving def setStateBackend(backend: AbstractStateBackend): StreamExecutionEnvironment = { javaEnv.setStateBackend(backend) this @@ -227,7 +227,7 @@ class StreamExecutionEnvironment(javaEnv: JavaEnv) { /** * Returns the state backend that defines how to store and checkpoint state. */ - @Experimental + @PublicEvolving def getStateBackend: AbstractStateBackend = javaEnv.getStateBackend() /** @@ -235,7 +235,7 @@ class StreamExecutionEnvironment(javaEnv: JavaEnv) { * effectively disables fault tolerance. A value of "-1" indicates that the system * default value (as defined in the configuration) should be used. */ - @Experimental + @PublicEvolving def setNumberOfExecutionRetries(numRetries: Int): Unit = { javaEnv.setNumberOfExecutionRetries(numRetries) } @@ -245,7 +245,7 @@ class StreamExecutionEnvironment(javaEnv: JavaEnv) { * of "-1" indicates that the system default value (as defined in the configuration) * should be used. */ - @Experimental + @PublicEvolving def getNumberOfExecutionRetries = javaEnv.getNumberOfExecutionRetries // -------------------------------------------------------------------------------------------- @@ -327,7 +327,7 @@ class StreamExecutionEnvironment(javaEnv: JavaEnv) { * * @param characteristic The time characteristic. */ - @Experimental + @PublicEvolving def setStreamTimeCharacteristic(characteristic: TimeCharacteristic) : Unit = { javaEnv.setStreamTimeCharacteristic(characteristic) } @@ -336,10 +336,9 @@ class StreamExecutionEnvironment(javaEnv: JavaEnv) { * Gets the time characteristic/ * * @see #setStreamTimeCharacteristic - * * @return The time characteristic. */ - @Experimental + @PublicEvolving def getStreamTimeCharacteristic = javaEnv.getStreamTimeCharacteristic() // -------------------------------------------------------------------------------------------- @@ -474,7 +473,7 @@ class StreamExecutionEnvironment(javaEnv: JavaEnv) { * character set. The maximum retry interval is specified in seconds, in case * of temporary service outage reconnection is initiated every second. */ - @Experimental + @PublicEvolving def socketTextStream(hostname: String, port: Int, delimiter: Char = '\n', maxRetry: Long = 0): DataStream[String] = javaEnv.socketTextStream(hostname, port) @@ -485,7 +484,7 @@ class StreamExecutionEnvironment(javaEnv: JavaEnv) { * determine the type of the data produced by the input format. It will attempt to determine the * data type by reflection, unless the input format implements the ResultTypeQueryable interface. */ - @Experimental + @PublicEvolving def createInput[T: ClassTag : TypeInformation](inputFormat: InputFormat[T, _]): DataStream[T] = javaEnv.createInput(inputFormat) @@ -562,6 +561,7 @@ class StreamExecutionEnvironment(javaEnv: JavaEnv) { /** * Getter of the wrapped [[org.apache.flink.streaming.api.environment.StreamExecutionEnvironment]] + * * @return The encased ExecutionEnvironment */ @Internal @@ -590,7 +590,7 @@ object StreamExecutionEnvironment { * @param parallelism * The parallelism to use as the default local parallelism. */ - @Experimental + @PublicEvolving def setDefaultLocalParallelism(parallelism: Int) : Unit = StreamExecutionEnvironment.setDefaultLocalParallelism(parallelism) diff --git a/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/WindowedStream.scala b/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/WindowedStream.scala index 15b9505ef094f..6385831202e31 100644 --- a/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/WindowedStream.scala +++ b/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/WindowedStream.scala @@ -18,7 +18,7 @@ package org.apache.flink.streaming.api.scala -import org.apache.flink.annotation.{Experimental, Public} +import org.apache.flink.annotation.{PublicEvolving, Public} import org.apache.flink.api.common.functions.{FoldFunction, ReduceFunction} import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.streaming.api.datastream.{WindowedStream => JavaWStream} @@ -64,7 +64,7 @@ class WindowedStream[T, K, W <: Window](javaStream: JavaWStream[T, K, W]) { /** * Sets the [[Trigger]] that should be used to trigger window emission. */ - @Experimental + @PublicEvolving def trigger(trigger: Trigger[_ >: T, _ >: W]): WindowedStream[T, K, W] = { javaStream.trigger(trigger) this @@ -76,7 +76,7 @@ class WindowedStream[T, K, W <: Window](javaStream: JavaWStream[T, K, W]) { * Note: When using an evictor window performance will degrade significantly, since * pre-aggregation of window results cannot be used. */ - @Experimental + @PublicEvolving def evictor(evictor: Evictor[_ >: T, _ >: W]): WindowedStream[T, K, W] = { javaStream.evictor(evictor) this From 76eb2ce946754ac6e22f8f838ac3840a2275c843 Mon Sep 17 00:00:00 2001 From: Fabian Hueske Date: Tue, 9 Feb 2016 12:40:10 +0100 Subject: [PATCH 2/2] Updated JavaDocs of PublicEvolving --- .../org/apache/flink/annotation/PublicEvolving.java | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/flink-annotations/src/main/java/org/apache/flink/annotation/PublicEvolving.java b/flink-annotations/src/main/java/org/apache/flink/annotation/PublicEvolving.java index 925c19d25f558..289d89164a8d0 100644 --- a/flink-annotations/src/main/java/org/apache/flink/annotation/PublicEvolving.java +++ b/flink-annotations/src/main/java/org/apache/flink/annotation/PublicEvolving.java @@ -23,10 +23,15 @@ import java.lang.annotation.Target; /** - * Interface to mark methods within stable, public APIs as experimental. - * It also allows to mark types explicitly as experimental + * Annotation to mark classes and methods for public use but with evolving interfaces. + * + * Classes and methods with this annotation are intended for public use and have stable behavior. + * However, their interfaces and signatures are not considered to be stable and might be changed + * across versions. + * + * This annotation also excludes methods and classes with evolving interfaces / signatures + * within classes annotated with {@link Public}. * - * An experimental API might change between minor releases. */ @Documented @Target({ ElementType.TYPE, ElementType.METHOD, ElementType.FIELD, ElementType.CONSTRUCTOR })