PCollectionView<T> view
com.datatorrent.api.DAG dag
com.datatorrent.api.Attribute.AttributeMap launchAttributes
java.util.Properties configProperties
java.util.HashMap<K,V> env
java.lang.String cmd
PCollectionView<T> view
boolean streaming
DataflowPipelineJob job
PCollectionView<T> view
Coder<T> dataCoder
java.util.Map<K,V> properties
PCollectionView<T> view
org.apache.beam.runners.core.metrics.MetricsContainerStepMap metricsContainers
PCollectionView<T> view
boolean isNonMerging
WindowFn<T,W extends BoundedWindow> windowFn
TimestampCombiner timestampCombiner
WindowFn<T,W extends BoundedWindow> windowFn
TimestampCombiner timestampCombiner
WindowFn<T,W extends BoundedWindow> windowFn
DoFnRunnerFactory<InputT,OutputT> doFnRunnerFactory
DoFn<InputT,OutputT> doFn
java.util.Collection<E> sideInputs
java.util.Map<K,V> tagsToSideInputs
TupleTag<V> mainOutput
java.util.List<E> sideOutputs
org.apache.beam.runners.gearpump.translators.functions.DoFnFunction.DoFnOutputManager outputManager
BoundedSource<T> source
org.apache.beam.runners.core.construction.SerializablePipelineOptions serializedOptions
Source.Reader<T> reader
boolean available
UnboundedSource<OutputT,CheckpointMarkT extends UnboundedSource.CheckpointMark> source
byte[] values
IterableCoder<T> iterableCoder
DoFn<InputT,OutputT> fn
org.apache.beam.runners.core.construction.SerializablePipelineOptions serializedOptions
java.util.Collection<E> sideInputs
org.apache.beam.runners.core.DoFnRunners.OutputManager outputManager
TupleTag<V> mainOutputTag
java.util.List<E> sideOutputTags
org.apache.beam.runners.core.StepContext stepContext
WindowingStrategy<T,W extends BoundedWindow> windowingStrategy
java.util.Map<K,V> mNamedAggregators
int num
UnboundedSource<OutputT,CheckpointMarkT extends UnboundedSource.CheckpointMark> source
Duration maxReadTime
int numInitialSplits
long maxNumRecords
int sourceId
double readerCacheInterval
int splitId
BoundedSource<T> source
org.apache.beam.runners.core.construction.SerializablePipelineOptions options
int numPartitions
java.lang.String stepName
org.apache.spark.Accumulator<T> metricsAccum
MicrobatchSource<T,CheckpointMarkT extends UnboundedSource.CheckpointMark> microbatchSource
org.apache.beam.runners.core.construction.SerializablePipelineOptions options
org.apache.spark.Partitioner partitioner
byte[] value
java.lang.Class<T> type
org.apache.beam.sdk.coders.AvroCoder.SerializableSchemaSupplier schemaSupplier
TypeDescriptor<T> typeDescriptor
java.util.List<E> nonDeterministicReasons
org.apache.beam.sdk.util.EmptyOnDeserializationThreadLocal<T> decoder
org.apache.beam.sdk.util.EmptyOnDeserializationThreadLocal<T> encoder
org.apache.beam.sdk.util.EmptyOnDeserializationThreadLocal<T> writer
org.apache.beam.sdk.util.EmptyOnDeserializationThreadLocal<T> reader
Supplier<T> reflectData
CannotProvideCoderException.ReasonCode reason
Coder<T> coder
DelegateCoder.CodingFunction<InputT,OutputT> toFn
DelegateCoder.CodingFunction<InputT,OutputT> fromFn
TypeDescriptor<T> typeDescriptor
Schema schema
java.lang.Class<T> type
Coder<T> keyCoder
VarIntCoder shardNumberCoder
DelegateCoder<T,IntermediateT> delegateCoder
java.lang.Class<T> clazz
org.apache.beam.sdk.util.gcsfs.GcsPath gcsPath
java.lang.Class<T> inputClass
ObjectMapper customMapper
java.lang.Class<T> outputClass
ObjectMapper customMapper
java.lang.Class<T> protoMessageClass
Message
type to be coded.java.util.Set<E> extensionHostClasses
ProtoCoder
. The extensions from these
classes will be included in the ExtensionRegistry
used during encoding and decoding.double compression
java.lang.String tempLocation
int memoryMB
java.lang.String tempLocation
int memoryMB
long memoryMB
BufferedExternalSorter.Options sorterOptions
java.lang.String name
int id
java.lang.String countryOfResidence
int id
int customerId
java.util.List<E> exps
int correlationId
java.util.List<E> operands
org.apache.calcite.sql.type.SqlTypeName outputType
int inputRef
java.lang.Object value
java.lang.String className
java.lang.String methodName
java.util.List<E> paraClassName
Combine.CombineFn<InputT,AccumT,OutputT> combineFn
BeamSqlExpression delegateExpression
org.apache.calcite.sql.type.SqlTypeName intervalType
java.util.Random rand
java.lang.Integer seed
java.util.Random rand
java.lang.Integer seed
BeamSqlPrimitive<T> zero
BeamSqlExpression referenceExpression
int nestedFieldIndex
Schema schema
BeamIOType ioType
VarIntCoder sizeCoder
java.util.List<E> elementCoders
Schema keySchema
java.util.List<E> groupByKeys
Schema outSchema
java.util.List<E> aggFieldNames
int windowStartFieldIdx
int windowFieldIdx
java.util.List<E> joinColumns
BeamSqlSeekableTable seekableTable
Schema lkpSchema
Schema joinSubsetType
java.util.List<E> factJoinIdx
PCollectionView<T> sideInputView
org.apache.calcite.rel.core.JoinRelType joinType
Row rightNullRow
boolean swap
TupleTag<V> leftTag
TupleTag<V> rightTag
BeamSetOperatorRelBase.OpType opType
boolean all
java.lang.String stepName
BeamSqlExpressionExecutor executor
java.lang.String stepName
java.lang.String stepName
BeamSqlExpressionExecutor executor
Schema outputSchema
boolean isSample
SerializableFunction<InputT,OutputT> decimalConverter
boolean isSample
SerializableFunction<InputT,OutputT> decimalConverter
java.lang.String tableSpec
org.apache.commons.csv.CSVFormat csvFormat
Schema schema
org.apache.commons.csv.CSVFormat format
Schema schema
org.apache.commons.csv.CSVFormat format
java.lang.String bootstrapServers
java.util.List<E> topics
java.util.List<E> topicPartitions
java.util.Map<K,V> configUpdates
java.lang.String filePattern
org.apache.commons.csv.CSVFormat csvFormat
java.lang.String filePattern
Schema schema
org.apache.commons.csv.CSVFormat csvFormat
java.lang.String filePattern
Schema schema
org.apache.commons.csv.CSVFormat csvFormat
java.lang.String filePattern
AvroIO.TypedWrite<UserT,DestinationT,OutputT> inner
private java.lang.Object readResolve() throws java.io.ObjectStreamException
java.io.ObjectStreamException
org.apache.beam.sdk.io.AvroSource.Mode<T> mode
UnboundedSource<OutputT,CheckpointMarkT extends UnboundedSource.CheckpointMark> source
long maxNumRecords
Duration maxReadTime
BoundedSource<T> adaptedSource
FileBasedSource<T> sourceDelegate
CompressedSource.DecompressingChannelFactory channelFactory
DefaultFilenamePolicy.Params params
ValueProvider<T> baseFilename
java.lang.String shardTemplate
boolean explicitTemplate
java.lang.String suffix
FileBasedSink.DynamicDestinations<UserT,DestinationT,OutputT> dynamicDestinations
FileBasedSink.WritableByteChannelFactory writableByteChannelFactory
FileBasedSink.WritableByteChannelFactory
that is used to wrap the raw data output to the
underlying channel. The default is to not compress the output using Compression.UNCOMPRESSED
.ValueProvider<T> tempDirectoryProvider
FileBasedSink<UserT,DestinationT,OutputT> sink
ValueProvider<T> tempDirectory
boolean windowedWrites
ValueProvider<T> fileOrPatternSpec
EmptyMatchTreatment emptyMatchTreatment
MatchResult.Metadata singleFileMetadata
FileBasedSource.Mode mode
long startOffset
long endOffset
long minBundleSize
BoundedSource<T> source
UnboundedSource<OutputT,CheckpointMarkT extends UnboundedSource.CheckpointMark> source
long desiredBundleSizeBytes
SerializableFunction<InputT,OutputT> createSource
Coder<T> coder
TextIO.TypedWrite<UserT,DestinationT> inner
FileBasedSink.WriteOperation<DestinationT,OutputT> writeOperation
int backendVersion
ElasticsearchIO.Read spec
java.lang.String shardPreference
java.lang.Integer numSlices
java.lang.Integer sliceId
BigQueryIO.TypedRead<T> inner
BigQueryIO.Write.CreateDisposition createDisposition
org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices bqServices
DynamicDestinations<T,DestinationT> dynamicDestinations
DynamicDestinations<T,DestinationT> dynamicDestinations
SerializableFunction<InputT,OutputT> formatFunction
org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices bigQueryServices
BigQueryIO.Write.CreateDisposition createDisposition
DynamicDestinations<T,DestinationT> dynamicDestinations
InsertRetryPolicy retryPolicy
org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices bigQueryServices
InsertRetryPolicy retryPolicy
java.lang.String tableSpec
java.lang.String tableDescription
java.lang.String jsonTimePartitioning
java.lang.String projectId
java.lang.String projectId
java.lang.String subscriptionName
java.lang.String path
org.apache.beam.sdk.io.gcp.pubsub.PubsubIO.PubsubSubscription.Type type
java.lang.String project
java.lang.String subscription
org.apache.beam.sdk.io.gcp.pubsub.PubsubIO.PubsubTopic.Type type
java.lang.String project
java.lang.String topic
PubsubClient.PubsubClientFactory pubsubFactory
ValueProvider<T> topic
java.lang.String timestampAttribute
java.lang.String idAttribute
int numShards
int publishBatchSize
int publishBatchBytes
Duration maxLatency
org.apache.beam.sdk.io.gcp.pubsub.PubsubUnboundedSink.RecordIdMethod recordIdMethod
PubsubUnboundedSink.idAttribute
is non-null).com.google.api.client.util.Clock clock
PubsubClient.PubsubClientFactory pubsubFactory
ValueProvider<T> project
PubsubUnboundedSource.topic
was given.ValueProvider<T> topic
PubsubUnboundedSource.subscription
must be given.
Otherwise PubsubUnboundedSource.subscription
must be null.ValueProvider<T> subscription
PubsubUnboundedSource.topic
must be given.
Otherwise PubsubUnboundedSource.topic
must be null.
If no subscription is given a random one will be created when the transorm is applied. This field will be update with that subscription's path. The created subscription is never deleted.
java.lang.String timestampAttribute
java.lang.String idAttribute
boolean needsAttributes
ImmutableList<E> mutations
SpannerIO.Write spec
public void readExternal(java.io.ObjectInput in) throws java.io.IOException, java.lang.ClassNotFoundException
java.io.IOException
java.lang.ClassNotFoundException
public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException
java.io.IOException
java.lang.Class<T> type
SerializableConfiguration conf
Coder<T> keyCoder
Coder<T> valueCoder
SimpleFunction<InputT,OutputT> keyTranslationFunction
SimpleFunction<InputT,OutputT> valueTranslationFunction
HadoopInputFormatIO.SerializableSplit inputSplit
long boundedSourceEstimatedSize
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException
java.io.IOException
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException
java.io.IOException
org.apache.hadoop.mapreduce.InputSplit inputSplit
SerializableConfiguration serializableConfiguration
java.lang.String tableId
org.apache.beam.sdk.io.hbase.SerializableScan serializableScan
java.lang.String tableId
SerializableConfiguration serializableConfiguration
JmsIO.Read<T> spec
java.lang.String jmsMessageID
long jmsTimestamp
java.lang.String jmsCorrelationID
javax.jms.Destination jmsReplyTo
javax.jms.Destination jmsDestination
int jmsDeliveryMode
boolean jmsRedelivered
java.lang.String jmsType
long jmsExpiration
int jmsPriority
java.util.Map<K,V> properties
java.lang.String text
java.lang.String topic
int partition
long nextOffset
long watermarkMillis
KafkaIO.Read<K,V> read
java.lang.String topic
int partition
long offset
org.apache.kafka.common.header.Headers headers
KV<K,V> kv
long timestamp
KafkaTimestampType timestampType
MongoDbGridFSIO.Read<T> spec
java.util.List<E> objectIds
com.google.protobuf.ByteString value
long from
long to
java.lang.String fileLocation
java.lang.String content
org.apache.tika.metadata.Metadata metadata
java.lang.String[] metadataNames
org.apache.beam.sdk.util.SerializableThrowable error
java.lang.Class<T> jaxbClass
org.apache.beam.sdk.util.EmptyOnDeserializationThreadLocal<T> jaxbMarshaller
org.apache.beam.sdk.util.EmptyOnDeserializationThreadLocal<T> jaxbUnmarshaller
org.apache.beam.sdk.io.xml.XmlIO.MappingConfiguration<T> configuration
ValueProvider<T> value
SerializableFunction<InputT,OutputT> translator
java.lang.Class<T> klass
java.lang.String methodName
java.lang.String propertyName
java.lang.Object defaultValue
java.lang.Long optionsId
java.lang.Object value
java.lang.String expectedChecksum
org.apache.beam.sdk.util.ShardedFile shardedFile
java.lang.String actualChecksum
FileChecksumMatcher.getActualChecksum()
.SerializableFunction<InputT,OutputT> checkerFn
org.apache.beam.sdk.testing.PAssert.AssertionWindows rewindowingStrategy
SimpleFunction<InputT,OutputT> paneExtractor
PAssert.PAssertionSite site
SerializableFunction<InputT,OutputT> checkerFn
org.apache.beam.sdk.testing.PAssert.AssertionWindows rewindowingStrategy
SimpleFunction<InputT,OutputT> paneExtractor
PAssert.PAssertionSite site
SerializableFunction<InputT,OutputT> checkerFn
PAssert.PAssertionSite site
java.lang.String message
java.lang.StackTraceElement[] creationStackTrace
SerializableMatcher<T> matcher
boolean isSuccess
PAssert.PAssertionSite site
org.apache.beam.sdk.util.SerializableThrowable throwable
java.util.Comparator<T> compareFn
int numQuantiles
int bufferSize
int numBuffers
long maxNumElements
java.util.TreeSet<E> heap
long minHash
long sampleSize
CombineFnBase.GlobalCombineFn<InputT,AccumT,OutputT> fn
DisplayData.ItemSpec<T> fnDisplayData
boolean insertDefault
int fanout
java.util.List<E> sideInputs
CombineFnBase.GlobalCombineFn<InputT,AccumT,OutputT> fn
DisplayData.ItemSpec<T> fnDisplayData
boolean insertDefault
int fanout
CombineFnBase.GlobalCombineFn<InputT,AccumT,OutputT> fn
DisplayData.ItemSpec<T> fnDisplayData
java.util.List<E> sideInputs
SerializableFunction<InputT,OutputT> combiner
int bufferSize
CombineFnBase.GlobalCombineFn<InputT,AccumT,OutputT> fn
DisplayData.ItemSpec<T> fnDisplayData
boolean fewKeys
java.util.List<E> sideInputs
CombineFnBase.GlobalCombineFn<InputT,AccumT,OutputT> fn
DisplayData.ItemSpec<T> fnDisplayData
SerializableFunction<InputT,OutputT> hotKeyFanout
java.util.Map<K,V> valuesMap
java.util.List<E> combineFns
java.util.List<E> extractInputFns
java.util.List<E> outputTags
int combineFnCount
java.util.List<E> extractInputFns
java.util.List<E> combineFnWithContexts
java.util.List<E> outputTags
int combineFnCount
java.lang.Object closure
Requirements requirements
ValueProvider<T> provider
Coder<T> coder
SerializableFunction<InputT,OutputT> fn
TypeDescriptor<T> representativeType
SerializableFunction<InputT,OutputT> predicate
java.lang.String predicateDescription
Contextful<ClosureT> fn
boolean fewKeys
long batchSize
Contextful<ClosureT> fn
java.util.List<E> sideInputs
TupleTag<V> mainOutputTag
TupleTagList additionalOutputTags
DisplayData.ItemSpec<T> fnDisplayData
DoFn<InputT,OutputT> fn
java.util.List<E> sideInputs
DoFn<InputT,OutputT> fn
DisplayData.ItemSpec<T> fnDisplayData
private void readObject(java.io.ObjectInputStream oos)
private void writeObject(java.io.ObjectOutputStream oos)
java.util.regex.Pattern pattern
java.util.regex.Pattern pattern
int group
java.util.regex.Pattern pattern
java.util.regex.Pattern pattern
int keyGroup
int valueGroup
java.util.regex.Pattern pattern
java.lang.String groupName
java.util.regex.Pattern pattern
java.lang.String keyGroupName
java.lang.String valueGroupName
java.util.regex.Pattern pattern
int group
java.util.regex.Pattern pattern
int keyGroup
int valueGroup
java.util.regex.Pattern pattern
java.lang.String groupName
java.util.regex.Pattern pattern
java.lang.String keyGroupName
java.lang.String valueGroupName
java.util.regex.Pattern pattern
java.lang.String replacement
java.util.regex.Pattern pattern
java.lang.String replacement
java.util.regex.Pattern pattern
boolean outputEmpty
java.util.Collection<E> sideInputs
int sampleSize
Top.TopCombineFn<T,ComparatorT extends java.util.Comparator<T> & java.io.Serializable> topCombineFn
java.util.Random rand
SerializableFunction<InputT,OutputT> fn
int count
java.util.Comparator<T> compareFn
java.lang.Object defaultValue
boolean hasDefault
PCollectionView<T> view
SerializableFunction<InputT,OutputT> fn
SerializableFunction<InputT,OutputT> fn
Duration allowedTimestampSkew
ImmutableMap<K,V> entries
ImmutableList<E> components
CoGbkResultSchema schema
UnionCoder unionCoder
TupleTagList tupleTagList
java.util.HashMap<K,V> tagMap
java.util.List<E> elementCoders
int countElems
java.util.List<E> timestampTransforms
Trigger.OnceTrigger earlyTrigger
Trigger.OnceTrigger lateTrigger
int number
DateTime startDate
DateTimeZone timeZone
int number
int dayOfMonth
DateTime startDate
DateTimeZone timeZone
int number
int monthOfYear
int dayOfMonth
DateTime startDate
DateTimeZone timeZone
WindowFn<T,W extends BoundedWindow> givenWindowFn
java.lang.String reason
java.lang.String cause
WindowFn<T,W extends BoundedWindow> originalWindowFn
Trigger repeatedTrigger
Duration gapDuration
java.util.List<E> subTriggers
Window<T> original
WindowingStrategy<T,W extends BoundedWindow> updatedStrategy
Duration maximumLookback
java.lang.Object key
java.lang.Object value
TupleTag<V> tag
WindowMappingFn<TargetWindowT extends BoundedWindow> windowMappingFn
WindowingStrategy<T,W extends BoundedWindow> windowingStrategy
Coder<T> coder
ViewFn<PrimitiveViewT,ViewT> viewFn
ViewFn
for this view.java.lang.Object key
int shardNumber
java.lang.String id
boolean generated
java.util.List<E> tupleTags
Coder<T> valueCoder
ByteArrayCoder idCoder
WindowFn<T,W extends BoundedWindow> windowFn
Trigger trigger
WindowingStrategy.AccumulationMode mode
Duration allowedLateness
Window.ClosingBehavior closingBehavior
Window.OnTimeBehavior onTimeBehavior
TimestampCombiner timestampCombiner
boolean triggerSpecified
boolean modeSpecified
boolean allowedLatenessSpecified
boolean timestampCombinerSpecified
SchemaFactory schemaFactory
java.util.List<E> getterFactories