PCollectionView<T> view
com.datatorrent.api.DAG dag
com.datatorrent.api.Attribute.AttributeMap launchAttributes
java.util.Properties configProperties
java.util.HashMap<K,V> env
java.lang.String cmd
PCollectionView<T> view
boolean streaming
DataflowPipelineJob job
PCollectionView<T> view
Coder<T> dataCoder
java.util.Map<K,V> properties
PCollectionView<T> view
int intTag
org.apache.beam.runners.core.metrics.MetricsContainerStepMap metricsContainers
PCollectionView<T> view
boolean isNonMerging
WindowFn<T,W extends BoundedWindow> windowFn
TimestampCombiner timestampCombiner
WindowFn<T,W extends BoundedWindow> windowFn
TimestampCombiner timestampCombiner
WindowFn<T,W extends BoundedWindow> windowFn
DoFnRunnerFactory<InputT,OutputT> doFnRunnerFactory
DoFn<InputT,OutputT> doFn
java.util.Collection<E> sideInputs
java.util.Map<K,V> tagsToSideInputs
TupleTag<V> mainOutput
java.util.List<E> sideOutputs
org.apache.beam.runners.gearpump.translators.functions.DoFnFunction.DoFnOutputManager outputManager
BoundedSource<T> source
org.apache.beam.runners.core.construction.SerializablePipelineOptions serializedOptions
Source.Reader<T> reader
boolean available
UnboundedSource<OutputT,CheckpointMarkT extends UnboundedSource.CheckpointMark> source
byte[] values
IterableCoder<T> iterableCoder
DoFn<InputT,OutputT> fn
org.apache.beam.runners.core.construction.SerializablePipelineOptions serializedOptions
java.util.Collection<E> sideInputs
org.apache.beam.runners.core.DoFnRunners.OutputManager outputManager
TupleTag<V> mainOutputTag
java.util.List<E> sideOutputTags
org.apache.beam.runners.core.StepContext stepContext
DoFnSchemaInformation doFnSchemaInformation
java.util.Map<K,V> outputCoders
WindowingStrategy<T,W extends BoundedWindow> windowingStrategy
java.util.Map<K,V> mNamedAggregators
int num
UnboundedSource<OutputT,CheckpointMarkT extends UnboundedSource.CheckpointMark> source
Duration maxReadTime
int numInitialSplits
long maxNumRecords
int sourceId
double readerCacheInterval
int splitId
BoundedSource<T> source
org.apache.beam.runners.core.construction.SerializablePipelineOptions options
int numPartitions
long bundleSize
java.lang.String stepName
org.apache.spark.Accumulator<T> metricsAccum
MicrobatchSource<T,CheckpointMarkT extends UnboundedSource.CheckpointMark> microbatchSource
org.apache.beam.runners.core.construction.SerializablePipelineOptions options
org.apache.spark.Partitioner partitioner
org.apache.beam.vendor.guava.v20_0.com.google.common.collect.Table<R,C,V> state
java.util.Collection<E> serTimers
byte[] value
java.lang.Class<T> type
org.apache.beam.sdk.coders.AvroCoder.SerializableSchemaSupplier schemaSupplier
TypeDescriptor<T> typeDescriptor
java.util.List<E> nonDeterministicReasons
org.apache.beam.sdk.util.EmptyOnDeserializationThreadLocal<T> decoder
org.apache.beam.sdk.util.EmptyOnDeserializationThreadLocal<T> encoder
org.apache.beam.sdk.util.EmptyOnDeserializationThreadLocal<T> writer
org.apache.beam.sdk.util.EmptyOnDeserializationThreadLocal<T> reader
org.apache.beam.vendor.guava.v20_0.com.google.common.base.Supplier<T> reflectData
CannotProvideCoderException.ReasonCode reason
Coder<T> coder
DelegateCoder.CodingFunction<InputT,OutputT> toFn
DelegateCoder.CodingFunction<InputT,OutputT> fromFn
TypeDescriptor<T> typeDescriptor
Schema schema
java.util.UUID id
java.lang.Class<T> type
Coder<T> keyCoder
VarIntCoder shardNumberCoder
DelegateCoder<T,IntermediateT> delegateCoder
java.lang.Class<T> clazz
org.apache.beam.sdk.util.gcsfs.GcsPath gcsPath
java.lang.Class<T> inputClass
ObjectMapper customMapper
java.lang.Class<T> outputClass
ObjectMapper customMapper
java.lang.Object leftNullValue
java.lang.Object rightNullValue
java.lang.Object nullValue
java.lang.Object nullValue
java.lang.Class<T> protoMessageClass
Message
type to be coded.java.util.Set<E> extensionHostClasses
ProtoCoder
. The extensions from these
classes will be included in the ExtensionRegistry
used during encoding and decoding.double compression
java.lang.String tempLocation
int memoryMB
java.lang.String tempLocation
int memoryMB
long memoryMB
BufferedExternalSorter.Options sorterOptions
java.lang.String name
int id
java.lang.String countryOfResidence
int id
int customerId
Combine.CombineFn<InputT,AccumT,OutputT> combineFn
BeamRelNode beamRelNode
boolean all
BeamSetOperatorRelBase.OpType opType
Schema schema
java.util.List<E> joinColumns
Schema schema
BeamSqlSeekableTable seekableTable
Schema lkpSchema
int factColOffset
Schema joinSubsetType
Schema outputSchema
java.util.List<E> factJoinIdx
Schema schema
PCollectionView<T> sideInputView
org.apache.calcite.rel.core.JoinRelType joinType
Row rightNullRow
boolean swap
Schema schema
TupleTag<V> leftTag
TupleTag<V> rightTag
BeamSetOperatorRelBase.OpType opType
boolean all
java.lang.String stepName
boolean isSample
SerializableFunction<InputT,OutputT> decimalConverter
boolean isSample
SerializableFunction<InputT,OutputT> decimalConverter
java.lang.String tableSpec
java.util.HashMap<K,V> configuration
org.apache.commons.csv.CSVFormat csvFormat
Schema schema
org.apache.commons.csv.CSVFormat format
Schema schema
org.apache.commons.csv.CSVFormat format
java.lang.String bootstrapServers
java.util.List<E> topics
java.util.List<E> topicPartitions
java.util.Map<K,V> configUpdates
java.util.List<E> rows
Table table
java.util.List<E> rows
long tableProviderInstanceId
java.util.List<E> timestampedRows
int timestampField
PTransform<InputT extends PInput,OutputT extends POutput> readConverter
PTransform<InputT extends PInput,OutputT extends POutput> writeConverter
java.lang.String filePattern
Schema schema
org.apache.commons.csv.CSVFormat csvFormat
AvroIO.TypedWrite<UserT,DestinationT,OutputT> inner
private java.lang.Object readResolve() throws java.io.ObjectStreamException
java.io.ObjectStreamException
org.apache.beam.sdk.io.AvroSource.Mode<T> mode
UnboundedSource<OutputT,CheckpointMarkT extends UnboundedSource.CheckpointMark> source
long maxNumRecords
Duration maxReadTime
FileBasedSource<T> sourceDelegate
CompressedSource.DecompressingChannelFactory channelFactory
DefaultFilenamePolicy.Params params
ValueProvider<T> baseFilename
java.lang.String shardTemplate
boolean explicitTemplate
java.lang.String suffix
FileBasedSink.DynamicDestinations<UserT,DestinationT,OutputT> dynamicDestinations
FileBasedSink.WritableByteChannelFactory writableByteChannelFactory
FileBasedSink.WritableByteChannelFactory
that is used to wrap the raw data output to the
underlying channel. The default is to not compress the output using Compression.UNCOMPRESSED
.ValueProvider<T> tempDirectoryProvider
FileBasedSink<UserT,DestinationT,OutputT> sink
ValueProvider<T> tempDirectory
boolean windowedWrites
ValueProvider<T> fileOrPatternSpec
EmptyMatchTreatment emptyMatchTreatment
MatchResult.Metadata singleFileMetadata
FileBasedSource.Mode mode
long startOffset
long endOffset
long minBundleSize
BoundedSource<T> source
UnboundedSource<OutputT,CheckpointMarkT extends UnboundedSource.CheckpointMark> source
long desiredBundleSizeBytes
SerializableFunction<InputT,OutputT> createSource
Coder<T> coder
TextIO.TypedWrite<UserT,DestinationT> inner
FileBasedSink.WriteOperation<DestinationT,OutputT> writeOperation
int backendVersion
ElasticsearchIO.Read spec
java.lang.String shardPreference
java.lang.Integer numSlices
java.lang.Integer sliceId
BigQueryIO.TypedRead<T> inner
com.google.cloud.bigquery.storage.v1beta1.Storage.ReadSession readSession
com.google.cloud.bigquery.storage.v1beta1.Storage.Stream stream
java.lang.String jsonTableSchema
SerializableFunction<InputT,OutputT> parseFn
Coder<T> outputCoder
BigQueryServices bqServices
ValueProvider<T> tableRefProtoProvider
com.google.cloud.bigquery.storage.v1beta1.ReadOptions.TableReadOptions readOptions
SerializableFunction<InputT,OutputT> parseFn
Coder<T> outputCoder
BigQueryServices bqServices
java.util.concurrent.atomic.AtomicReference<V> tableSizeBytes
BigQueryIO.Write.CreateDisposition createDisposition
BigQueryServices bqServices
DynamicDestinations<T,DestinationT> dynamicDestinations
java.lang.String kmsKey
DynamicDestinations<T,DestinationT> dynamicDestinations
SerializableFunction<InputT,OutputT> formatFunction
BigQueryServices bigQueryServices
BigQueryIO.Write.CreateDisposition createDisposition
DynamicDestinations<T,DestinationT> dynamicDestinations
InsertRetryPolicy retryPolicy
boolean extendedErrorInfo
boolean skipInvalidRows
boolean ignoreUnknownValues
java.lang.String kmsKey
Coder<T> elementCoder
SerializableFunction<InputT,OutputT> toTableRow
BigQueryServices bigQueryServices
InsertRetryPolicy retryPolicy
boolean extendedErrorInfo
boolean skipInvalidRows
boolean ignoreUnknownValues
Coder<T> elementCoder
SerializableFunction<InputT,OutputT> toTableRow
java.lang.String tableSpec
java.lang.String tableDescription
java.lang.String jsonTimePartitioning
byte[] elementBytes
java.util.Map<K,V> attributes
long timestampMsSinceEpoch
java.lang.String recordId
java.lang.String projectId
java.lang.String projectId
java.lang.String subscriptionName
java.lang.String path
org.apache.beam.sdk.io.gcp.pubsub.PubsubIO.PubsubSubscription.Type type
java.lang.String project
java.lang.String subscription
org.apache.beam.sdk.io.gcp.pubsub.PubsubIO.PubsubTopic.Type type
java.lang.String project
java.lang.String topic
int maxPublishBatchByteSize
int maxPublishBatchSize
PubsubClient.PubsubClientFactory pubsubFactory
ValueProvider<T> topic
java.lang.String timestampAttribute
java.lang.String idAttribute
int numShards
int publishBatchSize
int publishBatchBytes
Duration maxLatency
org.apache.beam.sdk.io.gcp.pubsub.PubsubUnboundedSink.RecordIdMethod recordIdMethod
PubsubUnboundedSink.idAttribute
is non-null).com.google.api.client.util.Clock clock
PubsubClient.PubsubClientFactory pubsubFactory
ValueProvider<T> project
PubsubUnboundedSource.topic
was given.ValueProvider<T> topic
PubsubUnboundedSource.subscription
must be given. Otherwise
PubsubUnboundedSource.subscription
must be null.ValueProvider<T> subscription
PubsubUnboundedSource.topic
must be given. Otherwise
PubsubUnboundedSource.topic
must be null.
If no subscription is given a random one will be created when the transorm is applied. This field will be update with that subscription's path. The created subscription is never deleted.
java.lang.String timestampAttribute
java.lang.String idAttribute
boolean needsAttributes
org.apache.beam.vendor.guava.v20_0.com.google.common.collect.ImmutableList<E> mutations
java.lang.String projectId
java.lang.String query
java.lang.String expectedChecksum
java.lang.String actualChecksum
BigqueryClient bigqueryClient
public void readExternal(java.io.ObjectInput in) throws java.io.IOException, java.lang.ClassNotFoundException
java.io.IOException
java.lang.ClassNotFoundException
public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException
java.io.IOException
java.lang.Class<T> type
SerializableConfiguration conf
Coder<T> keyCoder
Coder<T> valueCoder
SimpleFunction<InputT,OutputT> keyTranslationFunction
SimpleFunction<InputT,OutputT> valueTranslationFunction
HadoopFormatIO.SerializableSplit inputSplit
long boundedSourceEstimatedSize
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException
java.io.IOException
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException
java.io.IOException
org.apache.hadoop.mapreduce.InputSplit inputSplit
PTransform<InputT extends PInput,OutputT extends POutput> configTransform
ExternalSynchronization externalSynchronization
boolean withPartitioning
java.lang.String locksDir
HDFSSynchronization.ThrowingFunction<T1,T2,X extends java.lang.Exception> fileSystemFactory
SerializableConfiguration serializableConfiguration
java.lang.String tableId
org.apache.beam.sdk.io.hbase.SerializableScan serializableScan
SerializableConfiguration serializableConfiguration
java.lang.String tableId
SerializableConfiguration serializableConfiguration
JmsIO.Read<T> spec
java.lang.String jmsMessageID
long jmsTimestamp
java.lang.String jmsCorrelationID
javax.jms.Destination jmsReplyTo
javax.jms.Destination jmsDestination
int jmsDeliveryMode
boolean jmsRedelivered
java.lang.String jmsType
long jmsExpiration
int jmsPriority
java.util.Map<K,V> properties
java.lang.String text
java.lang.String topic
int partition
long nextOffset
long watermarkMillis
KafkaIO.Read<K,V> read
MongoDbGridFSIO.Read<T> spec
java.util.List<E> objectIds
com.google.protobuf.ByteString value
long from
long to
java.lang.String fileLocation
java.lang.String content
org.apache.tika.metadata.Metadata metadata
java.lang.String[] metadataNames
org.apache.beam.sdk.util.SerializableThrowable error
java.lang.Class<T> jaxbClass
org.apache.beam.sdk.util.EmptyOnDeserializationThreadLocal<T> jaxbMarshaller
org.apache.beam.sdk.util.EmptyOnDeserializationThreadLocal<T> jaxbUnmarshaller
org.apache.beam.sdk.io.xml.XmlIO.MappingConfiguration<T> configuration
MetricName name
ValueProvider<T> value
SerializableFunction<InputT,OutputT> translator
java.lang.Class<T> klass
java.lang.String methodName
java.lang.String propertyName
java.lang.Object defaultValue
java.lang.Long optionsId
java.lang.Object value
int byteArraySize
java.lang.String identifier
java.lang.String argument
Schema.FieldType fieldType
org.apache.beam.vendor.guava.v20_0.com.google.common.collect.BiMap<K,V> fieldIndices
java.util.List<E> fields
int hashCode
java.util.UUID uuid
RowCoder rowCoder
SerializableFunction<InputT,OutputT> toRowFunction
SerializableFunction<InputT,OutputT> fromRowFunction
java.lang.Class<T> clazz
java.util.Map<K,V> cachedProviders
org.apache.beam.sdk.schemas.transforms.Cast.Narrowing.Fold fold
org.apache.beam.sdk.schemas.transforms.Cast.Widening.Fold fold
org.apache.beam.sdk.schemas.transforms.CoGroup.JoinArguments joinArgs
org.apache.beam.sdk.schemas.transforms.CoGroup.JoinArguments joinArgs
java.util.Map<K,V> fieldNameFilters
java.util.Map<K,V> fieldIdFilters
java.util.Map<K,V> fieldNamesFilters
java.util.Map<K,V> fieldIdsFilters
FieldAccessDescriptor fieldAccessDescriptor
Schema keySchema
Group.ByFields<InputT> byFields
Combine.CombineFn<InputT,AccumT,OutputT> combineFn
Group.ByFields<InputT> byFields
org.apache.beam.sdk.schemas.transforms.SchemaAggregateFn.Inner<T> schemaAggregateFn
org.apache.beam.sdk.schemas.transforms.SchemaAggregateFn.Inner<T> schemaAggregateFn
Combine.CombineFn<InputT,AccumT,OutputT> combineFn
FieldAccessDescriptor fieldAccessDescriptor
java.lang.String expectedChecksum
org.apache.beam.sdk.util.ShardedFile shardedFile
java.lang.String actualChecksum
FileChecksumMatcher.getActualChecksum()
.SerializableFunction<InputT,OutputT> checkerFn
org.apache.beam.sdk.testing.PAssert.AssertionWindows rewindowingStrategy
SimpleFunction<InputT,OutputT> paneExtractor
PAssert.PAssertionSite site
SerializableFunction<InputT,OutputT> checkerFn
org.apache.beam.sdk.testing.PAssert.AssertionWindows rewindowingStrategy
SimpleFunction<InputT,OutputT> paneExtractor
PAssert.PAssertionSite site
SerializableFunction<InputT,OutputT> checkerFn
PAssert.PAssertionSite site
java.lang.String message
java.lang.StackTraceElement[] creationStackTrace
SerializableMatcher<T> matcher
boolean isSuccess
PAssert.PAssertionSite site
org.apache.beam.sdk.util.SerializableThrowable throwable
java.util.Comparator<T> compareFn
int numQuantiles
int bufferSize
int numBuffers
long maxNumElements
java.util.TreeSet<E> heap
long minHash
long sampleSize
CombineFnBase.GlobalCombineFn<InputT,AccumT,OutputT> fn
DisplayData.ItemSpec<T> fnDisplayData
boolean insertDefault
int fanout
java.util.List<E> sideInputs
CombineFnBase.GlobalCombineFn<InputT,AccumT,OutputT> fn
DisplayData.ItemSpec<T> fnDisplayData
boolean insertDefault
int fanout
CombineFnBase.GlobalCombineFn<InputT,AccumT,OutputT> fn
DisplayData.ItemSpec<T> fnDisplayData
java.util.List<E> sideInputs
SerializableFunction<InputT,OutputT> combiner
int bufferSize
CombineFnBase.GlobalCombineFn<InputT,AccumT,OutputT> fn
DisplayData.ItemSpec<T> fnDisplayData
boolean fewKeys
java.util.List<E> sideInputs
CombineFnBase.GlobalCombineFn<InputT,AccumT,OutputT> fn
DisplayData.ItemSpec<T> fnDisplayData
SerializableFunction<InputT,OutputT> hotKeyFanout
java.util.Map<K,V> valuesMap
java.util.List<E> combineFns
java.util.List<E> combineInputCoders
java.util.List<E> extractInputFns
java.util.List<E> outputTags
int combineFnCount
java.util.List<E> extractInputFns
java.util.List<E> combineInputCoders
java.util.List<E> combineFnWithContexts
java.util.List<E> outputTags
int combineFnCount
java.lang.Object closure
Requirements requirements
ValueProvider<T> provider
Coder<T> coder
SerializableFunction<InputT,OutputT> fn
TypeDescriptor<T> representativeType
ProcessFunction<InputT,OutputT> predicate
java.lang.String predicateDescription
Contextful<ClosureT> fn
Contextful<ClosureT> fn
ProcessFunction<InputT,OutputT> exceptionHandler
boolean fewKeys
long batchSize
ProcessFunction<InputT,OutputT> fn
Contextful<ClosureT> fn
Contextful<ClosureT> fn
ProcessFunction<InputT,OutputT> exceptionHandler
java.util.List<E> sideInputs
TupleTag<V> mainOutputTag
TupleTagList additionalOutputTags
DisplayData.ItemSpec<T> fnDisplayData
DoFn<InputT,OutputT> fn
java.util.List<E> sideInputs
DoFn<InputT,OutputT> fn
DisplayData.ItemSpec<T> fnDisplayData
private void readObject(java.io.ObjectInputStream oos)
private void writeObject(java.io.ObjectOutputStream oos)
java.util.regex.Pattern pattern
java.util.regex.Pattern pattern
int group
java.util.regex.Pattern pattern
java.util.regex.Pattern pattern
int keyGroup
int valueGroup
java.util.regex.Pattern pattern
java.lang.String groupName
java.util.regex.Pattern pattern
java.lang.String keyGroupName
java.lang.String valueGroupName
java.util.regex.Pattern pattern
int group
java.util.regex.Pattern pattern
int keyGroup
int valueGroup
java.util.regex.Pattern pattern
java.lang.String groupName
java.util.regex.Pattern pattern
java.lang.String keyGroupName
java.lang.String valueGroupName
java.util.regex.Pattern pattern
java.lang.String replacement
java.util.regex.Pattern pattern
java.lang.String replacement
java.util.regex.Pattern pattern
boolean outputEmpty
java.util.Collection<E> sideInputs
int sampleSize
Top.TopCombineFn<T,ComparatorT extends java.util.Comparator<T> & java.io.Serializable> topCombineFn
java.util.Random rand
SerializableFunction<InputT,OutputT> fn
int count
java.util.Comparator<T> compareFn
java.lang.Object defaultValue
boolean hasDefault
PCollectionView<T> view
SerializableFunction<InputT,OutputT> fn
SerializableFunction<InputT,OutputT> fn
Duration allowedTimestampSkew
org.apache.beam.vendor.guava.v20_0.com.google.common.collect.ImmutableMap<K,V> entries
org.apache.beam.vendor.guava.v20_0.com.google.common.collect.ImmutableList<E> components
CoGbkResultSchema schema
UnionCoder unionCoder
TupleTagList tupleTagList
java.util.HashMap<K,V> tagMap
java.util.List<E> elementCoders
int countElems
java.util.List<E> timestampTransforms
Trigger.OnceTrigger earlyTrigger
Trigger.OnceTrigger lateTrigger
int number
DateTime startDate
DateTimeZone timeZone
int number
int dayOfMonth
DateTime startDate
DateTimeZone timeZone
int number
int monthOfYear
int dayOfMonth
DateTime startDate
DateTimeZone timeZone
WindowFn<T,W extends BoundedWindow> givenWindowFn
java.lang.String reason
java.lang.String cause
WindowFn<T,W extends BoundedWindow> originalWindowFn
Trigger repeatedTrigger
Duration gapDuration
java.util.List<E> subTriggers
Window<T> original
WindowingStrategy<T,W extends BoundedWindow> updatedStrategy
Duration maximumLookback
java.lang.Object key
java.lang.Object value
TupleTag<V> tag
WindowMappingFn<TargetWindowT extends BoundedWindow> windowMappingFn
WindowingStrategy<T,W extends BoundedWindow> windowingStrategy
Coder<T> coder
ViewFn<PrimitiveViewT,ViewT> viewFn
ViewFn
for this view.Schema schema
java.util.List<E> values
java.lang.Object key
int shardNumber
java.lang.String id
boolean generated
java.util.List<E> tupleTags
org.apache.beam.vendor.guava.v20_0.com.google.common.reflect.TypeToken<T> token
Coder<T> valueCoder
ByteArrayCoder idCoder
WindowFn<T,W extends BoundedWindow> windowFn
Trigger trigger
WindowingStrategy.AccumulationMode mode
Duration allowedLateness
Window.ClosingBehavior closingBehavior
Window.OnTimeBehavior onTimeBehavior
TimestampCombiner timestampCombiner
boolean triggerSpecified
boolean modeSpecified
boolean allowedLatenessSpecified
boolean timestampCombinerSpecified